diff --git a/.github/workflows/formatting.yml b/.github/workflows/formatting.yml new file mode 100644 index 000000000..4ed49b6b5 --- /dev/null +++ b/.github/workflows/formatting.yml @@ -0,0 +1,47 @@ +name: Formatting / Lint +on: + push: + branches: [master, main] + pull_request: + branches: [master, main] +jobs: + Format: + name: Test formatting via prettier... + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v2 + - name: Use Node.js node + uses: actions/setup-node@v2 + with: + node-version: "18" + - name: before_install + run: + echo fs.inotify.max_user_watches=524288 | sudo tee -a /etc/sysctl.conf + && sudo sysctl -p + + - name: installing yarn + run: yarn install + - name: prettier:check + run: yarn prettier:check + stage: + name: Test linting... + needs: Format + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + - name: Use Node.js node + uses: actions/setup-node@v2 + with: + node-version: "18" + - name: before_install + run: + echo fs.inotify.max_user_watches=524288 | sudo tee -a /etc/sysctl.conf + && sudo sysctl -p + + - name: installing yarn + run: yarn install + - name: lint + run: yarn lint diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 000000000..410750e42 --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1,11 @@ +# This is a comment. +# Each line is a file pattern followed by one or more owners. + +# These global owners will be the default owners for everything in +# the repo. Unless a later match takes precedence, will be requested for +# review when someone opens a pull request. +* @nickfrosty + +# This list owns any file in the `/docs` directory in the root of +# the repository and any of its subdirectories. +# /docs/ @nickfrosty diff --git a/content/courses/README.md b/content/courses/README.md deleted file mode 100644 index fe16977d8..000000000 --- a/content/courses/README.md +++ /dev/null @@ -1 +0,0 @@ -# Solana Courses diff --git a/content/courses/solana-course/README.md b/content/courses/solana-course/README.md deleted file mode 100644 index 1da21cd17..000000000 --- a/content/courses/solana-course/README.md +++ /dev/null @@ -1,12 +0,0 @@ -# Solana Course - -This course is designed to be the absolute best starting point for Web -Developers looking to learn Web3 Development. Solana is the ideal network for -starting your Web3 journey because of its high speed, low cost, energy -efficiency, and more. - -## Source - -This Solana Development Course is originally from the -[`Unboxed-Software/solana-course`](https://github.com/Unboxed-Software/solana-course) -repo. diff --git a/content/courses/solana-course/assets/anchor-client-structure.png b/content/courses/solana-course/assets/anchor-client-structure.png deleted file mode 100644 index e504f65e8..000000000 Binary files a/content/courses/solana-course/assets/anchor-client-structure.png and /dev/null differ diff --git a/content/courses/solana-course/assets/anchor-frontend-demo.gif b/content/courses/solana-course/assets/anchor-frontend-demo.gif deleted file mode 100644 index ab21143f5..000000000 Binary files a/content/courses/solana-course/assets/anchor-frontend-demo.gif and /dev/null differ diff --git a/content/courses/solana-course/assets/anchor-frontend-increment.png b/content/courses/solana-course/assets/anchor-frontend-increment.png deleted file mode 100644 index 429d9495e..000000000 Binary files a/content/courses/solana-course/assets/anchor-frontend-increment.png and /dev/null differ diff --git a/content/courses/solana-course/assets/anchor-frontend-initialize.png b/content/courses/solana-course/assets/anchor-frontend-initialize.png deleted file mode 100644 index 4219a46b6..000000000 Binary files a/content/courses/solana-course/assets/anchor-frontend-initialize.png and /dev/null differ diff --git a/content/courses/solana-course/assets/connect-wallet-modal.png b/content/courses/solana-course/assets/connect-wallet-modal.png deleted file mode 100644 index 26eb64082..000000000 Binary files a/content/courses/solana-course/assets/connect-wallet-modal.png and /dev/null differ diff --git a/content/courses/solana-course/assets/ed2559-curve.png b/content/courses/solana-course/assets/ed2559-curve.png deleted file mode 100644 index 691e6f5b2..000000000 Binary files a/content/courses/solana-course/assets/ed2559-curve.png and /dev/null differ diff --git a/content/courses/solana-course/assets/env-variables-test-success.png b/content/courses/solana-course/assets/env-variables-test-success.png deleted file mode 100644 index 48aec3202..000000000 Binary files a/content/courses/solana-course/assets/env-variables-test-success.png and /dev/null differ diff --git a/content/courses/solana-course/assets/hello-world-build-deploy.gif b/content/courses/solana-course/assets/hello-world-build-deploy.gif deleted file mode 100644 index b083484d1..000000000 Binary files a/content/courses/solana-course/assets/hello-world-build-deploy.gif and /dev/null differ diff --git a/content/courses/solana-course/assets/hello-world-create-wallet.gif b/content/courses/solana-course/assets/hello-world-create-wallet.gif deleted file mode 100644 index 62942754d..000000000 Binary files a/content/courses/solana-course/assets/hello-world-create-wallet.gif and /dev/null differ diff --git a/content/courses/solana-course/assets/hello-world-program-id.gif b/content/courses/solana-course/assets/hello-world-program-id.gif deleted file mode 100644 index c744da337..000000000 Binary files a/content/courses/solana-course/assets/hello-world-program-id.gif and /dev/null differ diff --git a/content/courses/solana-course/assets/hello-world-program-log.png b/content/courses/solana-course/assets/hello-world-program-log.png deleted file mode 100644 index affe13ff3..000000000 Binary files a/content/courses/solana-course/assets/hello-world-program-log.png and /dev/null differ diff --git a/content/courses/solana-course/assets/intro-frontend-challenge.png b/content/courses/solana-course/assets/intro-frontend-challenge.png deleted file mode 100644 index 499acb433..000000000 Binary files a/content/courses/solana-course/assets/intro-frontend-challenge.png and /dev/null differ diff --git a/content/courses/solana-course/assets/intro-frontend-demo.png b/content/courses/solana-course/assets/intro-frontend-demo.png deleted file mode 100644 index dc97c3b9b..000000000 Binary files a/content/courses/solana-course/assets/intro-frontend-demo.png and /dev/null differ diff --git a/content/courses/solana-course/assets/json-rpc-illustration.png b/content/courses/solana-course/assets/json-rpc-illustration.png deleted file mode 100644 index d78a64d16..000000000 Binary files a/content/courses/solana-course/assets/json-rpc-illustration.png and /dev/null differ diff --git a/content/courses/solana-course/assets/movie-review-logs.png b/content/courses/solana-course/assets/movie-review-logs.png deleted file mode 100644 index a3b3750b7..000000000 Binary files a/content/courses/solana-course/assets/movie-review-logs.png and /dev/null differ diff --git a/content/courses/solana-course/assets/movie-review-pt2-build-deploy.gif b/content/courses/solana-course/assets/movie-review-pt2-build-deploy.gif deleted file mode 100644 index e3f9fb1cf..000000000 Binary files a/content/courses/solana-course/assets/movie-review-pt2-build-deploy.gif and /dev/null differ diff --git a/content/courses/solana-course/assets/movie-reviews-frontend.png b/content/courses/solana-course/assets/movie-reviews-frontend.png deleted file mode 100644 index 792147a7b..000000000 Binary files a/content/courses/solana-course/assets/movie-reviews-frontend.png and /dev/null differ diff --git a/content/courses/solana-course/assets/multi-button-connect.png b/content/courses/solana-course/assets/multi-button-connect.png deleted file mode 100644 index fe4bce858..000000000 Binary files a/content/courses/solana-course/assets/multi-button-connect.png and /dev/null differ diff --git a/content/courses/solana-course/assets/multi-button-connected.png b/content/courses/solana-course/assets/multi-button-connected.png deleted file mode 100644 index 20ed97262..000000000 Binary files a/content/courses/solana-course/assets/multi-button-connected.png and /dev/null differ diff --git a/content/courses/solana-course/assets/multi-button-select-wallet.png b/content/courses/solana-course/assets/multi-button-select-wallet.png deleted file mode 100644 index 952121d71..000000000 Binary files a/content/courses/solana-course/assets/multi-button-select-wallet.png and /dev/null differ diff --git a/content/courses/solana-course/assets/scavenger-hunt-screenshot.png b/content/courses/solana-course/assets/scavenger-hunt-screenshot.png deleted file mode 100644 index d6557c108..000000000 Binary files a/content/courses/solana-course/assets/scavenger-hunt-screenshot.png and /dev/null differ diff --git a/content/courses/solana-course/assets/solana-explorer-devnet.png b/content/courses/solana-course/assets/solana-explorer-devnet.png deleted file mode 100644 index 1959aab3d..000000000 Binary files a/content/courses/solana-course/assets/solana-explorer-devnet.png and /dev/null differ diff --git a/content/courses/solana-course/assets/solana-explorer-ping-result.png b/content/courses/solana-course/assets/solana-explorer-ping-result.png deleted file mode 100644 index 30f4a6658..000000000 Binary files a/content/courses/solana-course/assets/solana-explorer-ping-result.png and /dev/null differ diff --git a/content/courses/solana-course/assets/solana-explorer-transaction-overview.png b/content/courses/solana-course/assets/solana-explorer-transaction-overview.png deleted file mode 100644 index f0c76eea0..000000000 Binary files a/content/courses/solana-course/assets/solana-explorer-transaction-overview.png and /dev/null differ diff --git a/content/courses/solana-course/assets/solana-nft-metaplex-metadata.png b/content/courses/solana-course/assets/solana-nft-metaplex-metadata.png deleted file mode 100644 index a1ac6b8fe..000000000 Binary files a/content/courses/solana-course/assets/solana-nft-metaplex-metadata.png and /dev/null differ diff --git a/content/courses/solana-course/assets/solana-ping-app.png b/content/courses/solana-course/assets/solana-ping-app.png deleted file mode 100644 index 7593b6556..000000000 Binary files a/content/courses/solana-course/assets/solana-ping-app.png and /dev/null differ diff --git a/content/courses/solana-course/assets/solana-send-sol-app.png b/content/courses/solana-course/assets/solana-send-sol-app.png deleted file mode 100644 index 9ce660b5d..000000000 Binary files a/content/courses/solana-course/assets/solana-send-sol-app.png and /dev/null differ diff --git a/content/courses/solana-course/assets/student-intros-frontend.png b/content/courses/solana-course/assets/student-intros-frontend.png deleted file mode 100644 index 5efa12603..000000000 Binary files a/content/courses/solana-course/assets/student-intros-frontend.png and /dev/null differ diff --git a/content/courses/solana-course/assets/token-program-frontend.png b/content/courses/solana-course/assets/token-program-frontend.png deleted file mode 100644 index c6b2f31cd..000000000 Binary files a/content/courses/solana-course/assets/token-program-frontend.png and /dev/null differ diff --git a/content/courses/solana-course/assets/token-program-usdc-mint.png b/content/courses/solana-course/assets/token-program-usdc-mint.png deleted file mode 100644 index d6580680c..000000000 Binary files a/content/courses/solana-course/assets/token-program-usdc-mint.png and /dev/null differ diff --git a/content/courses/solana-course/assets/token-swap-frontend.png b/content/courses/solana-course/assets/token-swap-frontend.png deleted file mode 100644 index a3d2efd12..000000000 Binary files a/content/courses/solana-course/assets/token-swap-frontend.png and /dev/null differ diff --git a/content/courses/solana-course/assets/wallet-connect-prompt.png b/content/courses/solana-course/assets/wallet-connect-prompt.png deleted file mode 100644 index 21aee04a9..000000000 Binary files a/content/courses/solana-course/assets/wallet-connect-prompt.png and /dev/null differ diff --git a/content/courses/solana-course/assets/wallet-transaction-approval-prompt.png b/content/courses/solana-course/assets/wallet-transaction-approval-prompt.png deleted file mode 100644 index d4a178e16..000000000 Binary files a/content/courses/solana-course/assets/wallet-transaction-approval-prompt.png and /dev/null differ diff --git a/content/courses/solana-course/content/account-data-matching.md b/content/courses/solana-course/content/account-data-matching.md deleted file mode 100644 index a4cae95d8..000000000 --- a/content/courses/solana-course/content/account-data-matching.md +++ /dev/null @@ -1,526 +0,0 @@ ---- -title: Account Data Matching -objectives: - - Explain the security risks associated with missing data validation checks - - Implement data validation checks using long-form Rust - - Implement data validation checks using Anchor constraints ---- - -# TL;DR - -- Use **data validation checks** to verify that account data matches an expected - value**.** Without appropriate data validations checks, unexpected accounts - may be used in an instruction. -- To implement data validations checks in Rust, simply compare the data stored - on an account to an expected value. - ```rust - if ctx.accounts.user.key() != ctx.accounts.user_data.user { - return Err(ProgramError::InvalidAccountData.into()); - } - ``` -- In Anchor, you can use `constraint` to checks whether the given expression - evaluates to true. Alternatively, you can use `has_one` to check that a target - account field stored on the account matches the key of an account in the - `Accounts` struct. - -# Overview - -Account data matching refers to data validation checks used to verify the data -stored on an account matches an expected value. Data validation checks provide a -way to include additional constraints to ensure the appropriate accounts are -passed into an instruction. - -This can be useful when accounts required by an instruction have dependencies on -values stored in other accounts or if an instruction is dependent on the data -stored in an account. - -### Missing data validation check - -The example below includes an `update_admin` instruction that updates the -`admin` field stored on an `admin_config` account. - -The instruction is missing a data validation check to verify the `admin` account -signing the transaction matches the `admin` stored on the `admin_config` -account. This means any account signing the transaction and passed into the -instruction as the `admin` account can update the `admin_config` account. - -```rust -use anchor_lang::prelude::*; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod data_validation { - use super::*; - ... - pub fn update_admin(ctx: Context) -> Result<()> { - ctx.accounts.admin_config.admin = ctx.accounts.new_admin.key(); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct UpdateAdmin<'info> { - #[account(mut)] - pub admin_config: Account<'info, AdminConfig>, - #[account(mut)] - pub admin: Signer<'info>, - pub new_admin: SystemAccount<'info>, -} - -#[account] -pub struct AdminConfig { - admin: Pubkey, -} -``` - -### Add data validation check - -The basic Rust approach to solve this problem is to simply compare the passed in -`admin` key to the `admin` key stored in the `admin_config` account, throwing an -error if they don’t match. - -```rust -if ctx.accounts.admin.key() != ctx.accounts.admin_config.admin { - return Err(ProgramError::InvalidAccountData.into()); -} -``` - -By adding a data validation check, the `update_admin` instruction would only -process if the `admin` signer of the transaction matched the `admin` stored on -the `admin_config` account. - -```rust -use anchor_lang::prelude::*; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod data_validation { - use super::*; - ... - pub fn update_admin(ctx: Context) -> Result<()> { - if ctx.accounts.admin.key() != ctx.accounts.admin_config.admin { - return Err(ProgramError::InvalidAccountData.into()); - } - ctx.accounts.admin_config.admin = ctx.accounts.new_admin.key(); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct UpdateAdmin<'info> { - #[account(mut)] - pub admin_config: Account<'info, AdminConfig>, - #[account(mut)] - pub admin: Signer<'info>, - pub new_admin: SystemAccount<'info>, -} - -#[account] -pub struct AdminConfig { - admin: Pubkey, -} -``` - -### Use Anchor constraints - -Anchor simplifies this with the `has_one` constraint. You can use the `has_one` -constraint to move the data validation check from the instruction logic to the -`UpdateAdmin` struct. - -In the example below, `has_one = admin` specifies that the `admin` account -signing the transaction must match the `admin` field stored on the -`admin_config` account. To use the `has_one` constraint, the naming convention -of the data field on the account must be consistent with the naming on the -account validation struct. - -```rust -use anchor_lang::prelude::*; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod data_validation { - use super::*; - ... - pub fn update_admin(ctx: Context) -> Result<()> { - ctx.accounts.admin_config.admin = ctx.accounts.new_admin.key(); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct UpdateAdmin<'info> { - #[account( - mut, - has_one = admin - )] - pub admin_config: Account<'info, AdminConfig>, - #[account(mut)] - pub admin: Signer<'info>, - pub new_admin: SystemAccount<'info>, -} - -#[account] -pub struct AdminConfig { - admin: Pubkey, -} -``` - -Alternatively, you can use `constraint` to manually add an expression that must -evaluate to true in order for execution to continue. This is useful when for -some reason naming can’t be consistent or when you need a more complex -expression to fully validate the incoming data. - -```rust -#[derive(Accounts)] -pub struct UpdateAdmin<'info> { - #[account( - mut, - constraint = admin_config.admin == admin.key() - )] - pub admin_config: Account<'info, AdminConfig>, - #[account(mut)] - pub admin: Signer<'info>, - pub new_admin: SystemAccount<'info>, -} -``` - -# Demo - -For this demo we’ll create a simple “vault” program similar to the program we -used in the Signer Authorization lesson and the Owner Check lesson. Similar to -those demos, we’ll show in this demo how a missing data validation check could -allow the vault to be drained. - -### 1. Starter - -To get started, download the starter code from the `starter` branch of -[this repository](https://github.com/Unboxed-Software/solana-account-data-matching). -The starter code includes a program with two instructions and the boilerplate -setup for the test file. - -The `initialize_vault` instruction initializes a new `Vault` account and a new -`TokenAccount`. The `Vault` account will store the address of a token account, -the authority of the vault, and a withdraw destination token account. - -The authority of the new token account will be set as the `vault`, a PDA of the -program. This allows the `vault` account to sign for the transfer of tokens from -the token account. - -The `insecure_withdraw` instruction transfers all the tokens in the `vault` -account’s token account to a `withdraw_destination` token account. - -Notice that this instruction \***\*does\*\*** have a signer check for -`authority` and an owner check for `vault`. However, nowhere in the account -validation or instruction logic is there code that checks that the `authority` -account passed into the instruction matches the `authority` account on the -`vault`. - -```rust -use anchor_lang::prelude::*; -use anchor_spl::token::{self, Mint, Token, TokenAccount}; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod account_data_matching { - use super::*; - - pub fn initialize_vault(ctx: Context) -> Result<()> { - ctx.accounts.vault.token_account = ctx.accounts.token_account.key(); - ctx.accounts.vault.authority = ctx.accounts.authority.key(); - ctx.accounts.vault.withdraw_destination = ctx.accounts.withdraw_destination.key(); - Ok(()) - } - - pub fn insecure_withdraw(ctx: Context) -> Result<()> { - let amount = ctx.accounts.token_account.amount; - - let seeds = &[b"vault".as_ref(), &[*ctx.bumps.get("vault").unwrap()]]; - let signer = [&seeds[..]]; - - let cpi_ctx = CpiContext::new_with_signer( - ctx.accounts.token_program.to_account_info(), - token::Transfer { - from: ctx.accounts.token_account.to_account_info(), - authority: ctx.accounts.vault.to_account_info(), - to: ctx.accounts.withdraw_destination.to_account_info(), - }, - &signer, - ); - - token::transfer(cpi_ctx, amount)?; - Ok(()) - } -} - -#[derive(Accounts)] -pub struct InitializeVault<'info> { - #[account( - init, - payer = authority, - space = 8 + 32 + 32 + 32, - seeds = [b"vault"], - bump, - )] - pub vault: Account<'info, Vault>, - #[account( - init, - payer = authority, - token::mint = mint, - token::authority = vault, - seeds = [b"token"], - bump, - )] - pub token_account: Account<'info, TokenAccount>, - pub withdraw_destination: Account<'info, TokenAccount>, - pub mint: Account<'info, Mint>, - #[account(mut)] - pub authority: Signer<'info>, - pub token_program: Program<'info, Token>, - pub system_program: Program<'info, System>, - pub rent: Sysvar<'info, Rent>, -} - -#[derive(Accounts)] -pub struct InsecureWithdraw<'info> { - #[account( - seeds = [b"vault"], - bump, - )] - pub vault: Account<'info, Vault>, - #[account( - mut, - seeds = [b"token"], - bump, - )] - pub token_account: Account<'info, TokenAccount>, - #[account(mut)] - pub withdraw_destination: Account<'info, TokenAccount>, - pub token_program: Program<'info, Token>, - pub authority: Signer<'info>, -} - -#[account] -pub struct Vault { - token_account: Pubkey, - authority: Pubkey, - withdraw_destination: Pubkey, -} -``` - -### 2. Test `insecure_withdraw` instruction - -To prove that this is a problem, let’s write a test where an account other than -the vault’s `authority` tries to withdraw from the vault. - -The test file includes the code to invoke the `initialize_vault` instruction -using the provider wallet as the `authority` and then mints 100 tokens to the -`vault` token account. - -Add a test to invoke the `insecure_withdraw` instruction. Use -`withdrawDestinationFake` as the `withdrawDestination` account and `walletFake` -as the `authority`. Then send the transaction using `walletFake`. - -Since there are no checks the verify the `authority` account passed into the -instruction matches the values stored on the `vault` account initialized in the -first test, the instruction will process successfully and the tokens will be -transferred to the `withdrawDestinationFake` account. - -```tsx -describe("account-data-matching", () => { - ... - it("Insecure withdraw", async () => { - const tx = await program.methods - .insecureWithdraw() - .accounts({ - vault: vaultPDA, - tokenAccount: tokenPDA, - withdrawDestination: withdrawDestinationFake, - authority: walletFake.publicKey, - }) - .transaction() - - await anchor.web3.sendAndConfirmTransaction(connection, tx, [walletFake]) - - const balance = await connection.getTokenAccountBalance(tokenPDA) - expect(balance.value.uiAmount).to.eq(0) - }) -}) -``` - -Run `anchor test` to see that both transactions will complete successfully. - -```bash -account-data-matching - ✔ Initialize Vault (811ms) - ✔ Insecure withdraw (403ms) -``` - -### 3. Add `secure_withdraw` instruction - -Let’s go implement a secure version of this instruction called -`secure_withdraw`. - -This instruction will be identical to the `insecure_withdraw` instruction, -except we’ll use the `has_one` constraint in the account validation struct -(`SecureWithdraw`) to check that the `authority` account passed into the -instruction matches the `authority` account on the `vault` account. That way -only the correct authority account can withdraw the vault’s tokens. - -```rust -use anchor_lang::prelude::*; -use anchor_spl::token::{self, Mint, Token, TokenAccount}; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod account_data_matching { - use super::*; - ... - pub fn secure_withdraw(ctx: Context) -> Result<()> { - let amount = ctx.accounts.token_account.amount; - - let seeds = &[b"vault".as_ref(), &[*ctx.bumps.get("vault").unwrap()]]; - let signer = [&seeds[..]]; - - let cpi_ctx = CpiContext::new_with_signer( - ctx.accounts.token_program.to_account_info(), - token::Transfer { - from: ctx.accounts.token_account.to_account_info(), - authority: ctx.accounts.vault.to_account_info(), - to: ctx.accounts.withdraw_destination.to_account_info(), - }, - &signer, - ); - - token::transfer(cpi_ctx, amount)?; - Ok(()) - } -} - -#[derive(Accounts)] -pub struct SecureWithdraw<'info> { - #[account( - seeds = [b"vault"], - bump, - has_one = token_account, - has_one = authority, - has_one = withdraw_destination, - - )] - pub vault: Account<'info, Vault>, - #[account( - mut, - seeds = [b"token"], - bump, - )] - pub token_account: Account<'info, TokenAccount>, - #[account(mut)] - pub withdraw_destination: Account<'info, TokenAccount>, - pub token_program: Program<'info, Token>, - pub authority: Signer<'info>, -} -``` - -### 4. Test `secure_withdraw` instruction - -Now let’s test the `secure_withdraw` instruction with two tests: one that uses -`walletFake` as the authority and one that uses `wallet` as the authority. We -expect the first invocation to return an error and the second to succeed. - -```tsx -describe("account-data-matching", () => { - ... - it("Secure withdraw, expect error", async () => { - try { - const tx = await program.methods - .secureWithdraw() - .accounts({ - vault: vaultPDA, - tokenAccount: tokenPDA, - withdrawDestination: withdrawDestinationFake, - authority: walletFake.publicKey, - }) - .transaction() - - await anchor.web3.sendAndConfirmTransaction(connection, tx, [walletFake]) - } catch (err) { - expect(err) - console.log(err) - } - }) - - it("Secure withdraw", async () => { - await spl.mintTo( - connection, - wallet.payer, - mint, - tokenPDA, - wallet.payer, - 100 - ) - - await program.methods - .secureWithdraw() - .accounts({ - vault: vaultPDA, - tokenAccount: tokenPDA, - withdrawDestination: withdrawDestination, - authority: wallet.publicKey, - }) - .rpc() - - const balance = await connection.getTokenAccountBalance(tokenPDA) - expect(balance.value.uiAmount).to.eq(0) - }) -}) -``` - -Run `anchor test` to see that the transaction using an incorrect authority -account will now return an Anchor Error while the transaction using correct -accounts completes successfully. - -```bash -'Program Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS invoke [1]', -'Program log: Instruction: SecureWithdraw', -'Program log: AnchorError caused by account: vault. Error Code: ConstraintHasOne. Error Number: 2001. Error Message: A has one constraint was violated.', -'Program log: Left:', -'Program log: DfLZV18rD7wCQwjYvhTFwuvLh49WSbXFeJFPQb5czifH', -'Program log: Right:', -'Program log: 5ovvmG5ntwUC7uhNWfirjBHbZD96fwuXDMGXiyMwPg87', -'Program Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS consumed 10401 of 200000 compute units', -'Program Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS failed: custom program error: 0x7d1' -``` - -Note that Anchor specifies in the logs the account that causes the error -(`AnchorError caused by account: vault`). - -```bash -✔ Secure withdraw, expect error (77ms) -✔ Secure withdraw (10073ms) -``` - -And just like that, you've closed up the security loophole. The theme across -most of these potential exploits is that they're quite simple. However, as your -programs grow in scope and complexity, it becomse increasingly easy to miss -possible exploits. It's great to get in a habit of writing tests that send -instructions that _shouldn't_ work. The more the better. That way you catch -problems before you deploy. - -If you want to take a look at the final solution code you can find it on the -`solution` branch of -[the repository](https://github.com/Unboxed-Software/solana-account-data-matching/tree/solution). - -# Challenge - -Just as with other lessons in this module, your opportunity to practice avoiding -this security exploit lies in auditing your own or other programs. - -Take some time to review at least one program and ensure that proper data checks -are in place to avoid security exploits. - -Remember, if you find a bug or exploit in somebody else's program, please alert -them! If you find one in your own program, be sure to patch it right away. diff --git a/content/courses/solana-course/content/anchor-cpi.md b/content/courses/solana-course/content/anchor-cpi.md deleted file mode 100644 index 1868fcfee..000000000 --- a/content/courses/solana-course/content/anchor-cpi.md +++ /dev/null @@ -1,669 +0,0 @@ ---- -title: Anchor CPIs and Errors -objectives: - - Make Cross Program Invocations (CPIs) from an Anchor program - - Use the `cpi` feature to generate helper functions for invoking instructions - on existing Anchor programs - - Use `invoke` and `invoke_signed` to make CPIs where CPI helper functions are - unavailable - - Create and return custom Anchor errors ---- - -# TL;DR - -- Anchor provides a simplified way to create CPIs using a **`CpiContext`** -- Anchor's **`cpi`** feature generates CPI helper functions for invoking - instructions on existing Anchor programs -- If you do not have access to CPI helper functions, you can still use `invoke` - and `invoke_signed` directly -- The **`error_code`** attribute macro is used to create custom Anchor Errors - -# Overview - -If you think back to the [first CPI lesson](cpi.md), you'll remember that -constructing CPIs can get tricky with vanilla Rust. Anchor makes it a bit -simpler though, especially if the program you're invoking is also an Anchor -program whose crate you can access. - -In this lesson, you'll learn how to construct an Anchor CPI. You'll also learn -how to throw custom errors from an Anchor program so that you can start to write -more sophisticated Anchor programs. - -## Cross Program Invocations (CPIs) with Anchor - -As a refresher, CPIs allow programs to invoke instructions on other programs -using the `invoke` or `invoke_signed` functions. This allows new programs to -build on top of existing programs (we call that composability). - -While making CPIs directly using `invoke` or `invoke_signed` is still an option, -Anchor also provides a simplified way to make CPIs by using a `CpiContext`. - -In this lesson, you'll use the `anchor_spl` crate to make CPIs to the SPL Token -Program. You can explore what's available in the `anchor_spl` crate -[here](https://docs.rs/anchor-spl/latest/anchor_spl/#). - -### `CpiContext` - -The first step in making a CPI is to create an instance of `CpiContext`. -`CpiContext` is very similar to `Context`, the first argument type required by -Anchor instruction functions. They are both declared in the same module and -share similar functionality. - -The `CpiContext` type specifies non-argument inputs for cross program -invocations: - -- `accounts` - the list of accounts required for the instruction being invoked -- `remaining_accounts` - any remaining accounts -- `program` - the program ID of the program being invoked -- `signer_seeds` - if a PDA is signing, include the seeds required to derived - the PDA - -```rust -pub struct CpiContext<'a, 'b, 'c, 'info, T> -where - T: ToAccountMetas + ToAccountInfos<'info>, -{ - pub accounts: T, - pub remaining_accounts: Vec>, - pub program: AccountInfo<'info>, - pub signer_seeds: &'a [&'b [&'c [u8]]], -} -``` - -You use `CpiContext::new` to construct a new instance when passing along the -original transaction signature. - -```rust -CpiContext::new(cpi_program, cpi_accounts) -``` - -```rust -pub fn new( - program: AccountInfo<'info>, - accounts: T - ) -> Self { - Self { - accounts, - program, - remaining_accounts: Vec::new(), - signer_seeds: &[], - } -} -``` - -You use `CpiContext::new_with_signer` to construct a new instance when signing -on behalf of a PDA for the CPI. - -```rust -CpiContext::new_with_signer(cpi_program, cpi_accounts, seeds) -``` - -```rust -pub fn new_with_signer( - program: AccountInfo<'info>, - accounts: T, - signer_seeds: &'a [&'b [&'c [u8]]], -) -> Self { - Self { - accounts, - program, - signer_seeds, - remaining_accounts: Vec::new(), - } -} -``` - -### CPI accounts - -One of the main things about `CpiContext` that simplifies cross-program -invocations is that the `accounts` argument is a generic type that lets you pass -in any object that adopts the `ToAccountMetas` and `ToAccountInfos<'info>` -traits. - -These traits are added by the `#[derive(Accounts)]` attribute macro that you've -used before when creating structs to represent instruction accounts. That means -you can use similar structs with `CpiContext`. - -This helps with code organization and type safety. - -### Invoke an instruction on another Anchor program - -When the program you're calling is an Anchor program with a published crate, -Anchor can generate instruction builders and CPI helper functions for you. - -Simply declare your program's dependency on the program you're calling in your -program's `Cargo.toml` file as follows: - -``` -[dependencies] -callee = { path = "../callee", features = ["cpi"]} -``` - -By adding `features = ["cpi"]`, you enable the `cpi` feature and your program -gains access to the `callee::cpi` module. - -The `cpi` module exposes `callee`'s instructions as a Rust function that takes -as arguments a `CpiContext` and any additional instruction data. These functions -use the same format as the instruction functions in your Anchor programs, only -with `CpiContext` instead of `Context`. The `cpi` module also exposes the -accounts structs required for calling the instructions. - -For example, if `callee` has the instruction `do_something` that requires the -accounts defined in the `DoSomething` struct, you could invoke `do_something` as -follows: - -```rust -use anchor_lang::prelude::*; -use callee; -... - -#[program] -pub mod lootbox_program { - use super::*; - - pub fn call_another_program(ctx: Context, params: InitUserParams) -> Result<()> { - callee::cpi::do_something( - CpiContext::new( - ctx.accounts.callee.to_account_info(), - callee::DoSomething { - user: ctx.accounts.user.to_account_info() - } - ) - ) - Ok(()) - } -} -... -``` - -### Invoke an instruction on a non-Anchor program - -When the program you're calling is _not_ an Anchor program, there are two -possible options: - -1. It's possible that the program maintainers have published a crate with their - own helper functions for calling into their program. For example, the - `anchor_spl` crate provides helper functions that are virtually identical - from a call-site perspective to what you would get with the `cpi` module of - an Anchor program. E.g. you can mint using the - [`mint_to` helper function](https://docs.rs/anchor-spl/latest/src/anchor_spl/token.rs.html#36-58) - and use the - [`MintTo` accounts struct](https://docs.rs/anchor-spl/latest/anchor_spl/token/struct.MintTo.html). - ```rust - token::mint_to( - CpiContext::new_with_signer( - ctx.accounts.token_program.to_account_info(), - token::MintTo { - mint: ctx.accounts.mint_account.to_account_info(), - to: ctx.accounts.token_account.to_account_info(), - authority: ctx.accounts.mint_authority.to_account_info(), - }, - &[&[ - "mint".as_bytes(), - &[*ctx.bumps.get("mint_authority").unwrap()], - ]] - ), - amount, - )?; - ``` -2. If there is no helper module for the program whose instruction(s) you need to - invoke, you can fall back to using `invoke` and `invoke_signed`. In fact, the - source code of the `mint_to` helper function referenced above shows an - example us using `invoke_signed` when given a `CpiContext`. You can follow a - similar pattern if you decide to use an accounts struct and `CpiContext` to - organize and prepare your CPI. - ```rust - pub fn mint_to<'a, 'b, 'c, 'info>( - ctx: CpiContext<'a, 'b, 'c, 'info, MintTo<'info>>, - amount: u64, - ) -> Result<()> { - let ix = spl_token::instruction::mint_to( - &spl_token::ID, - ctx.accounts.mint.key, - ctx.accounts.to.key, - ctx.accounts.authority.key, - &[], - amount, - )?; - solana_program::program::invoke_signed( - &ix, - &[ - ctx.accounts.to.clone(), - ctx.accounts.mint.clone(), - ctx.accounts.authority.clone(), - ], - ctx.signer_seeds, - ) - .map_err(Into::into) - } - ``` - -## Throw errors in Anchor - -We're deep enough into Anchor at this point that it's important to know how to -create custom errors. - -Ultimately, all programs return the same error -type: [`ProgramError`](https://docs.rs/solana-program/latest/solana_program/program_error/enum.ProgramError.html). -However, when writing a program using Anchor you can use `AnchorError` as an -abstraction on top of `ProgramError`. This abstraction provides additional -information when a program fails, including: - -- The error name and number -- Location in the code where the error was thrown -- The account that violated a constraint - -```rust -pub struct AnchorError { - pub error_name: String, - pub error_code_number: u32, - pub error_msg: String, - pub error_origin: Option, - pub compared_values: Option, -} -``` - -Anchor Errors can be divided into: - -- Anchor Internal Errors that the framework returns from inside its own code -- Custom errors that you the developer can create - -You can add errors unique to your program by using the `error_code` attribute. -Simply add this attribute to a custom `enum` type. You can then use the variants -of the `enum` as errors in your program. Additionally, you can add an error -message to each variant using the `msg` attribute. Clients can then display this -error message if the error occurs. - -```rust -#[error_code] -pub enum MyError { - #[msg("MyAccount may only hold data below 100")] - DataTooLarge -} -``` - -To return a custom error you can use -the [err](https://docs.rs/anchor-lang/latest/anchor_lang/macro.err.html) or -the [error](https://docs.rs/anchor-lang/latest/anchor_lang/prelude/macro.error.html) -macro from an instruction function. These add file and line information to the -error that is then logged by Anchor to help you with debugging. - -```rust -#[program] -mod hello_anchor { - use super::*; - pub fn set_data(ctx: Context, data: MyAccount) -> Result<()> { - if data.data >= 100 { - return err!(MyError::DataTooLarge); - } - ctx.accounts.my_account.set_inner(data); - Ok(()) - } -} - -#[error_code] -pub enum MyError { - #[msg("MyAccount may only hold data below 100")] - DataTooLarge -} -``` - -Alternatively, you can use -the [require](https://docs.rs/anchor-lang/latest/anchor_lang/macro.require.html) macro -to simplify returning errors. The code above can be refactored to the following: - -```rust -#[program] -mod hello_anchor { - use super::*; - pub fn set_data(ctx: Context, data: MyAccount) -> Result<()> { - require!(data.data < 100, MyError::DataTooLarge); - ctx.accounts.my_account.set_inner(data); - Ok(()) - } -} - -#[error_code] -pub enum MyError { - #[msg("MyAccount may only hold data below 100")] - DataTooLarge -} -``` - -# Demo - -Let’s practice the concepts we’ve gone over in this lesson by building on top of -the Movie Review program from previous lessons. - -In this demo we’ll update the program to mint tokens to users when they submit a -new movie review. - -### 1. Starter - -To get started, we will be using the final state of the Anchor Movie Review -program from the previous lesson. So, if you just completed that lesson then -you’re all set and ready to go. If you are just jumping in here, no worries, you -can download the starter code -[here](https://github.com/Unboxed-Software/anchor-movie-review-program/tree/solution-pdas). -We'll be using the `solution-pdas` branch as our starting point. - -### 2. Add dependencies to `Cargo.toml` - -Before we get started we need enable the `init-if-needed` feature and add the -`anchor-spl` crate to the dependencies in `Cargo.toml`. If you need to brush up -on the `init-if-needed` feature take a look at the -[Anchor PDAs and Accounts lesson](anchor-pdas.md). - -```rust -[dependencies] -anchor-lang = { version = "0.25.0", features = ["init-if-needed"] } -anchor-spl = "0.25.0" -``` - -### 3. Initialize reward token - -Next, navigate to `lib.rs` and create an instruction to initialize a new token -mint. This will be the token that is minted each time a user leaves a review. -Note that we don't need to include any custom instruction logic since the -initialization can be handled entirely through Anchor constraints. - -```rust -pub fn initialize_token_mint(_ctx: Context) -> Result<()> { - msg!("Token mint initialized"); - Ok(()) -} -``` - -Now, implement the `InitializeMint` context type and list the accounts and -constraints the instruction requires. Here we initialize a new `Mint` account -using a PDA with the string "mint" as a seed. Note that we can use the same PDA -for both the address of the `Mint` account and the mint authority. Using a PDA -as the mint authority enables our program to sign for the minting of the tokens. - -In order to initialize the `Mint` account, we'll need to include the -`token_program`, `rent`, and `system_program` in the list of accounts. - -```rust -#[derive(Accounts)] -pub struct InitializeMint<'info> { - #[account( - init, - seeds = ["mint".as_bytes()], - bump, - payer = user, - mint::decimals = 6, - mint::authority = mint, - )] - pub mint: Account<'info, Mint>, - #[account(mut)] - pub user: Signer<'info>, - pub token_program: Program<'info, Token>, - pub rent: Sysvar<'info, Rent>, - pub system_program: Program<'info, System> -} -``` - -There may be some constraints above that you haven't seen yet. Adding -`mint::decimals` and `mint::authority` along with `init` ensures that the -account is initialized as a new token mint with the appropriate decimals and -mint authority set. - -### 4. Anchor Error - -Next, let’s create an Anchor Error that we’ll use when validating the `rating` -passed to either the `add_movie_review` or `update_movie_review` instruction. - -```rust -#[error_code] -enum MovieReviewError { - #[msg("Rating must be between 1 and 5")] - InvalidRating -} -``` - -### 5. Update `add_movie_review` instruction - -Now that we've done some setup, let’s update the `add_movie_review` instruction -and `AddMovieReview` context type to mint tokens to the reviewer. - -Next, update the `AddMovieReview` context type to add the following accounts: - -- `token_program` - we'll be using the Token Program to mint tokens -- `mint` - the mint account for the tokens that we'll mint to users when they - add a movie review -- `token_account` - the associated token account for the afforementioned `mint` - and reviewer -- `associated_token_program` - required because we'll be using the - `associated_token` constraint on the `token_account` -- `rent` - required because we are using the `init-if-needed` constraint on the - `token_account` - -```rust -#[derive(Accounts)] -#[instruction(title: String, description: String)] -pub struct AddMovieReview<'info> { - #[account( - init, - seeds=[title.as_bytes(), initializer.key().as_ref()], - bump, - payer = initializer, - space = 8 + 32 + 1 + 4 + title.len() + 4 + description.len() - )] - pub movie_review: Account<'info, MovieAccountState>, - #[account(mut)] - pub initializer: Signer<'info>, - pub system_program: Program<'info, System>, - // ADDED ACCOUNTS BELOW - pub token_program: Program<'info, Token>, - #[account( - seeds = ["mint".as_bytes()] - bump, - mut - )] - pub mint: Account<'info, Mint>, - #[account( - init_if_needed, - payer = initializer, - associated_token::mint = mint, - associated_token::authority = initializer - )] - pub token_account: Account<'info, TokenAccount>, - pub associated_token_program: Program<'info, AssociatedToken>, - pub rent: Sysvar<'info, Rent> -} -``` - -Again, some of the above constraints may be unfamiliar to you. The -`associated_token::mint` and `associated_token::authority` constraints along -with the `init_if_needed` constraint ensures that if the account has not already -been initialized, it will be initialized as an associated token account for the -specified mint and authority. - -Next, let’s update the `add_movie_review` instruction to do the following: - -- Check that `rating` is valid. If it is not a valid rating, return the - `InvalidRating` error. -- Make a CPI to the token program’s `mint_to` instruction using the mint - authority PDA as a signer. Note that we'll mint 10 tokens to the user but need - to adjust for the mint decimals by making it `10*10^6`. - -Fortunately, we can use the `anchor_spl` crate to access helper functions and -types like `mint_to` and `MintTo` for constructing our CPI to the Token Program. -`mint_to` takes a `CpiContext` and integer as arguments, where the integer -represents the number of tokens to mint. `MintTo` can be used for the list of -accounts that the mint instruction needs. - -```rust -pub fn add_movie_review(ctx: Context, title: String, description: String, rating: u8) -> Result<()> { - msg!("Movie review account created"); - msg!("Title: {}", title); - msg!("Description: {}", description); - msg!("Rating: {}", rating); - - require!(rating >= 1 && rating <= 5, MovieReviewError::InvalidRating); - - let movie_review = &mut ctx.accounts.movie_review; - movie_review.reviewer = ctx.accounts.initializer.key(); - movie_review.title = title; - movie_review.description = description; - movie_review.rating = rating; - - mint_to( - CpiContext::new_with_signer( - ctx.accounts.token_program.to_account_info(), - MintTo { - authority: ctx.accounts.mint.to_account_info(), - to: ctx.accounts.token_account.to_account_info(), - mint: ctx.accounts.mint.to_account_info() - }, - &[&[ - "mint".as_bytes(), - &[*ctx.bumps.get("mint").unwrap()] - ]] - ), - 10*10^6 - )?; - - msg!("Minted tokens"); - - Ok(()) -} -``` - -### 6. Update `update_movie_review` instruction - -Here we are only adding the check that `rating` is valid. - -```rust -pub fn update_movie_review(ctx: Context, title: String, description: String, rating: u8) -> Result<()> { - msg!("Movie review account space reallocated"); - msg!("Title: {}", title); - msg!("Description: {}", description); - msg!("Rating: {}", rating); - - require!(rating >= 1 && rating <= 5, MovieReviewError::InvalidRating); - - let movie_review = &mut ctx.accounts.movie_review; - movie_review.description = description; - movie_review.rating = rating; - - Ok(()) -} -``` - -### 7. Test - -Those are all of the changes we need to make to the program! Now, let’s update -our tests. - -Start by making sure your imports nad `describe` function look like this: - -```ts -import * as anchor from "@project-serum/anchor" -import { Program } from "@project-serum/anchor" -import { expect } from "chai" -import { getAssociatedTokenAddress, getAccount } from "@solana/spl-token" -import { AnchorMovieReviewProgram } from "../target/types/anchor_movie_review_program" - -describe("anchor-movie-review-program", () => { - // Configure the client to use the local cluster. - const provider = anchor.AnchorProvider.env() - anchor.setProvider(provider) - - const program = anchor.workspace - .AnchorMovieReviewProgram as Program - - const movie = { - title: "Just a test movie", - description: "Wow what a good movie it was real great", - rating: 5, - } - - const [movie_pda] = anchor.web3.PublicKey.findProgramAddressSync( - [Buffer.from(movie.title), provider.wallet.publicKey.toBuffer()], - program.programId - ) - - const [mint] = anchor.web3.PublicKey.findProgramAddressSync( - [Buffer.from("mint")], - program.programId - ) -... -} -``` - -With that done, add a test for the `initializeTokenMint` instruction: - -```ts -it("Initializes the reward token", async () => { - const tx = await program.methods.initializeTokenMint().rpc(); -}); -``` - -Notice that we didn't have to add `.accounts` because they call be inferred, -including the `mint` account (assuming you have seed inference enabled). - -Next, update the test for the `addMovieReview` instruction. The primary -additions are: - -1. To get the associated token address that needs to be passed into the - instruction as an account that cannot be inferred -2. Check at the end of the test that the associated token account has 10 tokens - -```ts -it("Movie review is added`", async () => { - const tokenAccount = await getAssociatedTokenAddress( - mint, - provider.wallet.publicKey, - ); - - const tx = await program.methods - .addMovieReview(movie.title, movie.description, movie.rating) - .accounts({ - tokenAccount: tokenAccount, - }) - .rpc(); - - const account = await program.account.movieAccountState.fetch(movie_pda); - expect(movie.title === account.title); - expect(movie.rating === account.rating); - expect(movie.description === account.description); - expect(account.reviewer === provider.wallet.publicKey); - - const userAta = await getAccount(provider.connection, tokenAccount); - expect(Number(userAta.amount)).to.equal((10 * 10) ^ 6); -}); -``` - -After that, neither the test for `updateMovieReview` nor the test for -`deleteMovieReview` need any changes. - -At this point, run `anchor test` and you should see the following output - -```console -anchor-movie-review-program - ✔ Initializes the reward token (458ms) - ✔ Movie review is added (410ms) - ✔ Movie review is updated (402ms) - ✔ Deletes a movie review (405ms) - - 5 passing (2s) -``` - -If you need more time with the concepts from this lesson or got stuck along the -way, feel free to take a look at the -[solution code](https://github.com/Unboxed-Software/anchor-movie-review-program/tree/solution-add-tokens). -Note that the solution to this demo is on the `solution-add-tokens` branch. - -# Challenge - -To apply what you've learned about CPIs in this lesson, think about how you -could incorporate them into the Student Intro program. You could do something -similar to what we did in the demo here and add some functionality to mint -tokens to users when they introduce themselves. - -Try to do this independently if you can! But if you get stuck, feel free to -reference -this [solution code](https://github.com/Unboxed-Software/anchor-student-intro-program/tree/cpi-challenge). -Note that your code may look slightly different than the solution code depending -on your implementation. diff --git a/content/courses/solana-course/content/anchor-pdas.md b/content/courses/solana-course/content/anchor-pdas.md deleted file mode 100644 index 634d2348a..000000000 --- a/content/courses/solana-course/content/anchor-pdas.md +++ /dev/null @@ -1,812 +0,0 @@ ---- -title: Anchor PDAs and Accounts -objectives: - - Use the `seeds` and `bump` constraints to work with PDA accounts in Anchor - - Enable and use the `init_if_needed` constraint - - Use the `realloc` constraint to reallocate space on an existing account - - Use the `close` constraint to close an existing account ---- - -# TL;DR - -- The `seeds` and `bump` constraints are used to initialize and validate PDA - accounts in Anchor -- The `init_if_needed` constraint is used to conditionally initialize a new - account -- The `realloc` constraint is used to reallocate space on an existing account -- The `close` constraint is used to close an account and refund its rent - -# Overview - -In this lesson you'll learn how to work with PDAs, reallocate accounts, and -close accounts in Anchor. - -Recall that Anchor programs separate instruction logic from account validation. -Account validation primarily happens within structs that represent the list of -accounts needed for a given instruction. Each field of the struct represents a -different account, and you can customize the validation performed on the account -using the `#[account(...)]` attribute macro. - -In addition to using constraints for account validation, some constraints can -handle repeatable tasks that would otherwise require a lot of boilerplate inside -our instruction logic. This lesson will introduce the `seeds`, `bump`, -`realloc`, and `close` constraints to help you initialize and validate PDAs, -reallocate accounts, and close accounts. - -## PDAs with Anchor - -Recall that -[PDAs](https://github.com/Unboxed-Software/solana-course/blob/main/content/pda.md) -are derived using a list of optional seeds, a bump seed, and a program ID. -Anchor provides a convenient way to validate a PDA with the `seeds` and `bump` -constraints. - -```rust -#[derive(Accounts)] -struct ExampleAccounts { - #[account( - seeds = [b"example_seed"], - bump - )] - pub pda_account: Account<'info, AccountType>, -} -``` - -During account validation, Anchor will derive a PDA using the seeds specified in -the `seeds` constraint and verify that the account passed into the instruction -matches the PDA found using the specified `seeds`. - -When the `bump` constraint is included without specifying a specific bump, -Anchor will default to using the canonical bump (the first bump that results in -a valid PDA). In most cases you should use the canonical bump. - -You can access other fields from within the struct from constraints, so you can -specify seeds that are dependent on other accounts like the signer's public key. - -You can also reference the deserialized instruction data if you add the -`#[instruction(...)]` attribute macro to the struct. - -For example, the following example shows a list of accounts that include -`pda_account` and `user`. The `pda_account` is constrained such that the seeds -must be the string "example_seed," the public key of `user`, and the string -passed into the instruction as `instruction_data`. - -```rust -#[derive(Accounts)] -#[instruction(instruction_data: String)] -pub struct Example<'info> { - #[account( - seeds = [b"example_seed", user.key().as_ref(), instruction_data.as_ref()], - bump - )] - pub pda_account: Account<'info, AccountType>, - #[account(mut)] - pub user: Signer<'info> -} -``` - -If the `pda_account` address provided by the client doesn't match the PDA -derived using the specified seeds and the canonical bump, then the account -validation will fail. - -### Use PDAs with the `init` constraint - -You can combine the `seeds` and `bump` constraints with the `init` constraint to -initialize an account using a PDA. - -Recall that the `init` constraint must be used in combination with the `payer` -and `space` constraints to specify the account that will pay for account -initialization and the space to allocate on the new account. Additionally, you -must include `system_program` as one of the fields of the account validation -struct. - -```rust -#[derive(Accounts)] -pub struct InitializePda<'info> { - #[account( - init, - seeds = [b"example_seed", user.key().as_ref()], - bump, - payer = user, - space = 8 + 8 - )] - pub pda_account: Account<'info, AccountType>, - #[account(mut)] - pub user: Signer<'info>, - pub system_program: Program<'info, System>, -} - -#[account] -pub struct AccountType { - pub data: u64, -} -``` - -When using `init` for non-PDA accounts, Anchor defaults to setting the owner of -the initialized account to be the program currently executing the instruction. - -However, when using `init` in combination with `seeds` and `bump`, the owner -_must_ be the executing program. This is because initializing an account for the -PDA requires a signature that only the executing program can provide. In other -words, the signature verification for the initialization of the PDA account -would fail if the program ID used to derive the PDA did not match the program ID -of the executing program. - -When determining the value of `space` for an account initialized and owned by -the executing Anchor program, remember that the first 8 bytes are reserved for -the account discriminator. This is an 8-byte value that Anchor calculates and -uses to identify the program account types. You can use this -[reference](https://www.anchor-lang.com/docs/space) to calculate how much space -you should allocate for an account. - -### Seed inference - -The account list for an instruction can get really long for some programs. To -simplify the client-side experience when invoking an Anchor program instruction, -we can turn on seed inference. - -Seed inference adds information about PDA seeds to the IDL so that Anchor can -infer PDA seeds from existing call-site information. In the previous example, -the seeds are `b"example_seed"` and `user.key()`. The first is static and -therefore known, and the second is known because `user` is the transaction -signer. - -If you use seed inference when building your program, then as long as you're -calling the program using Anchor, you don't need to explicitly derive and pass -in the PDA. Instead, the Anchor library will do it for you. - -You can turn on seed inference in the `Anchor.toml` file with `seeds = true` -under `[features]`. - -``` -[features] -seeds = true -``` - -### Use the `#[instruction(...)]` attribute macro - -Let's briefly look at the `#[instruction(...)]` attribute macro before moving -on. When using `#[instruction(...)]`, the instruction data you provide in the -list of arguments must match and be in the same order as the instruction -arguments. You can omit unused arguments at the end of the list, but you must -include all arguments up until the last one you will be using. - -For example, imagine an instruction has arguments `input_one`, `input_two`, and -`input_three`. If your account constraints need to reference `input_one` and -`input_three`, you need to list all three arguments in the `#[instruction(...)]` -attribute macro. - -However, if your constraints only reference `input_one` and `input_two`, you can -omit `input_three`. - -```rust -pub fn example_instruction( - ctx: Context, - input_one: String, - input_two: String, - input_three: String, -) -> Result<()> { - ... - Ok(()) -} - -#[derive(Accounts)] -#[instruction(input_one:String, input_two:String)] -pub struct Example<'info> { - ... -} -``` - -Additionally, you will get an error if you list the inputs in the incorrect -order: - -```rust -#[derive(Accounts)] -#[instruction(input_two:String, input_one:String)] -pub struct Example<'info> { - ... -} -``` - -## Init-if-needed - -Anchor provides an `init_if_needed` constraint that can be used to initialize an -account if the account has not already been initialized. - -This feature is gated behind a feature flag to make sure you are intentional -about using it. For security reasons, it's smart to avoid having one instruction -branch into multiple logic paths. And as the name suggests, `init_if_needed` -executes one of two possible code paths depending on the state of the account in -question. - -When using `init_if_needed`, you need to make sure to properly protect your -program against re-initialization attacks. You need to include checks in your -code that check that the initialized account cannot be reset to its initial -settings after the first time it was initialized. - -To use `init_if_needed`, you must first enable the feature in `Cargo.toml`. - -```rust -[dependencies] -anchor-lang = { version = "0.25.0", features = ["init-if-needed"] } -``` - -Once you’ve enabled the feature, you can include the constraint in the -`#[account(…)]` attribute macro. The example below demonstrates using the -`init_if_needed` constraint to initialize a new associated token account if one -does not already exist. - -```rust -#[program] -mod example { - use super::*; - pub fn initialize(ctx: Context) -> Result<()> { - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Initialize<'info> { - #[account( - init_if_needed, - payer = payer, - associated_token::mint = mint, - associated_token::authority = payer - )] - pub token_account: Account<'info, TokenAccount>, - pub mint: Account<'info, Mint>, - #[account(mut)] - pub payer: Signer<'info>, - pub system_program: Program<'info, System>, - pub token_program: Program<'info, Token>, - pub associated_token_program: Program<'info, AssociatedToken>, - pub rent: Sysvar<'info, Rent>, -} -``` - -When the `initialize` instruction is invoked in the previous example, Anchor -will check if the `token_account` exists and initialize it if it does not. If it -already exists, then the instruction will continue without initializing the -account. Just as with the `init` constraint, you can use `init_if_needed` in -conjunction with `seeds` and `bump` if the account is a PDA. - -## Realloc - -The `realloc` constraint provides a simple way to reallocate space for existing -accounts. - -The `realloc` constraint must be used in combination with the following -constraints: - -- `mut` - the account must be set as mutable -- `realloc::payer` - the account to subtract or add lamports to depending on - whether the reallocation is decreasing or increasing account space -- `realloc::zero` - boolean to specify if new memory should be zero initialized - -As with `init`, you must include `system_program` as one of the accounts in the -account validation struct when using `realloc`. - -Below is an example of reallocating space for an account that stores a `data` -field of type `String`. - -```rust -#[derive(Accounts)] -#[instruction(instruction_data: String)] -pub struct ReallocExample<'info> { - #[account( - mut, - seeds = [b"example_seed", user.key().as_ref()], - bump, - realloc = 8 + 4 + instruction_data.len(), - realloc::payer = user, - realloc::zero = false, - )] - pub pda_account: Account<'info, AccountType>, - #[account(mut)] - pub user: Signer<'info>, - pub system_program: Program<'info, System>, -} - -#[account] -pub struct AccountType { - pub data: String, -} -``` - -Notice that `realloc` is set to `8 + 4 + instruction_data.len()`. This breaks -down as follows: - -- `8` is for the account discriminator -- `4` is for the 4 bytes of space that BORSH uses to store the length of the - string -- `instruction_data.len()` is the length of the string itself - -If the change in account data length is additive, lamports will be transferred -from the `realloc::payer` to the account in order to maintain rent exemption. -Likewise, if the change is subtractive, lamports will be transferred from the -account back to the `realloc::payer`. - -The `realloc::zero` constraint is required in order to determine whether the new -memory should be zero initialized after reallocation. This constraint should be -set to true in cases where you expect the memory of an account to shrink and -expand multiple times. That way you zero out space that would otherwise show as -stale data. - -## Close - -The `close` constraint provides a simple and secure way to close an existing -account. - -The `close` constraint marks the account as closed at the end of the -instruction’s execution by setting its discriminator to -the `CLOSED_ACCOUNT_DISCRIMINATOR` and sends its lamports to a specified -account. Setting the discriminator to a special variant makes account revival -attacks (where a subsequent instruction adds the rent exemption lamports again) -impossible. If someone tries to reinitialize the account, the reinitialization -will fail the discriminator check and be considered invalid by the program. - -The example below uses the `close` constraint to close the `data_account` and -sends the lamports allocated for rent to the `receiver` account. - -```rust -pub fn close(ctx: Context) -> Result<()> { - Ok(()) -} - -#[derive(Accounts)] -pub struct Close<'info> { - #[account(mut, close = receiver)] - pub data_account: Account<'info, AccountType>, - #[account(mut)] - pub receiver: Signer<'info> -} -``` - -# Demo - -Let’s practice the concepts we’ve gone over in this lesson by creating a Movie -Review program using the Anchor framework. - -This program will allow users to: - -- Use a PDA to initialize a new movie review account to store the review -- Update the content of an existing movie review account -- Close an existing movie review account - -### 1. Create a new Anchor project - -To begin, let’s create a new project using `anchor init`. - -```console -anchor init anchor-movie-review-program -``` - -Next, navigate to the `lib.rs` file within the `programs` folder and you should -see the following starter code. - -```rust -use anchor_lang::prelude::*; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod anchor_movie_review_program { - use super::*; - - pub fn initialize(ctx: Context) -> Result<()> { - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Initialize {} -``` - -Go ahead and remove the `initialize` instruction and `Initialize` type. - -```rust -use anchor_lang::prelude::*; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod anchor_movie_review_program { - use super::*; - -} -``` - -### 2. `MovieAccountState` - -First, let’s use the `#[account]` attribute macro to define the -`MovieAccountState` that will represent the data structure of the movie review -accounts. As a reminder, the `#[account]` attribute macro implements various -traits that help with serialization and deserialization of the account, set the -discriminator for the account, and set the owner of a new account as the program -ID defined in the `declare_id!` macro. - -Within each movie review account, we’ll store the: - -- `reviewer` - user creating the review -- `rating` - rating for the movie -- `title` - title of the movie -- `description` - content of the review - -```rust -use anchor_lang::prelude::*; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod anchor_movie_review_program { - use super::*; - -} - -#[account] -pub struct MovieAccountState { - pub reviewer: Pubkey, // 32 - pub rating: u8, // 1 - pub title: String, // 4 + len() - pub description: String, // 4 + len() -} -``` - -### 3. Add Movie Review - -Next, let’s implement the `add_movie_review` instruction. The `add_movie_review` -instruction will require a `Context` of type `AddMovieReview` that we’ll -implement shortly. - -The instruction will require three additional arguments as instruction data -provided by a reviewer: - -- `title` - title of the movie as a `String` -- `description` - details of the review as a `String` -- `rating` - rating for the movie as a `u8` - -Within the instruction logic, we’ll populate the data of the new `movie_review` -account with the instruction data. We’ll also set the `reviewer` field as the -`initializer` account from the instruction context. - -```rust -#[program] -pub mod movie_review{ - use super::*; - - pub fn add_movie_review( - ctx: Context, - title: String, - description: String, - rating: u8, - ) -> Result<()> { - msg!("Movie Review Account Created"); - msg!("Title: {}", title); - msg!("Description: {}", description); - msg!("Rating: {}", rating); - - let movie_review = &mut ctx.accounts.movie_review; - movie_review.reviewer = ctx.accounts.initializer.key(); - movie_review.title = title; - movie_review.rating = rating; - movie_review.description = description; - Ok(()) - } -} -``` - -Next, let’s create the `AddMovieReview` struct that we used as the generic in -the instruction's context. This struct will list the accounts the -`add_movie_review` instruction requires. - -Remember, you'll need the following macros: - -- The `#[derive(Accounts)]` macro is used to deserialize and validate the list - of accounts specified within the struct -- The `#[instruction(...)]` attribute macro is used to access the instruction - data passed into the instruction -- The `#[account(...)]` attribute macro then specifies additional constraints on - the accounts - -The `movie_review` account is a PDA that needs to be initialized, so we'll add -the `seeds` and `bump` constraints as well as the `init` constraint with its -required `payer` and `space` constraints. - -For the PDA seeds, we'll use the movie title and the reviewer's public key. The -payer for the initialization should be the reviewer, and the space allocated on -the account should be enough for the account discriminator, the reviewer's -public key, and the movie review's rating, title, and description. - -```rust -#[derive(Accounts)] -#[instruction(title:String, description:String)] -pub struct AddMovieReview<'info> { - #[account( - init, - seeds = [title.as_bytes(), initializer.key().as_ref()], - bump, - payer = initializer, - space = 8 + 32 + 1 + 4 + title.len() + 4 + description.len() - )] - pub movie_review: Account<'info, MovieAccountState>, - #[account(mut)] - pub initializer: Signer<'info>, - pub system_program: Program<'info, System>, -} -``` - -### 4. Update Movie Review - -Next, let’s implement the `update_movie_review` instruction with a context whose -generic type is `UpdateMovieReview`. - -Just as before, the instruction will require three additional arguments as -instruction data provided by a reviewer: - -- `title` - title of the movie -- `description` - details of the review -- `rating` - rating for the movie - -Within the instruction logic we’ll update the `rating` and `description` stored -on the `movie_review` account. - -While the `title` doesn't get used in the instruction function itself, we'll -need it for account validation of `movie_review` in the next step. - -```rust -#[program] -pub mod anchor_movie_review_program { - use super::*; - - ... - - pub fn update_movie_review( - ctx: Context, - title: String, - description: String, - rating: u8, - ) -> Result<()> { - msg!("Movie review account space reallocated"); - msg!("Title: {}", title); - msg!("Description: {}", description); - msg!("Rating: {}", rating); - - let movie_review = &mut ctx.accounts.movie_review; - movie_review.rating = rating; - movie_review.description = description; - - Ok(()) - } - -} -``` - -Next, let’s create the `UpdateMovieReview` struct to define the accounts that -the `update_movie_review` instruction needs. - -Since the `movie_review` account will have already been initialized by this -point, we no longer need the `init` constraint. However, since the value of -`description` may now be different, we need to use the `realloc` constraint to -reallocate the space on the account. Accompanying this, we need the `mut`, -`realloc::payer`, and `realloc::zero` constraints. - -We'll also still need the `seeds` and `bump` constraints as we had them in -`AddMovieReview`. - -```rust -#[derive(Accounts)] -#[instruction(title:String, description:String)] -pub struct UpdateMovieReview<'info> { - #[account( - mut, - seeds = [title.as_bytes(), initializer.key().as_ref()], - bump, - realloc = 8 + 32 + 1 + 4 + title.len() + 4 + description.len(), - realloc::payer = initializer, - realloc::zero = true, - )] - pub movie_review: Account<'info, MovieAccountState>, - #[account(mut)] - pub initializer: Signer<'info>, - pub system_program: Program<'info, System>, -} -``` - -Note that the `realloc` constraint is set to the new space required by the -`movie_review` account based on the updated value of `description`. - -Additionally, the `realloc::payer` constraint specifies that any additional -lamports required or refunded will come from or be send to the `initializer` -account. - -Finally, we set the `realloc::zero` constraint to `true` because the -`movie_review` account may be updated multiple times either shrinking or -expanding the space allocated to the account. - -### 5. Delete Movie Review - -Lastly, let’s implement the `delete_movie_review` instruction to close an -existing `movie_review` account. - -We'll use a context whose generic type is `DeleteMovieReview` and won't include -any additional instruction data. Since we are only closing an account, we -actually don't need any instruction logic inside the body of the function. The -closing itself will be handled by the Anchor constraint in the -`DeleteMovieReview` type. - -```rust -#[program] -pub mod anchor_movie_review_program { - use super::*; - - ... - - pub fn delete_movie_review(_ctx: Context, title: String) -> Result<()> { - msg!("Movie review for {} deleted", title); - Ok(()) - } - -} -``` - -Next, let’s implement the `DeleteMovieReview` struct. - -```rust -#[derive(Accounts)] -#[instruction(title: String)] -pub struct DeleteMovieReview<'info> { - #[account( - mut, - seeds=[title.as_bytes(), initializer.key().as_ref()], - bump, - close=initializer - )] - pub movie_review: Account<'info, MovieAccountState>, - #[account(mut)] - pub initializer: Signer<'info>, - pub system_program: Program<'info, System> -} -``` - -Here we use the `close` constraint to specify we are closing the `movie_review` -account and that the rent should be refunded to the `initializer` account. We -also include the `seeds` and `bump` constraints for the the `movie_review` -account for validation. Anchor then handles the additional logic required to -securely close the account. - -### 6. Testing - -The program should be good to go! Now let's test it out. Navigate to -`anchor-movie-review-program.ts` and replace the default test code with the -following. - -Here we: - -- Create default values for the movie review instruction data -- Derive the movie review account PDA -- Create placeholders for tests - -```ts -import * as anchor from "@project-serum/anchor"; -import { Program } from "@project-serum/anchor"; -import { assert, expect } from "chai"; -import { AnchorMovieReviewProgram } from "../target/types/anchor_movie_review_program"; - -describe("anchor-movie-review-program", () => { - // Configure the client to use the local cluster. - const provider = anchor.AnchorProvider.env(); - anchor.setProvider(provider); - - const program = anchor.workspace - .AnchorMovieReviewProgram as Program; - - const movie = { - title: "Just a test movie", - description: "Wow what a good movie it was real great", - rating: 5, - }; - - const [moviePda] = anchor.web3.PublicKey.findProgramAddressSync( - [Buffer.from(movie.title), provider.wallet.publicKey.toBuffer()], - program.programId, - ); - - it("Movie review is added`", async () => {}); - - it("Movie review is updated`", async () => {}); - - it("Deletes a movie review", async () => {}); -}); -``` - -Next, let's create the first test for the `addMovieReview` instruction. Note -that we don't explicitly add `.accounts`. This is because the `Wallet` from -`AnchorProvider` is automatically included as a signer, Anchor can infer certain -accounts like `SystemProgram`, and Anchor can also infer the `movieReview` PDA -from the `title` instruction argument and the signer's public key. - -Once the instruction runs, we then fetch the `movieReview` account and check -that the data stored on the account match the expected values. - -```ts -it("Movie review is added`", async () => { - // Add your test here. - const tx = await program.methods - .addMovieReview(movie.title, movie.description, movie.rating) - .rpc(); - - const account = await program.account.movieAccountState.fetch(moviePda); - expect(movie.title === account.title); - expect(movie.rating === account.rating); - expect(movie.description === account.description); - expect(account.reviewer === provider.wallet.publicKey); -}); -``` - -Next, let's create the test for the `updateMovieReview` instruction following -the same process as before. - -```ts -it("Movie review is updated`", async () => { - const newDescription = "Wow this is new"; - const newRating = 4; - - const tx = await program.methods - .updateMovieReview(movie.title, newDescription, newRating) - .rpc(); - - const account = await program.account.movieAccountState.fetch(moviePda); - expect(movie.title === account.title); - expect(newRating === account.rating); - expect(newDescription === account.description); - expect(account.reviewer === provider.wallet.publicKey); -}); -``` - -Next, create the test for the `deleteMovieReview` instruction - -```ts -it("Deletes a movie review", async () => { - const tx = await program.methods.deleteMovieReview(movie.title).rpc(); -}); -``` - -Lastly, run `anchor test` and you should see the following output in the -console. - -```console - anchor-movie-review-program - ✔ Movie review is added` (139ms) - ✔ Movie review is updated` (404ms) - ✔ Deletes a movie review (403ms) - - - 3 passing (950ms) -``` - -If you need more time with this project to feel comfortable with these concepts, -feel free to have a look at -the [solution code](https://github.com/Unboxed-Software/anchor-movie-review-program/tree/solution-pdas) before -continuing. - -# Challenge - -Now it’s your turn to build something independently. Equipped with the concepts -introduced in this lesson, try to recreate the Student Intro program that we've -used before using the Anchor framework. - -The Student Intro program is a Solana Program that lets students introduce -themselves. The program takes a user's name and a short message as the -instruction data and creates an account to store the data on-chain. - -Using what you've learned in this lesson, build out this program. The program -should include instructions to: - -1. Initialize a PDA account for each student that stores the student's name and - their short message -2. Update the message on an existing account -3. Close an existing account - -Try to do this independently if you can! But if you get stuck, feel free to -reference -the [solution code](https://github.com/Unboxed-Software/anchor-student-intro-program). diff --git a/content/courses/solana-course/content/arbitrary-cpi.md b/content/courses/solana-course/content/arbitrary-cpi.md deleted file mode 100644 index 3b0bc53d6..000000000 --- a/content/courses/solana-course/content/arbitrary-cpi.md +++ /dev/null @@ -1,457 +0,0 @@ ---- -title: Arbitrary CPI -objectives: - - Explain the security risks associated with invoking a CPI to an unknown - program - - Showcase how Anchor’s CPI module prevents this from happening when making a - CPI from one Anchor program to another - - Safely and securely make a CPI from an Anchor program to an arbitrary - non-anchor program ---- - -# TL;DR - -- To generate a CPI, the target program must be passed into the invoking - instruction as an account. This means that any target program could be passed - into the instruction. Your program should check for incorrect or unexpected - programs. -- Perform program checks in native programs by simply comparing the public key - of the passed-in program to the progam you expected. -- If a program is written in Anchor, then it may have a publicly available CPI - module. This makes invoking the program from another Anchor program simple and - secure. The Anchor CPI module automatically checks that the address of the - program passed in matches the address of the program stored in the module. - -# Overview - -A cross program invocation (CPI) is when one program invokes an instruction on -another program. An “arbitrary CPI” is when a program is structured to issue a -CPI to whatever program is passed into the instruction rather than expecting to -perform a CPI to one specific program. Given that the callers of your program's -instruction can pass any program they'd like into the instruction's list of -accounts, failing to verify the address of a passed-in program results in your -program performing CPIs to arbitrary programs. - -This lack of program checks creates an opportunity for a malicious user to pass -in a different program than expected, causing the original program to call an -instruction on this mystery program. There’s no telling what the consequences of -this CPI could be. It depends on the program logic (both that of the original -program and the unexpected program), as well as what other accounts are passed -into the original instruction. - -## Missing program checks - -Take the following program as an example. The `cpi` instruction invokes the -`transfer` instruction on `token_program`, but there is no code that checks -whether or not the `token_program` account passed into the instruction is, in -fact, the SPL Token Program. - -```rust -use anchor_lang::prelude::*; -use anchor_lang::solana_program; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod arbitrary_cpi_insecure { - use super::*; - - pub fn cpi(ctx: Context, amount: u64) -> ProgramResult { - solana_program::program::invoke( - &spl_token::instruction::transfer( - ctx.accounts.token_program.key, - ctx.accounts.source.key, - ctx.accounts.destination.key, - ctx.accounts.authority.key, - &[], - amount, - )?, - &[ - ctx.accounts.source.clone(), - ctx.accounts.destination.clone(), - ctx.accounts.authority.clone(), - ], - ) - } -} - -#[derive(Accounts)] -pub struct Cpi<'info> { - source: UncheckedAccount<'info>, - destination: UncheckedAccount<'info>, - authority: UncheckedAccount<'info>, - token_program: UncheckedAccount<'info>, -} -``` - -An attacker could easily call this instruction and pass in a duplicate token -program that they created and control. - -## Add program checks - -It's possible to fix this vulnerabilty by simply adding a few lines to the `cpi` -instruction to check whether or not `token_program`'s public key is that of the -SPL Token Program. - -```rust -pub fn cpi_secure(ctx: Context, amount: u64) -> ProgramResult { - if &spl_token::ID != ctx.accounts.token_program.key { - return Err(ProgramError::IncorrectProgramId); - } - solana_program::program::invoke( - &spl_token::instruction::transfer( - ctx.accounts.token_program.key, - ctx.accounts.source.key, - ctx.accounts.destination.key, - ctx.accounts.authority.key, - &[], - amount, - )?, - &[ - ctx.accounts.source.clone(), - ctx.accounts.destination.clone(), - ctx.accounts.authority.clone(), - ], - ) -} -``` - -Now, if an attacker passes in a different token program, the instruction will -return the `ProgramError::IncorrectProgramId` error. - -Depending on the program you’re invoking with your CPI, you can either hard code -the address of the expected program ID or use the program’s Rust crate to get -the address of the program, if available. In the example above, the `spl_token` -crate provides the address of the SPL Token Program. - -## Use an Anchor CPI module - -A simpler way to manage program checks is to use Anchor CPI modules. We learned -in a -[previous lesson](https://github.com/Unboxed-Software/solana-course/blob/main/content/anchor-cpi.md) -that Anchor can automatically generate CPI modules to make CPIs into the program -simpler. These modules also enhance security by verifying the public key of the -program that’s passed into one of its public instructions. - -Every Anchor program uses the `declare_id()` macro to define the address of the -program. When a CPI module is generated for a specific program, it uses the -address passed into this macro as the "source of truth" and will automatically -verify that all CPIs made using its CPI module target this program id. - -While at the core no different than manual program checks, using CPI modules -avoids the possibility of forgetting to perform a program check or accidentally -typing in the wrong program ID when hard-coding it. - -The program below shows an example of using a CPI module for the SPL Token -Program to perform the transfer shown in the previous examples. - -```rust -use anchor_lang::prelude::*; -use anchor_spl::token::{self, Token, TokenAccount}; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod arbitrary_cpi_recommended { - use super::*; - - pub fn cpi(ctx: Context, amount: u64) -> ProgramResult { - token::transfer(ctx.accounts.transfer_ctx(), amount) - } -} - -#[derive(Accounts)] -pub struct Cpi<'info> { - source: Account<'info, TokenAccount>, - destination: Account<'info, TokenAccount>, - authority: Signer<'info>, - token_program: Program<'info, Token>, -} - -impl<'info> Cpi<'info> { - pub fn transfer_ctx(&self) -> CpiContext<'_, '_, '_, 'info, token::Transfer<'info>> { - let program = self.token_program.to_account_info(); - let accounts = token::Transfer { - from: self.source.to_account_info(), - to: self.destination.to_account_info(), - authority: self.authority.to_account_info(), - }; - CpiContext::new(program, accounts) - } -} -``` - -Note that, like the example above, Anchor has created a few -[wrappers for popular native programs](https://github.com/coral-xyz/anchor/tree/master/spl/src) -that allow you to issue CPIs into them as if they were Anchor programs. - -Additionally and depending on the program you’re making the CPI to, you may be -able to use Anchor’s -[`Program` account type](https://docs.rs/anchor-lang/latest/anchor_lang/accounts/program/struct.Program.html) -to validate the passed-in program in your account validation struct. Between -the [`anchor_lang`](https://docs.rs/anchor-lang/latest/anchor_lang) and [`anchor_spl`](https://docs.rs/anchor_spl/latest/) crates, -the following `Program` types are provided out of the box: - -- [`System`](https://docs.rs/anchor-lang/latest/anchor_lang/struct.System.html) -- [`AssociatedToken`](https://docs.rs/anchor-spl/latest/anchor_spl/associated_token/struct.AssociatedToken.html) -- [`Token`](https://docs.rs/anchor-spl/latest/anchor_spl/token/struct.Token.html) - -If you have access to an Anchor program's CPI module, you typically can import -its program type with the following, replacing the program name with the name of -the actual program: - -```rust -use other_program::program::OtherProgram; -``` - -# Demo - -To show the importance of checking with program you use for CPIs, we're going to -work with a simplified and somewhat contrived game. This game represents -characters with PDA accounts, and uses a separate "metadata" program to manage -character metadata and attributes like health and power. - -While this example is somewhat contrived, it's actually almost identical -architecture to how NFTs on Solana work: the SPL Token Program manages the token -mints, distribution, and transfers, and a separate metadata program is used to -assign metadata to tokens. So the vulnerability we go through here could also be -applied to real tokens. - -### 1. Setup - -We'll start with the `starter` branch of -[this repository](https://github.com/Unboxed-Software/solana-arbitrary-cpi/tree/starter). -Clone the repository and then open it on the `starter` branch. - -Notice that there are three programs: - -1. `gameplay` -2. `character-metadata` -3. `fake-metadata` - -Additionally, there is already a test in the `tests` directory. - -The first program, `gameplay`, is the one that our test directly uses. Take a -look at the program. It has two instructions: - -1. `create_character_insecure` - creates a new character and CPI's into the - metadata program to set up the character's initial attributes -2. `battle_insecure` - pits two characters against each other, assigning a "win" - to the character with the highest attributes - -The second program, `character-metadata`, is meant to be the "approved" program -for handling character metadata. Have a look at this program. It has a single -instruction for `create_metadata` that creates a new PDA and assigns a -pseudo-random value between 0 and 20 for the character's health and power. - -The last program, `fake-metadata` is a "fake" metadata program meant to -illustrate what an attacker might make to exploit our `gameplay` program. This -program is almost identical to the `character-metadata` program, only it assigns -a character's initial health and power to be the max allowed: 255. - -### 2. Test `create_character_insecure` instruction - -There is already a test in the `tests` directory for this. It's long, but take a -minute to look at it before we talk through it together: - -```ts -it("Insecure instructions allow attacker to win every time", async () => { - // Initialize player one with real metadata program - await gameplayProgram.methods - .createCharacterInsecure() - .accounts({ - metadataProgram: metadataProgram.programId, - authority: playerOne.publicKey, - }) - .signers([playerOne]) - .rpc(); - - // Initialize attacker with fake metadata program - await gameplayProgram.methods - .createCharacterInsecure() - .accounts({ - metadataProgram: fakeMetadataProgram.programId, - authority: attacker.publicKey, - }) - .signers([attacker]) - .rpc(); - - // Fetch both player's metadata accounts - const [playerOneMetadataKey] = getMetadataKey( - playerOne.publicKey, - gameplayProgram.programId, - metadataProgram.programId, - ); - - const [attackerMetadataKey] = getMetadataKey( - attacker.publicKey, - gameplayProgram.programId, - fakeMetadataProgram.programId, - ); - - const playerOneMetadata = await metadataProgram.account.metadata.fetch( - playerOneMetadataKey, - ); - - const attackerMetadata = await fakeMetadataProgram.account.metadata.fetch( - attackerMetadataKey, - ); - - // The regular player should have health and power between 0 and 20 - expect(playerOneMetadata.health).to.be.lessThan(20); - expect(playerOneMetadata.power).to.be.lessThan(20); - - // The attacker will have health and power of 255 - expect(attackerMetadata.health).to.equal(255); - expect(attackerMetadata.power).to.equal(255); -}); -``` - -This test effectively walks through the scenario where a regular player and an -attacker both create their characters. Only the attacker passes in the program -ID of the fake metadata program rather than the actual metadata program. And -since the `create_character_insecure` instruction has no program checks, it -still executes. - -The result is that the regular character has the appropriate amount of health -and power: each a value between 0 and 20. But the attacker's health and power -are each 255, making the attacker unbeatable. - -If you haven't already, run `anchor test` to see that this test in fact behaves -as described. - -### 3. Create a `create_character_secure` instruction - -Let's fix this by creating a secure instruction for creating a new character. -This instruction should implement proper program checks and use the -`character-metadata` program's `cpi` crate to do the CPI rather than just using -`invoke`. - -If you want to test out your skills, try this on your own before moving ahead. - -We'll start by updating our `use` statement at the top of the `gameplay` -programs `lib.rs` file. We're giving ourselves access to the program's type for -account validation, and the helper function for issuing the `create_metadata` -CPI. - -```rust -use character_metadata::{ - cpi::accounts::CreateMetadata, - cpi::create_metadata, - program::CharacterMetadata, -}; -``` - -Next let's create a new account validation struct called -`CreateCharacterSecure`. This time, we make `metadata_program` a `Program` type: - -```rust -#[derive(Accounts)] -pub struct CreateCharacterSecure<'info> { - #[account(mut)] - pub authority: Signer<'info>, - #[account( - init, - payer = authority, - space = 8 + 32 + 32 + 64, - seeds = [authority.key().as_ref()], - bump - )] - pub character: Account<'info, Character>, - #[account( - mut, - seeds = [character.key().as_ref()], - seeds::program = metadata_program.key(), - bump, - )] - /// CHECK: manual checks - pub metadata_account: AccountInfo<'info>, - pub metadata_program: Program<'info, CharacterMetadata>, - pub system_program: Program<'info, System>, -} -``` - -Lastly, we add the `create_character_secure` instruction. It will be the same as -before but will use the full functionality of Anchor CPIs rather than using -`invoke` directly: - -```rust -pub fn create_character_secure(ctx: Context) -> Result<()> { - let character = &mut ctx.accounts.character; - character.metadata = ctx.accounts.metadata_account.key(); - character.auth = ctx.accounts.authority.key(); - character.wins = 0; - - let context = CpiContext::new( - ctx.accounts.metadata_program.to_account_info(), - CreateMetadata { - character: ctx.accounts.character.to_account_info(), - metadata: ctx.accounts.metadata_account.to_owned(), - authority: ctx.accounts.authority.to_account_info(), - system_program: ctx.accounts.system_program.to_account_info(), - }, - ); - - create_metadata(context)?; - - Ok(()) -} -``` - -### 4. Test `create_character_secure` - -Now that we have a secure way of initializing a new character, let's create a -new test. This test just needs to attempt to initialize the attacker's character -and expect an error to be thrown. - -```ts -it("Secure character creation doesn't allow fake program", async () => { - try { - await gameplayProgram.methods - .createCharacterSecure() - .accounts({ - metadataProgram: fakeMetadataProgram.programId, - authority: attacker.publicKey, - }) - .signers([attacker]) - .rpc(); - } catch (error) { - expect(error); - console.log(error); - } -}); -``` - -Run `anchor test` if you haven't already. Notice that an error was thrown as -expected, detailing that the program ID passed into the instruction is not the -expected program ID: - -```bash -'Program log: AnchorError caused by account: metadata_program. Error Code: InvalidProgramId. Error Number: 3008. Error Message: Program ID was not as expected.', -'Program log: Left:', -'Program log: FKBWhshzcQa29cCyaXc1vfkZ5U985gD5YsqfCzJYUBr', -'Program log: Right:', -'Program log: D4hPnYEsAx4u3EQMrKEXsY3MkfLndXbBKTEYTwwm25TE' -``` - -That's all you need to do to protect against arbitrary CPIs! - -There may be times where you want more flexibility in your program's CPIs. We -certainly won't stop you from architecting the program you need, but please take -every precaution possible to ensure no vulnerabilities in your program. - -If you want to take a look at the final solution code you can find it on the -`solution` branch of -[the same repository](https://github.com/Unboxed-Software/solana-arbitrary-cpi/tree/solution). - -# Challenge - -Just as with other lessons in this module, your opportunity to practice avoiding -this security exploit lies in auditing your own or other programs. - -Take some time to review at least one program and ensure that program checks are -in place for every program passed into the instructions, particularly those that -are invoked via CPI. - -Remember, if you find a bug or exploit in somebody else's program, please alert -them! If you find one in your own program, be sure to patch it right away. diff --git a/content/courses/solana-course/content/bump-seed-canonicalization.md b/content/courses/solana-course/content/bump-seed-canonicalization.md deleted file mode 100644 index 294c686b7..000000000 --- a/content/courses/solana-course/content/bump-seed-canonicalization.md +++ /dev/null @@ -1,606 +0,0 @@ ---- -title: Bump Seed Canonicalization -objectives: - - Explain the vulnerabilities associated with using PDAs derived without the - canonical bump - - Initialize a PDA using Anchor’s `seeds` and `bump` constraints to - automatically use the canonical bump - - Use Anchor's `seeds` and `bump` constraints to ensure the canonical bump is - always used in future instructions when deriving a PDA ---- - -# TL;DR - -- The - [**`create_program_address`**](https://docs.rs/solana-program/latest/solana_program/pubkey/struct.Pubkey.html#method.create_program_address) - function derives a PDA without searching for the **canonical bump**. This - means there are multiple valid bumps, all of which will produce different - addresses. -- Using - [**`find_program_address`**](https://docs.rs/solana-program/latest/solana_program/pubkey/struct.Pubkey.html#method.find_program_address) - ensures that the highest valid bump, or canonical bump, is used for the - derivation, thus creating a deterministic way to find an address given - specific seeds. -- Upon initialization, you can use Anchor's `seeds` and `bump` constraint to - ensure that PDA derivations in the account validation struct always use the - canonical bump -- Anchor allows you to **specify a bump** with the `bump = ` - constraint when verifying the address of a PDA -- Because `find_program_address` can be expensive, best practice is to store the - derived bump in an account’s data field to be referenced later on when - re-deriving the address for verification - ```rust - #[derive(Accounts)] - pub struct VerifyAddress<'info> { - #[account( - seeds = [DATA_PDA_SEED.as_bytes()], - bump = data.bump - )] - data: Account<'info, Data>, - } - ``` - -# Overview - -Bump seeds are a number between 0 and 255, inclusive, used to ensure that an -address derived using -[`create_program_address`](https://docs.rs/solana-program/latest/solana_program/pubkey/struct.Pubkey.html#method.create_program_address) -is a valid PDA. The **canonical bump** is the highest bump value that produces a -valid PDA. The standard in Solana is to _always use the canonical bump_ when -deriving PDAs, both for security and convenience. - -## Insecure PDA derivation using `create_program_address` - -Given a set of seeds, the `create_program_address` function will produce a valid -PDA about 50% of the time. The bump seed is an additional byte added as a seed -to "bump" the derived address into valid territory. Since there are 256 possible -bump seeds and the function produces valid PDAs approximately 50% of the time, -there are many valid bumps for a given set of input seeds. - -You can imagine that this could cause confusion for locating accounts when using -seeds as a way of mapping between known pieces of information to accounts. Using -the canonical bump as the standard ensures that you can always find the right -account. More importantly, it avoids security exploits caused by the open-ended -nature of allowing multiple bumps. - -In the example below, the `set_value` instruction uses a `bump` that was passed -in as instruction data to derive a PDA. The instruction then derives the PDA -using `create_program_address` function and checks that the `address` matches -the public key of the `data` account. - -```rust -use anchor_lang::prelude::*; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod bump_seed_canonicalization_insecure { - use super::*; - - pub fn set_value(ctx: Context, key: u64, new_value: u64, bump: u8) -> Result<()> { - let address = - Pubkey::create_program_address(&[key.to_le_bytes().as_ref(), &[bump]], ctx.program_id).unwrap(); - if address != ctx.accounts.data.key() { - return Err(ProgramError::InvalidArgument.into()); - } - - ctx.accounts.data.value = new_value; - - Ok(()) - } -} - -#[derive(Accounts)] -pub struct BumpSeed<'info> { - data: Account<'info, Data>, -} - -#[account] -pub struct Data { - value: u64, -} -``` - -While the instruction derives the PDA and checks the passed-in account, which is -good, it allows the caller to pass in an arbitrary bump. Depending on the -context of your program, this could result in undesired behavior or potential -exploit. - -If the seed mapping was meant to enforce a one-to-one relationship between PDA -and user, for example, this program would not properly enforce that. A user -could call the program multiple times with many valid bumps, each producing a -different PDA. - -## Recommended derivation using `find_program_address` - -A simple way around this problem is to have the program expect only the -canonical bump and use `find_program_address` to derive the PDA. - -The -[`find_program_address`](https://docs.rs/solana-program/latest/solana_program/pubkey/struct.Pubkey.html#method.find_program_address) -_always uses the canonical bump_. This function iterates through calling -`create_program_address`, starting with a bump of 255 and decrementing the bump -by one with each iteration. As soon as a valid address is found, the function -returns both the derived PDA and the canonical bump used to derive it. - -This ensures a one-to-one mapping between your input seeds and the address they -produce. - -```rust -pub fn set_value_secure( - ctx: Context, - key: u64, - new_value: u64, - bump: u8, -) -> Result<()> { - let (address, expected_bump) = - Pubkey::find_program_address(&[key.to_le_bytes().as_ref()], ctx.program_id); - - if address != ctx.accounts.data.key() { - return Err(ProgramError::InvalidArgument.into()); - } - if expected_bump != bump { - return Err(ProgramError::InvalidArgument.into()); - } - - ctx.accounts.data.value = new_value; - Ok(()) -} -``` - -## Use Anchor’s `seeds` and `bump` constraints - -Anchor provides a convenient way to derive PDAs in the account validation struct -using the `seeds` and `bump` constraints. These can even be combined with the -`init` constraint to initialize the account at the intended address. To protect -the program from the vulnerability we’ve been discussing throughout this lesson, -Anchor does not even allow you to initialize an account at a PDA using anything -but the canonical bump. Instead, it uses `find_program_address` to derive the -PDA and subsequently performs the initialization. - -```rust -use anchor_lang::prelude::*; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod bump_seed_canonicalization_recommended { - use super::*; - - pub fn set_value(ctx: Context, _key: u64, new_value: u64) -> Result<()> { - ctx.accounts.data.value = new_value; - Ok(()) - } -} - -// initialize account at PDA -#[derive(Accounts)] -#[instruction(key: u64)] -pub struct BumpSeed<'info> { - #[account(mut)] - payer: Signer<'info>, - #[account( - init, - seeds = [key.to_le_bytes().as_ref()], - // derives the PDA using the canonical bump - bump, - payer = payer, - space = 8 + 8 - )] - data: Account<'info, Data>, - system_program: Program<'info, System> -} - -#[account] -pub struct Data { - value: u64, -} -``` - -If you aren't initializing an account, you can still validate PDAs with the -`seeds` and `bump` constraints. This simply rederives the PDA and compares the -derived address with the address of the account passed in. - -In this scenario, Anchor _does_ allow you to specify the bump to use to derive -the PDA with `bump = `. The intent here is not for you to use -arbitrary bumps, but rather to let you optimize your program. The iterative -nature of `find_program_address` makes it expensive, so best practice is to -store the canonical bump in the PDA account's data upon initializing a PDA, -allowing you to reference the bump stored when validating the PDA in subsequent -instructions. - -When you specify the bump to use, Anchor uses `create_program_address` with the -provided bump instead of `find_program_address`. This pattern of storing the -bump in the account data ensures that your program always uses the canonical -bump without degrading performance. - -```rust -use anchor_lang::prelude::*; - -declare_id!("CVwV9RoebTbmzsGg1uqU1s4a3LvTKseewZKmaNLSxTqc"); - -#[program] -pub mod bump_seed_canonicalization_recommended { - use super::*; - - pub fn set_value(ctx: Context, _key: u64, new_value: u64) -> Result<()> { - ctx.accounts.data.value = new_value; - // store the bump on the account - ctx.accounts.data.bump = *ctx.bumps.get("data").unwrap(); - Ok(()) - } - - pub fn verify_address(ctx: Context, _key: u64) -> Result<()> { - msg!("PDA confirmed to be derived with canonical bump: {}", ctx.accounts.data.key()); - Ok(()) - } -} - -// initialize account at PDA -#[derive(Accounts)] -#[instruction(key: u64)] -pub struct BumpSeed<'info> { - #[account(mut)] - payer: Signer<'info>, - #[account( - init, - seeds = [key.to_le_bytes().as_ref()], - // derives the PDA using the canonical bump - bump, - payer = payer, - space = 8 + 8 + 1 - )] - data: Account<'info, Data>, - system_program: Program<'info, System> -} - -#[derive(Accounts)] -#[instruction(key: u64)] -pub struct VerifyAddress<'info> { - #[account( - seeds = [key.to_le_bytes().as_ref()], - // guranteed to be the canonical bump every time - bump = data.bump - )] - data: Account<'info, Data>, -} - -#[account] -pub struct Data { - value: u64, - // bump field - bump: u8 -} -``` - -If you don't specify the bump on the `bump` constraint, Anchor will still use -`find_program_address` to derive the PDA using the canonical bump. As a -consequence, your instruction will incur a variable amount of compute budget. -Programs that are already at risk of exceeding their compute budget should use -this with care since there is a chance that the program’s budget may be -occasionally and unpredictably exceeded. - -On the other hand, if you only need to verify the address of a PDA passed in -without initializing an account, you'll be forced to either let Anchor derive -the canonical bump or expose your program to unecessary risks. In that case, -please use the canonical bump despite the slight mark against performance. - -# Demo - -To demonstrate the security exploits possible when you don't check for the -canonical bump, let's work with a program that lets each program user "claim" -rewards on time. - -### 1. Setup - -Start by getting the code on the `starter` branch of -[this repository](https://github.com/Unboxed-Software/solana-bump-seed-canonicalization/tree/starter). - -Notice that there are two instructions on the program and a single test in the -`tests` directory. - -The instructions on the program are: - -1. `create_user_insecure` -2. `claim_insecure` - -The `create_user_insecure` instruction simply creates a new account at a PDA -derived using the signer's public key and a passed-in bump. - -The `claim_insecure` instruction mints 10 tokens to the user and then marks the -account's rewards as claimed so that they can't claim again. - -However, the program doesn't explicitly check that the PDAs in question are -using the canonical bump. - -Have a look at the program to understand what it does before proceeding. - -### 2. Test insecure instructions - -Since the instructions don't explicitly require the `user` PDA to use the -canonical bump, an attacker can create multiple accounts per wallet and claim -more rewards than should be allowed. - -The test in the `tests` directory creates a new keypair called `attacker` to -represent an attacker. It then loops through all possible bumps and calls -`create_user_insecure` and `claim_insecure`. By the end, the test expects that -the attacker has been able to claim rewards multiple times and has earned more -than the 10 tokens allotted per user. - -```ts -it("Attacker can claim more than reward limit with insecure instructions", async () => { - const attacker = Keypair.generate(); - await safeAirdrop(attacker.publicKey, provider.connection); - const ataKey = await getAssociatedTokenAddress(mint, attacker.publicKey); - - let numClaims = 0; - - for (let i = 0; i < 256; i++) { - try { - const pda = createProgramAddressSync( - [attacker.publicKey.toBuffer(), Buffer.from([i])], - program.programId, - ); - await program.methods - .createUserInsecure(i) - .accounts({ - user: pda, - payer: attacker.publicKey, - }) - .signers([attacker]) - .rpc(); - await program.methods - .claimInsecure(i) - .accounts({ - user: pda, - mint, - payer: attacker.publicKey, - userAta: ataKey, - }) - .signers([attacker]) - .rpc(); - - numClaims += 1; - } catch (error) { - if (error.message !== "Invalid seeds, address must fall off the curve") { - console.log(error); - } - } - } - - const ata = await getAccount(provider.connection, ataKey); - - console.log( - `Attacker claimed ${numClaims} times and got ${Number(ata.amount)} tokens`, - ); - - expect(numClaims).to.be.greaterThan(1); - expect(Number(ata.amount)).to.be.greaterThan(10); -}); -``` - -Run `anchor test` to see that this test passes, showing that the attacker is -successful. Since the test calles the instructions for every valid bump, it -takes a bit to run, so be patient. - -```bash - bump-seed-canonicalization -Attacker claimed 129 times and got 1290 tokens - ✔ Attacker can claim more than reward limit with insecure instructions (133840ms) -``` - -### 3. Create secure instructions - -Let's demonstrate patching the vulnerability by creating two new instructions: - -1. `create_user_secure` -2. `claim_secure` - -Before we write the account validation or instruction logic, let's create a new -user type, `UserSecure`. This new type will add the canonical bump as a field on -the struct. - -```rust -#[account] -pub struct UserSecure { - auth: Pubkey, - bump: u8, - rewards_claimed: bool, -} -``` - -Next, let's create account validation structs for each of the new instructions. -They'll be very similar to the insecure versions but will let Anchor handle the -derivation and deserialization of the PDAs. - -```rust -#[derive(Accounts)] -pub struct CreateUserSecure<'info> { - #[account(mut)] - payer: Signer<'info>, - #[account( - init, - seeds = [payer.key().as_ref()], - // derives the PDA using the canonical bump - bump, - payer = payer, - space = 8 + 32 + 1 + 1 - )] - user: Account<'info, UserSecure>, - system_program: Program<'info, System>, -} - -#[derive(Accounts)] -pub struct SecureClaim<'info> { - #[account( - seeds = [payer.key().as_ref()], - bump = user.bump, - constraint = !user.rewards_claimed @ ClaimError::AlreadyClaimed, - constraint = user.auth == payer.key() - )] - user: Account<'info, UserSecure>, - #[account(mut)] - payer: Signer<'info>, - #[account( - init_if_needed, - payer = payer, - associated_token::mint = mint, - associated_token::authority = payer - )] - user_ata: Account<'info, TokenAccount>, - #[account(mut)] - mint: Account<'info, Mint>, - /// CHECK: mint auth PDA - #[account(seeds = ["mint".as_bytes().as_ref()], bump)] - pub mint_authority: UncheckedAccount<'info>, - token_program: Program<'info, Token>, - associated_token_program: Program<'info, AssociatedToken>, - system_program: Program<'info, System>, - rent: Sysvar<'info, Rent>, -} -``` - -Finally, let's implement the instruction logic for the two new instructions. The -`create_user_secure` instruction simply needs to set the `auth`, `bump` and -`rewards_claimed` fields on the `user` account data. - -```rust -pub fn create_user_secure(ctx: Context) -> Result<()> { - ctx.accounts.user.auth = ctx.accounts.payer.key(); - ctx.accounts.user.bump = *ctx.bumps.get("user").unwrap(); - ctx.accounts.user.rewards_claimed = false; - Ok(()) -} -``` - -The `claim_secure` instruction needs to mint 10 tokens to the user and set the -`user` account's `rewards_claimed` field to `true`. - -```rust -pub fn claim_secure(ctx: Context) -> Result<()> { - token::mint_to( - CpiContext::new_with_signer( - ctx.accounts.token_program.to_account_info(), - MintTo { - mint: ctx.accounts.mint.to_account_info(), - to: ctx.accounts.user_ata.to_account_info(), - authority: ctx.accounts.mint_authority.to_account_info(), - }, - &[&[ - b"mint".as_ref(), - &[*ctx.bumps.get("mint_authority").unwrap()], - ]], - ), - 10, - )?; - - ctx.accounts.user.rewards_claimed = true; - - Ok(()) -} -``` - -### 4. Test secure instructions - -Let's go ahead and write a test to show that the attacker can no longer claim -more than once using the new instructions. - -Notice that if you start to loop through using multiple PDAs like the old test, -you can't even pass the non-canonical bump to the instructions. However, you can -still loop through using the various PDAs and at the end check that only 1 claim -happened for a total of 10 tokens. Your final test will look something like -this: - -```ts -it.only("Attacker can only claim once with secure instructions", async () => { - const attacker = Keypair.generate(); - await safeAirdrop(attacker.publicKey, provider.connection); - const ataKey = await getAssociatedTokenAddress(mint, attacker.publicKey); - const [userPDA] = findProgramAddressSync( - [attacker.publicKey.toBuffer()], - program.programId, - ); - - await program.methods - .createUserSecure() - .accounts({ - payer: attacker.publicKey, - }) - .signers([attacker]) - .rpc(); - - await program.methods - .claimSecure() - .accounts({ - payer: attacker.publicKey, - userAta: ataKey, - mint, - user: userPDA, - }) - .signers([attacker]) - .rpc(); - - let numClaims = 1; - - for (let i = 0; i < 256; i++) { - try { - const pda = createProgramAddressSync( - [attacker.publicKey.toBuffer(), Buffer.from([i])], - program.programId, - ); - await program.methods - .createUserSecure() - .accounts({ - user: pda, - payer: attacker.publicKey, - }) - .signers([attacker]) - .rpc(); - - await program.methods - .claimSecure() - .accounts({ - payer: attacker.publicKey, - userAta: ataKey, - mint, - user: pda, - }) - .signers([attacker]) - .rpc(); - - numClaims += 1; - } catch {} - } - - const ata = await getAccount(provider.connection, ataKey); - - expect(Number(ata.amount)).to.equal(10); - expect(numClaims).to.equal(1); -}); -``` - -```bash - bump-seed-canonicalization -Attacker claimed 119 times and got 1190 tokens - ✔ Attacker can claim more than reward limit with insecure instructions (128493ms) - ✔ Attacker can only claim once with secure instructions (1448ms) -``` - -If you use Anchor for all of the PDA derivations, this particular exploit is -pretty simple to avoid. However, if you end up doing anything "non-standard," be -careful to design your program to explicitly use the canonical bump! - -If you want to take a look at the final solution code you can find it on the -`solution` branch of -[the same repository](https://github.com/Unboxed-Software/solana-bump-seed-canonicalization/tree/solution). - -# Challenge - -Just as with other lessons in this module, your opportunity to practice avoiding -this security exploit lies in auditing your own or other programs. - -Take some time to review at least one program and ensure that all PDA -derivations and checks are using the canonical bump. - -Remember, if you find a bug or exploit in somebody else's program, please alert -them! If you find one in your own program, be sure to patch it right away. diff --git a/content/courses/solana-course/content/closing-accounts.md b/content/courses/solana-course/content/closing-accounts.md deleted file mode 100644 index 771172fb6..000000000 --- a/content/courses/solana-course/content/closing-accounts.md +++ /dev/null @@ -1,563 +0,0 @@ ---- -title: Closing Accounts and Revival Attacks -objectives: - - Explain the various security vulnerabilities associated with closing program - accounts incorrectly - - Close program accounts safely and securely using native Rust - - Close program accounts safely and securely using the Anchor `close` - constraint ---- - -# TL;DR - -- **Closing an account** improperly creates an opportunity for - reinitialization/revival attacks -- The Solana runtime **garbage collects accounts** when they are no longer rent - exempt. Closing accounts involves transferring the lamports stored in the - account for rent exemption to another account of your choosing. -- You can use the Anchor `#[account(close = )]` - constraint to securely close accounts and set the account discriminator to the - `CLOSED_ACCOUNT_DISCRIMINATOR` - ```rust - #[account(mut, close = receiver)] - pub data_account: Account<'info, MyData>, - #[account(mut)] - pub receiver: SystemAccount<'info> - ``` - -# Overview - -While it sounds simple, closing accounts properly can be tricky. There are a -number of ways an attacker could circumvent having the account closed if you -don't follow specific steps. - -To get a better understanding of these attack vectors, let’s explore each of -these scenarios in depth. - -## Insecure account closing - -At its core, closing an account involves transferring its lamports to a separate -account, thus triggering the Solana runtime to garbage collect the first -account. This resets the owner from the owning program to the system program. - -Take a look at the example below. The instruction requires two accounts: - -1. `account_to_close` - the account to be closed -2. `destination` - the account that should receive the closed account’s lamports - -The program logic is intended to close an account by simply increasing the -`destination` account’s lamports by the amount stored in the `account_to_close` -and setting the `account_to_close` lamports to 0. With this program, after a -full transaction is processed, the `account_to_close` will be garbage collected -by the runtime. - -```rust -use anchor_lang::prelude::*; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod closing_accounts_insecure { - use super::*; - - pub fn close(ctx: Context) -> ProgramResult { - let dest_starting_lamports = ctx.accounts.destination.lamports(); - - **ctx.accounts.destination.lamports.borrow_mut() = dest_starting_lamports - .checked_add(ctx.accounts.account_to_close.to_account_info().lamports()) - .unwrap(); - **ctx.accounts.account_to_close.to_account_info().lamports.borrow_mut() = 0; - - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Close<'info> { - account_to_close: Account<'info, Data>, - destination: AccountInfo<'info>, -} - -#[account] -pub struct Data { - data: u64, -} -``` - -However, the garbage collection doesn't occur until the transaction completes. -And since there can be multiple instructions in a transaction, this creates an -opportunity for an attacker to invoke the instruction to close the account but -also include in the transaction a transfer to refund the account's rent -exemption lamports. The result is that the account _will not_ be garbage -collected, opening up a path for the attacker to cause unintended behavior in -the program and even drain a protocol. - -## Secure account closing - -The two most important things you can do to close this loophole are to zero out -the account data and add an account discriminator that represents the account -has been closed. You need _both_ of these things to avoid unintended program -behavior. - -An account with zeroed out data can still be used for some things, especially if -it's a PDA whose address derivation is used within the program for verification -purposes. However, the damage may be potentially limited if the attacker can't -access the previously-stored data. - -To further secure the program, however, closed accounts should be given an -account discriminator that designates it as "closed," and all instructions -should perform checks on all passed-in accounts that return an error if the -account is marked closed. - -Look at the example below. This program transfers the lamports out of an -account, zeroes out the account data, and sets an account discriminator in a -single instruction in hopes of preventing a subsequent instruction from -utilizing this account again before it has been garbage collected. Failing to do -any one of these things would result in a security vulnerability. - -```rust -use anchor_lang::prelude::*; -use std::io::Write; -use std::ops::DerefMut; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod closing_accounts_insecure_still_still { - use super::*; - - pub fn close(ctx: Context) -> ProgramResult { - let account = ctx.accounts.account.to_account_info(); - - let dest_starting_lamports = ctx.accounts.destination.lamports(); - - **ctx.accounts.destination.lamports.borrow_mut() = dest_starting_lamports - .checked_add(account.lamports()) - .unwrap(); - **account.lamports.borrow_mut() = 0; - - let mut data = account.try_borrow_mut_data()?; - for byte in data.deref_mut().iter_mut() { - *byte = 0; - } - - let dst: &mut [u8] = &mut data; - let mut cursor = std::io::Cursor::new(dst); - cursor - .write_all(&anchor_lang::__private::CLOSED_ACCOUNT_DISCRIMINATOR) - .unwrap(); - - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Close<'info> { - account: Account<'info, Data>, - destination: AccountInfo<'info>, -} - -#[account] -pub struct Data { - data: u64, -} -``` - -Note that the example above is using Anchor's `CLOSED_ACCOUNT_DISCRIMINATOR`. -This is simply an account discriminator where each byte is `255`. The -discriminator doesn't have any inherent meaning, but if you couple it with -account validation checks that return errors any time an account with this -discriminator is passed to an instruction, you'll stop your program from -unintentionally processing an instruction with a closed account. - -### Manual Force Defund - -There is still one small issue. While the practice of zeroing out account data -and adding a "closed" account discriminator will stop your program from being -exploited, a user can still keep an account from being garbage collected by -refunding the account's lamports before the end of an instruction. This results -in one or potentially many accounts existing in a limbo state where they cannot -be used but also cannot be garbage collected. - -To handle this edge case, you may consider adding an instruction that will allow -_anyone_ to defund accounts tagged with the "closed" account discriminator. The -only account validation this instruction would perform is to ensure that the -account being defunded is marked as closed. It may look something like this: - -```rust -use anchor_lang::__private::CLOSED_ACCOUNT_DISCRIMINATOR; -use anchor_lang::prelude::*; -use std::io::{Cursor, Write}; -use std::ops::DerefMut; - -... - - pub fn force_defund(ctx: Context) -> ProgramResult { - let account = &ctx.accounts.account; - - let data = account.try_borrow_data()?; - assert!(data.len() > 8); - - let mut discriminator = [0u8; 8]; - discriminator.copy_from_slice(&data[0..8]); - if discriminator != CLOSED_ACCOUNT_DISCRIMINATOR { - return Err(ProgramError::InvalidAccountData); - } - - let dest_starting_lamports = ctx.accounts.destination.lamports(); - - **ctx.accounts.destination.lamports.borrow_mut() = dest_starting_lamports - .checked_add(account.lamports()) - .unwrap(); - **account.lamports.borrow_mut() = 0; - - Ok(()) - } - -... - -#[derive(Accounts)] -pub struct ForceDefund<'info> { - account: AccountInfo<'info>, - destination: AccountInfo<'info>, -} -``` - -Since anyone can call this instruction, this can act as a deterrent to attempted -revival attacks since the attacker is paying for account rent exemption but -anyone else can claim the lamports in a refunded account for themselves. - -While not necessary, this can help eliminate the waste of space and lamports -associated with these "limbo" accounts. - -## Use the Anchor `close` constraint - -Fortunately, Anchor makes all of this much simpler with the -`#[account(close = )]` constraint. This constraint handles -everything required to securely close an account: - -1. Transfers the account’s lamports to the given `` -2. Zeroes out the account data -3. Sets the account discriminator to the `CLOSED_ACCOUNT_DISCRIMINATOR` variant - -All you have to do is add it in the account validation struct to the account you -want closed: - -```rust -#[derive(Accounts)] -pub struct CloseAccount { - #[account( - mut, - close = receiver - )] - pub data_account: Account<'info, MyData>, - #[account(mut)] - pub receiver: SystemAccount<'info> -} -``` - -The `force_defund` instruction is an optional addition that you’ll have to -implement on your own if you’d like to utilize it. - -# Demo - -To clarify how an attacker might take advantage of a revival attack, let's work -with a simple lottery program that uses program account state to manage a user's -participation in the lottery. - -## 1. Setup - -Start by getting the code on the `starter` branch from the -[following repo](https://github.com/Unboxed-Software/solana-closing-accounts/tree/starter). - -The code has two instructions on the program and two tests in the `tests` -directory. - -The program instructions are: - -1. `enter_lottery` -2. `redeem_rewards_insecure` - -When a user calls `enter_lottery`, the program will initialize an account to -store some state about the user's lottery entry. - -Since this is a simplified example rather than a fully-fledge lottery program, -once a user has entered the lottery they can call the `redeem_rewards_insecure` -instruction at any time. This instruction will mint the user an amount of Reward -tokens proportional to the amount of times the user has entered the lottery. -After minting the rewards, the program closes the user's lottery entry. - -Take a minute to familiarize yourself with the program code. The `enter_lottery` -instruction simply creates an account at a PDA mapped to the user and -initializes some state on it. - -The `redeem_rewards_insecure` instruction performs some account and data -validation, mints tokens to the given token account, then closes the lottery -account by removing its lamports. - -However, notice the `redeem_rewards_insecure` instruction _only_ transfers out -the account's lamports, leaving the account open to revival attacks. - -## 2. Test Insecure Program - -An attacker that successfully keeps their account from closing can then call -`redeem_rewards_insecure` multiple times, claiming more rewards than they are -owed. - -Some starter tests have already been written that showcase this vulnerability. -Take a look at the `closing-accounts.ts` file in the `tests` directory. There is -some setup in the `before` function, then a test that simply creates a new -lottery entry for `attacker`. - -Finally, there's a test that demonstrates how an attacker can keep the account -alive even after claiming rewards and then claim rewards again. That test looks -like this: - -```typescript -it("attacker can close + refund lottery acct + claim multiple rewards", async () => { - // claim multiple times - for (let i = 0; i < 2; i++) { - const tx = new Transaction(); - // instruction claims rewards, program will try to close account - tx.add( - await program.methods - .redeemWinningsInsecure() - .accounts({ - lotteryEntry: attackerLotteryEntry, - user: attacker.publicKey, - userAta: attackerAta, - rewardMint: rewardMint, - mintAuth: mintAuth, - tokenProgram: TOKEN_PROGRAM_ID, - }) - .instruction(), - ); - - // user adds instruction to refund dataAccount lamports - const rentExemptLamports = - await provider.connection.getMinimumBalanceForRentExemption( - 82, - "confirmed", - ); - tx.add( - SystemProgram.transfer({ - fromPubkey: attacker.publicKey, - toPubkey: attackerLotteryEntry, - lamports: rentExemptLamports, - }), - ); - // send tx - await sendAndConfirmTransaction(provider.connection, tx, [attacker]); - await new Promise(x => setTimeout(x, 5000)); - } - - const ata = await getAccount(provider.connection, attackerAta); - const lotteryEntry = await program.account.lotteryAccount.fetch( - attackerLotteryEntry, - ); - - expect(Number(ata.amount)).to.equal( - lotteryEntry.timestamp.toNumber() * 10 * 2, - ); -}); -``` - -This test does the following: - -1. Calls `redeem_rewards_insecure` to redeem the user's rewards -2. In the same transaction, adds an instruction to refund the user's - `lottery_entry` before it can actually be closed -3. Successfully repeats steps 1 and 2, redeeming rewards for a second time. - -You can theoretically repeat steps 1-2 infinitely until either a) the program -has no more rewards to give or b) someone notices and patches the exploit. This -would obviously be a severe problem in any real program as it allows a malicious -attacker to drain an entire rewards pool. - -## 3. Create a `redeem_rewards_secure` instruction - -To prevent this from happening we're going to create a new instruction that -closes the lottery account seucrely using the Anchor `close` constraint. Feel -free to try this out on your own if you'd like. - -The new account validation struct called `RedeemWinningsSecure` should look like -this: - -```rust -#[derive(Accounts)] -pub struct RedeemWinningsSecure<'info> { - // program expects this account to be initialized - #[account( - mut, - seeds = [user.key().as_ref()], - bump = lottery_entry.bump, - has_one = user, - close = user - )] - pub lottery_entry: Account<'info, LotteryAccount>, - #[account(mut)] - pub user: Signer<'info>, - #[account( - mut, - constraint = user_ata.key() == lottery_entry.user_ata - )] - pub user_ata: Account<'info, TokenAccount>, - #[account( - mut, - constraint = reward_mint.key() == user_ata.mint - )] - pub reward_mint: Account<'info, Mint>, - ///CHECK: mint authority - #[account( - seeds = [MINT_SEED.as_bytes()], - bump - )] - pub mint_auth: AccountInfo<'info>, - pub token_program: Program<'info, Token> -} -``` - -It should be the exact same as the original `RedeemWinnings` account validation -struct, except there is an additional `close = user` constraint on the -`lottery_entry` account. This will tell Anchor to close the account by zeroing -out the data, transferring its lamports to the `user` account, and setting the -account discriminator to the `CLOSED_ACCOUNT_DISCRIMINATOR`. This last step is -what will prevent the account from being used again if the program has attempted -to close it already. - -Then, we can create a `mint_ctx` method on the new `RedeemWinningsSecure` struct -to help with the minting CPI to the token program. - -```Rust -impl<'info> RedeemWinningsSecure <'info> { - pub fn mint_ctx(&self) -> CpiContext<'_, '_, '_, 'info, MintTo<'info>> { - let cpi_program = self.token_program.to_account_info(); - let cpi_accounts = MintTo { - mint: self.reward_mint.to_account_info(), - to: self.user_ata.to_account_info(), - authority: self.mint_auth.to_account_info() - }; - - CpiContext::new(cpi_program, cpi_accounts) - } -} -``` - -Finally, the logic for the new secure instruction should look like this: - -```rust -pub fn redeem_winnings_secure(ctx: Context) -> Result<()> { - - msg!("Calculating winnings"); - let amount = ctx.accounts.lottery_entry.timestamp as u64 * 10; - - msg!("Minting {} tokens in rewards", amount); - // program signer seeds - let auth_bump = *ctx.bumps.get("mint_auth").unwrap(); - let auth_seeds = &[MINT_SEED.as_bytes(), &[auth_bump]]; - let signer = &[&auth_seeds[..]]; - - // redeem rewards by minting to user - mint_to(ctx.accounts.mint_ctx().with_signer(signer), amount)?; - - Ok(()) -} -``` - -This logic simply calculates the rewards for the claiming user and transfers the -rewards. However, because of the `close` constraint in the account validation -struct, the attacker shouldn't be able to call this instruction multiple times. - -## 4. Test the Program - -To test our new secure instruction, let's create a new test that trys to call -`redeemingWinningsSecure` twice. We expect the second call to throw an error. - -```typescript -it("attacker cannot claim multiple rewards with secure claim", async () => { - const tx = new Transaction(); - // instruction claims rewards, program will try to close account - tx.add( - await program.methods - .redeemWinningsSecure() - .accounts({ - lotteryEntry: attackerLotteryEntry, - user: attacker.publicKey, - userAta: attackerAta, - rewardMint: rewardMint, - mintAuth: mintAuth, - tokenProgram: TOKEN_PROGRAM_ID, - }) - .instruction(), - ); - - // user adds instruction to refund dataAccount lamports - const rentExemptLamports = - await provider.connection.getMinimumBalanceForRentExemption( - 82, - "confirmed", - ); - tx.add( - SystemProgram.transfer({ - fromPubkey: attacker.publicKey, - toPubkey: attackerLotteryEntry, - lamports: rentExemptLamports, - }), - ); - // send tx - await sendAndConfirmTransaction(provider.connection, tx, [attacker]); - - try { - await program.methods - .redeemWinningsSecure() - .accounts({ - lotteryEntry: attackerLotteryEntry, - user: attacker.publicKey, - userAta: attackerAta, - rewardMint: rewardMint, - mintAuth: mintAuth, - tokenProgram: TOKEN_PROGRAM_ID, - }) - .signers([attacker]) - .rpc(); - } catch (error) { - console.log(error.message); - expect(error); - } -}); -``` - -Run `anchor test` to see that the test passes. The output will look something -like this: - -```bash - closing-accounts - ✔ Enter lottery (451ms) - ✔ attacker can close + refund lottery acct + claim multiple rewards (18760ms) -AnchorError caused by account: lottery_entry. Error Code: AccountDiscriminatorMismatch. Error Number: 3002. Error Message: 8 byte discriminator did not match what was expected. - ✔ attacker cannot claim multiple rewards with secure claim (414ms) -``` - -Note, this does not prevent the malicious user from refunding their account -altogether - it just protects our program from accidentally re-using the account -when it should be closed. We haven't implemented a `force_defund` instruction so -far, but we could. If you're feeling up for it, give it a try yourself! - -The simplest and most secure way to close accounts is using Anchor's `close` -constraint. If you ever need more custom behavior and can't use this constraint, -make sure to replicate its functionality to ensure your program is secure. - -If you want to take a look at the final solution code you can find it on the -`solution` branch of -[the same repository](https://github.com/Unboxed-Software/solana-closing-accounts/tree/solution). - -# Challenge - -Just as with other lessons in this module, your opportunity to practice avoiding -this security exploit lies in auditing your own or other programs. - -Take some time to review at least one program and ensure that when accounts are -closed they're not susceptible to revival attacks. - -Remember, if you find a bug or exploit in somebody else's program, please alert -them! If you find one in your own program, be sure to patch it right away. diff --git a/content/courses/solana-course/content/cpi.md b/content/courses/solana-course/content/cpi.md deleted file mode 100644 index 9f4baeac3..000000000 --- a/content/courses/solana-course/content/cpi.md +++ /dev/null @@ -1,844 +0,0 @@ ---- -title: Cross Program Invocations -objectives: - - Explain Cross-Program Invocations (CPIs) - - Describe how to construct and use CPIs - - Explain how a program provides a signature for a PDA - - Avoid common pitfalls and troubleshoot common errors associated with CPIs ---- - -# TL;DR - -- A **Cross-Program Invocation (CPI)** is a call from one program to another, - targeting a specific instruction on the program called -- CPIs are made using the commands `invoke` or `invoke_signed`, the latter being - how programs provide signatures for PDAs that they own -- CPIs make programs in the Solana ecosystem completely interoperable because - all public instructions of a program can be invoked by another program via a - CPI -- Because we have no control over the accounts and data submitted to a program, - it's important to verify all of the parameters passed into a CPI to ensure - program security - -# Overview - -## What is a CPI? - -A Cross-Program Invocation (CPI) is a direct call from one program into another. -Just as any client can call any program using the JSON RPC, any program can call -any other program directly. The only requirement for invoking an instruction on -another program from within your program is that you construct the instruction -correctly. You can make CPIs to native programs, other programs you've created, -and third party programs. CPIs essentially turn the entire Solana ecosystem into -one giant API that is at your disposal as a developer. - -CPIs have a similar make up to instructions that you are used to creating client -side. There are some intricacies and differences depending on if you are using -`invoke` or `invoke_signed`. We'll be covering both of these later in this -lesson. - -## How to make a CPI - -CPIs are made using the -[`invoke`](https://docs.rs/solana-program/1.10.19/solana_program/program/fn.invoke.html) -or -[`invoke_signed`](https://docs.rs/solana-program/1.10.19/solana_program/program/fn.invoke_signed.html) -function from the `solana_program` crate. You use `invoke` to essentially pass -through the original transaction signature that was passed into your program. -You use `invoke_signed` to have your program "sign" for its PDAs. - -```rust -// Used when there are not signatures for PDAs needed -pub fn invoke( - instruction: &Instruction, - account_infos: &[AccountInfo<'_>] -) -> ProgramResult - -// Used when a program must provide a 'signature' for a PDA, hence the signer_seeds parameter -pub fn invoke_signed( - instruction: &Instruction, - account_infos: &[AccountInfo<'_>], - signers_seeds: &[&[&[u8]]] -) -> ProgramResult -``` - -CPIs extend the privileges of the caller to the callee. If the instruction the -callee program is processing contains an account that was marked as a signer or -writable when originally passed into the caller program, then it will be -considered a signer or writable account in the invoked program as well. - -It's important to note that you as the developer decide which accounts to pass -into the CPI. You can think of a CPI as building another instruction from -scratch with only information that was passed into your program. - -### CPI with `invoke` - -```rust -invoke( - &Instruction { - program_id: calling_program_id, - accounts: accounts_meta, - data, - }, - &account_infos[account1.clone(), account2.clone(), account3.clone()], -)?; -``` - -- `program_id` - the public key of the program you are going to invoke -- `account` - a list of account metadata as a vector. You need to include every - account that the invoked program will read from or write to -- `data` - a byte buffer representing the data being passed to the callee - program as a vector - -The `Instruction` type has the following definition: - -```rust -pub struct Instruction { - pub program_id: Pubkey, - pub accounts: Vec, - pub data: Vec, -} -``` - -Depending on the program you're making the call to, there may be a crate -available with helper functions for creating the `Instruction` object. Many -individuals and organizations create publicly available crates alongside their -programs that expose these sorts of functions to simplify calling their -programs. This is similar to the Typescript libraries we've used in this course -(e.g. [@solana/web3.js](https://solana-labs.github.io/solana-web3.js/), -[@solana/spl-token](https://solana-labs.github.io/solana-program-library/token/js/)). -For example, in this lesson's demo we'll be using the `spl_token` crate to -create minting instructions. In all other cases, you'll need to create the -`Instruction` instance from scratch. - -While the `program_id` field is fairly straightforward, the `accounts` and -`data` fields require some explanation. - -Both the `accounts` and `data` fields are of type `Vec`, or vector. You can use -the [`vec`](https://doc.rust-lang.org/std/macro.vec.html) macro to construct a -vector using array notation, like so: - -```rust -let v = vec![1, 2, 3]; -assert_eq!(v[0], 1); -assert_eq!(v[1], 2); -assert_eq!(v[2], 3); -``` - -The `accounts` field of the `Instruction` struct expects a vector of type -[`AccountMeta`](https://docs.rs/solana-program/latest/solana_program/instruction/struct.AccountMeta.html). -The `AccountMeta` struct has the following definition: - -```rust -pub struct AccountMeta { - pub pubkey: Pubkey, - pub is_signer: bool, - pub is_writable: bool, -} -``` - -Putting these two pieces together looks like this: - -```rust -use solana_program::instruction::AccountMeta; - -vec![ - AccountMeta::new(account1_pubkey, true), - AccountMeta::read_only(account2_pubkey, false), - AccountMeta::read_only(account3_pubkey, true), - AccountMeta::new(account4_pubkey, false), -] -``` - -The final field of the instruction object is the data, as a byte buffer of -course. You can create a byte buffer in Rust using the `vec` macro again, which -has an implemented function allowing you to create a vector of certain length. -Once you have initialized an empty vector, you would construct the byte buffer -similar to how you would client-side. Determine the data required by the callee -program and the serialization format used and write your code to match. Feel -free to read up on some of the -[features of the `vec` macro available to you here](https://doc.rust-lang.org/alloc/vec/struct.Vec.html#). - -```rust -let mut vec = Vec::with_capacity(3); -vec.push(1); -vec.push(2); -vec.extend_from_slice(&number_variable.to_le_bytes()); -``` - -The -[`extend_from_slice`](https://doc.rust-lang.org/alloc/vec/struct.Vec.html#method.extend_from_slice) -method is probably new to you. It's a method on vectors that takes a slice as -input, iterates over the slice, clones each element, and then appends it to the -`Vec`. - -### Pass a list of accounts - -In addition to the instruction, both `invoke` and `invoke_signed` also require a -list of `account_info` objects. Just like the list of `AccountMeta` objects you -added to the instruction, you must include all of the accounts that the program -you're calling will read from or write to. - -By the time you make a CPI in your program, you should have already grabbed all -the `account_info` objects that were passed into your program and stored them in -variables. You'll construct your list of `account_info` objects for the CPI by -choosing which of these accounts to copy and send along. - -You can copy each `account_info` object that you need to pass into the CPI using -the -[`Clone`](https://docs.rs/solana-program/1.10.19/solana_program/account_info/struct.AccountInfo.html#impl-Clone) -trait that is implemented on the `account_info` struct in the `solana_program` -crate. This `Clone` trait returns a copy of the -[`account_info`](https://docs.rs/solana-program/1.10.19/solana_program/account_info/struct.AccountInfo.html) -instance. - -```rust -&[first_account.clone(), second_account.clone(), third_account.clone()] -``` - -### CPI with `invoke` - -With both the instruction and the list of accounts created, you can perform a -call to `invoke`. - -```rust -invoke( - &Instruction { - program_id: calling_program_id, - accounts: accounts_meta, - data, - }, - &[account1.clone(), account2.clone(), account3.clone()], -)?; -``` - -There's no need to include a signature because the Solana runtime passes along -the original signature passed into your program. Remember, `invoke` won't work -if a signature is required on behalf of a PDA. For that, you'll need to use -`invoke_signed`. - -### CPI with `invoke_signed` - -Using `invoke_signed` is a little different just because there is an additional -field that requires the seeds used to derive any PDAs that must sign the -transaction. You may recall from previous lessons that PDAs do not lie on the -Ed25519 curve and, therefore, do not have a corresponding secret key. You’ve -been told that programs can provide signatures for their PDAs, but have not -learned how that actually happens - until now. Programs provide signatures for -their PDAs with the `invoke_signed` function. The first two fields of -`invoke_signed` are the same as `invoke`, but there is an additional -`signers_seeds` field that comes into play here. - -```rust -invoke_signed( - &instruction, - accounts, - &[&["First addresses seed"], - &["Second addresses first seed", - "Second addresses second seed"]], -)?; -``` - -While PDAs have no secret keys of their own, they can be used by a program to -issue an instruction that includes the PDA as a signer. The only way for the -runtime to verify that the PDA belongs to the calling program is for the calling -program to supply the seeds used to generate the address in the `signers_seeds` -field. - -The Solana runtime will internally -call [`create_program_address`](https://docs.rs/solana-program/1.4.4/solana_program/pubkey/struct.Pubkey.html#method.create_program_address) -using the seeds provided and the `program_id` of the calling program. It can -then compare the result against the addresses supplied in the instruction. If -any of the addresses match, then the runtime knows that indeed the program -associated with this address is the caller and thus is authorized to be a -signer. - -## Best Practices and common pitfalls - -### Security checks - -There are some common mistakes and things to remember when utilizing CPIs that -are important to your program’s security and robustness. The first thing to -remember is that, as we know by now, we have no control over what information is -passed into our programs. For this reason, it’s important to always verify the -`program_id`, accounts, and data passed into the CPI. Without these security -checks, someone could submit a transaction that invokes an instruction on a -completely different program than was expected, which is not ideal. - -Fortunately, there are inherent checks on the validity of any PDAs that are -marked as signers within the `invoke_signed` function. All other accounts and -`instruction_data` should be verified somewhere in your program code before -making the CPI. It's also important to make sure you’re targeting the intended -instruction on the program you are invoking. The easiest way to do this is to -read the source code of the program you will be invoking just as you would if -you were constructing an instruction from the client side. - -### Common errors - -There are some common errors you might receive when executing a CPI, they -usually mean you are constructing the CPI with incorrect information. For -example, you may come across an error message similar to this: - -```text -EF1M4SPfKcchb6scq297y8FPCaLvj5kGjwMzjTM68wjA's signer privilege escalated -Program returned error: "Cross-program invocation with unauthorized signer or writable account" -``` - -This message is a little misleading, because “signer privilege escalated” does -not seem like a problem but, in reality, it means that you are incorrectly -signing for the address in the message. If you are using `invoke_signed` and -receive this error, then it likely means that the seeds you are providing are -incorrect. An example transaction that failed with this error can be found -[here](https://explorer.solana.com/tx/3mxbShkerH9ZV1rMmvDfaAhLhJJqrmMjcsWzanjkARjBQurhf4dounrDCUkGunH1p9M4jEwef9parueyHVw6r2Et?cluster=devnet). - -Another similar error is thrown when an account that's written to isn't marked -as `writable` inside the `AccountMeta` struct. - -```text -2qoeXa9fo8xVHzd2h9mVcueh6oK3zmAiJxCTySM5rbLZ's writable privilege escalated -Program returned error: "Cross-program invocation with unauthorized signer or writable account" -``` - -Remember, any account whose data may be mutated by the program during execution -must be specified as writable. During execution, writing to an account that was -not specified as writable will cause the transaction to fail. Writing to an -account that is not owned by the program will cause the transaction to fail. Any -account whose lamport balance may be mutated by the program during execution -must be specified as writable. During execution, mutating the lamports of an -account that was not specified as writable will cause the transaction to fail. -While subtracting lamports from an account not owned by the program will cause -the transaction to fail, adding lamports to any account is allowed, as long is -it is mutable. - -To see this in action, view this -[transaction in the explorer](https://explorer.solana.com/tx/ExB9YQJiSzTZDBqx4itPaa4TpT8VK4Adk7GU5pSoGEzNz9fa7PPZsUxssHGrBbJRnCvhoKgLCWnAycFB7VYDbBg?cluster=devnet). - -## Why CPIs matter? - -CPIs are a very important feature of the Solana ecosystem and they make all -programs deployed interoperable with each other. With CPIs there is no need to -re-invent the wheel when it comes to development. This creates the opportunity -for building new protocols and applications on top of what’s already been built, -just like building blocks or Lego bricks. It’s important to remember that CPIs -are a two-way street and the same is true for any programs that you deploy! If -you build something cool and useful, developers have the ability to build on top -of what you’ve done or just plug your protocol into whatever it is that they are -building. Composability is a big part of what makes crypto so unique and CPIs -are what makes this possible on Solana. - -Another important aspect of CPIs is that they allow programs to sign for their -PDAs. As you have probably noticed by now, PDAs are used very frequently in -Solana development because they allow programs to control specific addresses in -such a way that no external user can generate transactions with valid signatures -for those addresses. This can be _very_ useful for many applications in Web3 -(e.g. DeFi, NFTs, etc.) Without CPIs, PDAs would not be nearly as useful because -there would be no way for a program to sign transactions involving them - -essentially turning them black holes (once something is sent to a PDA, there -would be no way to get it back out w/o CPIs!) - -# Demo - -Now let's get some hands on experience with CPIs by making some additions to the -Movie Review program again. If you're dropping into this lesson without having -gone through prior lessons, the Movie Review program allows users to submit -movie reviews and have them stored in PDA accounts. - -Last lesson, we added the ability to leave comments on other movie reviews using -PDAs. In this lesson, we’re going to work on having the program mint tokens to -the reviewer or commenter anytime a review or comment is submitted. - -To implement this, we'll have to invoke the SPL Token Program's `MintTo` -instruction using a CPI. If you need a refresher on tokens, token mints, and -minting new tokens, have a look at the -[Token Program lesson](./token-program.md) before moving forward with this demo. - -### 1. Get starter code and add dependencies - -To get started, we will be using the final state of the Movie Review program -from the previous PDA lesson. So, if you just completed that lesson then you’re -all set and ready to go. If you are just jumping in here, no worries, you can -[download the starter code here](https://github.com/Unboxed-Software/solana-movie-program/tree/solution-add-comments). -We'll be using the `solution-add-comments` branch as our starting point. - -### 2. Add dependencies to `Cargo.toml` - -Before we get started we need to add two new dependencies to the `Cargo.toml` -file underneath `[dependencies]`. We'll be using the `spl-token` and -`spl-associated-token-account` crates in addition to the existing dependencies. - -```text -spl-token = { version="~3.2.0", features = [ "no-entrypoint" ] } -spl-associated-token-account = { version="=1.0.5", features = [ "no-entrypoint" ] } -``` - -After adding the above, run `cargo check` in your console to have cargo resolve -your dependencies and ensure that you are ready to continue. Depending on your -setup you may need to modify crate versions before moving on. - -### 3. Add necessary accounts to `add_movie_review` - -Because we want users to be minted tokens upon creating a review, it makes sense -to add minting logic inside the `add_movie_review` function. Since we'll be -minting tokens, the `add_movie_review` instruction requires a few new accounts -to be passed in: - -- `token_mint` - the mint address of the token -- `mint_auth` - address of the authority of the token mint -- `user_ata` - user’s associated token account for this mint (where the tokens - will be minted) -- `token_program` - address of the token program - -We'll start by adding these new accounts to the area of the function that -iterates through the passed in accounts: - -```rust -// Inside add_movie_review -msg!("Adding movie review..."); -msg!("Title: {}", title); -msg!("Rating: {}", rating); -msg!("Description: {}", description); - -let account_info_iter = &mut accounts.iter(); - -let initializer = next_account_info(account_info_iter)?; -let pda_account = next_account_info(account_info_iter)?; -let pda_counter = next_account_info(account_info_iter)?; -let token_mint = next_account_info(account_info_iter)?; -let mint_auth = next_account_info(account_info_iter)?; -let user_ata = next_account_info(account_info_iter)?; -let system_program = next_account_info(account_info_iter)?; -let token_program = next_account_info(account_info_iter)?; -``` - -There is no additional `instruction_data` required for the new functionality, so -no changes need to be made to how data is deserialized. The only additional -information that’s needed is the extra accounts. - -### 4. Mint tokens to the reviewer in `add_movie_review` - -Before we dive into the minting logic, let's import the address of the Token -program and the constant `LAMPORTS_PER_SOL` at the top of the file. - -```rust -// Inside processor.rs -use solana_program::native_token::LAMPORTS_PER_SOL; -use spl_associated_token_account::get_associated_token_address; -use spl_token::{instruction::initialize_mint, ID as TOKEN_PROGRAM_ID}; -``` - -Now we can move on to the logic that handles the actual minting of the tokens! -We’ll be adding this to the very end of the `add_movie_review` function right -before `Ok(())` is returned. - -Minting tokens requires a signature by the mint authority. Since the program -needs to be able to mint tokens, the mint authority needs to be an account that -the program can sign for. In other words, it needs to be a PDA account owned by -the program. - -We'll also be structuring our token mint such that the mint account is a PDA -account that we can derive deterministically. This way we can always verify that -the `token_mint` account passed into the program is the expected account. - -Let's go ahead and derive the token mint and mint authority addresses using the -`find_program_address` function with the seeds “token_mint” and "token_auth," -respectively. - -```rust -// Mint tokens here -msg!("deriving mint authority"); -let (mint_pda, mint_bump) = Pubkey::find_program_address(&[b"token_mint"], program_id); -let (mint_auth_pda, _mint_auth_bump) = - Pubkey::find_program_address(&[b"token_auth"], program_id); -``` - -Next, we'll perform security checks against each of the new accounts passed into -the program. Always remember to verify accounts! - -```rust -if *token_mint.key != mint_pda { - msg!("Incorrect token mint"); - return Err(ReviewError::IncorrectAccountError.into()); -} - -if *mint_auth.key != mint_auth_pda { - msg!("Mint passed in and mint derived do not match"); - return Err(ReviewError::InvalidPDA.into()); -} - -if *user_ata.key != get_associated_token_address(initializer.key, token_mint.key) { - msg!("Incorrect token mint"); - return Err(ReviewError::IncorrectAccountError.into()); -} - -if *token_program.key != TOKEN_PROGRAM_ID { - msg!("Incorrect token program"); - return Err(ReviewError::IncorrectAccountError.into()); -} -``` - -Finally, we can issue a CPI to the `mint_to` function of the token program with -the correct accounts using `invoke_signed`. The `spl_token` crate provides a -`mint_to` helper function for creating the minting instruction. This is great -because it means we don't have to manually build the entire instruction from -scratch. Rather, we can simply pass in the arguments required by the function. -Here's the function signature: - -```rust -// Inside the token program, returns an Instruction object -pub fn mint_to( - token_program_id: &Pubkey, - mint_pubkey: &Pubkey, - account_pubkey: &Pubkey, - owner_pubkey: &Pubkey, - signer_pubkeys: &[&Pubkey], - amount: u64, -) -> Result -``` - -Then we provide copies of the `token_mint`, `user_ata`, and `mint_auth` -accounts. And, most relevant to this lesson, we provide the seeds used to find -the `token_mint` address, including the bump seed. - -```rust -msg!("Minting 10 tokens to User associated token account"); -invoke_signed( - // Instruction - &spl_token::instruction::mint_to( - token_program.key, - token_mint.key, - user_ata.key, - mint_auth.key, - &[], - 10*LAMPORTS_PER_SOL, - )?, - // Account_infos - &[token_mint.clone(), user_ata.clone(), mint_auth.clone()], - // Seeds - &[&[b"token_mint", &[mint_bump]]], -)?; - -Ok(()) -``` - -Note that we are using `invoke_signed` and not `invoke` here. The Token program -requires the `mint_auth` account to sign for this transaction. Since the -`mint_auth` account is a PDA, only the program it was derived from can sign on -its behalf. When `invoke_signed` is called, the Solana runtime calls -`create_program_address` with the seeds and bump provided and then compares the -derived address with all of the addresses of the provided `AccountInfo` objects. -If any of the addresses match the derived address, the runtime knows that the -matching account is a PDA of this program and that the program is signing this -transaction for this account. - -At this point, the `add_movie_review` instruction should be fully functional and -will mint ten tokens to the reviewer when a review is created. - -### 5. Repeat for `add_comment` - -Our updates to the `add_comment` function will be almost identical to what we -did for the `add_movie_review` function above. The only difference is that we’ll -change the amount of tokens minted for a comment from ten to five so that adding -reviews are weighted above commenting. First, update the accounts with the same -four additional accounts as in the `add_movie_review` function. - -```rust -// Inside add_comment -let account_info_iter = &mut accounts.iter(); - -let commenter = next_account_info(account_info_iter)?; -let pda_review = next_account_info(account_info_iter)?; -let pda_counter = next_account_info(account_info_iter)?; -let pda_comment = next_account_info(account_info_iter)?; -let token_mint = next_account_info(account_info_iter)?; -let mint_auth = next_account_info(account_info_iter)?; -let user_ata = next_account_info(account_info_iter)?; -let system_program = next_account_info(account_info_iter)?; -let token_program = next_account_info(account_info_iter)?; -``` - -Next, move to the bottom of the `add_comment` function just before the `Ok(())`. -Then derive the token mint and mint authority accounts. Remember, both are PDAs -derived from seeds "token_mint" and "token_authority" respectively. - -```rust -// Mint tokens here -msg!("deriving mint authority"); -let (mint_pda, mint_bump) = Pubkey::find_program_address(&[b"token_mint"], program_id); -let (mint_auth_pda, _mint_auth_bump) = - Pubkey::find_program_address(&[b"token_auth"], program_id); -``` - -Next, verify that each of the new accounts is the correct account. - -```rust -if *token_mint.key != mint_pda { - msg!("Incorrect token mint"); - return Err(ReviewError::IncorrectAccountError.into()); -} - -if *mint_auth.key != mint_auth_pda { - msg!("Mint passed in and mint derived do not match"); - return Err(ReviewError::InvalidPDA.into()); -} - -if *user_ata.key != get_associated_token_address(commenter.key, token_mint.key) { - msg!("Incorrect token mint"); - return Err(ReviewError::IncorrectAccountError.into()); -} - -if *token_program.key != TOKEN_PROGRAM_ID { - msg!("Incorrect token program"); - return Err(ReviewError::IncorrectAccountError.into()); -} -``` - -Finally, use `invoke_signed` to send the `mint_to` instruction to the Token -program, sending five tokens to the commenter. - -```rust -msg!("Minting 5 tokens to User associated token account"); -invoke_signed( - // Instruction - &spl_token::instruction::mint_to( - token_program.key, - token_mint.key, - user_ata.key, - mint_auth.key, - &[], - 5 * LAMPORTS_PER_SOL, - )?, - // Account_infos - &[token_mint.clone(), user_ata.clone(), mint_auth.clone()], - // Seeds - &[&[b"token_mint", &[mint_bump]]], -)?; - -Ok(()) -``` - -### 6. Set up the token mint - -We've written all the code needed to mint tokens to reviewers and commenters, -but all of it assumes that there is a token mint at the PDA derived with the -seed "token_mint." For this to work, we're going to set up an additional -instruction for initializing the token mint. It will be written such that it can -only be called once and it doesn't particularly matter who calls it. - -Given that throughout this lesson we've already hammered home all of the -concepts associated with PDAs and CPIs multiple times, we're going to walk -through this bit with less explanation than the prior steps. Start by adding a -fourth instruction variant to the `MovieInstruction` enum in `instruction.rs`. - -```rust -pub enum MovieInstruction { - AddMovieReview { - title: String, - rating: u8, - description: String, - }, - UpdateMovieReview { - title: String, - rating: u8, - description: String, - }, - AddComment { - comment: String, - }, - InitializeMint, -} -``` - -Be sure to add it to the `match` statement in the `unpack` function in the same -file under the variant `3`. - -```rust -impl MovieInstruction { - pub fn unpack(input: &[u8]) -> Result { - let (&variant, rest) = input - .split_first() - .ok_or(ProgramError::InvalidInstructionData)?; - Ok(match variant { - 0 => { - let payload = MovieReviewPayload::try_from_slice(rest).unwrap(); - Self::AddMovieReview { - title: payload.title, - rating: payload.rating, - description: payload.description, - } - } - 1 => { - let payload = MovieReviewPayload::try_from_slice(rest).unwrap(); - Self::UpdateMovieReview { - title: payload.title, - rating: payload.rating, - description: payload.description, - } - } - 2 => { - let payload = CommentPayload::try_from_slice(rest).unwrap(); - Self::AddComment { - comment: payload.comment, - } - } - 3 => Self::InitializeMint, - _ => return Err(ProgramError::InvalidInstructionData), - }) - } -} -``` - -In the `process_instruction` function in the `processor.rs` file, add the new -instruction to the `match` statement and call a function -`initialize_token_mint`. - -```rust -pub fn process_instruction( - program_id: &Pubkey, - accounts: &[AccountInfo], - instruction_data: &[u8], -) -> ProgramResult { - let instruction = MovieInstruction::unpack(instruction_data)?; - match instruction { - MovieInstruction::AddMovieReview { - title, - rating, - description, - } => add_movie_review(program_id, accounts, title, rating, description), - MovieInstruction::UpdateMovieReview { - title, - rating, - description, - } => update_movie_review(program_id, accounts, title, rating, description), - MovieInstruction::AddComment { comment } => add_comment(program_id, accounts, comment), - MovieInstruction::InitializeMint => initialize_token_mint(program_id, accounts), - } -} -``` - -Lastly, declare and implement the `initialize_token_mint` function. This -function will derive the token mint and mint authority PDAs, create the token -mint account, and then initialize the token mint. We won't explain all of this -in detail, but it's worth reading through the code, especially given that the -creation and initialization of the token mint both involve CPIs. Again, if you -need a refresher on tokens and mints, have a look at the -[Token Program lesson](./token-program.md). - -```rust -pub fn initialize_token_mint(program_id: &Pubkey, accounts: &[AccountInfo]) -> ProgramResult { - let account_info_iter = &mut accounts.iter(); - - let initializer = next_account_info(account_info_iter)?; - let token_mint = next_account_info(account_info_iter)?; - let mint_auth = next_account_info(account_info_iter)?; - let system_program = next_account_info(account_info_iter)?; - let token_program = next_account_info(account_info_iter)?; - let sysvar_rent = next_account_info(account_info_iter)?; - - let (mint_pda, mint_bump) = Pubkey::find_program_address(&[b"token_mint"], program_id); - let (mint_auth_pda, _mint_auth_bump) = - Pubkey::find_program_address(&[b"token_auth"], program_id); - - msg!("Token mint: {:?}", mint_pda); - msg!("Mint authority: {:?}", mint_auth_pda); - - if mint_pda != *token_mint.key { - msg!("Incorrect token mint account"); - return Err(ReviewError::IncorrectAccountError.into()); - } - - if *token_program.key != TOKEN_PROGRAM_ID { - msg!("Incorrect token program"); - return Err(ReviewError::IncorrectAccountError.into()); - } - - if *mint_auth.key != mint_auth_pda { - msg!("Incorrect mint auth account"); - return Err(ReviewError::IncorrectAccountError.into()); - } - - let rent = Rent::get()?; - let rent_lamports = rent.minimum_balance(82); - - invoke_signed( - &system_instruction::create_account( - initializer.key, - token_mint.key, - rent_lamports, - 82, - token_program.key, - ), - &[ - initializer.clone(), - token_mint.clone(), - system_program.clone(), - ], - &[&[b"token_mint", &[mint_bump]]], - )?; - - msg!("Created token mint account"); - - invoke_signed( - &initialize_mint( - token_program.key, - token_mint.key, - mint_auth.key, - Option::None, - 9, - )?, - &[token_mint.clone(), sysvar_rent.clone(), mint_auth.clone()], - &[&[b"token_mint", &[mint_bump]]], - )?; - - msg!("Initialized token mint"); - - Ok(()) -} -``` - -### 7. Build and deploy - -Now we’re ready to build and deploy our program! You can build the program by -running `cargo build-bpf` and then running the command that is returned, it -should look something like `solana program deploy `. - -Before you can start testing whether or not adding a review or comment sends you -tokens, you need to initialize the program's token mint. You can use -[this script](https://github.com/Unboxed-Software/solana-movie-token-client) to -do that. Once you'd cloned that repository, replace the `PROGRAM_ID` in -`index.ts` with your program's ID. Then run `npm install` and then `npm start`. -The script assumes you're deploying to Devnet. If you're deploying locally, then -make sure to tailor the script accordingly. - -Once you've initialized your token mint, you can use the -[Movie Review frontend](https://github.com/Unboxed-Software/solana-movie-frontend/tree/solution-add-tokens) -to test adding reviews and comments. Again, the code assumes you're on Devnet so -please act accordingly. - -After submitting a review, you should see 10 new tokens in your wallet! When you -add a comment, you should receive 5 tokens. They won't have a fancy name or -image since we didn't add any metadata to the token, but you get the idea. - -If you need more time with the concepts from this lesson or got stuck along the -way, feel free to -[take a look at the solution code](https://github.com/Unboxed-Software/solana-movie-program/tree/solution-add-tokens). -Note that the solution to this demo is on the `solution-add-tokens` branch. - -# Challenge - -To apply what you've learned about CPIs in this lesson, think about how you -could incorporate them into the Student Intro program. You could do something -similar to what we did in the demo here and add some functionality to mint -tokens to users when they introduce themselves. Or if you're feeling really -ambitious, think about how you could take all that you have learned so far in -the course and create something completely new from scratch. - -A great example would be to build a decentralized Stack Overflow. The program -could use tokens to determine a user's overall rating, mint tokens when -questions are answered correctly, allow users to upvote answers, etc. All of -that is possible and you now have the skills and knowledge to go and build -something like it on your own! - -Congratulations on reaching the end of Module 4! Feel free to share some quick -feedback -[here](https://airtable.com/shrOsyopqYlzvmXSC?prefill_Module=Module%204), so -that we can continue to improve the course. diff --git a/content/courses/solana-course/content/deserialize-custom-data.md b/content/courses/solana-course/content/deserialize-custom-data.md deleted file mode 100644 index 0c04b96a4..000000000 --- a/content/courses/solana-course/content/deserialize-custom-data.md +++ /dev/null @@ -1,394 +0,0 @@ ---- -title: Deserialize Custom Account Data -objectives: - - Explain Program Derived Accounts - - Derive PDAs given specific seeds - - Fetch a program’s accounts - - Use Borsh to deserialize custom data ---- - -# TL;DR - -- **Program Derived Addresses**, or PDAs, are addresses that do not have a - corresponding secret key. The concept of PDAs allows for programs to sign for - transactions themselves and allows for storing and locating data. -- You can derive a PDA using the `findProgramAddress(seeds, programid)` method. -- You can get an array of all the accounts belonging to a program using - `getProgramAccounts(programId)`. -- Account data needs to be deserialized using the same layout used to store it - in the first place. You can use `@project-serum/borsh` to create a schema. - -# Overview - -In the last lesson, we serialized custom instruction data that was subsequently -stored on-chain by a Solana program. In this lesson, we’ll cover in greater -detail how programs use accounts, how to retrieve them, and how to deserialize -the data they store. - -## Programs - -As the saying goes, everything in Solana is an account. Even programs. Programs -are accounts that store code and are marked as executable. This code can be -executed by the Solana runtime when instructed to do so. - -Programs themselves, however, are stateless. They cannot modify the data within -their account. They can only persist state by storing data in other accounts -that can be referenced at some other point in time. Understanding how these -accounts are used and how to find them is crucial to client-side Solana -development. - -### PDA - -PDA stands for Program Derived Address. As the name suggests, it refers to an -address (public key) derived from a program and some seeds. In a previous -lesson, we discussed public/secret keys and how they are used on Solana. Unlike -a keypair, a PDA _does not_ have a corresponding secret key. The purpose of a -PDA is to create an address that a program can sign for in the same way a user -may sign for a transaction with their wallet. - -When you submit a transaction to a program and expect the program to then update -state or store data in some way, that program is using one or more PDAs. This is -important to understand when developing client-side for two reasons: - -1. When submitting a transaction to a program, the client needs to include all - addresses for accounts that will be written to or read from. This means that - unlike more traditional client-server architectures, the client needs to have - implementation-specific knowledge about the Solana program. The client needs - to know which PDA is going to be used to store data so that it can include - that address in the transaction. -2. Similarly, when reading data from a program, the client needs to know which - account(s) to read from. - -### Finding PDAs - -PDAs are not technically created. Rather, they are _found_ or _derived_ based on -one or more input seeds. - -Regular Solana keypairs lie on the ed2559 Elliptic Curve. This cryptographic -function ensures that every point along the curve has a corresponding point -somewhere else on the curve, allowing for public/secret keys. PDAs are addresses -that lie _off_ the ed2559 Elliptic curve and therefore cannot be signed for by a -secret key (since there isn’t one). This ensures that the program is the only -valid signer for that address. - -To find a public key that does not lie on the ed2559 curve, the program ID and -seeds of the developer’s choice (like a string of text) are passed through the -function -[`findProgramAddress(seeds, programid)`](https://solana-labs.github.io/solana-web3.js/classes/PublicKey.html#findProgramAddress). -This function combines the program ID, seeds, and a bump seed into a buffer and -passes it into a SHA256 hash to see whether or not the resulting address is on -the curve. If the address is on the curve (~50% chance it is), then the bump -seed is decremented by 1 and the address is calculated again. The bump seed -starts at 255 and progressively iterates down to `bump = 254`, `bump = 253`, -etc. until an address is found with the given seeds and bump that does not lie -on the ed2559 curve. The `findProgramAddress` function returns the resulting -address and the bump used to kick it off the curve. This way, the address can be -generated anywhere as long as you have the bump and seeds. - -![Screenshot of ed2559 curve](../assets/ed2559-curve.png) - -PDAs are a unique concept and are one of the hardest parts of Solana development -to understand. If you don’t get it right away, don’t worry. It’ll make more -sense the more you practice. - -### Why Does This Matter? - -The derivation of PDAs is important because the seeds used to find a PDA are -what we use to locate the data. For example, a simple program that only uses a -single PDA to store global program state might use a simple seed phrase like -“GLOBAL_STATE”. If the client wanted to read data from this PDA, it could derive -the address using the program ID and this same seed. - -```tsx -const [pda, bump] = await findProgramAddress( - Buffer.from("GLOBAL_STATE"), - programId, -); -``` - -In more complex programs that store user-specific data, it’s common to use a -user’s public key as the seed. This separates each user’s data into its own PDA. -The separation makes it possible for the client to locate each user’s data by -finding the address using the program ID and the user’s public key. - -```tsx -const [pda, bump] = await web3.PublicKey.findProgramAddress( - [publicKey.toBuffer()], - programId, -); -``` - -Also, when there are multiple accounts per user, a program may use one or more -additional seeds to create and identify accounts. For example, in a note-taking -app there may be one account per note where each PDA is derived with the user’s -public key and the note’s title. - -```tsx -const [pda, bump] = await web3.PublicKey.findProgramAddress( - [publicKey.toBuffer(), Buffer.from("First Note")], - programId, -); -``` - -### Getting Multiple Program Accounts - -In addition to deriving addresses, you can fetch all accounts created by a -program using `connection.getProgramAccounts(programId)`. This returns an array -of objects where each object has `pubkey` property representing the public key -of the account and an `account` property of type `AccountInfo`. You can use the -`account` property to get the account data. - -```tsx -const accounts = connection.getProgramAccounts(programId).then(accounts => { - accounts.map(({ pubkey, account }) => { - console.log("Account:", pubkey); - console.log("Data buffer:", account.data); - }); -}); -``` - -## Deserializing custom account data - -The `data` property on an `AccountInfo` object is a buffer. To use it -efficiently, you’ll need to write code that deserializes it into something more -usable. This is similar to the serialization process we covered last lesson. -Just as before, we’ll use [Borsh](https://borsh.io/) and `@project-serum/borsh`. -If you need a refresher on either of these, have a look at the previous lesson. - -Deserializing requires knowledge of the account layout ahead of time. When -creating your own programs, you will define how this is done as part of that -process. Many programs also have documentation on how to deserialize the account -data. Otherwise, if the program code is available you can look at the source and -determine the structure that way. - -To properly deserialize data from an on-chain program, you will have to create a -client-side schema mirroring how the data is stored in the account. For example, -the following might be the schema for an account storing metadata about a player -in an on-chain game. - -```tsx -import * as borsh from "@project-serum/borsh"; - -borshAccountSchema = borsh.struct([ - borsh.bool("initialized"), - borsh.u16("playerId"), - borsh.str("name"), -]); -``` - -Once you have your layout defined, simply call `.decode(buffer)` on the schema. - -```tsx -import * as borsh from "@project-serum/borsh"; - -borshAccountSchema = borsh.struct([ - borsh.bool("initialized"), - borsh.u16("playerId"), - borsh.str("name"), -]); - -const { playerId, name } = borshAccountSchema.decode(buffer); -``` - -# Demo - -Let’s practice this together by continuing to work on the Movie Review app from -the last lesson. No worries if you’re just jumping into this lesson - it should -be possible to follow either way. - -As a refresher, this project uses a Solana program deployed on Devnet which lets -users review movies. Last lesson, we added functionality to the frontend -skeleton letting users submit movie reviews but the list of reviews is still -showing mock data. Let’s fix that by fetching the program’s storage accounts and -deserializing the data stored there. - -![Screenshot of movie review frontend](../assets/movie-reviews-frontend.png) - -### 1. Download the starter code - -If you didn’t complete the demo from the last lesson or just want to make sure -that you didn’t miss anything, you can download the -[starter code](https://github.com/Unboxed-Software/solana-movie-frontend/tree/solution-serialize-instruction-data). - -The project is a fairly simple Next.js application. It includes the -`WalletContextProvider` we created in the Wallets lesson, a `Card` component for -displaying a movie review, a `MovieList` component that displays reviews in a -list, a `Form` component for submitting a new review, and a `Movie.ts` file that -contains a class definition for a `Movie` object. - -Note that when you run `npm run dev`, the reviews displayed on the page are -mocks. We’ll be swapping those out for the real deal. - -### 2. Create the buffer layout - -Remember that to properly interact with a Solana program, you need to know how -its data is structured. - -The Movie Review program creates a separate account for each movie review and -stores the following data in the account’s `data`: - -1. `initialized` as a boolean representing whether or not the account has been - initialized. -2. `rating` as an unsigned, 8-bit integer representing the rating out of 5 that - the reviewer gave the movie. -3. `title` as a string representing the title of the reviewed movie. -4. `description` as a string representing the written portion of the review. - -Let’s configure a `borsh` layout in the `Movie` class to represent the movie -account data layout. Start by importing `@project-serum/borsh`. Next, create a -`borshAccountSchema` static property and set it to the appropriate `borsh` -struct containing the properties listed above. - -```tsx -import * as borsh from '@project-serum/borsh' - -export class Movie { - title: string; - rating: number; - description: string; - - ... - - static borshAccountSchema = borsh.struct([ - borsh.bool('initialized'), - borsh.u8('rating'), - borsh.str('title'), - borsh.str('description'), - ]) -} -``` - -Remember, the order here _matters_. It needs to match how the account data is -structured. - -### 3. Create a method to deserialize data - -Now that we have the buffer layout set up, let’s create a static method in -`Movie` called `deserialize` that will take an optional `Buffer` and return a -`Movie` object or `null`. - -```tsx -import * as borsh from '@project-serum/borsh' - -export class Movie { - title: string; - rating: number; - description: string; - - ... - - static borshAccountSchema = borsh.struct([ - borsh.bool('initialized'), - borsh.u8('rating'), - borsh.str('title'), - borsh.str('description'), - ]) - - static deserialize(buffer?: Buffer): Movie|null { - if (!buffer) { - return null - } - - try { - const { title, rating, description } = this.borshAccountSchema.decode(buffer) - return new Movie(title, rating, description) - } catch(error) { - console.log('Deserialization error:', error) - return null - } - } -} -``` - -The method first checks whether or not the buffer exists and returns `null` if -it doesn’t. Next, it uses the layout we created to decode the buffer, then uses -the data to construct and return an instance of `Movie`. If the decoding fails, -the method logs the error and returns `null`. - -### 4. Fetch movie review accounts - -Now that we have a way to deserialize account data, we need to actually fetch -the accounts. Open `MovieList.tsx` and import `@solana/web3.js`. Then, create a -new `Connection` inside the `MovieList` component. Finally, replace the line -`setMovies(Movie.mocks)` inside `useEffect` with a call to -`connection.getProgramAccounts`. Take the resulting array and convert it into an -array of movies and call `setMovies`. - -```tsx -import { Card } from "./Card"; -import { FC, useEffect, useState } from "react"; -import { Movie } from "../models/Movie"; -import * as web3 from "@solana/web3.js"; - -const MOVIE_REVIEW_PROGRAM_ID = "CenYq6bDRB7p73EjsPEpiYN7uveyPUTdXkDkgUduboaN"; - -export const MovieList: FC = () => { - const connection = new web3.Connection(web3.clusterApiUrl("devnet")); - const [movies, setMovies] = useState([]); - - useEffect(() => { - connection - .getProgramAccounts(new web3.PublicKey(MOVIE_REVIEW_PROGRAM_ID)) - .then(async accounts => { - const movies: Movie[] = accounts.map(({ account }) => { - return Movie.deserialize(account.data); - }); - - setMovies(movies); - }); - }, []); - - return ( -
- {movies.map((movie, i) => ( - - ))} -
- ); -}; -``` - -At this point, you should be able to run the app and see the list of movie -reviews retrieved from the program! - -Depending on how many reviews have been submitted, this may take a long time to -load or may lock up your browser entirely. But don’t worry — next lesson we’ll -learn how to page and filter accounts so you can be more surgical with what you -load. - -If you need more time with this project to feel comfortable with these concepts, -have a look at the -[solution code](https://github.com/Unboxed-Software/solana-movie-frontend/tree/solution-deserialize-account-data) -before continuing. - -# Challenge - -Now it’s your turn to build something independently. Last lesson, you worked on -the Student Intros app to serialize instruction data and send a new intro to the -network. Now, it's time to fetch and deserialize the program's account data. -Remember, the Solana program that supports this is at -`HdE95RSVsdb315jfJtaykXhXY478h53X6okDupVfY9yf`. - -![Screenshot of Student Intros frontend](../assets/student-intros-frontend.png) - -1. You can build this from scratch or you can download the starter code - [here](https://github.com/Unboxed-Software/solana-student-intros-frontend/tree/solution-serialize-instruction-data). -2. Create the account buffer layout in `StudentIntro.ts`. The account data - contains: - 1. `initialized` as an unsigned, 8-bit integer representing the instruction - to run (should be 1). - 2. `name` as a string representing the student's name. - 3. `message` as a string representing the message the student shared about - their Solana journey. -3. Create a static method in `StudentIntro.ts` that will use the buffer layout - to deserialize an account data buffer into a `StudentIntro` object. -4. In the `StudentIntroList` component's `useEffect`, get the program's accounts - and deserialize their data into a list of `StudentIntro` objects. -5. Instead of mock data, you should now be seeing student introductions from the - network! - -If you get really stumped, feel free to check out the solution code -[here](https://github.com/Unboxed-Software/solana-student-intros-frontend/tree/solution-deserialize-account-data). - -As always, get creative with these challenges and take them beyond the -instructions if you want! diff --git a/content/courses/solana-course/content/deserialize-instruction-data.md b/content/courses/solana-course/content/deserialize-instruction-data.md deleted file mode 100644 index b96303352..000000000 --- a/content/courses/solana-course/content/deserialize-instruction-data.md +++ /dev/null @@ -1,633 +0,0 @@ ---- -title: Create a Basic Program, Part 1 - Handle Instruction Data -objectives: - - Assign mutable and immutable variables in Rust - - Create and use Rust structs and enums - - Use Rust match statements - - Add implementations to Rust types - - Deserialize instruction data into Rust data types - - Execute different program logic for different types of instructions - - Explain the structure of an on-chain program on Solana ---- - -# TL;DR - -- Most programs support **multiple discrete instructions** - you decide when - writing your program what these instructions are and what data must accompany - them -- Rust **enums** are often used to represent discrete program instructions -- You can use the `borsh` crate and the `derive` attribute to provide Borsh - deserialization and serialization functionality to Rust structs -- Rust `match` expressions help create conditional code paths based on the - provided instruction - -# Overview - -One of the most basic elements of a Solana program is the logic for handling -instruction data. Most programs support multiple related functions and use -differences in instruction data to determine which code path to execute. For -example, two different data formats in the instruction data passed to the -program may represent instructions for creating a new piece of data vs deleting -the same piece of data. - -Since instruction data is provided to your program's entry point as a byte -array, it's common to create a Rust data type to represent instructions in a way -that's more usable throughout your code. This lesson will walk through how to -set up such a type, how to deserialize the instruction data into this format, -and how to execute the proper code path based on the instruction passed into the -program's entry point. - -## Rust basics - -Before we dive into the specifics of a basic Solana program, let's talk about -the Rust basics we'll be using throughout this lesson. - -### Variables - -Variable assignment in Rust happens with the `let` keyword. - -```rust -let age = 33; -``` - -Variables in Rust by default are immutable, meaning a variable's value cannot be -changed once it has been set. In order to create a variable that we'd like to -change at some point in the future, we use the `mut` keyword. Defining a -variable with this keyword means that the value stored in it can change. - -```rust -// compiler will throw error -let age = 33; -age = 34; - -// this is allowed -let mut mutable_age = 33; -mutable_age = 34; -``` - -The Rust compiler guarantees that immutable variables truly cannot change so -that you don’t have to keep track of it yourself. This makes your code easier to -reason through and simplifies debugging. - -### Structs - -A struct, or structure, is a custom data type that lets you package together and -name multiple related values that make up a meaningful group. Each piece of data -in a struct can be of different types and each has a name associated with it. -These pieces of data are called **fields**. They behave similarly to properties -in other languages. - -```rust -struct User { - active: bool, - email: String, - age: u64 -} -``` - -To use a struct after we’ve defined it, we create an instance of that struct by -specifying concrete values for each of the fields. - -```rust -let mut user1 = User { - active: true, - email: String::from("test@test.com"), - age: 36 -}; -``` - -To get or set a specific value from a struct, we use dot notation. - -```rust -user1.age = 37; -``` - -### Enumerations - -Enumerations (or Enums) are a data struct that allow you to define a type by -enumerating its possible variants. An example of an enum may look like: - -```rust -enum LightStatus { - On, - Off -} -``` - -The `LightStatus` enum has two possible variants in this situation: it's either -`On` or `Off`. - -You can also embed values into enum variants, similar to adding fields to a -struct. - -```rust -enum LightStatus { - On { - color: String - }, - Off -} - -let light_status = LightStatus::On { color: String::from("red") }; -``` - -In this example, setting a variable to the `On` variant of `LightStatus` -requires also setting the value of `color`. - -### Match statements - -Match statements are very similar to `switch` statements in C/C++. The `match` -statement allows you to compare a value against a series of patterns and then -execute code based on which pattern matches the value. Patterns can be made of -literal values, variable names, wildcards, and more. The match statement must -include all possible scenarios, otherwise the code will not compile. - -```rust -enum Coin { - Penny, - Nickel, - Dime, - Quarter -} - -fn value_in_cents(coin: Coin) -> u8 { - match coin { - Coin::Penny => 1, - Coin::Nickel => 5, - Coin::Dime => 10, - Coin::Quarter => 25 - } -} -``` - -### Implementations - -The `impl` keyword is used in Rust to define a type's implementations. Functions -and constants can both be defined in an implementation. - -```rust -struct Example { - number: i32 -} - -impl Example { - fn boo() { - println!("boo! Example::boo() was called!"); - } - - fn answer(&mut self) { - self.number += 42; - } - - fn get_number(&self) -> i32 { - self.number - } -} -``` - -The function `boo` here can only be called on the type itself rather than an -instance of the type, like so: - -```rust -Example::boo(); -``` - -Meanwhile, `answer` requires a mutable instance of `Example` and can be called -with dot syntax: - -```rust -let mut example = Example { number: 3 }; -example.answer(); -``` - -### Traits and attributes - -You won't be creating your own traits or attributes at this stage, so we won't -provide an in depth explanation of either. However, you will be using the -`derive` attribute macro and some traits provided by the `borsh` crate, so it's -important you have a high level understanding of each. - -Traits describe an abstract interface that types can implement. If a trait -defines a function `bark()` and a type then adopts that trait, the type must -then implement the `bark()` function. - -[Attributes](https://doc.rust-lang.org/rust-by-example/attribute.html) add -metadata to a type and can be used for many different purposes. - -When you add the -[`derive` attribute](https://doc.rust-lang.org/rust-by-example/trait/derive.html) -to a type and provide one or more supported traits, code is generated under the -hood to automatically implement the traits for that type. We'll provide a -concrete example of this shortly. - -## Representing instructions as a Rust data type - -Now that we've covered the Rust basics, let's apply them to Solana programs. - -More often than not, programs will have more than one function. For example, you -may have a program that acts as the backend for a note-taking app. Assume this -program accepts instructions for creating a new note, updating an existing note, -and deleting an existing note. - -Since instructions have discrete types, they're usually a great fit for an enum -data type. - -```rust -enum NoteInstruction { - CreateNote { - title: String, - body: String, - id: u64 - }, - UpdateNote { - title: String, - body: String, - id: u64 - }, - DeleteNote { - id: u64 - } -} -``` - -Notice that each variant of the `NoteInstruction` enum comes with embedded data -that will be used by the program to accomplish the tasks of creating, updating, -and deleting a note, respectively. - -## Deserialize instruction data - -Instruction data is passed to the program as a byte array, so you need a way to -deterministically convert that array into an instance of the instruction enum -type. - -In previous modules, we used Borsh for client-side serialization and -deserialization. To use Borsh program-side, we use the `borsh` crate. This crate -provides traits for `BorshDeserialize` and `BorshSerialize` that you can apply -to your types using the `derive` attribute. - -To make deserializing instruction data simple, you can create a struct -representing the data and use the `derive` attribute to apply the -`BorshDeserialize` trait to the struct. This implements the methods defined in -`BorshDeserialize`, including the `try_from_slice` method that we'll be using to -deserialize the instruction data. - -Remember, the struct itself needs to match the structure of the data in the byte -array. - -```rust -#[derive(BorshDeserialize)] -struct NoteInstructionPayload { - id: u64, - title: String, - body: String -} -``` - -Once this struct has been created, you can create an implementation for your -instruction enum to handle the logic associated with deserializing instruction -data. It's common to see this done inside a function called `unpack` that -accepts the instruction data as an argument and returns the appropriate instance -of the enum with the deserialized data. - -It's standard practice to structure your program to expect the first byte (or -other fixed number of bytes) to be an identifier for which instruction the -program should run. This could be an integer or a string identifier. For this -example, we'll use the first byte and map integers 0, 1, and 2 to instructions -create, update, and delete, respectively. - -```rust -impl NoteInstruction { - // Unpack inbound buffer to associated Instruction - // The expected format for input is a Borsh serialized vector - pub fn unpack(input: &[u8]) -> Result { - // Take the first byte as the variant to - // determine which instruction to execute - let (&variant, rest) = input.split_first().ok_or(ProgramError::InvalidInstructionData)?; - // Use the temporary payload struct to deserialize - let payload = NoteInstructionPayload::try_from_slice(rest).unwrap(); - // Match the variant to determine which data struct is expected by - // the function and return the TestStruct or an error - Ok(match variant { - 0 => Self::CreateNote { - title: payload.title, - body: payload.body, - id: payload.id - }, - 1 => Self::UpdateNote { - title: payload.title, - body: payload.body, - id: payload.id - }, - 2 => Self::DeleteNote { - id: payload.id - }, - _ => return Err(ProgramError::InvalidInstructionData) - }) - } -} -``` - -There's a lot in this example so let's take it one step at a time: - -1. This function starts by using the `split_first` function on the `input` - parameter to return a tuple. The first element, `variant`, is the first byte - from the byte array and the second element, `rest`, is the rest of the byte - array. -2. The function then uses the `try_from_slice` method on - `NoteInstructionPayload` to deserialize the rest of the byte array into an - instance of `NoteInstructionPayload` called `payload` -3. Finally, the function uses a `match` statement on `variant` to create and - return the appropriate enum instance using information from `payload` - -Note that there is Rust syntax in this function that we haven't explained yet. -The `ok_or` and `unwrap` functions are used for error handling and will be -discussed in detail in another lesson. - -## Program logic - -With a way to deserialize instruction data into a custom Rust type, you can then -use appropriate control flow to execute different code paths in your program -based on which instruction is passed into your program's entry point. - -```rust -entrypoint!(process_instruction); - -pub fn process_instruction( - program_id: &Pubkey, - accounts: &[AccountInfo], - instruction_data: &[u8] -) -> ProgramResult { - // Call unpack to deserialize instruction_data - let instruction = NoteInstruction::unpack(instruction_data)?; - // Match the returned data struct to what you expect - match instruction { - NoteInstruction::CreateNote { title, body, id } => { - // Execute program code to create a note - }, - NoteInstruction::UpdateNote { title, body, id } => { - // Execute program code to update a note - }, - NoteInstruction::DeleteNote { id } => { - // Execute program code to delete a note - } - } -} -``` - -For simple programs where there are only one or two instructions to execute, it -may be fine to write the logic inside the match statement. For programs with -many different possible instructions to match against, your code will be much -more readable if the logic for each instruction is written in a separate -function and simply called from inside the `match` statement. - -## Program file structure - -The [Hello World lesson’s](hello-world-program.md) program was simple enough -that it could be confined to one file. But as the complexity of a program grows, -it's important to maintain a project structure that remains readable and -extensible. This involves encapsulating code into functions and data structures -as we've done so far. But it also involves grouping related code into separate -files. - -For example, a good portion of the code we've worked through so far has to do -with defining and deserializing instructions. That code should live in its own -file rather than be written in the same file as the entry point. By doing so, we -would then have 2 files, one with the program entry point and the other with the -instruction code: - -- **lib.rs** -- **instruction.rs** - -Once you start splitting your program up like this you will need to make sure -you register all of the files in one central location. We’ll be doing this in -`lib.rs`. **You must register every file in your program like this.** - -```rust -// This would be inside lib.rs -pub mod instruction; -``` - -Additionally, any declarations that you would like to be available through `use` -statements in other files will need to be prefaced with the `pub` keyword: - -```rust -pub enum NoteInstruction { ... } -``` - -## Demo - -For this lesson’s demo, we’ll be building out the first half of the Movie Review -program that we worked with in Module 1. This program stores movie reviews -submitted by users. - -For now, we'll focus on deserializing the instruction data. The following lesson -will focus on the second half of this program. - -### 1. Entry point - -We’ll be using [Solana Playground](https://beta.solpg.io/) again to build out -this program. Solana Playground saves state in your browser, so everything you -did in the previous lesson may still be there. If it is, let's clear everything -out from the current `lib.rs` file. - -Inside lib.rs, we’re going to bring in the following crates and define where -we’d like our entry point to the program to be with the `entrypoint` macro. - -```rust -use solana_program::{ - entrypoint, - entrypoint::ProgramResult, - pubkey::Pubkey, - msg, - account_info::AccountInfo, -}; - -// Entry point is a function call process_instruction -entrypoint!(process_instruction); - -// Inside lib.rs -pub fn process_instruction( - program_id: &Pubkey, - accounts: &[AccountInfo], - instruction_data: &[u8] -) -> ProgramResult { - - Ok(()) -} -``` - -### 2. Deserialize instruction data - -Before we continue with the processor logic, we should define our supported -instructions and implement our deserialization function. - -For readability, let's create a new file called `instruction.rs`. Inside this -new file, add `use` statements for `BorshDeserialize` and `ProgramError`, then -create a `MovieInstruction` enum with an `AddMovieReview` variant. This variant -should have embedded values for `title,` `rating`, and `description`. - -```rust -use borsh::{BorshDeserialize}; -use solana_program::{program_error::ProgramError}; - -pub enum MovieInstruction { - AddMovieReview { - title: String, - rating: u8, - description: String - } -} -``` - -Next, define a `MovieReviewPayload` struct. This will act as an intermediary -type for deserializtion so it should use the `derive` attribute macro to provide -a default implementation for the `BorshDeserialize` trait. - -```rust -#[derive(BorshDeserialize)] -struct MovieReviewPayload { - title: String, - rating: u8, - description: String -} -``` - -Finally, create an implementation for the `MovieInstruction` enum that defines -and implements a function called `unpack` that takes a byte array as an argument -and returns a `Result` type. This function should: - -1. Use the `split_first` function to split the first byte of the array from the - rest of the array -2. Deserialize the rest of the array into an instance of `MovieReviewPayload` -3. Use a `match` statement to return the `AddMovieReview` variant of - `MovieInstruction` if the first byte of the array was a 0 or return a program - error otherwise - -```rust -impl MovieInstruction { - // Unpack inbound buffer to associated Instruction - // The expected format for input is a Borsh serialized vector - pub fn unpack(input: &[u8]) -> Result { - // Split the first byte of data - let (&variant, rest) = input.split_first().ok_or(ProgramError::InvalidInstructionData)?; - // `try_from_slice` is one of the implementations from the BorshDeserialization trait - // Deserializes instruction byte data into the payload struct - let payload = MovieReviewPayload::try_from_slice(rest).unwrap(); - // Match the first byte and return the AddMovieReview struct - Ok(match variant { - 0 => Self::AddMovieReview { - title: payload.title, - rating: payload.rating, - description: payload.description }, - _ => return Err(ProgramError::InvalidInstructionData) - }) - } -} -``` - -### 3. Program logic - -With the instruction deserialization handled, we can return to the `lib.rs` file -to handle some of our program logic. - -Remember, since we added code to a different file, we need to register it in the -`lib.rs` file using `pub mod instruction;`. Then we can add a `use` statement to -bring the `MovieInstruction` type into scope. - -```rust -pub mod instruction; -use instruction::{MovieInstruction}; -``` - -Next, let's define a new function `add_movie_review` that takes as arguments -`program_id`, `accounts`, `title`, `rating`, and `description`. It should also -return an instance of `ProgramResult` Inside this function, let's simply log our -values for now and we'll revisit the rest of the implementation of the function -in the next lesson. - -```rust -pub fn add_movie_review( - program_id: &Pubkey, - accounts: &[AccountInfo], - title: String, - rating: u8, - description: String -) -> ProgramResult { - - // Logging instruction data that was passed in - msg!("Adding movie review..."); - msg!("Title: {}", title); - msg!("Rating: {}", rating); - msg!("Description: {}", description); - - Ok(()) -} -``` - -With that done, we can call `add_movie_review` from `process_instruction` (the -function we set as our entry point). In order to pass all the required arguments -to the function, we'll first need to call the `unpack` we created on -`MovieInstruction`, then use a `match` statement to ensure that the instruction -we've received is the `AddMovieReview` variant. - -```rust -pub fn process_instruction( - program_id: &Pubkey, - accounts: &[AccountInfo], - instruction_data: &[u8] -) -> ProgramResult { - // Unpack called - let instruction = MovieInstruction::unpack(instruction_data)?; - // Match against the data struct returned into `instruction` variable - match instruction { - MovieInstruction::AddMovieReview { title, rating, description } => { - // Make a call to `add_move_review` function - add_movie_review(program_id, accounts, title, rating, description) - } - } -} -``` - -And just like that, your program should be functional enough to log the -instruction data passed in when a transaction is submitted! - -Build and deploy your program from Solana Program just like in the last lesson. -If you haven't changed the program ID since going through the last lesson, it -will automatically deploy to the same ID. If you'd like it to have a separate -address you can generate a new program ID from the playground before deploying. - -You can test your program by submitting a transaction with the right instruction -data. For that, feel free to use -[this script](https://github.com/Unboxed-Software/solana-movie-client) or -[the frontend](https://github.com/Unboxed-Software/solana-movie-frontend) we -built in the -[Serialize Custom Instruction Data lesson](serialize-instruction-data.md). In -both cases, make sure you copy and paste the program ID for your program into -the appropriate area of the source code to make sure you're testing the right -program. - -If you need to spend some more time with this demo before moving on, please do! -You can also have a look at the program -[solution code](https://beta.solpg.io/62aa9ba3b5e36a8f6716d45b) if you get -stuck. - -# Challenge - -For this lesson's challenge, try replicating the Student Intro program from -Module 1. Recall that we created a frontend application that lets students -introduce themselves! The program takes a user's name and a short message as the -`instruction_data` and creates an account to store the data on-chain. - -Using what you've learned in this lesson, build the Student Intro program to the -point where you can print the `name` and `message` provided by the user to the -program logs when the program is invoked. - -You can test your program by building the -[frontend](https://github.com/Unboxed-Software/solana-student-intros-frontend/tree/solution-serialize-instruction-data) -we created in the -[Serialize Custom Instruction Data lesson](serialize-instruction-data.md) and -then checking the program logs on Solana Explorer. Remember to replace the -program ID in the frontend code with the one you've deployed. - -Try to do this independently if you can! But if you get stuck, feel free to -reference the [solution code](https://beta.solpg.io/62b0ce53f6273245aca4f5b0). diff --git a/content/courses/solana-course/content/duplicate-mutable-accounts.md b/content/courses/solana-course/content/duplicate-mutable-accounts.md deleted file mode 100644 index 6c9d9a5ea..000000000 --- a/content/courses/solana-course/content/duplicate-mutable-accounts.md +++ /dev/null @@ -1,435 +0,0 @@ ---- -title: Duplicate Mutable Accounts -objectives: - - Explain the security risks associated with instructions that require two - mutable accounts of the same type and how to avoid them - - Implement a check for duplicate mutable accounts using long-form Rust - - Implement a check for duplicate mutable accounts using Anchor constraints ---- - -# TL;DR - -- When an instruction requires two mutable accounts of the same type, an - attacker can pass in the same account twice, causing the account to be mutated - in unintended ways. -- To check for duplicate mutable accounts in Rust, simply compare the public - keys of the two accounts and throw an error if they are the same. - - ```rust - if ctx.accounts.account_one.key() == ctx.accounts.account_two.key() { - return Err(ProgramError::InvalidArgument) - } - ``` - -- In Anchor, you can use `constraint` to add an explicit constraint to an - account checking that it is not the same as another account. - -# Overview - -Duplicate Mutable Accounts refers to an instruction that requires two mutable -accounts of the same type. When this occurs, you should validate that two -accounts are different to prevent the same account from being passed into the -instruction twice. - -Since the program treats each account as separate, passing in the same account -twice could result in the second account being mutated in unintended ways. This -could result in very minor issues, or catastrophic ones - it really depends on -what data the code changes and how these accounts are used. Regardless, this is -a vulnerability all developers should be aware of. - -### No check - -For example, imagine a program that updates a `data` field for `user_a` and -`user_b` in a single instruction. The value that the instruction sets for -`user_a` is different from `user_b`. Without verifying that `user_a` and -`user_b` are different, the program would update the `data` field on the -`user_a` account, then update the `data` field a second time with a different -value under the assumption that `user_b` is a separate account. - -You can see this example in the code below.Tthere is no check to verify that -`user_a` and `user_b` are not the same account. Passing in the same account for -`user_a` and `user_b` will result in the `data` field for the account being set -to `b` even though the intent is to set both values `a` and `b` on separate -accounts. Depending on what `data` represents, this could be a minor unintended -side-effect, or it could mean a severe security risk. allowing `user_a` and -`user_b` to be the same account could result in - -```rust -use anchor_lang::prelude::*; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod duplicate_mutable_accounts_insecure { - use super::*; - - pub fn update(ctx: Context, a: u64, b: u64) -> Result<()> { - let user_a = &mut ctx.accounts.user_a; - let user_b = &mut ctx.accounts.user_b; - - user_a.data = a; - user_b.data = b; - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Update<'info> { - user_a: Account<'info, User>, - user_b: Account<'info, User>, -} - -#[account] -pub struct User { - data: u64, -} -``` - -### Add check in instruction - -To fix this problem with plan Rust, simply add a check in the instruction logic -to verify that the public key of `user_a` isn't the same as the public key of -`user_b`, returning an error if they are the same. - -```rust -if ctx.accounts.user_a.key() == ctx.accounts.user_b.key() { - return Err(ProgramError::InvalidArgument) -} -``` - -This check ensures that `user_a` and `user_b` are not the same account. - -```rust -use anchor_lang::prelude::*; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod duplicate_mutable_accounts_secure { - use super::*; - - pub fn update(ctx: Context, a: u64, b: u64) -> Result<()> { - if ctx.accounts.user_a.key() == ctx.accounts.user_b.key() { - return Err(ProgramError::InvalidArgument.into()) - } - let user_a = &mut ctx.accounts.user_a; - let user_b = &mut ctx.accounts.user_b; - - user_a.data = a; - user_b.data = b; - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Update<'info> { - user_a: Account<'info, User>, - user_b: Account<'info, User>, -} - -#[account] -pub struct User { - data: u64, -} -``` - -### Use Anchor `constraint` - -An even better solution if you're using Anchor is to add the check to the -account validation struct instead of the instruction logic. - -You can use the `#[account(..)]` attribute macro and the `constraint` keyword to -add a manual constraint to an account. The `constraint` keyword will check -whether the expression that follows evaluates to true or false, returning an -error if the expression evaluates to false. - -The example below moves the check from the instruction logic to the account -validation struct by adding a `constraint` to the `#[account(..)]` attribute. - -```rust -use anchor_lang::prelude::*; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod duplicate_mutable_accounts_recommended { - use super::*; - - pub fn update(ctx: Context, a: u64, b: u64) -> Result<()> { - let user_a = &mut ctx.accounts.user_a; - let user_b = &mut ctx.accounts.user_b; - - user_a.data = a; - user_b.data = b; - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Update<'info> { - #[account(constraint = user_a.key() != user_b.key())] - user_a: Account<'info, User>, - user_b: Account<'info, User>, -} - -#[account] -pub struct User { - data: u64, -} -``` - -# Demo - -Let’s practice by creating a simple Rock Paper Scissors program to demonstrate -how failing to check for duplicate mutable accounts can cause undefined behavior -within your program. - -This program will initialize “player” accounts and have a separate instruction -that requires two player accounts to represent starting a game of rock paper -scissors. - -- An `initialize` instruction to initialize a `PlayerState` account -- A `rock_paper_scissors_shoot_insecure` instruction that requires two - `PlayerState` accounts, but does not check that the accounts passed into the - instruction are different -- A `rock_paper_scissors_shoot_secure` instruction that is the same as the - `rock_paper_scissors_shoot_insecure` instruction but adds a constraint that - ensures the two player accounts are different - -### 1. Starter - -To get started, download the starter code on the `starter` branch -of [this repository](https://github.com/unboxed-software/solana-duplicate-mutable-accounts/tree/starter). -The starter code includes a program with two instructions and the boilerplate -setup for the test file. - -The `initialize` instruction initializes a new `PlayerState` account that stores -the public key of a player and a `choice` field that is set to `None`. - -The `rock_paper_scissors_shoot_insecure` instruction requires two `PlayerState` -accounts and requires a choice from the `RockPaperScissors` enum for each -player, but does not check that the accounts passed into the instruction are -different. This means a single account can be used for both `PlayerState` -accounts in the instruction. - -```rust -use anchor_lang::prelude::*; -use borsh::{BorshDeserialize, BorshSerialize}; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod duplicate_mutable_accounts { - use super::*; - - pub fn initialize(ctx: Context) -> Result<()> { - ctx.accounts.new_player.player = ctx.accounts.payer.key(); - ctx.accounts.new_player.choice = None; - Ok(()) - } - - pub fn rock_paper_scissors_shoot_insecure( - ctx: Context, - player_one_choice: RockPaperScissors, - player_two_choice: RockPaperScissors, - ) -> Result<()> { - ctx.accounts.player_one.choice = Some(player_one_choice); - - ctx.accounts.player_two.choice = Some(player_two_choice); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Initialize<'info> { - #[account( - init, - payer = payer, - space = 8 + 32 + 8 - )] - pub new_player: Account<'info, PlayerState>, - #[account(mut)] - pub payer: Signer<'info>, - pub system_program: Program<'info, System>, -} - -#[derive(Accounts)] -pub struct RockPaperScissorsInsecure<'info> { - #[account(mut)] - pub player_one: Account<'info, PlayerState>, - #[account(mut)] - pub player_two: Account<'info, PlayerState>, -} - -#[account] -pub struct PlayerState { - player: Pubkey, - choice: Option, -} - -#[derive(Clone, Copy, BorshDeserialize, BorshSerialize)] -pub enum RockPaperScissors { - Rock, - Paper, - Scissors, -} -``` - -### 2. Test `rock_paper_scissors_shoot_insecure` instruction - -The test file includes the code to invoke the `initialize` instruction twice to -create two player accounts. - -Add a test to invoke the `rock_paper_scissors_shoot_insecure` instruction by -passing in the `playerOne.publicKey` for as both `playerOne` and `playerTwo`. - -```ts -describe("duplicate-mutable-accounts", () => { - ... - it("Invoke insecure instruction", async () => { - await program.methods - .rockPaperScissorsShootInsecure({ rock: {} }, { scissors: {} }) - .accounts({ - playerOne: playerOne.publicKey, - playerTwo: playerOne.publicKey, - }) - .rpc() - - const p1 = await program.account.playerState.fetch(playerOne.publicKey) - assert.equal(JSON.stringify(p1.choice), JSON.stringify({ scissors: {} })) - assert.notEqual(JSON.stringify(p1.choice), JSON.stringify({ rock: {} })) - }) -}) -``` - -Run `anchor test` to see that the transactions completes successfully, even -though the same account is used as two accounts in the instruction. Since the -`playerOne` account is used as both players in the instruction, note the -`choice` stored on the `playerOne` account is also overridden and set -incorrectly as `scissors`. - -```bash -duplicate-mutable-accounts - ✔ Initialized Player One (461ms) - ✔ Initialized Player Two (404ms) - ✔ Invoke insecure instruction (406ms) -``` - -Not only does allowing duplicate accounts not make a whole lot of sense for the -game, it also causes undefined behavior. If we were to build out this program -further, the program only has one chosen option and therefore can't compare -against a second option. The game would end in a draw every time. It's also -unclear to a human whether `playerOne`'s choice should be rock or scissors, so -the program behavior is strange. - -### 3. Add `rock_paper_scissors_shoot_secure` instruction - -Next, return to `lib.rs` and add a `rock_paper_scissors_shoot_secure` -instruction that uses the `#[account(...)]` macro to add an additional -`constraint` to check that `player_one` and `player_two` are different accounts. - -```rust -#[program] -pub mod duplicate_mutable_accounts { - use super::*; - ... - pub fn rock_paper_scissors_shoot_secure( - ctx: Context, - player_one_choice: RockPaperScissors, - player_two_choice: RockPaperScissors, - ) -> Result<()> { - ctx.accounts.player_one.choice = Some(player_one_choice); - - ctx.accounts.player_two.choice = Some(player_two_choice); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct RockPaperScissorsSecure<'info> { - #[account( - mut, - constraint = player_one.key() != player_two.key() - )] - pub player_one: Account<'info, PlayerState>, - #[account(mut)] - pub player_two: Account<'info, PlayerState>, -} -``` - -### 7. Test `rock_paper_scissors_shoot_secure` instruction - -To test the `rock_paper_scissors_shoot_secure` instruction, we’ll invoke the -instruction twice. First, we’ll invoke the instruction using two different -player accounts to check that the instruction works as intended. Then, we’ll -invoke the instruction using the `playerOne.publicKey` as both player accounts, -which we expect to fail. - -```ts -describe("duplicate-mutable-accounts", () => { - ... - it("Invoke secure instruction", async () => { - await program.methods - .rockPaperScissorsShootSecure({ rock: {} }, { scissors: {} }) - .accounts({ - playerOne: playerOne.publicKey, - playerTwo: playerTwo.publicKey, - }) - .rpc() - - const p1 = await program.account.playerState.fetch(playerOne.publicKey) - const p2 = await program.account.playerState.fetch(playerTwo.publicKey) - assert.equal(JSON.stringify(p1.choice), JSON.stringify({ rock: {} })) - assert.equal(JSON.stringify(p2.choice), JSON.stringify({ scissors: {} })) - }) - - it("Invoke secure instruction - expect error", async () => { - try { - await program.methods - .rockPaperScissorsShootSecure({ rock: {} }, { scissors: {} }) - .accounts({ - playerOne: playerOne.publicKey, - playerTwo: playerOne.publicKey, - }) - .rpc() - } catch (err) { - expect(err) - console.log(err) - } - }) -}) -``` - -Run `anchor test` to see that the instruction works as intended and using the -`playerOne` account twice returns the expected error. - -```bash -'Program Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS invoke [1]', -'Program log: Instruction: RockPaperScissorsShootSecure', -'Program log: AnchorError caused by account: player_one. Error Code: ConstraintRaw. Error Number: 2003. Error Message: A raw constraint was violated.', -'Program Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS consumed 5104 of 200000 compute units', -'Program Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS failed: custom program error: 0x7d3' -``` - -The simple constraint is all it takes to close this loophole. While somewhat -contrived, this example illustrates the odd behavior that can occur if you write -your program under the assumption that two same-typed accounts will be different -instances of an account but don't explicitly write that constraint into your -program. Always think about the behavior you're expecting from the program and -whether that is explicit. - -If you want to take a look at the final solution code you can find it on the -`solution` branch of -[the repository](https://github.com/Unboxed-Software/solana-duplicate-mutable-accounts/tree/solution). - -# Challenge - -Just as with other lessons in this module, your opportunity to practice avoiding -this security exploit lies in auditing your own or other programs. - -Take some time to review at least one program and ensure that any instructions -with two same-typed mutable accounts are properly constrained to avoid -duplicates. - -Remember, if you find a bug or exploit in somebody else's program, please alert -them! If you find one in your own program, be sure to patch it right away. diff --git a/content/courses/solana-course/content/env-variables.md b/content/courses/solana-course/content/env-variables.md deleted file mode 100644 index 65eb2f4f2..000000000 --- a/content/courses/solana-course/content/env-variables.md +++ /dev/null @@ -1,1072 +0,0 @@ ---- -title: Environment Variables in Solana Programs -objectives: - - Define program features in the `Cargo.toml` file - - Use the Rust `cfg` attribute to conditionally compile code based on which - features are or are not enabled - - Use the Rust `cfg!` macro to conditionally compile code based on which - features are or are not enabled - - Create an admin-only instruction to set up a program account that can be - used to store program configuration values ---- - -# TL;DR - -- There are no "out of the box" solutions for creating distinct environments in - an on-chain program, but you can achieve something similar to environment - variables if you get creative. -- You can use the `cfg` attribute with **Rust features** - (`#[cfg(feature = ...)]`) to run different code or provide different variable - values based on the Rust feature provided. _This happens at compile-time and - doesn't allow you to swap values after a program has been deployed_. -- Similarly, you can use the `cfg!` **macro** to compile different code paths - based on the features that are enabled. -- Alternatively, you can achieve something similar to environment variables that - can be modified after deployment by creating accounts and instructions that - are only accessible by the program’s upgrade authority. - -# Overview - -One of the difficulties engineers face across all types of software development -is that of writing testable code and creating distinct environments for local -development, testing, production, etc. - -This can be particularly difficult in Solana program development. For example, -imagine creating an NFT staking program that rewards each staked NFT with 10 -reward tokens per day. How do you test the ability to claim rewards when tests -run in a few hundred milliseconds, not nearly long enough to earn rewards? - -Traditional web development solves some of this with environment variables whose -values can differ in each distinct "environment." Currently, there's no formal -concept of environment variables in a Solana program. If there were, you could -just make it so that rewards in your test environment are 10,000,000 tokens per -day and it would be easier to test the ability to claim rewards. - -Fortunately, you can achieve similar functionality if you get creative. The best -approach is probably a combination of two things: - -1. Rust feature flags that allow you to specify in your build command the - "environment" of the build, coupled with code that adjusts specific values - accordingly -2. Program "admin-only" accounts and instructions that are only accessible by - the program's upgrade authority - -## Rust feature flags - -One of the simplest ways to create environments is to use Rust features. -Features are defined in the `[features]` table of the program’s `Cargo.toml` -file. You may define multiple features for different use cases. - -```toml -[features] -feature-one = [] -feature-two = [] -``` - -It's important to note that the above simply defines a feature. To enable a -feature when testing your program, you can use the `--features` flag with the -`anchor test` command. - -```bash -anchor test -- --features "feature-one" -``` - -You can also specify multiple features by separating them with a comma. - -```bash -anchor test -- --features "feature-one", "feature-two" -``` - -### Make code conditional using the `cfg` attribute - -With a feature defined, you can then use the `cfg` attribute within your code to -conditionally compile code based on the whether or not a given feature is -enabled. This allows you to include or exclude certain code from your program. - -The syntax for using the `cfg` attribute is like any other attribute macro: -`#[cfg(feature=[FEATURE_HERE])]`. For example, the following code compiles the -function `function_for_testing` when the `testing` feature is enabled and the -`function_when_not_testing` otherwise: - -```rust -#[cfg(feature = "testing")] -fn function_for_testing() { - // code that will be included only if the "testing" feature flag is enabled -} - -#[cfg(not(feature = "testing"))] -fn function_when_not_testing() { - // code that will be included only if the "testing" feature flag is not enabled -} -``` - -This allows you to enable or disable certain functionality in your Anchor -program at compile time by enabling or disabling the feature. - -It's not a stretch to imagine wanting to use this to create distinct -"environments" for different program deployments. For example, not all tokens -have deployments across both Mainnet and Devnet. So you might hard-code one -token address for Mainnet deployments but hard-code a different address for -Devnet and Localnet deployments. That way you can quickly switch between between -different environments without requiring any changes to the code itself. - -The code below shows an example of an Anchor program that uses the `cfg` -attribute to include different token addresses for local testing compared to -other deployments: - -```rust -use anchor_lang::prelude::*; -use anchor_spl::token::{Mint, Token, TokenAccount}; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[cfg(feature = "local-testing")] -pub mod constants { - use solana_program::{pubkey, pubkey::Pubkey}; - pub const USDC_MINT_PUBKEY: Pubkey = pubkey!("WaoKNLQVDyBx388CfjaVeyNbs3MT2mPgAhoCfXyUvg8"); -} - -#[cfg(not(feature = "local-testing"))] -pub mod constants { - use solana_program::{pubkey, pubkey::Pubkey}; - pub const USDC_MINT_PUBKEY: Pubkey = pubkey!("EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v"); -} - -#[program] -pub mod test_program { - use super::*; - - pub fn initialize_usdc_token_account(ctx: Context) -> Result<()> { - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Initialize<'info> { - #[account( - init, - payer = payer, - token::mint = mint, - token::authority = payer, - )] - pub token: Account<'info, TokenAccount>, - #[account(address = constants::USDC_MINT_PUBKEY)] - pub mint: Account<'info, Mint>, - #[account(mut)] - pub payer: Signer<'info>, - pub token_program: Program<'info, Token>, - pub system_program: Program<'info, System>, - pub rent: Sysvar<'info, Rent>, -} -``` - -In this example, the `cfg` attribute is used to conditionally compile two -different implementations of the `constants` module. This allows the program to -use different values for the `USDC_MINT_PUBKEY` constant depending on whether or -not the `local-testing` feature is enabled. - -### Make code conditional using the `cfg!` macro - -Similar to the `cfg` attribute, the `cfg!` **macro** in Rust allows you to check -the values of certain configuration flags at runtime. This can be useful if you -want to execute different code paths depending on the values of certain -configuration flags. - -You could use this to bypass or adjust the time-based constraints required in -the NFT staking app we mentioned previously. When running a test, you can -execute code that provides far higher staking rewards when compared to running a -production build. - -To use the `cfg!` macro in an Anchor program, you simply add a `cfg!` macro call -to the conditional statement in question: - -```rust -#[program] -pub mod my_program { - use super::*; - - pub fn test_function(ctx: Context) -> Result<()> { - if cfg!(feature = "local-testing") { - // This code will be executed only if the "local-testing" feature is enabled - // ... - } else { - // This code will be executed only if the "local-testing" feature is not enabled - // ... - } - // Code that should always be included goes here - ... - Ok(()) - } -} -``` - -In this example, the `test_function` uses the `cfg!` macro to check the value of -the `local-testing` feature at runtime. If the `local-testing` feature is -enabled, the first code path is executed. If the `local-testing` feature is not -enabled, the second code path is executed instead. - -## Admin-only instructions - -Feature flags are great for adjusting values and code paths at compilation, but -they don't help much if you end up needing to adjust something after you've -already deployed your program. - -For example, if your NFT staking program has to pivot and use a different -rewards token, there'd be no way to update the program without redeploying. If -only there were a way for program admins to update certain program values... -Well, it's possible! - -First, you need to structure your program to store the values you anticipate -changing in an account rather than hard-coding them into the program code. - -Next, you need to ensure that this account can only be updated by some known -program authority, or what we're calling an admin. That means any instructions -that modify the data on this account need to have constraints limiting who can -sign for the instruction. This sounds fairly straightforward in theory, but -there is one main issues: how does the program know who is an authorized admin? - -Well, there are a few solutions, each with their own benefits and drawbacks: - -1. Hard-code an admin public key that can be used in the admin-only instruction - constraints. -2. Make the program's upgrade authority the admin. -3. Store the admin in the config account and set the first admin in an - `initialize` instruction. - -### Create the config account - -The first step is adding what we'll call a "config" account to your program. You -can customize this to best suit your needs, but we suggest a single global PDA. -In Anchor, that simply means creating an account struct and using a single seed -to derive the account's address. - -```rust -pub const SEED_PROGRAM_CONFIG: &[u8] = b"program_config"; - -#[account] -pub struct ProgramConfig { - reward_token: Pubkey, - rewards_per_day: u64, -} -``` - -The example above shows a hypothetical config account for the NFT staking -program example we've referenced throughout the lesson. It stores data -representing the token that should be used for rewards and the amount of tokens -to give out for each day of staking. - -With the config account defined, simply ensure that the rest of your code -references this account when using these values. That way, if the data in the -account changes, the program adapts accordingly. - -### Constrain config updates to hard-coded admins - -You'll need a way to initialize and update the config account data. That means -you need to have one or more instructions that only an admin can invoke. The -simplest way to do this is to hard-code an admin's public key in your code and -then add a simple signer check into your instruction's account validation -comparing the signer to this public key. - -In Anchor, constraining an `update_program_config` instruction to only be usable -by a hard-coded admin might look like this: - -```rust -#[program] -mod my_program { - pub fn update_program_config( - ctx: Context, - reward_token: Pubkey, - rewards_per_day: u64 - ) -> Result<()> { - ctx.accounts.program_config.reward_token = reward_token; - ctx.accounts.program_config.rewards_per_day = rewards_per_day; - - Ok(()) - } -} - -pub const SEED_PROGRAM_CONFIG: &[u8] = b"program_config"; - -#[constant] -pub const ADMIN_PUBKEY: Pubkey = pubkey!("ADMIN_WALLET_ADDRESS_HERE"); - -#[derive(Accounts)] -pub struct UpdateProgramConfig<'info> { - #[account(mut, seeds = SEED_PROGRAM_CONFIG, bump)] - pub program_config: Account<'info, ProgramConfig>, - #[account(constraint = authority.key() == ADMIN_PUBKEY)] - pub authority: Signer<'info>, -} -``` - -Before instruction logic even executes, a check will be performed to make sure -the instruction's signer matches the hard-coded `ADMIN_PUBKEY`. Notice that the -example above doesn't show the instruction that initializes the config account, -but it should have similar constraints to ensure that an attacker can't -initialize the account with unexpected values. - -While this approach works, it also means keeping track of an admin wallet on top -of keeping track of a program's upgrade authority. With a few more lines of -code, you could simply restrict an instruction to only be callable by the -upgrade authority. The only tricky part is getting a program's upgrade authority -to compare against. - -### Constrain config updates to the program's upgrade authority - -Fortunately, every program has a program data account that translates to the -Anchor `ProgramData` account type and has the `upgrade_authority_address` field. -The program itself stores this account's address in its data in the field -`programdata_address`. - -So in addition to the two accounts required by the instruction in the hard-coded -admin example, this instruction requires the `program` and the `program_data` -accounts. - -The accounts then need the following constraints: - -1. A constraint on `program` ensuring that the provided `program_data` account - matches the program's `programdata_address` field -2. A constraint on the `program_data` account ensuring that the instruction's - signer matches the `program_data` account's `upgrade_authority_address` - field. - -When completed, that looks like this: - -```rust -... - -#[derive(Accounts)] -pub struct UpdateProgramConfig<'info> { - #[account(mut, seeds = SEED_PROGRAM_CONFIG, bump)] - pub program_config: Account<'info, ProgramConfig>, - #[account(constraint = program.programdata_address()? == Some(program_data.key()))] - pub program: Program<'info, MyProgram>, - #[account(constraint = program_data.upgrade_authority_address == Some(authority.key()))] - pub program_data: Account<'info, ProgramData>, - pub authority: Signer<'info>, -} -``` - -Again, the example above doesn't show the instruction that initializes the -config account, but it should have the same constraints to ensure that an -attacker can't initialize the account with unexpected values. - -If this is the first time you've heard about the program data account, it's -worth reading through -[this Notion doc](https://www.notion.so/29780c48794c47308d5f138074dd9838) about -program deploys. - -### Constrain config updates to a provided admin - -Both of the previous options are fairly secure but also inflexible. What if you -want to update the admin to be someone else? For that, you can store the admin -on the config account. - -```rust -pub const SEED_PROGRAM_CONFIG: &[u8] = b"program_config"; - -#[account] -pub struct ProgramConfig { - admin: Pubkey, - reward_token: Pubkey, - rewards_per_day: u64, -} -``` - -Then you can constrain your "update" instructions with a signer check matching -against the config account's `admin` field. - -```rust -... - -pub const SEED_PROGRAM_CONFIG: &[u8] = b"program_config"; - -#[derive(Accounts)] -pub struct UpdateProgramConfig<'info> { - #[account(mut, seeds = SEED_PROGRAM_CONFIG, bump)] - pub program_config: Account<'info, ProgramConfig>, - #[account(constraint = authority.key() == program_config.admin)] - pub authority: Signer<'info>, -} -``` - -There's one catch here: in the time between deploying a program and initializing -the config account, _there is no admin_. Which means that the instruction for -initializing the config account can't be constrained to only allow admins as -callers. That means it could be called by an attacker looking to set themselves -as the admin. - -While this sounds bad, it really just means that you shouldn't treat your -program as "initialized" until you've initialized the config account yourself -and verified that the admin listed on the account is who you expect. If your -deploy script deploys and then immediately calls `initialize`, it's very -unlikely that an attacker is even aware of your program's existence much less -trying to make themselves the admin. If by some crazy stroke of bad luck someone -"intercepts" your program, you can close the program with the upgrade authority -and redeploy. - -# Demo - -Now let's go ahead and try this out together. For this demo, we'll be working -with a simple program that enables USDC payments. The program collects a small -fee for facilitating the transfer. Note that this is somewhat contrived since -you can do direct transfers without an intermediary contract, but it simulates -how some complex DeFi programs work. - -We'll quickly learn while testing our program that it could benefit from the -flexibility provided by an admin-controlled configuration account and some -feature flags. - -### 1. Starter - -Download the starter code from the `starter` branch -of [this repository](https://github.com/Unboxed-Software/solana-admin-instructions/tree/starter). -The code contains a program with a single instruction and a single test in the -`tests` directory. - -Let's quickly walk through how the program works. - -The `lib.rs` file includes a constant for the USDC address and a single -`payment` instruction. The `payment` instruction simply called the -`payment_handler` function in the `instructions/payment.rs` file where the -instruction logic is contained. - -The `instructions/payment.rs` file contains both the `payment_handler` function -as well as the `Payment` account validation struct representing the accounts -required by the `payment` instruction. The `payment_handler` function calculates -a 1% fee from the payment amount, transfers the fee to a designated token -account, and transfers the remaining amount to the payment recipient. - -Finally, the `tests` directory has a single test file, `config.ts` that simply -invokes the `payment` instruction and asserts that the corresponding token -account balances have been debited and credited accordingly. - -Before we continue, take a few minutes to familiarize yourself with these files -and their contents. - -### 2. Run the existing test - -Let's start by running the existing test. - -Make sure you use `yarn` or `npm install` to install the dependencies laid out -in the `package.json` file. Then be sure to run `anchor keys list` to get the -public key for your program printed to the console. This differs based on the -keypair you have locally, so you need to update `lib.rs` and `Anchor.toml` to -use _your_ key. - -Finally, run `anchor test` to start the test. It should fail with the following -output: - -``` -Error: failed to send transaction: Transaction simulation failed: Error processing Instruction 0: incorrect program id for instruction -``` - -The reason for this error is that we're attempting to use the mainnet USDC mint -address (as hard-coded in the `lib.rs` file of the program), but that mint -doesn't exist in the local environment. - -### 3. Adding a `local-testing` feature - -To fix this, we need a mint we can use locally _and_ hard-code into the program. -Since the local environment is reset often during testing, you'll need to store -a keypair that you can use to recreate the same mint address every time. - -Additionally, you don't want to have to change the hard-coded address between -local and mainnet builds since that could introduce human error (and is just -annoying). So we'll create a `local-testing` feature that, when enabled, will -make the program use our local mint but otherwise use the production USDC mint. - -Generate a new keypair by running `solana-keygen grind`. Run the following -command to generate a keypair with a public key that begins with "env". - -``` -solana-keygen grind --starts-with env:1 -``` - -Once a keypair is found, you should see an output similar to the following: - -``` -Wrote keypair to env9Y3szLdqMLU9rXpEGPqkjdvVn8YNHtxYNvCKXmHe.json -``` - -The keypair is written to a file in your working directory. Now that we have a -placeholder USDC address, let's modify the `lib.rs` file. Use the `cfg` -attribute to define the `USDC_MINT_PUBKEY` constant depending on whether the -`local-testing` feature is enabled or disabled. Remember to set the -`USDC_MINT_PUBKEY` constant for `local-testing` with the one generated in the -previous step rather than copying the one below. - -```rust -use anchor_lang::prelude::*; -use solana_program::{pubkey, pubkey::Pubkey}; -mod instructions; -use instructions::*; - -declare_id!("BC3RMBvVa88zSDzPXnBXxpnNYCrKsxnhR3HwwHhuKKei"); - -#[cfg(feature = "local-testing")] -#[constant] -pub const USDC_MINT_PUBKEY: Pubkey = pubkey!("..."); - -#[cfg(not(feature = "local-testing"))] -#[constant] -pub const USDC_MINT_PUBKEY: Pubkey = pubkey!("EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v"); - -#[program] -pub mod config { - use super::*; - - pub fn payment(ctx: Context, amount: u64) -> Result<()> { - instructions::payment_handler(ctx, amount) - } -} -``` - -Next, add the `local-testing` feature to the `Cargo.toml` file located in -`/programs`. - -``` -[features] -... -local-testing = [] -``` - -Next, update the `config.ts` test file to create a mint using the generated -keypair. Start by deleting the `mint` constant. - -```ts -const mint = new anchor.web3.PublicKey( - "EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v", -); -``` - -Next, update the test to create a mint using the keypair, which will enable us -to reuse the same mint address each time the tests are run. Remember to replace -the file name with the one generated in the previous step. - -```ts -let mint: anchor.web3.PublicKey - -before(async () => { - let data = fs.readFileSync( - "env9Y3szLdqMLU9rXpEGPqkjdvVn8YNHtxYNvCKXmHe.json" - ) - - let keypair = anchor.web3.Keypair.fromSecretKey( - new Uint8Array(JSON.parse(data)) - ) - - const mint = await spl.createMint( - connection, - wallet.payer, - wallet.publicKey, - null, - 0, - keypair - ) -... -``` - -Lastly, run the test with the `local-testing` feature enabled. - -``` -anchor test -- --features "local-testing" -``` - -You should see the following output: - -``` -config - ✔ Payment completes successfully (406ms) - - -1 passing (3s) -``` - -Boom. Just like that, you've used features to run two different code paths for -different environments. - -### 4. Program Config - -Features are great for setting different values at compilation, but what if you -wanted to be able to dynamically update the fee percentage used by the program? -Let's make that possible by creating a Program Config account that allows us to -update the fee without upgrading the program. - -To begin, let's first update the `lib.rs` file to: - -1. Include a `SEED_PROGRAM_CONFIG` constant, which will be used to generate the - PDA for the program config account. -2. Include an `ADMIN` constant, which will be used as a constraint when - initializing the program config account. Run the `solana address` command to - get your address to use as the constant's value. -3. Include a `state` module that we'll implement shortly. -4. Include the `initialize_program_config` and `update_program_config` - instructions and calls to their "handlers," both of which we'll implement in - another step. - -```rust -use anchor_lang::prelude::*; -use solana_program::{pubkey, pubkey::Pubkey}; -mod instructions; -mod state; -use instructions::*; - -declare_id!("BC3RMBvVa88zSDzPXnBXxpnNYCrKsxnhR3HwwHhuKKei"); - -#[cfg(feature = "local-testing")] -#[constant] -pub const USDC_MINT_PUBKEY: Pubkey = pubkey!("envgiPXWwmpkHFKdy4QLv2cypgAWmVTVEm71YbNpYRu"); - -#[cfg(not(feature = "local-testing"))] -#[constant] -pub const USDC_MINT_PUBKEY: Pubkey = pubkey!("EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v"); - -pub const SEED_PROGRAM_CONFIG: &[u8] = b"program_config"; - -#[constant] -pub const ADMIN: Pubkey = pubkey!("..."); - -#[program] -pub mod config { - use super::*; - - pub fn initialize_program_config(ctx: Context) -> Result<()> { - instructions::initialize_program_config_handler(ctx) - } - - pub fn update_program_config( - ctx: Context, - new_fee: u64, - ) -> Result<()> { - instructions::update_program_config_handler(ctx, new_fee) - } - - pub fn payment(ctx: Context, amount: u64) -> Result<()> { - instructions::payment_handler(ctx, amount) - } -} -``` - -### 5. Program Config State - -Next, let's define the structure for the `ProgramConfig` state. This account -will store the admin, the token account where fees are sent, and the fee rate. -We'll also specify the number of bytes required to store this structure. - -Create a new file called `state.rs` in the `/src` directory and add the -following code. - -```rust -use anchor_lang::prelude::*; - -#[account] -pub struct ProgramConfig { - pub admin: Pubkey, - pub fee_destination: Pubkey, - pub fee_basis_points: u64, -} - -impl ProgramConfig { - pub const LEN: usize = 8 + 32 + 32 + 8; -} -``` - -### 6. Add Initialize Program Config Account Instruction - -Now let's create the instruction logic for initializing the program config -account. It should only be callable by a transaction signed by the `ADMIN` key -and should set all the properties on the `ProgramConfig` account. - -Create a folder called `program_config` at the path -`/src/instructions/program_config`. This folder will store all instructions -related to the program config account. - -Within the `program_config` folder, create a file called -`initialize_program_config.rs` and add the following code. - -```rust -use crate::state::ProgramConfig; -use crate::ADMIN; -use crate::SEED_PROGRAM_CONFIG; -use crate::USDC_MINT_PUBKEY; -use anchor_lang::prelude::*; -use anchor_spl::token::TokenAccount; - -#[derive(Accounts)] -pub struct InitializeProgramConfig<'info> { - #[account(init, seeds = [SEED_PROGRAM_CONFIG], bump, payer = authority, space = ProgramConfig::LEN)] - pub program_config: Account<'info, ProgramConfig>, - #[account( token::mint = USDC_MINT_PUBKEY)] - pub fee_destination: Account<'info, TokenAccount>, - #[account(mut, address = ADMIN)] - pub authority: Signer<'info>, - pub system_program: Program<'info, System>, -} - -pub fn initialize_program_config_handler(ctx: Context) -> Result<()> { - ctx.accounts.program_config.admin = ctx.accounts.authority.key(); - ctx.accounts.program_config.fee_destination = ctx.accounts.fee_destination.key(); - ctx.accounts.program_config.fee_basis_points = 100; - Ok(()) -} -``` - -### 7. Add Update Program Config Fee Instruction - -Next, implement the instruction logic for updating the config account. The -instruction should require that the signer match the `admin` stored in the -`program_config` account. - -Within the `program_config` folder, create a file called -`update_program_config.rs` and add the following code. - -```rust -use crate::state::ProgramConfig; -use crate::SEED_PROGRAM_CONFIG; -use crate::USDC_MINT_PUBKEY; -use anchor_lang::prelude::*; -use anchor_spl::token::TokenAccount; - -#[derive(Accounts)] -pub struct UpdateProgramConfig<'info> { - #[account(mut, seeds = [SEED_PROGRAM_CONFIG], bump)] - pub program_config: Account<'info, ProgramConfig>, - #[account( token::mint = USDC_MINT_PUBKEY)] - pub fee_destination: Account<'info, TokenAccount>, - #[account( - mut, - address = program_config.admin, - )] - pub admin: Signer<'info>, - /// CHECK: arbitrarily assigned by existing admin - pub new_admin: UncheckedAccount<'info>, -} - -pub fn update_program_config_handler( - ctx: Context, - new_fee: u64, -) -> Result<()> { - ctx.accounts.program_config.admin = ctx.accounts.new_admin.key(); - ctx.accounts.program_config.fee_destination = ctx.accounts.fee_destination.key(); - ctx.accounts.program_config.fee_basis_points = new_fee; - Ok(()) -} -``` - -### 8. Add mod.rs and update instructions.rs - -Next, let's expose the instruction handlers we created so that the call from -`lib.rs` doesn't show an error. Start by adding a file `mod.rs` in the -`program_config` folder. Add the code below to make the two modules, -`initialize_program_config` and `update_program_config` accessible. - -```rust -mod initialize_program_config; -pub use initialize_program_config::*; - -mod update_program_config; -pub use update_program_config::*; -``` - -Now, update `instructions.rs` at the path `/src/instructions.rs`. Add the code -below to make the two modules, `program_config` and `payment` accessible. - -```rust -mod program_config; -pub use program_config::*; - -mod payment; -pub use payment::*; -``` - -### 9. Update Payment Instruction - -Lastly, let's update the payment instruction to check that the `fee_destination` -account in the instruction matches the `fee_destination` stored in the program -config account. Then update the instruction's fee calculation to be based on the -`fee_basis_point` stored in the program config account. - -```rust -use crate::state::ProgramConfig; -use crate::SEED_PROGRAM_CONFIG; -use crate::USDC_MINT_PUBKEY; -use anchor_lang::prelude::*; -use anchor_spl::token::{self, Token, TokenAccount}; - -#[derive(Accounts)] -pub struct Payment<'info> { - #[account( - seeds = [SEED_PROGRAM_CONFIG], - bump, - has_one = fee_destination - )] - pub program_config: Account<'info, ProgramConfig>, - #[account( - mut, - token::mint = USDC_MINT_PUBKEY - )] - pub fee_destination: Account<'info, TokenAccount>, - #[account( - mut, - token::mint = USDC_MINT_PUBKEY - )] - pub sender_token_account: Account<'info, TokenAccount>, - #[account( - mut, - token::mint = USDC_MINT_PUBKEY - )] - pub receiver_token_account: Account<'info, TokenAccount>, - pub token_program: Program<'info, Token>, - #[account(mut)] - pub sender: Signer<'info>, -} - -pub fn payment_handler(ctx: Context, amount: u64) -> Result<()> { - let fee_amount = amount - .checked_mul(ctx.accounts.program_config.fee_basis_points) - .unwrap() - .checked_div(10000) - .unwrap(); - let remaining_amount = amount.checked_sub(fee_amount).unwrap(); - - msg!("Amount: {}", amount); - msg!("Fee Amount: {}", fee_amount); - msg!("Remaining Transfer Amount: {}", remaining_amount); - - token::transfer( - CpiContext::new( - ctx.accounts.token_program.to_account_info(), - token::Transfer { - from: ctx.accounts.sender_token_account.to_account_info(), - authority: ctx.accounts.sender.to_account_info(), - to: ctx.accounts.fee_destination.to_account_info(), - }, - ), - fee_amount, - )?; - - token::transfer( - CpiContext::new( - ctx.accounts.token_program.to_account_info(), - token::Transfer { - from: ctx.accounts.sender_token_account.to_account_info(), - authority: ctx.accounts.sender.to_account_info(), - to: ctx.accounts.receiver_token_account.to_account_info(), - }, - ), - remaining_amount, - )?; - - Ok(()) -} -``` - -### 10. Test - -Now that we're done implementing our new program configuration struct and -instructions, let's move on to testing our updated program. To begin, add the -PDA for the program config account to the test file. - -```ts -describe("config", () => { - ... - const programConfig = findProgramAddressSync( - [Buffer.from("program_config")], - program.programId - )[0] -... -``` - -Next, update the test file with three more tests testing that: - -1. The program config account is initialized correctly -2. The payment instruction is functioning as intended -3. The config account can be updated successfully by the admin -4. The config account cannot be updated by another other than the admin - -The first test initializes the program config account and verifies that the -correct fee is set and that the correct admin is stored on the program config -account. - -```typescript -it("Initialize Program Config Account", async () => { - const tx = await program.methods - .initializeProgramConfig() - .accounts({ - programConfig: programConfig, - feeDestination: feeDestination, - authority: wallet.publicKey, - systemProgram: anchor.web3.SystemProgram.programId, - }) - .rpc(); - - assert.strictEqual( - ( - await program.account.programConfig.fetch(programConfig) - ).feeBasisPoints.toNumber(), - 100, - ); - assert.strictEqual( - (await program.account.programConfig.fetch(programConfig)).admin.toString(), - wallet.publicKey.toString(), - ); -}); -``` - -The second test verifies that the payment instruction is working correctly, with -the fee being sent to the fee destination and the remaining balance being -transferred to the receiver. Here we update the existing test to include the -`programConfig` account. - -```typescript -it("Payment completes successfully", async () => { - const tx = await program.methods - .payment(new anchor.BN(10000)) - .accounts({ - programConfig: programConfig, - feeDestination: feeDestination, - senderTokenAccount: senderTokenAccount, - receiverTokenAccount: receiverTokenAccount, - sender: sender.publicKey, - }) - .transaction(); - - await anchor.web3.sendAndConfirmTransaction(connection, tx, [sender]); - - assert.strictEqual( - (await connection.getTokenAccountBalance(senderTokenAccount)).value - .uiAmount, - 0, - ); - - assert.strictEqual( - (await connection.getTokenAccountBalance(feeDestination)).value.uiAmount, - 100, - ); - - assert.strictEqual( - (await connection.getTokenAccountBalance(receiverTokenAccount)).value - .uiAmount, - 9900, - ); -}); -``` - -The third test attempts to update the fee on the program config account, which -should be successful. - -```typescript -it("Update Program Config Account", async () => { - const tx = await program.methods - .updateProgramConfig(new anchor.BN(200)) - .accounts({ - programConfig: programConfig, - admin: wallet.publicKey, - feeDestination: feeDestination, - newAdmin: sender.publicKey, - }) - .rpc(); - - assert.strictEqual( - ( - await program.account.programConfig.fetch(programConfig) - ).feeBasisPoints.toNumber(), - 200, - ); -}); -``` - -The fourth test tries to update the fee on the program config account, where the -admin is not the one stored on the program config account, and this should fail. - -```typescript -it("Update Program Config Account with unauthorized admin (expect fail)", async () => { - try { - const tx = await program.methods - .updateProgramConfig(new anchor.BN(300)) - .accounts({ - programConfig: programConfig, - admin: sender.publicKey, - feeDestination: feeDestination, - newAdmin: sender.publicKey, - }) - .transaction(); - - await anchor.web3.sendAndConfirmTransaction(connection, tx, [sender]); - } catch (err) { - expect(err); - } -}); -``` - -Finally, run the test using the following command: - -``` -anchor test -- --features "local-testing" -``` - -You should see the following output: - -``` -config - ✔ Initialize Program Config Account (199ms) - ✔ Payment completes successfully (405ms) - ✔ Update Program Config Account (403ms) - ✔ Update Program Config Account with unauthorized admin (expect fail) - -4 passing (8s) -``` - -And that's it! You've made the program a lot easier to work with moving forward. -If you want to take a look at the final solution code you can find it on -the `solution` branch -of [the same repository](https://github.com/Unboxed-Software/solana-admin-instructions/tree/solution). - -# Challenge - -Now it's time for you to do some of this on your own. We mentioned being able to -use the program's upgrade authority as the initial admin. Go ahead and update -the demo's `initialize_program_config` so that only the upgrade authority can -call it rather than having a hardcoded `ADMIN`. - -Note that the `anchor test` command, when run on a local network, starts a new -test validator using `solana-test-validator`. This test validator uses a -non-upgradeable loader. The non-upgradeable loader makes it so the program's -`program_data` account isn't initialized when the validator starts. You'll -recall from the lesson that this account is how we access the upgrade authority -from the program. - -To work around this, you can add a `deploy` function to the test file that runs -the deploy command for the program with an upgradeable loader. To use it, run -`anchor test --skip-deploy`, and call the `deploy` function within the test to -run the deploy command after the test validator has started. - -```typescript -import { execSync } from "child_process" - -... - -const deploy = () => { - const deployCmd = `solana program deploy --url localhost -v --program-id $(pwd)/target/deploy/config-keypair.json $(pwd)/target/deploy/config.so` - execSync(deployCmd) -} - -... - -before(async () => { - ... - deploy() -}) -``` - -For example, the command to run the test with features would look like this: - -``` -anchor test --skip-deploy -- --features "local-testing" -``` - -Try doing this on your own, but if you get stuck, feel free to reference the -`challenge` branch of -[the same repository](https://github.com/Unboxed-Software/solana-admin-instructions/tree/challenge) -to see one possible solution. diff --git a/content/courses/solana-course/content/getting-started.md b/content/courses/solana-course/content/getting-started.md deleted file mode 100644 index 24eb5cf2b..000000000 --- a/content/courses/solana-course/content/getting-started.md +++ /dev/null @@ -1,57 +0,0 @@ ---- -title: Course Guide -objectives: - - explain how this Solana course is structured ---- - -## How is this course structured? - -Glad you asked. Each lesson starts by listing the lesson objectives. These are -short statements that indicate what you'll be learning throughout the lesson. - -In addition to listing objectives, each lesson provides a brief TL;DR so that -you can glance through, get a sense of what the lesson covers, and decide if the -lesson is for you or not. - -If you decide to continue your journey, you'll then encounter three sections: - -- **Overview** - the overview contains explanatory text, examples, and code - snippets. You are _not_ expected to code along with any of the examples shown - here. The goal is to simply read through and get initial exposure to the - lesson topics. -- **Demo** - the demo is a tutorial-style project. You _absolutely should_ code - along with this section. This is your second exposure to the content as well - as your first opportunity to dive in and _do the thing_. -- **Challenge** - the challenge contains a similar project to the demo only - instead of walking you through it, the lesson leaves just a few simple prompts - that you should then take and implement independently. - -This structure leans into a pedagogical technique call IWY loops. IWY stands for -"I do, We do, You do." Each step along the way increases your exposure to the -topic _and_ reduces the amount of handholding you're given. - -## How do I use it effectively? - -Again, glad you asked. The lesson structure is pretty great, but everyone comes -into this with different backgrounds and aptitudes that can't be taken into -account by static text. With that in mind, here are three recommendations for -how to get the most out of the course: - -1. **Be brutally honest with yourself** - this may sound a little vague, but - being honest with yourself about how well you understand a certain topic is - essential to mastering it. It's really easy to read a thing and think "yeah, - yeah I get it," only to realize later that you actually didn't. Be honest - with yourself while going through each lesson. Please don't hesitate to - repeat sections if you need to or do outside research when the lesson - phrasing doesn't quite work for you. -2. **Do every demo and challenge** - this supports the first point. It's pretty - tough to lie to yourself about how well you know something when you make - yourself try to do it. Do every demo and every challenge to test where you're - at and repeat them as needed. We provide solution code for everything, but be - sure to use it as a helpful resource rather than a crutch. -3. **Go above and beyond** - sounds cliche, I know, but don't just stop at what - the demo and challenges ask you to do. Get creative! Take the projects and - make them your own. Build past them. The more you practice the better you - get. - -Alright, that's it for my pep talk. Get after it! diff --git a/content/courses/solana-course/content/hello-world-program.md b/content/courses/solana-course/content/hello-world-program.md deleted file mode 100644 index 5c073ea1e..000000000 --- a/content/courses/solana-course/content/hello-world-program.md +++ /dev/null @@ -1,389 +0,0 @@ ---- -title: Hello World -objectives: - - Use the Rust module system - - Define a function in Rust - - Explain the `Result` type - - Explain the entry point to a Solana program - - Build and deploy a basic Solana program - - Submit a transaction to invoke our “Hello, world!” program ---- - -# TL;DR - -- **Programs** on Solana are a particular type of account that stores and - executes instruction logic -- Solana programs have a single **entry point** to process instructions -- A program processes an instruction using the **program_id**, list of - **accounts**, and **instruction_data** included with the instruction - -# Overview - -Solana's ability to run arbitrary executable code is part of what makes it so -powerful. Solana programs, similar to "smart contracts" in other blockchain -environments, are quite literally the backbone of the Solana ecosystem. And the -collection of programs grows daily as developers and creators dream up and -deploy new programs. - -This lesson will give you a basic introduction to writing and deploying a Solana -program using the Rust programming language. To avoid the distraction of setting -up a local development environment, we'll be using a browser-based IDE called -Solana Playground. - -## Rust Basics - -Before we dive into the building our "Hello, world!" program, let’s first go -over some Rust basics. If you want to dig deeper into Rust, have a look at -the [Rust language book](https://doc.rust-lang.org/book/ch00-00-introduction.html). - -### Module System - -Rust organizes code using what is collectively referred to as the “module -system”. - -This includes: - -- **Modules** - A module separates code into logical units to provide isolated - namespaces for organization, scope, and privacy of paths -- **Crates** - A crate is either a library or an executable program. The source - code for a crate is usually subdivided into multiple modules. -- **Packages** - A package contains a collection of crates as well as a manifest - file for specifying metadata and dependencies between packages - -Throughout this lesson, we’ll focus on using crates and modules. - -### Paths and scope - -Crates in Rust contain modules that define functionality which can be shared -with multiple projects. If we want to access an item within a module, then we -need to know its "path" (like when we're navigating a filesystem). - -Think of the crate structure as a tree where the crate is the base and modules -are branches, each of which can have submodules or items that are additional -branches. - -The path to a particular module or item is the name of each step from the crate -to that module where each is separated by `::`. As an example, let's look at the -following structure: - -1. The base crate is `solana_program` -2. `solana_program` contains a module named `account_info` -3. `account_info` contains a struct named `AccountInfo` - -The path to `AccountInfo` would be `solana_program::account_info::AccountInfo`. - -Absent of any other keywords, we would need to reference this entire path to use -`AccountInfo` in our code. - -However, with the -[`use`](https://doc.rust-lang.org/stable/book/ch07-04-bringing-paths-into-scope-with-the-use-keyword.html) -keyword we can bring an item into scope so that it can be reused throughout a -file without specifying the full path each time. It's common to see a series of -`use` commands at the top of a Rust file. - -```rust -use solana_program::account_info::AccountInfo -``` - -### Declaring Functions in Rust - -We define a function in Rust by using the `fn` keyword followed by a function -name and a set of parentheses. - -```rust -fn process_instruction() -``` - -We can then add arguments to our function by including variable names and -specifying its corresponding data type within the parentheses. - -Rust is known as a ”statically typed” language and every value in Rust is of a -certain ”data type”. This meaning that Rust must know the types of all variables -at compile time. In cases when multiple types are possible, we must add a type -annotation to our variables. - -In the example below, we create a function named `process_instruction` that -requires the following arguments: - -- `program_id` - required to be type `&Pubkey` -- `accounts` - required to be type `&[AccountInfo]` -- `instruction_data` - required to be type `&[u8]` - -Note the `&` in front of the type for each argument listed in the -`process_instruction` function. In Rust, `&` represents a ”reference” to another -variable. This allows you to refer to some value without taking ownership of it. -The “reference” is guaranteed to point to a valid value of a particular type. -The action of creating a reference in Rust is called “borrowing”. - -In this example, when the `process_instruction` function is called, a user must -pass in values for the required arguments. The `process_instruction` function -then references the values passed in by the user, and guarantees that each value -is the correct data type specified in the `process_instruction` function. - -Additionally, note the brackets `[]` around `&[AccountInfo]` and `&[u8]`. This -means that the `accounts` and `instruction_data` arguments expect “slices” of -types `AccountInfo` and `u8`, respectively. A “slice” is similar to an array -(collection of objects of the same type), except the length is not known at -compile time. In other words, the `accounts` and `instruction_data` arguments -expect inputs of unknown length. - -```rust -fn process_instruction( - program_id: &Pubkey, - accounts: &[AccountInfo], - instruction_data: &[u8], -) -``` - -We can then have our functions return values by declaring the return type using -an arrow `->` after the function. - -In the example below, the `process_instruction` function will now return a value -of type `ProgramResult`. We will go over this in the next section. - -```rust -fn process_instruction( - program_id: &Pubkey, - accounts: &[AccountInfo], - instruction_data: &[u8], -) -> ProgramResult -``` - -### Result enum - -`Result` is a standard library type that represents two discrete outcomes: -success (`Ok`) or failure (`Err`). We'll talk more about enums in a future -lesson, but you'll see `Ok` used later in this lesson so it's important to cover -the basics. - -When you use `Ok` or `Err`, you must include a value, the type of which is -determined by the context of the code. For example, a function that requires a -return value of type `Result` is saying that the function can -either return `Ok` with an embedded string value or `Err` with an embedded -integer. In this example, the integer is an error code that can be used to -appropriately handle the error. - -To return a success case with a string value, you would do the following: - -```rust -Ok(String::from("Success!")); -``` - -To return an error with an integer, you would do the following: - -```rust -Err(404); -``` - -## Solana Programs - -Recall that all data stored on the Solana network are contained in what are -referred to as accounts. Each account has its own unique address which is used -to identify and access the account data. Solana programs are just a particular -type of Solana account that store and execute instructions. - -### Solana Program Crate - -To write Solana programs with Rust, we use the `solana_program` library crate. -The `solana_program` crate acts as a standard library for Solana programs. This -standard library contains the modules and macros that we'll use to develop our -Solana programs. If you want to dig deeper `solana_program` crate, have a look -[here](https://docs.rs/solana-program/latest/solana_program/index.html). - -For a basic program we will need to bring into scope the following items from -the `solana_program` crate: - -```rust -use solana_program::{ - account_info::AccountInfo, - entrypoint, - entrypoint::ProgramResult, - pubkey::Pubkey, - msg -}; -``` - -- `AccountInfo` - a struct within the `account_info` module that allows us to - access account information -- `entrypoint` - a macro that declares the entry point of the program -- `ProgramResult` - a type within the `entrypoint` module that returns either - a `Result` or `ProgramError` -- `Pubkey` - a struct within the `pubkey` module that allows us to access - addresses as a public key -- `msg` - a macro that allows us to print messages to the program log - -### Solana Program Entry Point - -Solana programs require a single entry point to process program instructions. -The entry point is declared using the `entrypoint!` macro. - -The entry point to a Solana program requires a `process_instruction` function -with the following arguments: - -- `program_id` - the address of the account where the program is stored -- `accounts` - the list of accounts required to process the instruction -- `instruction_data` - the serialized, instruction-specific data - -```rust -entrypoint!(process_instruction); - -fn process_instruction( - program_id: &Pubkey, - accounts: &[AccountInfo], - instruction_data: &[u8], -) -> ProgramResult; -``` - -Recall that Solana program accounts only store the logic to process -instructions. This means program accounts are "read-only" and “stateless”. The -“state” (the set of data) that a program requires in order to process an -instruction is stored in data accounts (separate from the program account). - -In order to process an instruction, the data accounts that an instruction -requires must be explicitly passed into the program through -the `accounts` argument. Any additional inputs must be passed in through -the `instruction_data` argument. - -Following program execution, the program must return a value of type -`ProgramResult`. This type is a `Result` where the embedded value of a success -case is `()` and the embedded value of a failure case is `ProgramError`. `()` is -effectively an empty value and `ProgramError` is an error type defined in the -`solana_program` crate. - -...and there you have it - you now know all the things you need for the -foundations of creating a Solana program using Rust. Let’s practice what we’ve -learned so far! - -# Demo - -We're going to build a "Hello, World!" program using Solana Playground. Solana -Playground is a tool that allows you to write and deploy Solana programs from -the browser. - -### 1. Setup - -Click [here](https://beta.solpg.io/) to open Solana Playground. Next, go ahead -and delete everything in the default `lib.rs` file and create a Playground -wallet. - -![Gif Solana Playground Create Wallet](../assets/hello-world-create-wallet.gif) - -### 2. Solana Program Crate - -First, let's bring into scope everything we’ll need from the `solana_program` -crate. - -```rust -use solana_program::{ - account_info::AccountInfo, - entrypoint, - entrypoint::ProgramResult, - pubkey::Pubkey, - msg -}; -``` - -Next, let's set up the entry point to our program using the `entrypoint!` macro -and create the `process_instruction` function. The `msg!` macro then allows us -to print “Hello, world!” to the program log when the program is invoked. - -### 3. Entry Point - -```rust -entrypoint!(process_instruction); - -pub fn process_instruction( - program_id: &Pubkey, - accounts: &[AccountInfo], - instruction_data: &[u8] -) -> ProgramResult{ - msg!("Hello, world!"); - - Ok(()) -} -``` - -All together, the “Hello, world!” program will look like this: - -```rust -use solana_program::{ - account_info::AccountInfo, - entrypoint, - entrypoint::ProgramResult, - pubkey::Pubkey, - msg -}; - -entrypoint!(process_instruction); - -pub fn process_instruction( - program_id: &Pubkey, - accounts: &[AccountInfo], - instruction_data: &[u8] -) -> ProgramResult{ - msg!("Hello, world!"); - - Ok(()) -} -``` - -### 4. Build and Deploy - -Now let's build and deploy our program using Solana Playground. - -![Gif Solana Playground Build and Deploy](../assets/hello-world-build-deploy.gif) - -### 5. Invoke Program - -Finally, let's invoke our program from the client side. Download the code -[here](https://github.com/Unboxed-Software/solana-hello-world-client). - -The focus of this lesson is to build our Solana program, so we’ve gone ahead and -provided the client code to invoke our “Hello, world!” program. The code -provided includes a `sayHello` helper function that builds and submits our -transaction. We then call `sayHello` in the main function and print a Solana -Explorer URL to view our transaction details in the browser. - -Open the `index.ts` file you should see a variable named `programId`. Go ahead -and update this with the program ID of the “Hello, world!" program you just -deployed using Solana Playground. - -```tsx -let programId = new web3.PublicKey(""); -``` - -You can locate the program ID on Solana Playground referencing the image below. - -![Gif Solana Playground Program ID](../assets/hello-world-program-id.gif) - -Next, install the Node modules with `npm i`. - -Now, go ahead and run `npm start`. This command will: - -1. Generate a new keypair and create a `.env` file if one does not already exist -2. Airdrop devnet SOL -3. Invoke the “Hello, world!” program -4. Output the transaction URL to view on Solana Explorer - -Copy the transaction URL printed in the console into your browser. Scroll down -to see “Hello, world!” under Program Instruction Logs. - -![Screenshot Solana Explorer Program Log](../assets/hello-world-program-log.png) - -Congratulations, you’ve just successfully built and deployed a Solana program! - -# Challenge - -Now it’s your turn to build something independently. Because we're starting with -very simple programs, yours will look almost identical to what we just created. -It's useful to try and get to the point where you can write it from scratch -without referencing prior code, so try not to copy and paste here. - -1. Write a new program that uses the `msg!` macro to print your own message to - the program log. -2. Build and deploy your program like we did in the demo. -3. Invoke your newly deployed program and use Solana Explorer to check that your - message was printed in the program log. - -As always, get creative with these challenges and take them beyond the basic -instructions if you want - and have fun! diff --git a/content/courses/solana-course/content/interact-with-wallets.md b/content/courses/solana-course/content/interact-with-wallets.md deleted file mode 100644 index 91f46345c..000000000 --- a/content/courses/solana-course/content/interact-with-wallets.md +++ /dev/null @@ -1,612 +0,0 @@ ---- -title: Interact With Wallets -objectives: - - Explain Wallets - - Install Phantom extension - - Set Phantom wallet to [Devnet](https://api.devnet.solana.com/) - - Use wallet-adapter to have users sign transactions ---- - -# TL;DR - -- **Wallets** store your secret key and handle secure transaction signing -- **Hardware wallets** store your secret key on a separate device -- **Software wallets** use your computer for secure storage -- Software wallets are often **browser extensions** that facilitate connecting - to websites -- Solana’s **Wallet-Adapter library** simplifies the support of wallet browser - extensions, allowing you to build websites that can request a user’s wallet - address and propose transactions for them to sign - -# Overview - -## Wallets - -In the previous two lessons we discussed keypairs. Keypairs are used to locate -accounts and sign transactions. While the public key of a keypair is perfectly -safe to share, the secret key should always be kept in a secure location. If a -user’s secret key is exposed, then a malicious actor could drain their account -of all assets and execute transactions with the authority of that user. - -A “wallet” refers to anything that stores a secret key in order to keep it -secure. These secure storage options can generally be described as either -“hardware” or “software” wallets. Hardware wallets are storage devices that are -separate from your computer. Software wallets are application you can install on -your existing device(s). - -Software wallets often come in the form of a browser extension. This makes it -possible for websites to interact easily with the wallet. Such interactions are -usually limited to: - -1. Seeing the wallet’s public key (address) -2. Submitting transactions for a user's approval -3. Sending an approved transaction to the network - -Once a transaction is submitted, the end user can “confirm” the transaction and -send it to the network with their “signature.” - -Signing transactions requires using your secret key. By letting a site submit a -transaction to your wallet and having the wallet handle the signing, you ensure -that you never expose your secret key to the website. Instead, you only share -the secret key with the wallet application. - -Unless you’re creating a wallet application yourself, your code should never -need to ask a user for their secret key. Instead, you can ask users to connect -to your site using a reputable wallet. - -## Phantom Wallet - -One of the most widely used software wallets in the Solana ecosystem is -[Phantom](https://phantom.app). Phantom supports a few of the most popular -browsers and has a mobile app for connecting on the go. You’ll likely want your -decentralized applications to support multiple wallets, but this course will -focus on Phantom. - -## Solana’s Wallet-Adapter - -Solana’s Wallet-Adapter is a suite of libraries you can use to simplify the -process of supporting wallet browser extensions. - -Solana’s Wallet-Adapter comprises multiple modular packages. The core -functionality is found in `@solana/wallet-adapter-base` and -`@solana/wallet-adapter-react`. - -There are also packages that provide components for common UI frameworks. In -this lesson and throughout this course, we’ll be using components from -`@solana/wallet-adapter-react-ui`. - -Finally, there are packages that are adapters for specific wallets, including -Phantom. You can use `@solana/wallet-adapter-wallets` to include all of the -supported wallets, or you can choose a specific wallet package like -`@solana/wallet-adapter-phantom`. - -### Install Wallet-Adapter Libraries - -When adding wallet support to an existing react app, you start by installing the -appropriate packages. You’ll need `@solana/wallet-adapter-base`, -`@solana/wallet-adapter-react`, the package(s) for the wallet(s) you want to -support, and `@solana/wallet-adapter-react-ui` if you plan to use the provided -react components, e.g. - -``` -npm install @solana/wallet-adapter-base \ - @solana/wallet-adapter-react \ - @solana/wallet-adapter-phantom \ - @solana/wallet-adapter-react-ui -``` - -### Connect To Wallets - -`@solana/wallet-adapter-react` allows us to persist and access wallet connection -states through hooks and context providers, namely: - -- `useWallet` -- `WalletProvider` -- `useConnection` -- `ConnectionProvider` - -For these to work properly, any use of `useWallet` and `useConnection` should be -wrapped in `WalletProvider` and `ConnectionProvider`. One of the best ways to -ensure this is to wrap your entire app in `ConnectionProvider` and -`WalletProvider`: - -```tsx -import { NextPage } from "next"; -import { FC, ReactNode } from "react"; -import { - ConnectionProvider, - WalletProvider, -} from "@solana/wallet-adapter-react"; -import { PhantomWalletAdapter } from "@solana/wallet-adapter-phantom"; -import * as web3 from "@solana/web3.js"; - -export const Home: NextPage = props => { - const endpoint = web3.clusterApiUrl("devnet"); - const wallet = new PhantomWalletAdapter(); - - return ( - - -

Put the rest of your app here

-
-
- ); -}; -``` - -Note that `ConnectionProvider` requires an `endpoint` property and that -`WalletProvider` requires a `wallets` property. We’re continuing to use the -endpoint for the Devnet cluster, and for now we’re only using the -`PhantomWalletAdapter` for `wallets`. - -At this point you can connect with `wallet.connect()`, which will effectively -instruct the wallet to prompt the user for permission to view their public key -and request approval for transactions. - -![Screenshot of wallet connection prompt](../assets/wallet-connect-prompt.png) - -While you could do this in a `useEffect` hook, you’ll usually want to provide -more sophisticated functionality. For example, you may want users to be able to -choose from a list of supported wallets, or disconnect after they’ve already -connected. - -### `@solana/wallet-adapter-react-ui` - -You can create custom components for this, or you can leverage components -provided by `@solana/wallet-adapter-react-ui`. The simplest way to provide -extensive options is to use `WalletModalProvider` and `WalletMultiButton`: - -```tsx -import { NextPage } from "next"; -import { FC, ReactNode } from "react"; -import { - ConnectionProvider, - WalletProvider, -} from "@solana/wallet-adapter-react"; -import { - WalletModalProvider, - WalletMultiButton, -} from "@solana/wallet-adapter-react-ui"; -import { PhantomWalletAdapter } from "@solana/wallet-adapter-phantom"; -import * as web3 from "@solana/web3.js"; - -const Home: NextPage = props => { - const endpoint = web3.clusterApiUrl("devnet"); - const wallet = new PhantomWalletAdapter(); - - return ( - - - - -

Put the rest of your app here

-
-
-
- ); -}; - -export default Home; -``` - -The `WalletModalProvider` adds functionality for presenting a modal screen for -users to select which wallet they’d like to use. The `WalletMultiButton` changes -behavior to match the connection status: - -![Screenshot of multi button select wallet option](../assets/multi-button-select-wallet.png) - -![Screenshot of connect wallet modal](../assets/connect-wallet-modal.png) - -![Screenshot of multi button connect options](../assets/multi-button-connect.png) - -![Screenshot of multi button connected state](../assets/multi-button-connected.png) - -You can also use more granular components if you need more specific -functionality: - -- `WalletConnectButton` -- `WalletModal` -- `WalletModalButton` -- `WalletDisconnectButton` -- `WalletIcon` - -### Access Account Info - -Once your site is connected to a wallet, `useConnection` will retrieve a -`Connection` object and `useWallet` will get the `WalletContextState`. -`WalletContextState` has a property `publicKey` that is `null` when not -connected to a wallet and has the public key of the user’s account when a wallet -is connected. With a public key and a connection, you can fetch account info and -more. - -```tsx -import { useConnection, useWallet } from "@solana/wallet-adapter-react"; -import { LAMPORTS_PER_SOL } from "@solana/web3.js"; -import { FC, useEffect, useState } from "react"; - -export const BalanceDisplay: FC = () => { - const [balance, setBalance] = useState(0); - const { connection } = useConnection(); - const { publicKey } = useWallet(); - - useEffect(() => { - if (!connection || !publicKey) { - return; - } - - connection.getAccountInfo(publicKey).then(info => { - setBalance(info.lamports); - }); - }, [connection, publicKey]); - - return ( -
-

{publicKey ? `Balance: ${balance / LAMPORTS_PER_SOL} SOL` : ""}

-
- ); -}; -``` - -### Send Transactions - -`WalletContextState` also provides a `sendTransaction` function that you can use -to submit transactions for approval. - -```tsx -const { publicKey, sendTransaction } = useWallet(); -const { connection } = useConnection(); - -const sendSol = event => { - event.preventDefault(); - - const transaction = new web3.Transaction(); - const recipientPubKey = new web3.PublicKey(event.target.recipient.value); - - const sendSolInstruction = web3.SystemProgram.transfer({ - fromPubkey: publicKey, - toPubkey: recipientPubKey, - lamports: LAMPORTS_PER_SOL * 0.1, - }); - - transaction.add(sendSolInstruction); - sendTransaction(transaction, connection).then(sig => { - console.log(sig); - }); -}; -``` - -When this function is called, the connected wallet will display the transaction -for the user’s approval. If approved, then the transaction will be sent. - -![Screenshot of wallet transaction approval prompt](../assets/wallet-transaction-approval-prompt.png) - -# Demo - -Let’s take the Ping program from last lesson and build a frontend that lets -users approve a transaction that pings the program. As a reminder, the program’s -public key is `ChT1B39WKLS8qUrkLvFDXMhEJ4F1XZzwUNHUt4AU9aVa` and the public key -for the data account is `Ah9K7dQ8EHaZqcAsgBW8w37yN2eAy3koFmUn4x3CJtod`. - -![Screenshot of Solana Ping App](../assets/solana-ping-app.png) - -### 1. Download the Phantom browser extension and set it to Devnet - -If you don’t already have it, download the -[Phantom browser extension](https://phantom.app/download). At the time of -writing, it supports Chrome, Brave, Firefox, and Edge browsers, so you’ll also -need to have one of those browsers installed. Follow Phantom’s instructions for -creating a new account and a new wallet. - -Once you have a wallet, click the settings gear on the bottom right in the -Phantom UI. Scroll down and click on the line item “Change Network” and select -“Devnet.” This ensures that Phantom will be connected to the same network we’ll -be using in this demo. - -### 2. Download the starter code - -Download the starter code for this project -[here](https://github.com/Unboxed-Software/solana-ping-frontend/tree/starter). -This project is a simple Next.js application. It’s mostly empty except for the -`AppBar` component. We’ll build the rest throughout this demo. - -You can see its current state with the command `npm run dev` in the console. - -### 3. Wrap the app in context providers - -To start, we’re going to create a new component to contain the various -Wallet-Adapter providers that we’ll be using. Create a new file inside the -`components` folder called `WalletContextProvider.tsx`. - -Let’s start with some of the boilerplate for a functional component: - -```tsx -import { FC, ReactNode } from 'react' - -const WalletContextProvider: FC<{ children: ReactNode }> = ({ children }) => { - - return ( - - ) -} - -export default WalletContextProvider -``` - -To properly connect to the user’s wallet, we’ll need a `ConnectionProvider`, -`WalletProvider` and `WalletModalProvider`. Start by importing these components -from `@solana/wallet-adapter-react` and `@solana/wallet-adapter-react-ui`. Then -add them to the `WalletContextProvider` component. Note that -`ConnectionProvider` requires an `endpoint` parameter and `WalletProvider` -requires an array of `wallets`. For now, just use an empty string and an empty -array, respectively. - -```tsx -import { FC, ReactNode } from "react"; -import { - ConnectionProvider, - WalletProvider, -} from "@solana/wallet-adapter-react"; -import { WalletModalProvider } from "@solana/wallet-adapter-react-ui"; - -const WalletContextProvider: FC<{ children: ReactNode }> = ({ children }) => { - return ( - - - {children} - - - ); -}; - -export default WalletContextProvider; -``` - -The last things we need are an actual endpoint for `ConnectionProvider` and the -supported wallets for `WalletProvider`. - -For the endpoint, we’ll use the same `clusterApiUrl` function from the -`@solana/web3.js` library that we’ve used before so you’ll need to import it. -For the array of wallets you’ll also need to import the -`@solana/wallet-adapter-wallets` library. - -After importing these libraries, create a constant `endpoint` that uses the -`clusterApiUrl` function to get the url for Devnet. Then create a constant -`wallets` and set it to an array that contains a newly constructed -`PhantomWalletAdapter`. Finally, replace the empty string and empty array in -`ConnectionProvider` and `WalletProvider`, respectively. - -To complete this component, add -`require('@solana/wallet-adapter-react-ui/styles.css');` below your imports to -ensure proper styling and behavior of the Wallet Adapter library components. - -```tsx -import { FC, ReactNode } from "react"; -import { - ConnectionProvider, - WalletProvider, -} from "@solana/wallet-adapter-react"; -import { WalletModalProvider } from "@solana/wallet-adapter-react-ui"; -import * as web3 from "@solana/web3.js"; -import * as walletAdapterWallets from "@solana/wallet-adapter-wallets"; -require("@solana/wallet-adapter-react-ui/styles.css"); - -const WalletContextProvider: FC<{ children: ReactNode }> = ({ children }) => { - const endpoint = web3.clusterApiUrl("devnet"); - const wallets = [new walletAdapterWallets.PhantomWalletAdapter()]; - - return ( - - - {children} - - - ); -}; - -export default WalletContextProvider; -``` - -### 4. Add wallet multi-button - -Next let’s set up the Connect button. The current button is just a placeholder -because rather than using a standard button or creating a custom component, -we’ll be using Wallet-Adapter’s “multi-button.” This button interfaces with the -providers we set up in `WalletContextProvider` and let’s users choose a wallet, -connect to a wallet, and disconnect from a wallet. If you ever need more custom -functionality, you can create a custom component to handle this. - -Before we add the “multi-button,” we need to wrap the app in the -`WalletContextProvider`. Do this by importing it in `index.tsx` and adding it -after the closing `` tag: - -```tsx -import { NextPage } from "next"; -import styles from "../styles/Home.module.css"; -import WalletContextProvider from "../components/WalletContextProvider"; -import { AppBar } from "../components/AppBar"; -import Head from "next/head"; -import { PingButton } from "../components/PingButton"; - -const Home: NextPage = props => { - return ( -
- - Wallet-Adapter Example - - - - -
- -
-
-
- ); -}; - -export default Home; -``` - -If you run the app, everything should still look the same since the current -button on the top right is still just a placeholder. To remedy this, open -`AppBar.tsx` and replace `` with ``. -You’ll need to import `WalletMultiButton` from -`@solana/wallet-adapter-react-ui`. - -```tsx -import { FC } from "react"; -import styles from "../styles/Home.module.css"; -import Image from "next/image"; -import { WalletMultiButton } from "@solana/wallet-adapter-react-ui"; - -export const AppBar: FC = () => { - return ( -
- - Wallet-Adapter Example - -
- ); -}; -``` - -At this point, you should be able to run the app and interact with the -multi-button at the top-right of the screen. It should now read, "Select -Wallet." If you have the Phantom extension and are signed in, you should be able -to connect your Phantom wallet to the site using this new button. - -### 5. Create button to ping program - -Now that our app can connect to the Phantom wallet, let’s make the “Ping!” -button actually do something. - -Start by opening the `PingButton.tsx` file. We’re going to replace the -`console.log` inside of `onClick` with code that will create a transaction and -submit it to the Phantom extension for the end user’s approval. - -First, we need a connection, the wallet’s public key, and Wallet-Adapter’s -`sendTransaction` function. To get this, we need to import `useConnection` and -`useWallet` from `@solana/wallet-adapter-react`. While we’re here, let’s also -import `@solana/web3.js` since we’ll need it to create our transaction. - -```tsx -import { useConnection, useWallet } from "@solana/wallet-adapter-react"; -import * as web3 from "@solana/web3.js"; -import { FC, useState } from "react"; -import styles from "../styles/PingButton.module.css"; - -export const PingButton: FC = () => { - const onClick = () => { - console.log("Ping!"); - }; - - return ( -
- -
- ); -}; -``` - -Now use the `useConnection` hook to create a `connection` constant and the -`useWallet` hook to create `publicKey` and `sendTransaction` constants. - -```tsx -import { useConnection, useWallet } from "@solana/wallet-adapter-react"; -import * as web3 from "@solana/web3.js"; -import { FC, useState } from "react"; -import styles from "../styles/PingButton.module.css"; - -export const PingButton: FC = () => { - const { connection } = useConnection(); - const { publicKey, sendTransaction } = useWallet(); - - const onClick = () => { - console.log("Ping!"); - }; - - return ( -
- -
- ); -}; -``` - -With that, we can fill in the body of `onClick`. - -First, check that both `connection` and `publicKey` exist (if either does not -then the user’s wallet isn’t connected yet). - -Next, construct two instances of `PublicKey`, one for the program ID -`ChT1B39WKLS8qUrkLvFDXMhEJ4F1XZzwUNHUt4AU9aVa` and one for the data account -`Ah9K7dQ8EHaZqcAsgBW8w37yN2eAy3koFmUn4x3CJtod`. - -Next, construct a `Transaction`, then a new `TransactionInstruction` that -includes the data account as a writable key. - -Next, add the instruction to the transaction. - -Finally, call `sendTransaction`. - -```tsx -const onClick = () => { - if (!connection || !publicKey) { - return; - } - - const programId = new web3.PublicKey(PROGRAM_ID); - const programDataAccount = new web3.PublicKey(DATA_ACCOUNT_PUBKEY); - const transaction = new web3.Transaction(); - - const instruction = new web3.TransactionInstruction({ - keys: [ - { - pubkey: programDataAccount, - isSigner: false, - isWritable: true, - }, - ], - programId, - }); - - transaction.add(instruction); - sendTransaction(transaction, connection).then(sig => { - console.log(sig); - }); -}; -``` - -And that’s it! If you refresh the page, connect your wallet, and click the ping -button, Phantom should present you with a popup for confirming the transaction. - -### 6. Add some polish around the edges - -There’s a lot you could do to make the user experience here even better. For -example, you could change the UI to only show you the Ping button when a wallet -is connected and display some other prompt otherwise. You could link to the -transaction on Solana Explorer after a user confirms a transaction so they can -easily go look at the transaction details. The more you experiment with it, the -more comfortable you’ll get, so get creative! - -If you need to spend some time looking at the full source code from this demo to -understand all of this in context, check that out -[here](https://github.com/Unboxed-Software/solana-ping-frontend). - -# Challenge - -Now it’s your turn to build something independently. Create an application that -lets a user connect their Phantom wallet and send SOL to another account. - -![Screenshot of Send Sol App](../assets/solana-send-sol-app.png) - -1. You can build this from scratch or you can download the starter code - [here](https://github.com/Unboxed-Software/solana-send-sol-frontend/tree/starter). -2. Wrap the starter application in the appropriate context providers. -3. In the form component, set up the transaction and send it to the user’s - wallet for approval. -4. Get creative with the user experience. Add a link to let the user view the - transaction on Solana Explorer or something else that seems cool to you! - -If you get really stumped, feel free to check out the solution code -[here](https://github.com/Unboxed-Software/solana-send-sol-frontend/tree/main). diff --git a/content/courses/solana-course/content/intro-to-anchor-frontend.md b/content/courses/solana-course/content/intro-to-anchor-frontend.md deleted file mode 100644 index 931bf550b..000000000 --- a/content/courses/solana-course/content/intro-to-anchor-frontend.md +++ /dev/null @@ -1,660 +0,0 @@ ---- -title: Intro to client-side Anchor development -objectives: - - Use an IDL to interact with a Solana program from the client - - Explain an Anchor `Provider` object - - Explain an Anchor `Program` object - - Use the Anchor `MethodsBuilder` to build instructions and transactions - - Use Anchor to fetch accounts - - Set up a frontend to invoke instructions using Anchor and an IDL ---- - -# TL;DR - -- An **IDL** is a file representing the structure of a Solana program. Programs - written and built using Anchor automatically generate a corresponding IDL. IDL - stands for Interface Description Language. -- `@project-serum/anchor` is a Typescript client that includes everything you’ll - need to interact with Anchor programs -- An **Anchor `Provider`** object combines a `connection` to a cluster and a - specified `wallet` to enable transaction signing -- An **Anchor `Program`** object provides a custom API to interact with a - specific program. You create a `Program` instance using a program's IDL and - `Provider`. -- The **Anchor `MethodsBuilder`** provides a simple interface through `Program` - for building instructions and transactions - -# Overview - -Anchor simplifies the process of interacting with Solana programs from the -client by providing an Interface Description Language (IDL) file that reflects -the structure of a program. Using the IDL in conjunction with Anchor's -Typescript library (`@project-serum/anchor`) provides a simplified format for -building instructions and transactions. - -```tsx -// sends transaction -await program.methods - .instructionName(instructionDataInputs) - .accounts({}) - .signers([]) - .rpc(); -``` - -This works from any Typescript client, whether it's a frontend or integration -tests. In this lesson we'll go over how to use `@project-serum/anchor` to -simplify your client-side program interaction. - -## Anchor client-side structure - -Let's start by going over the basic structure of Anchor's Typescript library. -The primary object you'll be using is the `Program` object. A `Program` instance -represents a specific Solana program and provides a custom API for reading and -writing to the program. - -To create an instance of `Program`, you'll need the following: - -- IDL - file representing the structure of a program -- `Connection` - the cluster connection -- `Wallet` - default keypair used to pay for and sign transactions -- `Provider` - encapsulates the `Connection` to a Solana cluster and a `Wallet` -- `ProgramId` - the program’s on-chain address - -![Anchor structure](../assets/anchor-client-structure.png) - -The above image shows how each of these pieces are combined to create a -`Program` instance. We'll go over each of them individually to get a better idea -of how everything ties together. - -### Interface Description Language (IDL) - -When you build an Anchor program, Anchor generates both a JSON and Typescript -file representing your program's IDL. The IDL represents the structure of the -program and can be used by a client to infer how to interact with a specific -program. - -While it isn't automatic, you can also generate an IDL from a native Solana -program using tools like [shank](https://github.com/metaplex-foundation/shank) -by Metaplex. - -To get an idea of the information an IDL provides, here is the IDL for the -counter program you built previously: - -```json -{ - "version": "0.1.0", - "name": "counter", - "instructions": [ - { - "name": "initialize", - "accounts": [ - { "name": "counter", "isMut": true, "isSigner": true }, - { "name": "user", "isMut": true, "isSigner": true }, - { "name": "systemProgram", "isMut": false, "isSigner": false } - ], - "args": [] - }, - { - "name": "increment", - "accounts": [ - { "name": "counter", "isMut": true, "isSigner": false }, - { "name": "user", "isMut": false, "isSigner": true } - ], - "args": [] - } - ], - "accounts": [ - { - "name": "Counter", - "type": { - "kind": "struct", - "fields": [{ "name": "count", "type": "u64" }] - } - } - ] -} -``` - -Inspecting the IDL, you can see that this program contains two instructions -(`initialize` and `increment`). - -Notice that in addition to specifying the instructions, it species the accounts -and inputs for each instruction. The `initialize` instruction requires three -accounts: - -1. `counter` - the new account being initialized in the instruction -2. `user` - the payer for the transaction and initialization -3. `systemProgram` - the system program is invoked to initialize a new account - -And the `increment` instruction requires two accounts: - -1. `counter` - an existing account to increment the count field -2. `user` - the payer from the transaction - -Looking at the IDL, you can see that in both instructions the `user` is required -as a signer because the `isSigner` flag is marked as `true`. Additionally, -neither instructions require any additional instruction data since the `args` -section is blank for both. - -Looking further down at the `accounts` section, you can see that the program -contains one account type named `Counter` with a single `count` field of type -`u64`. - -Although the IDL does not provide the implementation details for each -instruction, we can get a basic idea of how the on-chain program expects -instructions to be constructed and see the structure of the program accounts. - -Regardless of how you get it, you _need_ an IDL file to interact with a program -using the `@project-serum/anchor` package. To use the IDL, you'll need to -include the IDL file in your project and then import the file. - -```tsx -import idl from "./idl.json"; -``` - -### Provider - -Before you can create a `Program` object using the IDL, you first need to create -an Anchor `Provider` object. - -The `Provider` object combines two things: - -- `Connection` - the connection to a Solana cluster (i.e. localhost, devnet, - mainnet) -- `Wallet` - a specified address used to pay for and sign transactions - -The `Provider` is then able to send transactions to the Solana blockchain on -behalf of a `Wallet` by including the wallet’s signature to outgoing -transactions. When using a frontend with a Solana wallet provider, all outgoing -transactions must still be approved by the user via their wallet browser -extension. - -Setting up the `Wallet` and `Connection` would look something like this: - -```tsx -import { useAnchorWallet, useConnection } from "@solana/wallet-adapter-react"; - -const { connection } = useConnection(); -const wallet = useAnchorWallet(); -``` - -To set up the connection, you can use the `useConnection` hook from -`@solana/wallet-adapter-react` to get the `Connection` to a Solana cluster. - -Note that the `Wallet` object provided by the `useWallet` hook from -`@solana/wallet-adapter-react` is not compatible with the `Wallet` object that -the Anchor `Provider` expects. However, `@solana/wallet-adapter-react` also -provides a `useAnchorWallet` hook. - -For comparison, here is the `AnchorWallet` from `useAnchorWallet`: - -```tsx -export interface AnchorWallet { - publicKey: PublicKey; - signTransaction(transaction: Transaction): Promise; - signAllTransactions(transactions: Transaction[]): Promise; -} -``` - -And the `WalletContextState` from `useWallet`: - -```tsx -export interface WalletContextState { - autoConnect: boolean; - wallets: Wallet[]; - wallet: Wallet | null; - publicKey: PublicKey | null; - connecting: boolean; - connected: boolean; - disconnecting: boolean; - select(walletName: WalletName): void; - connect(): Promise; - disconnect(): Promise; - sendTransaction( - transaction: Transaction, - connection: Connection, - options?: SendTransactionOptions, - ): Promise; - signTransaction: SignerWalletAdapterProps["signTransaction"] | undefined; - signAllTransactions: - | SignerWalletAdapterProps["signAllTransactions"] - | undefined; - signMessage: MessageSignerWalletAdapterProps["signMessage"] | undefined; -} -``` - -The `WalletContextState` provides much more functionality compared to the -`AnchorWallet`, but the `AnchorWallet` is required to set up the `Provider` -object. - -To create the `Provider` object you use `AnchorProvider` from -`@project-serum/anchor`. - -The `AnchorProvider` constructor takes three parameters: - -- `connection` - the `Connection` to the Solana cluster -- `wallet` - the `Wallet` object -- `opts` - optional parameter that specifies the confirmation options, using a - default setting if one is not provided - -Once you’ve create the `Provider` object, you then set it as the default -provider using `setProvider`. - -```tsx -import { useAnchorWallet, useConnection } from "@solana/wallet-adapter-react"; -import { AnchorProvider, setProvider } from "@project-serum/anchor"; - -const { connection } = useConnection(); -const wallet = useAnchorWallet(); -const provider = new AnchorProvider(connection, wallet, {}); -setProvider(provider); -``` - -### Program - -Once you have the IDL and a provider, you can create an instance of `Program`. -The constructor requires three parameters: - -- `idl` - the IDL as type `Idl` -- `programId` - the on-chain address of the program as a `string` or `PublicKey` -- `Provider` - the provider discussed in the previous section - -The `Program` object creates a custom API you can use to interact with a Solana -program. This API is the one stop shop for all things related to communicating -with on-chain programs. Among other things, you can send transactions, fetch -deserialized accounts, decode instruction data, subscribe to account changes, -and listen to events. You can learn more about the `Program` class -[here](https://coral-xyz.github.io/anchor/ts/classes/Program.html#constructor). - -To create the `Program` object, first import `Program` and `Idl` from -`@project-serum/anchor`. `Idl` is a type you can used when working with -Typescript. - -Next, specify the `programId` of the program. We have to explicitly state the -`programId` since there can be multiple programs with the same IDL structure -(i.e. if the same program is deployed multiple times using different addresses). -When creating the `Program` object, the default `Provider` is used if one is not -explicitly specified. - -All together, the final setup looks something like this: - -```tsx -import idl from "./idl.json"; -import { useAnchorWallet, useConnection } from "@solana/wallet-adapter-react"; -import { - Program, - Idl, - AnchorProvider, - setProvider, -} from "@project-serum/anchor"; - -const { connection } = useConnection(); -const wallet = useAnchorWallet(); - -const provider = new AnchorProvider(connection, wallet, {}); -setProvider(provider); - -const programId = new PublicKey("JPLockxtkngHkaQT5AuRYow3HyUv5qWzmhwsCPd653n"); -const program = new Program(idl as Idl, programId); -``` - -## Anchor `MethodsBuilder` - -Once the `Program` object is set up, you can use the Anchor Methods Builder to -build instructions and transactions related to the program. The `MethodsBuilder` -uses the IDL to provide a simplified format for building transactions that -invoke program instructions. - -Note that the camel case naming convention is used when interacting with a -program from the client, compared to the snake case naming convention used when -the writing the program in rust. - -The basic `MethodsBuilder` format looks like this: - -```tsx -// sends transaction -await program.methods - .instructionName(instructionDataInputs) - .accounts({}) - .signers([]) - .rpc(); -``` - -Going step by step, you: - -1. Call `methods` on `program` - this is the builder API for creating - instruction calls related to the program's IDL -2. Call the instruction name as `.instructionName(instructionDataInputs)` - - simply call the instruction using dot syntax and the instruction's name, - passing in any instruction arguments as comma-separated values -3. Call `accounts` - using dot syntax, call `.accounts`, passing in an object - with each account the instruction expects based on the IDL -4. Optionally call `signers` - using dot syntax, call `.signers`, passing in an - array of additional signers required by the instruction -5. Call `rpc` - this method creates and sends a signed transaction with the - specified instruction and returns a `TransactionSignature`. When using - `.rpc`, the `Wallet` from the `Provider` is automatically included as a - signer and does not have to be listed explicitly. - -Note that if no additional signers are required by the instruction other than -the `Wallet` specified with the `Provider`, the `.signer([])` line can be -excluded. - -You can also build the transaction directly by changing `.rpc()` to -`.transaction()`. This builds a `Transaction` object using the instruction -specified. - -```tsx -// creates transaction -const transaction = await program.methods - .instructionName(instructionDataInputs) - .accounts({}) - .transaction(); - -await sendTransaction(transaction, connection); -``` - -Similarly, you can use the same format to build an instruction using -`.instruction()` and then manually add the instructions to a new transaction. -This builds a `TransactionInstruction` object using the instruction specified. - -```tsx -// creates first instruction -const instructionOne = await program.methods - .instructionOneName(instructionOneDataInputs) - .accounts({}) - .instruction(); - -// creates second instruction -const instructionTwo = await program.methods - .instructionTwoName(instructionTwoDataInputs) - .accounts({}) - .instruction(); - -// add both instruction to one transaction -const transaction = new Transaction().add(instructionOne, instructionTwo); - -// send transaction -await sendTransaction(transaction, connection); -``` - -In summary, the Anchor `MethodsBuilder` provides a simplified and more flexible -way to interact with on-chain programs. You can build an instruction, a -transaction, or build and send a transaction using basically the same format -without having to manually serialize or deserialize the accounts or instruction -data. - -## Fetch program accounts - -The `Program` object also allows you to easily fetch and filter program -accounts. Simply call `account` on `program` and then specify the name of the -account type as reflected on the IDL. Anchor then deserializes and returns all -accounts as specified. - -The example below shows how you can fetch all existing `counter` accounts for -the Counter program. - -```tsx -const accounts = await program.account.counter.all(); -``` - -You can also apply a filter by using `memcmp` and then specifying an `offset` -and the `bytes` to filter for. - -The example below fetches all `counter` accounts with a `count` of 0. Note that -the `offset` of 8 is for the 8 byte discriminator Anchor uses to identify -account types. The 9th byte is where the `count` field begins. You can refer to -the IDL to see that the next byte stores the `count` field of type `u64`. Anchor -then filters for and returns all accounts with matching bytes in the same -position. - -```tsx -const accounts = await program.account.counter.all([ - { - memcmp: { - offset: 8, - bytes: bs58.encode(new BN(0, "le").toArray()), - }, - }, -]); -``` - -Alternatively, you can also get the deserialized account data for a specific -account using `fetch` if you know the address of the account you're looking for. - -```tsx -const account = await program.account.counter.fetch(ACCOUNT_ADDRESS); -``` - -Similarly, you can fetch for multiple accounts using `fetchMultiple`. - -```tsx -const accounts = await program.account.counter.fetchMultiple([ - ACCOUNT_ADDRESS_ONE, - ACCOUNT_ADDRESS_TWO, -]); -``` - -# Demo - -Let’s practice this together by building a frontend for the Counter program from -last lesson. As a reminder, the Counter program has two instructions: - -- `initialize` - initializes a new `Counter` account and sets the `count` to `0` -- `increment` - increments the `count` on an existing `Counter` account - -### 1. Download the starter code - -Download the starter code for this -project [here](https://github.com/Unboxed-Software/anchor-ping-frontend/tree/starter). -Once you have the starter code, take a look around. Install the dependencies -with `npm install` and then run the app with `npm run dev`. - -This project is a simple Next.js application. It includes the -`WalletContextProvider` we created in the -[Wallets lesson](https://github.com/Unboxed-Software/solana-course/blob/main/content/interact-with-wallets.md), -the `idl.json` file for the Counter program, and the `Initialize` and -`Increment` components we’ll be building throughout this demo. The `programId` -of the program we’ll be invoking is also included in the starter code. - -### 2. `Initialize` - -To begin, let’s complete the setup to create the `Program` object in -`Initialize.tsx` component. - -Remember, we’ll need an instance of `Program` to use the Anchor `MethodsBuilder` -to invoke the instructions on our program. For that, we'll need an Anchor wallet -and a connection, which we can get from the `useAnchorWallet` and -`useConnection` hooks. Let's also create a `useState` to capture the program -instance. - -```tsx -export const Initialize: FC = ({ setCounter }) => { - const [program, setProgram] = useState("") - - const { connection } = useConnection() - const wallet = useAnchorWallet() - - ... -} -``` - -With that, we can work on creating the actual `Program` instance. Let's do this -in a `useEffect`. - -First we need to either get the default provider if it already exists, or create -it if it doesn't. We can do that by calling `getProvider` inside a try/catch -block. If an error is thrown, that means there is no default provider and we -need to create one. - -Once we have a provider, we can construct a `Program` instance. - -```tsx -useEffect(() => { - let provider: anchor.Provider; - - try { - provider = anchor.getProvider(); - } catch { - provider = new anchor.AnchorProvider(connection, wallet, {}); - anchor.setProvider(provider); - } - - const program = new anchor.Program(idl as anchor.Idl, PROGRAM_ID); - setProgram(program); -}, []); -``` - -Now that we've finished the Anchor setup, we can actually invoke the program's -`initialize` instruction. We'll do this inside the `onClick` function. - -First, we’ll need to generate a new `Keypair` for the new `Counter` account -since we are initializing an account for the first time. - -Then we can use the Anchor `MethodsBuilder` to create and send a new -transaction. Remember, Anchor can infer some of the accounts required, like the -`user` and `systemAccount` accounts. However, it can't infer the `counter` -account because we generate that dynamically, so you'll need to add it with -`.accounts`. You'll also need to add that keypair as a sign with `.signers`. -Lastly, you can use `.rpc()` to submit the transaction to the user's wallet. - -Once the transaction goes through, call `setUrl` with the explorer URL and then -call `setCounter`, passing in the counter account. - -```tsx -const onClick = async () => { - const sig = await program.methods - .initialize() - .accounts({ - counter: newAccount.publicKey, - user: wallet.publicKey, - systemAccount: anchor.web3.SystemProgram.programId, - }) - .signers([newAccount]) - .rpc(); - - setTransactionUrl(`https://explorer.solana.com/tx/${sig}?cluster=devnet`); - setCounter(newAccount.publicKey); -}; -``` - -### 3. `Increment` - -Next, let’s move on the the `Increment.tsx` component. Just as before, complete -the setup to create the `Program` object. In addition to calling `setProgram`, -the `useEffect` should call `refreshCount`. - -Add the following code for the initial set up: - -```tsx -export const Increment: FC = ({ counter, setTransactionUrl }) => { - const [count, setCount] = useState(0) - const [program, setProgram] = useState() - const { connection } = useConnection() - const wallet = useAnchorWallet() - - useEffect(() => { - let provider: anchor.Provider - - try { - provider = anchor.getProvider() - } catch { - provider = new anchor.AnchorProvider(connection, wallet, {}) - anchor.setProvider(provider) - } - - const program = new anchor.Program(idl as anchor.Idl, PROGRAM_ID) - setProgram(program) - refreshCount(program) - }, []) - ... -} -``` - -Next, let’s use the Anchor `MethodsBuilder` to build a new instruction to invoke -the `increment` instruction. Again, Anchor can infer the `user` account from the -wallet so we only need to include the `counter` account. - -```tsx -const onClick = async () => { - const sig = await program.methods - .increment() - .accounts({ - counter: counter, - user: wallet.publicKey, - }) - .rpc(); - - setTransactionUrl(`https://explorer.solana.com/tx/${sig}?cluster=devnet`); -}; -``` - -### 5. Display the correct count - -Now that we can initialize the counter program and increment the count, we need -to get our UI to show the count stored in the counter account. - -We'll show how to observe account changes in a future lesson, but for now we -just have a button that calls `refreshCount` so you can click it to show the new -count after each `increment` invocation. - -Inside `refreshCount`, let's use `program` to fetch the counter account, then -use `setCount` to set the count to the number stored on the program: - -```tsx -const refreshCount = async program => { - const counterAccount = await program.account.counter.fetch(counter); - setCount(counterAccount.count.toNumber()); -}; -``` - -Super simple with Anchor! - -### 5. Test the frontend - -At this point, everything should work! You can test the frontend by running -`npm run dev`. - -1. Connect your wallet and you should see the `Initialize Counter` button -2. Click the `Initialize Counter` button, and then approve the transaction -3. You should then see a link at the bottom of the screen to Solana Explorer for - the `initialize` transaction. The `Increment Counter` button, `Refresh Count` - button, and the count should also all appear. -4. Click the `Increment Counter` button, and then approve the transaction -5. Wait a few seconds and click `Refresh Count`. The count should increment on - the screen. - -![Gif of Anchor Frontend Demo](../assets/anchor-frontend-demo.gif) - -Feel free to click the links to inspect the program logs from each transaction! - -![Screenshot of Initialize Program Log](../assets/anchor-frontend-initialize.png) - -![Screenshot of Increment Program Log](../assets/anchor-frontend-increment.png) - -Congratulations, you now know how to set up a frontend to invoke a Solana -program using an Anchor IDL. - -If you need more time with this project to feel comfortable with these concepts, -feel free to have a look at -the [solution code on the `solution-increment` branch](https://github.com/Unboxed-Software/anchor-ping-frontend/tree/solution-increment) before -continuing. - -# Challenge - -Now it’s your turn to build something independently. Building on top of what -we’ve done in the demo, try to create a new component in the frontend that -implements a button to decrements the counter. - -Before building the component in the frontend, you’ll first need to: - -1. Build and deploy a new program that implements a `decrement` instruction -2. Update the IDL file in the frontend with the one from your new program -3. Update the `programId` with the one from your new program - -If you need some help, feel free to reference this program -[here](https://github.com/Unboxed-Software/anchor-counter-program/tree/solution-decrement). - -Try to do this independently if you can! But if you get stuck, feel free to -reference -the [solution code](https://github.com/Unboxed-Software/anchor-ping-frontend/tree/solution-decrement). diff --git a/content/courses/solana-course/content/intro-to-anchor.md b/content/courses/solana-course/content/intro-to-anchor.md deleted file mode 100644 index 28d2ff6e1..000000000 --- a/content/courses/solana-course/content/intro-to-anchor.md +++ /dev/null @@ -1,712 +0,0 @@ ---- -title: Intro to Anchor development -objectives: - - Use the Anchor framework to build a basic program - - Describe the basic structure of an Anchor program - - Explain how to implement basic account validation and security checks with - Anchor ---- - -# TL;DR - -- **Anchor** is a framework for building Solana programs -- **Anchor macros** speed up the process of building Solana programs by - abstracting away a significant amount of boilerplate code -- Anchor allows you to build **secure programs** more easily by performing - certain security checks, requiring account validation, and providing a simple - way to implement additional checks. - -# Overview - -## What is Anchor? - -Anchor is a development framework that makes writing Solana programs easier, -faster, and more secure. It's the "go to" framework for Solana development for -very good reason. It makes it easier to organize and reason about your code, -implements common security checks automatically, and abstracts away a -significant amount of boilerplate associated with writing a Solana program. - -## Anchor program structure - -Anchor uses macros and traits to generate boilerplate Rust code for you. These -provide a clear structure to your program so you can more easily reason about -your code. The main high level macros and attributes are: - -- `declare_id` - a macro for declaring the program’s on-chain address -- `#[program]` - an attribute macro used to denote the module containing the - program’s instruction logic -- `Accounts` - a trait applied to structs representing the list of accounts - required for an instruction -- `#[account]` - an attribute macro used to define custom account types for the - program - -Let's talk about each of them before putting all the pieces together. - -## Declare your program ID - -The `declare_id` macro is used to specify the on-chain address of the program -(i.e. the `programId`). When you build an Anchor program for the first time, the -framework will generate a new keypair. This becomes the default keypair used to -deploy the program unless specified otherwise. The corresponding public key -should be used as the `programId` specified in the `declare_id!` macro. - -```rust -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); -``` - -## Define instruction logic - -The `#[program]` attribute macro defines the module containing all of your -program's instructions. This is where you implement the business logic for each -instruction in your program. - -Each public function in the module with the `#[program]` attribute will be -treated as a separate instruction. - -Each instruction function requires a parameter of type `Context` and can -optionally include additional function parameters representing instruction data. -Anchor will automatically handle instruction data deserialization so that you -can work with instruction data as Rust types. - -```rust -#[program] -mod program_module_name { - use super::*; - - pub fn instruction_one(ctx: Context, instruction_data: u64) -> Result<()> { - ctx.accounts.account_name.data = instruction_data; - Ok(()) - } -} -``` - -### Instruction `Context` - -The `Context` type exposes instruction metadata and accounts to your instruction -logic. - -```rust -pub struct Context<'a, 'b, 'c, 'info, T> { - /// Currently executing program id. - pub program_id: &'a Pubkey, - /// Deserialized accounts. - pub accounts: &'b mut T, - /// Remaining accounts given but not deserialized or validated. - /// Be very careful when using this directly. - pub remaining_accounts: &'c [AccountInfo<'info>], - /// Bump seeds found during constraint validation. This is provided as a - /// convenience so that handlers don't have to recalculate bump seeds or - /// pass them in as arguments. - pub bumps: BTreeMap, -} -``` - -`Context` is a generic type where `T` defines the list of accounts an -instruction requires. When you use `Context`, you specify the concrete type of -`T` as a struct that adopts the `Accounts` trait (e.g. -`Context`). Through this context argument the -instruction can then access: - -- The accounts passed into the instruction (`ctx.accounts`) -- The program ID (`ctx.program_id`) of the executing program -- The remaining accounts (`ctx.remaining_accounts`). The `remaining_accounts` is - a vector that contains all accounts that were passed into the instruction but - are not declared in the `Accounts` struct. -- The bumps for any PDA accounts in the `Accounts` struct (`ctx.bumps`) - -## Define instruction accounts - -The `Accounts` trait defines a data structure of validated accounts. Structs -adopting this trait define the list of accounts required for a given -instruction. These accounts are then exposed through an instruction's `Context` -so that manual account iteration and deserialization is no longer necessary. - -You typically apply the `Accounts` trait through the `derive` macro (e.g. -`#[derive(Accounts)]`). This implements an `Accounts` deserializer on the given -struct and removes the need to deserialize each account manually. - -Implementations of the `Accounts` trait are responsible for performing all -requisite constraint checks to ensure the accounts meet conditions required for -the program to run securely. Constraints are provided for each field using the -`#account(..)` attribute (more on that shortly). - -For example, `instruction_one` requires a `Context` argument of type -`InstructionAccounts`. The `#[derive(Accounts)]` macro is used to implement the -`InstructionAccounts` struct which includes three accounts: `account_name`, -`user`, and `system_program`. - -```rust -#[program] -mod program_module_name { - use super::*; - pub fn instruction_one(ctx: Context, instruction_data: u64) -> Result<()> { - ... - Ok(()) - } -} - -#[derive(Accounts)] -pub struct InstructionAccounts { - #[account(init, payer = user, space = 8 + 8)] - pub account_name: Account<'info, AccountStruct>, - #[account(mut)] - pub user: Signer<'info>, - pub system_program: Program<'info, System>, - -} -``` - -When `instruction_one` is invoked, the program: - -- Checks that the accounts passed into the instruction match the account types - specified in the `InstructionAccounts` struct -- Checks the accounts against any additional constraints specified - -If any accounts passed into `instruction_one` fail the account validation or -security checks specified in the `InstructionAccounts` struct, then the -instruction fails before even reaching the program logic. - -## Account validation - -You may have noticed in the previous example that one of the accounts in -`InstructionAccounts` was of type `Account`, one was of type `Signer`, and one -was of type `Program`. - -Anchor provides a number of account types that can be used to represent -accounts. Each type implements different account validation. We’ll go over a few -of the common types you may encounter, but be sure to look through the -[full list of account types](https://docs.rs/anchor-lang/latest/anchor_lang/accounts/index.html). - -### `Account` - -`Account` is a wrapper around `AccountInfo` that verifies program ownership and -deserializes the underlying data into a Rust type. - -```rust -// Deserializes this info -pub struct AccountInfo<'a> { - pub key: &'a Pubkey, - pub is_signer: bool, - pub is_writable: bool, - pub lamports: Rc>, - pub data: Rc>, // <---- deserializes account data - pub owner: &'a Pubkey, // <---- checks owner program - pub executable: bool, - pub rent_epoch: u64, -} -``` - -Recall the previous example where `InstructionAccounts` had a field -`account_name`: - -```rust -pub account_name: Account<'info, AccountStruct> -``` - -The `Account` wrapper here does the following: - -- Deserializes the account `data` in the format of type `AccountStruct` -- Checks that the program owner of the account matches the program owner - specified for the `AccountStruct` type. - -When the account type specified in the `Account` wrapper is defined within the -same crate using the `#[account]` attribute macro, the program ownership check -is against the `programId` defined in the `declare_id!` macro. - -The following are the checks performed: - -```rust -// Checks -Account.info.owner == T::owner() -!(Account.info.owner == SystemProgram && Account.info.lamports() == 0) -``` - -### `Signer` - -The `Signer` type validates that the given account signed the transaction. No -other ownership or type checks are done. You should only use the `Signer` when -the underlying account data is not required in the instruction. - -For the `user` account in the previous example, the `Signer` type specifies that -the `user` account must be a signer of the instruction. - -The following check is performed for you: - -```rust -// Checks -Signer.info.is_signer == true -``` - -### `Program` - -The `Program` type validates that the account is a certain program. - -For the `system_program` account in the previous example, the `Program` type is -used to specify the program should be the system program. Anchor provides a -`System` type which includes the `programId` of the system program to check -against. - -The following checks are performed for you: - -```rust -//Checks -account_info.key == expected_program -account_info.executable == true -``` - -## Add constraints with `#[account(..)]` - -The `#[account(..)]` attribute macro is used to apply constraints to accounts. -We'll go over a few constraint examples in this and future lessons, but at some -point be sure to look at the full -[list of possible constraints](https://docs.rs/anchor-lang/latest/anchor_lang/derive.Accounts.html). - -Recall again the `account_name` field from the `InstructionAccounts` example. - -```rust -#[account(init, payer = user, space = 8 + 8)] -pub account_name: Account<'info, AccountStruct>, -#[account(mut)] -pub user: Signer<'info>, -``` - -Notice that the `#[account(..)]` attribute contains three comma-separated -values: - -- `init` - creates the account via a CPI to the system program and initializes - it (sets its account discriminator) -- `payer` - specifies the payer for the account initialization to be the `user` - account defined in the struct -- `space`- specifies that the space allocated for the account should be `8 + 8` - bytes. The first 8 bytes is for a discriminator that Anchor automatically adds - to identify the account type. The next 8 bytes allocates space for the data - stored on the account as defined in the `AccountStruct` type. - -For `user` we use the `#[account(..)]` attribute to specify that the given -account is mutable. The `user` account must be marked as mutable because -lamports will be deducted from the account to pay for the initialization of -`account_name`. - -```rust -#[account(mut)] -pub user: Signer<'info>, -``` - -Note that the `init` constraint placed on `account_name` automatically includes -a `mut` constraint so that both `account_name` and `user` are mutable accounts. - -## `#[account]` - -The `#[account]` attribute is applied to structs representing the data structure -of a Solana account. It implements the following traits: - -- `AccountSerialize` -- `AccountDeserialize` -- `AnchorSerialize` -- `AnchorDeserialize` -- `Clone` -- `Discriminator` -- `Owner` - -You can read more about the details of each trait -[here](https://docs.rs/anchor-lang/latest/anchor_lang/attr.account.html). -However, mostly what you need to know is that the `#[account]` attribute enables -serialization and deserialization, and implements the discriminator and owner -traits for an account. - -The discriminator is an 8 byte unique identifier for an account type derived -from the first 8 bytes of the SHA256 hash of the account type's name. When -implementing account serialization traits, the first 8 bytes are reserved for -the account discriminator. - -As a result, any calls to `AccountDeserialize`’s `try_deserialize` will check -this discriminator. If it doesn’t match, an invalid account was given, and the -account deserialization will exit with an error. - -The `#[account]` attribute also implements the `Owner` trait for a struct using -the `programId` declared by `declareId` of the crate `#[account]` is used in. In -other words, all accounts initialized using an account type defined using the -`#[account]` attribute within the program are also owned by the program. - -As an example, let's look at `AccountStruct` used by the `account_name` of -`InstructionAccounts` - -```rust -#[derive(Accounts)] -pub struct InstructionAccounts { - #[account(init, payer = user, space = 8 + 8)] - pub account_name: Account<'info, AccountStruct>, - ... -} - -#[account] -pub struct AccountStruct { - data: u64 -} -``` - -The `#[account]` attribute ensures that it can be used as an account in -`InstructionAccounts`. - -When the `account_name` account is initialized: - -- The first 8 bytes is set as the `AccountStruct` discriminator -- The data field of the account will match `AccountStruct` -- The account owner is set as the `programId` from `declare_id` - -## Bring it all together - -When you combine all of these Anchor types you end up with a complete program. -Below is an example of a basic Anchor program with a single instruction that: - -- Initializes a new account -- Updates the data field on the account with the instruction data passed into - the instruction - -```rust -// Use this import to gain access to common anchor features -use anchor_lang::prelude::*; - -// Program on-chain address -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -// Instruction logic -#[program] -mod program_module_name { - use super::*; - pub fn instruction_one(ctx: Context, instruction_data: u64) -> Result<()> { - ctx.accounts.account_name.data = instruction_data; - Ok(()) - } -} - -// Validate incoming accounts for instructions -#[derive(Accounts)] -pub struct InstructionAccounts<'info> { - #[account(init, payer = user, space = 8 + 8)] - pub account_name: Account<'info, AccountStruct>, - #[account(mut)] - pub user: Signer<'info>, - pub system_program: Program<'info, System>, - -} - -// Define custom program account type -#[account] -pub struct AccountStruct { - data: u64 -} -``` - -You are now ready to build your own Solana program using the Anchor framework! - -# Demo - -Before we begin, install Anchor by following the steps -[here](https://www.anchor-lang.com/docs/installation). - -For this demo we'll create a simple counter program with two instructions: - -- The first instruction will initialize a counter account -- The second instruction will increment the count stored on a counter account - -### 1. Setup - -Create a new project called `anchor-counter` by running `anchor init`: - -```console -anchor init anchor-counter -``` - -Next, run `anchor-build` - -```console -anchor-build -``` - -Then, run `anchor keys list` - -```console -anchor keys list -``` - -Copy the program ID output from `anchor keys list` - -``` -anchor_counter: BouTUP7a3MZLtXqMAm1NrkJSKwAjmid8abqiNjUyBJSr -``` - -Then update `declare_id!` in `lib.rs` - -```rust -declare_id!("BouTUP7a3MZLtXqMAm1NrkJSKwAjmid8abqiNjUyBJSr"); -``` - -And also update `Anchor.toml` - -``` -[programs.localnet] -anchor_counter = "BouTUP7a3MZLtXqMAm1NrkJSKwAjmid8abqiNjUyBJSr" -``` - -Finally, delete the default code in `lib.rs` until all that is left is the -following: - -```rust -use anchor_lang::prelude::*; - -declare_id!("BouTUP7a3MZLtXqMAm1NrkJSKwAjmid8abqiNjUyBJSr"); - -#[program] -pub mod anchor_counter { - use super::*; - -} -``` - -### 2. Add the `initialize` instruction - -First, let’s implement the `initialize` instruction within `#[program]`. This -instruction requires a `Context` of type `Initialize` and takes no additional -instruction data. In the instruction logic, we are simply setting the `counter` -account’s `count` field to `0`. - -```rust -pub fn initialize(ctx: Context) -> Result<()> { - let counter = &mut ctx.accounts.counter; - counter.count = 0; - msg!("Counter Account Created"); - msg!("Current Count: { }", counter.count); - Ok(()) -} -``` - -### 3. Implement `Context` type `Initialize` - -Next, using the `#[derive(Accounts)]` macro, let’s implement the `Initialize` -type that lists and validates the accounts used by the `initialize` instruction. -It'll need the following accounts: - -- `counter` - the counter account initialized in the instruction -- `user` - payer for the initialization -- `system_program` - the system program is required for the initialization of - any new accounts - -```rust -#[derive(Accounts)] -pub struct Initialize<'info> { - #[account(init, payer = user, space = 8 + 8)] - pub counter: Account<'info, Counter>, - #[account(mut)] - pub user: Signer<'info>, - pub system_program: Program<'info, System>, -} -``` - -### 4. Implement `Counter` - -Next, use the `#[account]` attribute to define a new `Counter` account type. The -`Counter` struct defines one `count` field of type `u64`. This means that we can -expect any new accounts initialized as a `Counter` type to have a matching data -structure. The `#[account]` attribute also automatically sets the discriminator -for a new account and sets the owner of the account as the `programId` from the -`declare_id!` macro. - -```rust -#[account] -pub struct Counter { - pub count: u64, -} -``` - -### 5. Add `increment` instruction - -Within `#[program]`, let’s implement an `increment` instruction to increment the -`count` once a `counter` account is initialized by the first instruction. This -instruction requires a `Context` of type `Update` (implemented in the next step) -and takes no additional instruction data. In the instruction logic, we are -simply incrementing an existing `counter` account’s `count` field by `1`. - -```rust -pub fn increment(ctx: Context) -> Result<()> { - let counter = &mut ctx.accounts.counter; - msg!("Previous counter: {}", counter.count); - counter.count = counter.count.checked_add(1).unwrap(); - msg!("Counter incremented. Current count: {}", counter.count); - Ok(()) -} -``` - -### 6. Implement `Context` type `Update` - -Lastly, using the `#[derive(Accounts)]` macro again, let’s create the `Update` -type that lists the accounts that the `increment` instruction requires. It'll -need the following accounts: - -- `counter` - an existing counter account to increment -- `user` - payer for the transaction fee - -Again, we’ll need to specify any constraints using the `#[account(..)]` -attribute: - -```rust -#[derive(Accounts)] -pub struct Update<'info> { - #[account(mut)] - pub counter: Account<'info, Counter>, - pub user: Signer<'info>, -} -``` - -### 7. Build - -All together, the complete program will look like this: - -```rust -use anchor_lang::prelude::*; - -declare_id!("BouTUP7a3MZLtXqMAm1NrkJSKwAjmid8abqiNjUyBJSr"); - -#[program] -pub mod anchor_counter { - use super::*; - - pub fn initialize(ctx: Context) -> Result<()> { - let counter = &mut ctx.accounts.counter; - counter.count = 0; - msg!("Counter account created. Current count: {}", counter.count); - Ok(()) - } - - pub fn increment(ctx: Context) -> Result<()> { - let counter = &mut ctx.accounts.counter; - msg!("Previous counter: {}", counter.count); - counter.count = counter.count.checked_add(1).unwrap(); - msg!("Counter incremented. Current count: {}", counter.count); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Initialize<'info> { - #[account(init, payer = user, space = 8 + 8)] - pub counter: Account<'info, Counter>, - #[account(mut)] - pub user: Signer<'info>, - pub system_program: Program<'info, System>, -} - -#[derive(Accounts)] -pub struct Update<'info> { - #[account(mut)] - pub counter: Account<'info, Counter>, - pub user: Signer<'info>, -} - -#[account] -pub struct Counter { - pub count: u64, -} -``` - -Run `anchor build` to build the program. - -### 8. Testing - -Anchor tests are typically Typescript integration tests that use the mocha test -framework. We'll learn more about testing later, but for now navigate to -`anchor-counter.ts` and replace the default test code with the following: - -```ts -import * as anchor from "@project-serum/anchor"; -import { Program } from "@project-serum/anchor"; -import { expect } from "chai"; -import { AnchorCounter } from "../target/types/anchor_counter"; - -describe("anchor-counter", () => { - // Configure the client to use the local cluster. - const provider = anchor.AnchorProvider.env(); - anchor.setProvider(provider); - - const program = anchor.workspace.AnchorCounter as Program; - - const counter = anchor.web3.Keypair.generate(); - - it("Is initialized!", async () => {}); - - it("Incremented the count", async () => {}); -}); -``` - -The above code generates a new keypair for the `counter` account we'll be -initializing and creates placeholders for a test of each instruction. - -Next, create the first test for the `initialize` instruction: - -```ts -it("Is initialized!", async () => { - // Add your test here. - const tx = await program.methods - .initialize() - .accounts({ counter: counter.publicKey }) - .signers([counter]) - .rpc(); - - const account = await program.account.counter.fetch(counter.publicKey); - expect(account.count.toNumber() === 0); -}); -``` - -Next, create the second test for the `increment` instruction: - -```ts -it("Incremented the count", async () => { - const tx = await program.methods - .increment() - .accounts({ counter: counter.publicKey, user: provider.wallet.publicKey }) - .rpc(); - - const account = await program.account.counter.fetch(counter.publicKey); - expect(account.count.toNumber() === 1); -}); -``` - -Lastly, run `anchor test` and you should see the following output: - -```console -anchor-counter -✔ Is initialized! (290ms) -✔ Incremented the count (403ms) - - -2 passing (696ms) -``` - -Running `anchor test` automatically spins up a local test validator, deploys -your program, and runs your mocha tests against it. Don't worry if you're -confused by the tests for now - we'll dig in more later. - -Congratulations, you just built a Solana program using the Anchor framework! -Feel free to reference the -[solution code](https://github.com/Unboxed-Software/anchor-counter-program/tree/solution-increment) -if you need some more time with it. - -# Challenge - -Now it’s your turn to build something independently. Because we're starting with -very simple programs, yours will look almost identical to what we just created. -It's useful to try and get to the point where you can write it from scratch -without referencing prior code, so try not to copy and paste here. - -1. Write a new program that initializes a `counter` account -2. Implement both an `increment` and `decrement` instruction -3. Build and deploy your program like we did in the demo -4. Test your newly deployed program and use Solana Explorer to check the program - logs - -As always, get creative with these challenges and take them beyond the basic -instructions if you want - and have fun! - -Try to do this independently if you can! But if you get stuck, feel free to -reference -the [solution code](https://github.com/Unboxed-Software/anchor-counter-program/tree/solution-decrement). diff --git a/content/courses/solana-course/content/intro-to-reading-data.md b/content/courses/solana-course/content/intro-to-reading-data.md deleted file mode 100644 index 33df8e780..000000000 --- a/content/courses/solana-course/content/intro-to-reading-data.md +++ /dev/null @@ -1,319 +0,0 @@ ---- -title: Read Data From The Solana Network -objectives: - - Explain accounts - - Explain SOL and lamports - - Explain public keys - - Explain the JSON RPC API - - Explain web3.js - - Install web3.js - - Use web3.js to create a connection to a Solana node - - Use web3.js to read data from the blockchain (balance, account info, etc.) ---- - -## TL;DR - -- **Accounts** are like the files in Solana’s network ledger. All state data is - stored in an account. Accounts can be used for many things, but for now we’ll - focus on the aspect of accounts which store SOL. -- **SOL** is the name of Solana’s native token. -- **Lamports** are fractional SOL and are named after - [Leslie Lamport](https://en.wikipedia.org/wiki/Leslie_Lamport). -- **Public keys**, often referred to as addresses, point to accounts on the - Solana network. While you must have a specific secret key to perform certain - functions within accounts, anyone can read account data with a public key. -- **JSON RPC API**: all interactions with the Solana network happens through the - [JSON RPC API](https://docs.solana.com/developing/clients/jsonrpc-api). This - is effectively an HTTP POST with a JSON body that represents the method you - want to call. -- **@solana/web3.js** is an abstraction on top of the JSON RPC API. It can be - installed with `npm` and allows you to call Solana methods as JavaScript - functions. For example, you can use it to query the SOL balance of any - account: - - ```tsx - async function getBalanceUsingWeb3(address: PublicKey): Promise { - const connection = new Connection(clusterApiUrl("devnet")); - return connection.getBalance(address); - } - - const publicKey = new PublicKey( - "7C4jsPZpht42Tw6MjXWF56Q5RQUocjBBmciEjDa8HRtp", - ); - getBalanceUsingWeb3(publicKey).then(balance => { - console.log(balance); - }); - ``` - -# Overview - -## Accounts - -Solana accounts are similar to files in operating systems such as Linux. They -hold arbitrary, persistent data and are flexible enough to be used in many -different ways. - -In this lesson we won’t consider much about accounts beyond their ability to -store SOL (Solana’s native token - more on that later). However, accounts are -also used to store custom data structures and executable code that can be run as -programs. Accounts will be involved in everything you do with Solana. - -### Public Keys - -Public keys are often referred to as addresses. The addresses point to accounts -on the Solana network. If you want to run a specific program or transfer SOL, -you’ll need to provide the necessary public key (or keys) to do so. - -Public keys are 256-bit and they are often shown as base-58 encoded strings like -`7C4jsPZpht42Tw6MjXWF56Q5RQUocjBBmciEjDa8HRtp`. - -## The Solana JSON RPC API - -![Illustration depicting how client-side interaction with the Solana network happens through the JSON RPC API](../assets/json-rpc-illustration.png) - -All client interaction with the Solana network happens through Solana’s -[JSON RPC API](https://docs.solana.com/developing/clients/jsonrpc-api). - -Per the [JSON-RPC 2.0 specification](https://www.jsonrpc.org/specification) - -> _JSON-RPC is a stateless, light-weight remote procedure call (RPC) protocol. -> Primarily this specification defines several data structures and the rules -> around their processing. It is transport agnostic in that the concepts can be -> used within the same process, over sockets, over http, or in many various -> message passing environments. It -> uses [JSON](http://www.json.org/) ([RFC 4627](http://www.ietf.org/rfc/rfc4627.txt)) -> as data format._ - -In practice, this specification simply involves sending a JSON object -representing a method you want to call. You can do this with sockets, http, and -more. - -This JSON object needs four members: - -- `jsonrpc` - The JSON RPC version number. This needs to be _exactly_ `"2.0"`. -- `id` - An identifier that you choose for identifying the call. This can be a - string or a whole number. -- `method` - The name of the method you want to invoke. -- `params` - An array containing the parameters to use during the method - invocation. - -So, if you want to call the `getBalance` method on the Solana network, you could -send an HTTP call to a Solana cluster as follows: - -```tsx -async function getBalanceUsingJSONRPC(address: string): Promise { - const url = clusterApiUrl("devnet"); - console.log(url); - return fetch(url, { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ - jsonrpc: "2.0", - id: 1, - method: "getBalance", - params: [address], - }), - }) - .then(response => response.json()) - .then(json => { - if (json.error) { - throw json.error; - } - - return json["result"]["value"] as number; - }) - .catch(error => { - throw error; - }); -} -``` - -## Solana’s Web3.js SDK - -While the JSON-RPC API is simple enough, it involves a significant amount of -tedious boilerplate. To simplify the process of communication, Solana Labs -created the `@solana/web3.js` SDK as an abstraction on top of the JSON-RPC API. - -Web3.js allows you to call the JSON-RPC API methods using JavaScript functions. -The SDK provides a suite of helper functions and objects. We’ll cover a lot of -the SDK gradually throughout this course, but we won’t go over everything in -depth, so be sure to check out the -[documentation](https://docs.solana.com/developing/clients/javascript-reference) -at some point. - -### Installation - -Throughout this course, we’ll mostly be using `npm`. How to use `npm` is outside -the scope of this course and we’ll assume it’s a tool you use regularly. -[Check this out](https://nodesource.com/blog/an-absolute-beginners-guide-to-using-npm/) -if that’s not the case. - -To install `@solana/web3.js`, set up your project the way you normally would -then use: - -`npm install @solana/web3.js`. - -### Connect to the Network - -Every interaction with the Solana network using `@solana/web3.js` is going to -happen through a `Connection` object. This object establishes a JSON-RPC -connection with a Solana cluster (more on clusters later). For now, we’re going -to use the url for the Devnet cluster rather than Mainnet. As the name suggests, -this cluster is designed for developer use and testing. - -```tsx -const connection = new Connection(clusterApiUrl("devnet")); -``` - -### Read from the Network - -Once you have a `Connection` object, querying the network is as simple as -calling the appropriate methods. For example, to get the balance of a particular -address, you do the following: - -```tsx -async function getBalanceUsingWeb3(address: PublicKey): Promise { - const connection = new Connection(clusterApiUrl("devnet")); - return connection.getBalance(address); -} -``` - -The balance returned is in fractional SOL called lamports. A single lamport -represents 0.000000001 SOL. Most of the time when dealing with SOL the system -will use lamports instead of SOL. Web3.js provides the constant -`LAMPORTS_PER_SOL` for making quick conversions. - -...and just like that, now you know how to read data from the Solana blockchain! -Once we get into custom data things will get more complicated. But for now, -let’s practice what we’ve learned so far. - -# Demo - -Let’s create a simple website that lets users check the balance at a particular -address. - -It’ll look something like this: - -![Screenshot of demo solution](../assets/intro-frontend-demo.png) - -In the interest of staying on topic, we won’t be working entirely from scratch. -You can find the starter code -[here](https://github.com/Unboxed-Software/solana-intro-frontend/tree/starter). -The starter project uses Next.js and Typescript. If you’re used to a different -stack, don’t worry! The web3 and Solana principles you’ll learn throughout these -lessons are applicable to whichever frontend stack you’re most comfortable with. - -### 1. Get oriented - -Once you’ve got the starter code, take a look around. Install the dependencies -with `npm install` and then run the app with `npm run dev`. Notice that no -matter what you put into the address field, when you click “Check SOL Balance” -the balance will be a placeholder value of 1000. - -Structurally, the app is composed of `index.tsx` and `AddressForm.tsx`. When a -user submits the form, the `addressSubmittedHandler` in `index.tsx` gets called. -That’s where we’ll be adding the logic to update the rest of the UI. - -### 2. Install dependencies - -Use `npm install @solana/web3.js` to install our dependency on Solana’s Web3 -library. - -### 3. Set the address balance - -First, import `@solana/web3.js` at the top of `index.tsx`. - -Now that the library is available, let’s go into the `addressSubmittedHandler` -and create an instance of `PublicKey` using the address value from the form -input. Next, create an instance of `Connection` and use it to call `getBalance`. -Pass in the value of the public key you just created. Finally, call -`setBalance`, passing in the result from `getBalance`. If you’re up to it, try -this independently instead of copying from the code snippet below. - -```tsx -import type { NextPage } from 'next' -import { useState } from 'react' -import styles from '../styles/Home.module.css' -import AddressForm from '../components/AddressForm' -import * as Web3 from '@solana/web3.js' - -const Home: NextPage = () => { - const [balance, setBalance] = useState(0) - const [address, setAddress] = useState('') - - const addressSubmittedHandler = (address: string) => { - setAddress(address) - const key = new Web3.PublicKey(address) - const connection = new Web3.Connection(Web3.clusterApiUrl('devnet')) - connection.getBalance(key).then(balance => { - setBalance(balance / Web3.LAMPORTS_PER_SOL) - }) - } - -... - -} -``` - -Notice that we are taking the balance returned by Solana and dividing it by -`LAMPORTS_PER_SOL`. Lamports are fractional SOL (0.000000001 SOL). Most of the -time when dealing with SOL, the system will use lamports instead of SOL. In this -case, the balance returned by the network is in lamports. Before setting it to -our state, we convert it to SOL using the `LAMPORTS_PER_SOL` constant. - -At this point you should be able to put a valid address into the form field and -click “Check SOL Balance” to see both the Address and Balance populate below. - -### 4. Handle invalid addresses - -We’re just about done. The only remaining issue is that using an invalid address -doesn’t show any error message or change the balance shown. If you open the -developer console, you’ll see `Error: Invalid public key input`. When using the -`PublicKey` constructor, you need to pass in a valid address or you’ll get this -error. - -To fix this, let’s wrap everything in a `try-catch` block and alert the user if -their input is invalid. - -```tsx -const addressSubmittedHandler = (address: string) => { - try { - setAddress(address); - const key = new Web3.PublicKey(address); - const connection = new Web3.Connection(Web3.clusterApiUrl("devnet")); - connection.getBalance(key).then(balance => { - setBalance(balance / Web3.LAMPORTS_PER_SOL); - }); - } catch (error) { - setAddress(""); - setBalance(0); - alert(error); - } -}; -``` - -Notice that in the catch block we also cleared out the address and balance to -avoid confusion. - -We did it! We have a functioning site that reads SOL balances from the Solana -network. You’re well on your way to achieving your grand ambitions on Solana. If -you need to spend some more time looking at this code to better understand it, -have a look at the complete -[solution code](https://github.com/Unboxed-Software/solana-intro-frontend). Hang -on tight, these lessons will ramp up quickly. - -# Challenge - -Since this is the first challenge, we’ll keep it simple. Go ahead and add on to -the frontend we’ve already created by including a line item after “Balance”. -Have the line item display whether or not the account is an executable account -or not. Hint: there’s a `getAccountInfo` method. - -Your standard wallet address will _not_ be executable, so if you want an address -that _will_ be executable for testing, use -`CenYq6bDRB7p73EjsPEpiYN7uveyPUTdXkDkgUduboaN`. - -![Screenshot of final challenge solution](../assets/intro-frontend-challenge.png) - -If you get stuck feel free to take a look at the -[solution code](https://github.com/Unboxed-Software/solana-intro-frontend/tree/challenge-solution). diff --git a/content/courses/solana-course/content/intro-to-writing-data.md b/content/courses/solana-course/content/intro-to-writing-data.md deleted file mode 100644 index ca7fe7176..000000000 --- a/content/courses/solana-course/content/intro-to-writing-data.md +++ /dev/null @@ -1,586 +0,0 @@ ---- -title: Write Data To The Solana Network -objectives: - - Explain keypair - - Use `@solana/web3.js` to generate a keypair - - Use `@solana/web3.js` to create a keypair using a secret key - - Explain transactions - - Explain transaction fees - - Use `@solana/web3.js` to send sol - - Use `@solana/web3.js` to sign transactions - - Use Solana explorer to view transactions ---- - -# TL;DR - -- **Keypair** refers to a pairing of public and secret keys. The public key is - used as an “address” that points to an account on the Solana network. The - secret key is used to verify identity or authority. As the name suggests, you - should always keep secret keys _private_. `@solana/web3.js` provides helper - functions for creating a brand new keypair, or for constructing a keypair - using an existing secret key. -- **Transactions** are effectively a bundle of instructions that invoke Solana - programs. The result of each transaction depends on the program being called. - All modifications to on-chain data happen through transactions. Example: - - ```tsx - const transaction = new Transaction(); - - const sendSolInstruction = SystemProgram.transfer({ - fromPubkey: sender, - toPubkey: recipient, - lamports: LAMPORTS_PER_SOL * amount, - }); - - transaction.add(sendSolInstruction); - - const signature = sendAndConfirmTransaction(connection, transaction, [ - senderKeypair, - ]); - ``` - -# Overview - -## Keypair - -As the name suggests, a keypair is a pair of keys: a public key and a secret -key. - -- The public key is used as an “address” that points to an account on the Solana - network. -- The secret key is used to verify identity or authority. As the name suggests, - you should always keep secret keys _private_. - -A keypair is _required_ for the vast majority of interactions within the Solana -network. If you don’t already have a keypair, or if you want to generate a new -one for a specific purpose, `@solana/web3.js` provides a helper function for -creating a brand new keypair. - -```tsx -const ownerKeypair = Keypair.generate(); -``` - -A keypair is of the data type `Keypair` and can be deconstructed into a public -key: - -```tsx -const publicKey = ownerKeypair.publicKey; -``` - -... or the secret key: - -```tsx -const secretKey = ownerKeypair.secretKey; -``` - -If you already have a keypair you’d like to use, you can create a `Keypair` from -the secret key using the `Keypair.fromSecretKey()` function. To ensure that your -secret key stays secure, we recommend injecting it through an environment -variable and not committing your `.env` file. - -```tsx -const secret = JSON.parse(process.env.PRIVATE_KEY ?? "") as number[]; -const secretKey = Uint8Array.from(secret); -const keypairFromSecretKey = Keypair.fromSecretKey(secretKey); -``` - -## Transactions - -Any modification to on-chain data happens through transactions sent to programs. - -Transaction instructions contain: - -- an identifier of the program you intend to invoke -- an array of accounts that will be read from and/or written to -- data structured as a byte array that is specified to the program being invoked - -When you send a transaction to a Solana cluster, a Solana program is invoked -with the instructions included in the transaction. - -As you might expect, `@solana/web3.js` provides helper functions for creating -transactions and instructions. You can create a new transaction with the -constructor, `new Transaction()`. Once created, then you can add instructions to -the transaction with the `add()` method. - -Instructions can get complicated when working with custom programs. Fortunately, -`@solana/web3.js` has convenience functions for some of Solana’s native programs -and basic operations, like transferring SOL: - -```tsx -const transaction = new Transaction(); - -const sendSolInstruction = SystemProgram.transfer({ - fromPubkey: sender, - toPubkey: recipient, - lamports: LAMPORTS_PER_SOL * amount, -}); - -transaction.add(sendSolInstruction); -``` - -The `SystemProgram.transfer()` function requires that you pass as parameters: - -- a public key corresponding to the sender account -- a public key corresponding to the recipient account -- the amount of SOL to send in lamports. - -This function then returns the instruction for sending SOL from the sender to -the recipient, after which the instruction can be added to the transaction. - -Once created, a transaction needs to be sent to the cluster and confirmed: - -```tsx -const signature = sendAndConfirmTransaction(connection, transaction, [ - senderKeypair, -]); -``` - -The `sendAndConfirmTransaction()` functions takes as parameters - -- a cluster connection -- a transaction -- an array of keypairs that will act as signers on the transaction - in this - example, we only have the one signer: the sender. - -### Instructions - -The example of sending SOL is great for introducing you to sending transactions, -but a lot of web3 development will involve calling non-native programs. In the -example above, the `SystemProgram.transfer()` function ensures that you pass all -the necessary data required to create the instruction, then it creates the -instruction for you. When working with non-native programs, however, you’ll need -to be very specific about creating instructions that are structured to match the -corresponding program. - -With `@solana/web3.js`, you can create non-native instructions with the -`TransactionInstruction` constructor. This constructor takes a single argument -of the data type `TransactionInstructionCtorFields`. - -```tsx -export type TransactionInstructionCtorFields = { - keys: Array; - programId: PublicKey; - data?: Buffer; -}; -``` - -Per the definition above, the object passed to the `TransactionInstruction` -constructor requires: - -- an array of keys of type `AccountMeta` -- the public key for the program being called -- an optional `Buffer` containing data to pass to the program. - -We’ll be ignoring the `data` field for now and will revisit it in a future -lesson. - -The `programId` field is fairly self explanatory: it’s the public key associated -with the program. You’ll need to know this in advance of calling the program in -the same way that you’d need to know the public key of someone to whom you want -to send SOL. - -The `keys` array requires a bit more explanation. Each object in this array -represents an account that will be read from or written to during a -transaction's execution. This means you need to know the behavior of the program -you are calling and ensure that you provide all of the necessary accounts in the -array. - -Each object in the `keys` array must include the following: - -- `pubkey` - the public key of the account -- `isSigner` - a boolean representing whether or not the account is a signer on - the transaction -- `isWritable` - a boolean representing whether or not the account is written to - during the transaction's execution - -Putting this all together, we might end up with something like the following: - -```tsx -async function callProgram( - connection: web3.Connection, - payer: web3.Keypair, - programId: web3.PublicKey, - programDataAccount: web3.PublicKey, -) { - const instruction = new web3.TransactionInstruction({ - keys: [ - { - pubkey: programDataAccount, - isSigner: false, - isWritable: true, - }, - ], - programId, - }); - - const signature = await web3.sendAndConfirmTransaction( - connection, - new web3.Transaction().add(instruction), - [payer], - ); - - console.log(signature); -} -``` - -### Transaction Fees - -Transaction fees are built into the Solana economy as compensation to the -validator network for the CPU and GPU resources required in processing -transactions. Unlike many networks that have a fee market where users can pay -higher fees to increase their chances of being included in the next block, -Solana transaction fees are deterministic. - -The first signer included in the array of signers on a transaction is -responsible for paying the transaction fee. If this signer does not have enough -SOL in their account to cover the transaction fee the transaction will be -dropped. - -When testing, whether locally or on devnet, you can use the Solana CLI command -`solana airdrop 1` to get free test SOL in your account for paying transaction -fees. - -### Solana Explorer - -![Screenshot of Solana Explorer set to Devnet](../assets/solana-explorer-devnet.png) - -All transactions on the blockchain are publicly viewable on the -[Solana Explorer](http://explorer.solana.com). For example, you could take the -signature returned by `sendAndConfirmTransaction()` in the example above, search -for that signature in the Solana Explorer, then see: - -- when it occurred -- which block it was included in -- the transaction fee -- and more! - -![Screenshot of Solana Explorer with details about a transaction](../assets/solana-explorer-transaction-overview.png) - -# Demo - -We’re going to create a script to ping a simple program that increments a -counter each time it has been pinged. This program exists on the Solana Devnet -at address `ChT1B39WKLS8qUrkLvFDXMhEJ4F1XZzwUNHUt4AU9aVa`. The program stores -the count data in a specific account at the address -`Ah9K7dQ8EHaZqcAsgBW8w37yN2eAy3koFmUn4x3CJtod`. - -### 1. Basic scaffolding - -Let’s start with some basic scaffolding. You’re welcome to set up your project -however feels most appropriate, but we’ll be using a simple Typescript project -with a dependency on the @solana/web3.js package. If you want to use our -scaffolding, you can use the following commands in the command line: - -```bash -mkdir -p solana-ping-client/src && \ - cd solana-ping-client && \ - touch src/index.ts && \ - git init && touch .gitignore && \ - npm init -y && \ - npm install --save-dev typescript && \ - npm install --save-dev ts-node && \ - npx tsc --init && \ - npm install @solana/web3.js && \ - npm install dotenv && \ - touch .env -``` - -This will: - -1. create a new directory for the project with a subdirectory `src` -2. move the command line prompt inside the project directory -3. create an `index.ts` file inside of `src` -4. initialize a git repository with a `.gitignore` file -5. create a new `npm` package -6. add a developer dependency on typescript -7. add a developer dependency on `ts-node` -8. create a `.tsconfig` file -9. install the `@solana/web3.js` dependency -10. install the `.dotenv` dependency -11. create a `.env` file - -If you want to match our code exactly, replace the contents of `tsconfig.json` -with the following: - -```json -{ - "compilerOptions": { - "target": "es5", - "module": "commonjs", - "strict": true, - "esModuleInterop": true, - "skipLibCheck": true, - "forceConsistentCasingInFileNames": true, - "outDir": "dist" - }, - "include": ["./src/**/*"] -} -``` - -Add the following to the `.gitignore`: - -``` -node_modules/ -dist/ -.env -``` - -And finally, add the following to the `scripts` object in `package.json`: - -```json -"start": "ts-node src/index.ts" -``` - -### 2. Generate a new keypair - -Before you can do anything, you’ll need a keypair. Let’s jump into the -`index.ts` file and generate one: - -```tsx -import web3 = require("@solana/web3.js"); -import Dotenv from "dotenv"; -Dotenv.config(); - -async function main() { - const newKeypair = web3.Keypair.generate(); - console.log(newKeypair.secretKey.toString()); -} - -main() - .then(() => { - console.log("Finished successfully"); - }) - .catch(error => { - console.error(error); - }); -``` - -Most of this code is just boilerplate to run the file properly. The lines inside -of the `main()` function generate a new keypair and log the secret key to the -console. - -Run `npm start` after saving this file and you should see an array of numbers -printed to the console. This array represents the secret key for your new -keypair. **Do not** use this keypair for Mainnet operations. **Only use this -keypair for testing.** - -Copy the secret key array from the console log and paste it into the `.env` file -as an environment variable called, `PRIVATE_KEY`. This way we can reuse this -keypair in future development instead of generating a new keypair every time we -run something. It should look something like this but with different numbers: - -``` -PRIVATE_KEY=[56,83,31,62,66,154,33,74,106,59,111,224,176,237,89,224,10,220,28,222,128,36,138,89,30,252,100,209,206,155,154,65,98,194,97,182,98,162,107,238,61,183,163,215,44,6,10,49,218,156,5,131,125,253,247,190,181,196,0,249,40,149,119,246] -``` - -### 3. Initialize Keypair from secret - -Now that we’ve successfully generated a keypair and copied it to the `.env` -file, we can remove the code inside of the `main()` function. - -We’ll return to the `main()` function soon, but for now let’s create a new -function outside of `main()` called `initializeKeypair()`. Inside of this new -function: - -1. parse the `PRIVATE_KEY` environment variable as `number[]` -2. use it to initialize a `Uint8Array` -3. initialize and return a `Keypair` using that `Uint8Array`. - -```tsx -function initializeKeypair(): web3.Keypair { - const secret = JSON.parse(process.env.PRIVATE_KEY ?? "") as number[]; - const secretKey = Uint8Array.from(secret); - const keypairFromSecretKey = web3.Keypair.fromSecretKey(secretKey); - return keypairFromSecretKey; -} -``` - -### 4. Ping program - -Now that we have a way of initializing our keypair, we need to establish a -connection with Solana’s Devnet. In `main()`, let’s invoke `initializeKeypair()` -and create a connection: - -```tsx -async function main() { - const payer = initializeKeypair(); - const connection = new web3.Connection(web3.clusterApiUrl("devnet")); -} -``` - -Now create an async function outside of `main()` called `pingProgram()` with two -parameters requiring a connection and a payer’s keypair as arguments: - -```tsx -async function pingProgram(connection: web3.Connection, payer: web3.Keypair) {} -``` - -Inside this function, we need to: - -1. create a transaction -2. create an instruction -3. add the instruction to the transaction -4. send the transaction. - -Remember, the most challenging piece here is including the right information in -the instruction. We know the address of the program that we are calling. We also -know that the program writes data to a separate account whose address we also -have. Let’s add the string versions of both of those as constants at the top of -the `index.ts` file: - -```tsx -const PROGRAM_ADDRESS = "ChT1B39WKLS8qUrkLvFDXMhEJ4F1XZzwUNHUt4AU9aVa"; -const PROGRAM_DATA_ADDRESS = "Ah9K7dQ8EHaZqcAsgBW8w37yN2eAy3koFmUn4x3CJtod"; -``` - -Now, in the `pingProgram()` function, let’s create a new transaction, then -initialize a `PublicKey` for the program account, and another for the data -account. - -```tsx -async function pingProgram(connection: web3.Connection, payer: web3.Keypair) { - const transaction = new web3.Transaction(); - - const programId = new web3.PublicKey(PROGRAM_ADDRESS); - const programDataPubkey = new web3.PublicKey(PROGRAM_DATA_ADDRESS); -} -``` - -Next, let’s create the instruction. Remember, the instruction needs to include -the public key for the program and it also needs to include an array with all -the accounts that will be read from or written to. In this example program, only -the data account referenced above is needed. - -```tsx -async function pingProgram(connection: web3.Connection, payer: web3.Keypair) { - const transaction = new web3.Transaction(); - - const programId = new web3.PublicKey(PROGRAM_ADDRESS); - const programDataPubkey = new web3.PublicKey(PROGRAM_DATA_ADDRESS); - - const instruction = new web3.TransactionInstruction({ - keys: [ - { - pubkey: programDataPubkey, - isSigner: false, - isWritable: true, - }, - ], - programId, - }); -} -``` - -Next, let’s add the instruction to the transaction we created at the start of -the function. Then, call upon `sendAndConfirmTransaction()` by passing in the -connection, transaction, and payer. Finally, let’s log the result of that -function call so we can look it up on the Solana Explorer. - -```tsx -async function pingProgram(connection: web3.Connection, payer: web3.Keypair) { - const transaction = new web3.Transaction(); - - const programId = new web3.PublicKey(PROGRAM_ADDRESS); - const programDataPubkey = new web3.PublicKey(PROGRAM_DATA_ADDRESS); - - const instruction = new web3.TransactionInstruction({ - keys: [ - { - pubkey: programDataPubkey, - isSigner: false, - isWritable: true, - }, - ], - programId, - }); - - transaction.add(instruction); - - const signature = await web3.sendAndConfirmTransaction( - connection, - transaction, - [payer], - ); - - console.log(signature); -} -``` - -Finally, let's invoke `pingProgram()` within `main()` using `connection` and -`payer`: - -```tsx -async function main() { - const payer = initializeKeypair(); - const connection = new web3.Connection(web3.clusterApiUrl("devnet")); - await pingProgram(connection, payer); -} -``` - -### 5. Airdrop - -Now run the code with `npm start` and see if it works. You may end up with the -following error in the console: - -> Transaction simulation failed: Attempt to debit an account but found no record -> of a prior credit. - -If you get this error, it’s because your keypair is brand new and doesn’t have -any SOL to cover the transaction fees. Let’s fix this by adding the following -line in `main()` before the call to `pingProgram()`: - -```tsx -await connection.requestAirdrop(payer.publicKey, web3.LAMPORTS_PER_SOL * 1); -``` - -This will deposit 1 SOL into your account which you can use for testing. This -won’t work on Mainnet where it would actually have value. But it's incredibly -convenient for testing locally and on Devnet. - -### 6. Check the Solana explorer - -Now run the code again. It may take a moment or two, but now the code should -work and you should see a long string printed to the console, like the -following: - -``` -55S47uwMJprFMLhRSewkoUuzUs5V6BpNfRx21MpngRUQG3AswCzCSxvQmS3WEPWDJM7bhHm3bYBrqRshj672cUSG -``` - -Copy this confirmation signature. Open a browser and go to -[https://explorer.solana.com/?cluster=devnet](https://explorer.solana.com/?cluster=devnet) -(the query parameter at the end of the URL will ensure that you’ll explore -transactions on Devnet instead of Mainnet). Paste the signature into the search -bar at the top of Solana’s Devnet explorer and hit enter. You should see all the -details about the transaction. If you scroll all the way to the bottom, then you -will see `Program Logs`, which show how many times the program has been pinged -including your ping. - -![Screenshot of Solana Explorer with logs from calling the Ping program](../assets/solana-explorer-ping-result.png) - -If you want to make it easier to look at Solana Explorer for transactions in the -future, simply change your `console.log` in `pingProgram()` to the following: - -```tsx -console.log( - `You can view your transaction on the Solana Explorer at:\nhttps://explorer.solana.com/tx/${sig}?cluster=devnet`, -); -``` - -And just like that you’re calling programs on the Solana network and writing -data to chain! - -In the next few lessons you’ll learn how to - -1. do this safely from the browser instead of from running a script -2. add custom data to your instructions -3. deserialize data from the chain - -# Challenge - -Go ahead and create a script from scratch that will allow you to transfer SOL -from one account to another on Devnet. Be sure to print out the transaction -signature so you can look at it on the Solana Explorer. - -If you get stuck feel free to glance at the -[solution code](https://github.com/Unboxed-Software/solana-send-sol-client). diff --git a/content/courses/solana-course/content/local-setup.md b/content/courses/solana-course/content/local-setup.md deleted file mode 100644 index 3f4c213ab..000000000 --- a/content/courses/solana-course/content/local-setup.md +++ /dev/null @@ -1,473 +0,0 @@ ---- -title: Local Program Development -objectives: - - Set up a local environment for Solana program development - - Use basic Solana CLI commands - - Run a local test validator - - Use Rust and the Solana CLI to deploy a Solana program from your local - development environment - - Use the Solana CLI to view program logs ---- - -# TL;DR - -- To get started with Solana locally, you’ll first need to install **Rust** and - the **Solana CLI** -- Using the Solana CLI you can run a **local test validator** using the - `solana-test-validator` command -- Once you have Rust and Solana CLI installed, you’ll be able to build and - deploy your programs locally using the `cargo build-bpf` and - `solana program deploy` commands -- You can view program logs using the `solana logs` command - -# Overview - -So far in this course, we've used Solana Playground to develop and deploy Solana -programs. And while it's a great tool, for certain complex projects you may -prefer to have a local development environment set up. This may be in order to -use crates not supported by Solana Playground, to take advantage of custom -scripts or tooling you've created, or simply out of personal preference. - -With that said, this lesson will be slightly different from the others. Instead -of covering a lot of ground on how to write a program or interact with the -Solana network, this lesson will primarily focus on the less glamorous task of -setting up your local development environment. - -In order to build, test, and deploy Solana programs from your machine, you'll -need to install the Rust compiler and the Solana Command Line Interface (CLI). -We'll start by guiding you through these installation processes, then cover how -to use what you'll have just installed. - -The installation instructions below contain the steps for installing Rust and -the Solana CLI at the time of writing. They may have changed by the time you're -reading this, so if you run into issues please consult the official installation -pages for each: - -- [Install Rust](https://www.rust-lang.org/tools/install) -- [Install the Solana Tool Suite](https://docs.solana.com/cli/install-solana-cli-tools) - -## Setup on Windows (with Linux) - -### Download Windows Subsystem for Linux (WSL) - -If you are on a Windows computer, it is recommended to use Windows Subsystem for -Linux (WSL) to build your Solana Programs. - -Open an **administrator** PowerShell or Windows Command Prompt and check Windows -version - -```bash -winver -``` - -If you are on Windows 10 version 2004 and higher (Build 19041 and higher) or -Windows 11, run the following command. - -```bash -wsl --install -``` - -If you are running an older version of Windows, follow the instructions -[here](https://docs.microsoft.com/en-us/windows/wsl/install-manual). - -You can read more about installing WSL -[here](https://docs.microsoft.com/en-us/windows/wsl/install). - -### Download Ubuntu - -Next, download Ubuntu -[here](https://apps.microsoft.com/store/detail/ubuntu-2004/9N6SVWS3RX71?hl=en-us&gl=US). -Ubuntu provides a terminal that allows you to run Linux on a Windows computer. -This is where you’ll be running Solana CLI commands. - -### Download Rust (for WSL) - -Next, open an Ubuntu terminal and download Rust for WSL using the following -command. You can read more about downloading Rust -[here](https://www.rust-lang.org/learn/get-started). - -```bash -curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -``` - -### Download Solana CLI - -Now we are ready to download Solana CLI for Linux. Go ahead and run the -following command in an Ubuntu terminal. You can read more about downloading -Solana CLI [here](https://docs.solana.com/cli/install-solana-cli-tools). - -```bash -sh -c "$(curl -sSfL https://release.solana.com/v1.10.31/install)" -``` - -## Setup on macOS - -### Download Rust - -First, download Rust by following the instructions -[here](https://www.rust-lang.org/tools/install) - -### Download the Solana CLI - -Next, download the Solana CLI by running the following command in your terminal. - -```bash -sh -c "$(curl -sSfL https://release.solana.com/v1.10.31/install)" -``` - -You can read more about downloading the Solana CLI -[here](https://docs.solana.com/cli/install-solana-cli-tools). - -## Solana CLI basics - -The Solana CLI is a command-line interface tool that provides a collection of -commands for interacting with a Solana cluster. - -We'll cover some of the most common commands in this lesson, but you can always -view the list of all possible Solana CLI commands by running `solana --help`. - -### Solana CLI configuration - -The Solana CLI stores a number of configuration settings that impact the -behavior of certain commands. You can use the following command to view the -current configuration: - -```bash -solana config get -``` - -The `solana config get` command will return the following: - -- `Config File` - the file Solana CLI is located on your computer -- `RPC URL` - endpoint you are using, connecting you to localhost, Devnet, or - Mainnet -- `WebSocket URL` - the websocket to listen for events from the cluster you are - targeting (computed when you set the `RPC URL`) -- `Keypair Path` - the keypair path used when running Solana CLI subcommands -- `Commitment` - provides a measure of the network confirmation and describes - how finalized a block is at that point in time - -You can change your Solana CLI configuration at any time by using the -`solana config set` command followed by the setting you want to update. - -The most common change will be to the cluster you are targeting. Use the -`solana config set --url` command to change the `RPC URL`. - -```bash -solana config set --url localhost -``` - -```bash -solana config set --url devnet -``` - -```bash -solana config set --url mainnet-beta -``` - -Similarly, you can use the `solana config set --keypair` command to change the -`Keypair Path`. Solana CLI will then use the keypair from the specified path -when running commands. - -```bash -solana config set --keypair ~/ -``` - -### Test validators - -You'll often find it helpful to run a local validator for testing and debugging -rather than deploying to Devnet. - -You can run a local test validator using the `solana-test-validator` command. -This command creates an ongoing process that will require its own command line -window. - -### Stream program logs - -It's often helpful to open a new console and run the `solana logs` command -alongside the test validator. This creates another ongoing process that will -stream the logs associated with your configuration's cluster. - -If your CLI configuration is pointed to `localhost` then the logs will always be -associated with the test validator you've created, but you can also stream logs -from other clusters like Devnet and Mainnet Beta. When streaming logs from other -clusters, you'll want to include a program ID with the command to limit the logs -you see to your specific program. - -### Keypairs - -You can generate a new keypair using the `solana-keygen new --outfile` command -followed by the file path to store the keypair. - -```bash -solana-keygen new --outfile ~/ -``` - -At times you may need to check which keypair your configuration is pointed to. -To view the `publickey` of the current keypair set in `solana config`, use the -`solana address` command. - -```bash -solana address -``` - -To view the SOL balance of the current keypair set in `solana config`, use the -`solana balance` command. - -```bash -solana balance -``` - -To airdrop SOL on Devnet or localhost, use the `solana airdrop` command. Note -that while on Devnet you are limited to 2 SOL per airdrop. - -```bash -solana airdrop 2 -``` - -As you develop and test programs in your local environment, you'll likely -encounter errors that are caused by: - -- Using the wrong keypair -- Not having enough SOL to deploy your program or perform a transaction -- Pointing to the wrong cluster - -The CLI commands we've covered so far should help you quickly resolve those -issues. - -## Develop Solana programs in your local environment - -While the Solana Playground is enormously helpful, it's hard to beat the -flexibility of your own local development environment. As you build more complex -programs, you may end up integrating them with one or more clients that are also -under development in your local environment. Testing between these programs and -clients is often simpler when you write, build, and deploy your programs -locally. - -### Create a new project - -To create a new Rust package to write a Solana program, you can use the -`cargo new --lib` command with the name of the new directory you'd like to -create. - -```bash -cargo new --lib -``` - -This command will create a new directory with the name you specified at the end -of the command. This new directory will contain a `Cargo.toml` manifest file -that describes the package. - -The manifest file contains metadata such as name, version, and dependencies -(crates). To write a Solana program, you’ll need to update the `Cargo.toml` file -to include `solana-program` as a dependency. You may also need to add the -`[lib]` and `crate-type` lines shown below. - -```rust -[package] -name = "" -version = "0.1.0" -edition = "2021" - -[features] -no-entrypoint = [] - -[dependencies] -solana-program = "~1.8.14" - -[lib] -crate-type = ["cdylib", "lib"] -``` - -At that point, you can start writing your program in the `src` folder. - -### Build and deploy - -When it comes time to build your Solana program, you can use the -`cargo build-bpf` command. - -```bash -cargo build-bpf -``` - -The output of this command will include instructions for a deploying your -program that look something like this: - -```text -To deploy this program: - $ solana program deploy /Users/James/Dev/Work/solana-hello-world-local/target/deploy/solana_hello_world_local.so -The program address will default to this keypair (override with --program-id): - /Users/James/Dev/Work/solana-hello-world-local/target/deploy/solana_hello_world_local-keypair.json -``` - -When you are ready to deploy the program, use the `solana program deploy` -command output from `cargo build-bpf`. This will deploy your program to the -cluster specified in your CLI configuration. - -```rust -solana program deploy -``` - -# Demo - -Let's practice by building and deploying the "Hello World!" program that we -created in -the [Hello World lesson](https://github.com/Unboxed-Software/solana-course/pull/content/hello-world-program.md). - -We'll do this all locally, including deploying to a local test validator. Before -we begin, make sure you've installed Rust and the Solana CLI. You can refer to -the instructions in the overview to get set up if you haven't already. - -### 1. Create a new Rust project - -Let's start by creating a new Rust project. Run the `cargo new --lib` command -below. Feel free to replace the directory name with your own. - -```bash -cargo new --lib solana-hello-world-local -``` - -Remember to update the `cargo.toml` file to include `solana-program` as a -dependency and the `crate-type` if isn't there already. - -```bash -[package] -name = "solana-hello-world-local" -version = "0.1.0" -edition = "2021" - -[dependencies] -solana-program = "~1.8.14" - -[lib] -crate-type = ["cdylib", "lib"] -``` - -### 2. Write your program - -Next, update `lib.rs` with the “Hello World!” program below. This program simply -prints “Hello, world!” to the program log when the program is invoked. - -```rust -use solana_program::{ - account_info::AccountInfo, - entrypoint, - entrypoint::ProgramResult, - pubkey::Pubkey, - msg -}; - -entrypoint!(process_instruction); - -pub fn process_instruction( - program_id: &Pubkey, - accounts: &[AccountInfo], - instruction_data: &[u8] -) -> ProgramResult{ - msg!("Hello, world!"); - - Ok(()) -} -``` - -### 3. Run a local test validator - -With your program written, let's make sure our Solana CLI configuration points -to localhost by using the `solana config set --url` command. - -```bash -solana config set --url localhost -``` - -Next, check that the Solana CLI configuration has updated using the -`solana config get` command. - -```bash -solana config get -``` - -Finally, run a local test validator. In a separate terminal window, run the -`solana-test-validator` command. This is only necessary when our `RPC URL` is -set to localhost. - -```bash -solana-test-validator -``` - -### 4. Build and deploy - -We're now ready to build and deploy our program. Build the program by running -the `cargo build-bpf` command. - -```bash -cargo build-bpf -``` - -Now let's deploy our program. Run the `solana program deploy` command output -from `cargo build-bpf`. - -```bash -solana program deploy -``` - -The `solana program deploy` will output the `Program ID` for your program. You -can now look up deployed program on -[Solana Explorer](https://explorer.solana.com/?cluster=custom) (for localhost, -select “Custom RPC URL” as the cluster). - -### 5. View program logs - -Before we invoke our program, open a separate terminal and run the `solana logs` -command. This will allow use to view the program logs in the terminal. - -```bash -solana logs -``` - -With the test validator still running, try invoking your program using the -client-side script -[here](https://github.com/Unboxed-Software/solana-hello-world-client). - -Replace the program ID in `index.ts` with the one from the program you just -deployed, then run `npm install` followed by `npm start`. This will return a -Solana Explorer URL. Copy the URL into the browser to look up the transaction on -Solana Explorer and check that “Hello, world!” was printed to the program log. -Alternatively, you can view the program logs in the terminal where you ran the -`solana logs` command. - -And that's it! You've just created and deployed your first program from a local -development environment. - -# Challenge - -Now it’s your turn to build something independently. Try to create a new program -to print your own message to the program logs. This time deploy your program to -Devnet instead of localhost. - -Remember to update your `RPC URL` to Devnet using the `solana config set --url` -command. - -You can invoke the program using the same client-side script from the demo as -long as you update the `connection` and Solana Explorer URL to both point to -Devnet instead of localhost. - -```tsx -let connection = new web3.Connection(web3.clusterApiUrl("devnet")); -``` - -```tsx -console.log( - `Transaction: https://explorer.solana.com/tx/${transactionSignature}?cluster=devnet`, -); -``` - -You can also open a separate command line window and use the -`solana logs | grep " invoke" -A `. When -using `solana logs` on Devnet you must specify the program ID. Otherwise, the -`solana logs` command will return a constant stream of logs from Devnet. For -example, you would do the following to monitor invocations to the Token Program -and show the first 5 lines of logs for each invocation: - -```bash -solana logs | grep "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA invoke" -A 5 -``` diff --git a/content/courses/solana-course/content/nfts-with-metaplex.md b/content/courses/solana-course/content/nfts-with-metaplex.md deleted file mode 100644 index e29718a60..000000000 --- a/content/courses/solana-course/content/nfts-with-metaplex.md +++ /dev/null @@ -1,762 +0,0 @@ ---- -title: Create Solana NFTs With Metaplex -objectives: - - Explain NFTs and how they're represented on the Solana network - - Explain the role of Metaplex in the Solana NFT ecosystem - - Create and update NFTs using the Metaplex SDK - - Explain the basic functionality of the Token Metadata program, Candy Machine - program, and Sugar CLI as tools that assist in creating and distributing - NFTs on Solana ---- - -# TL;DR - -- **Non-Fungible Tokens (NFTs)** are represented on Solana as SPL Tokens with an - associated metadata account, 0 decimals, and a maximum supply of 1 -- **Metaplex** offers a collection of tools that simplify the creation and - distribution of NFTs on the Solana blockchain -- The **Token Metadata** program standardizes the process of attaching metadata - to SPL Tokens -- The **Metaplex SDK** is a tool that offers user-friendly APIs to assist - developers in utilizing the on-chain tools provided by Metaplex -- The **Candy Machine** program is an NFT distribution tool used to create and - mint NFTs from a collection -- **Sugar CLI** is a tool that simplifies the process of uploading - media/metadata files and creating a Candy Machine for a collection - -# Overview - -Solana Non-Fungible Tokens (NFTs) are SPL tokens created using the Token -program. These tokens, however, also have an additional metadata account -associated with each token mint. This allows for a wide variety of use cases for -tokens. You can effectively tokenize anything, from game inventory to art. - -In this lesson, we'll cover the basics of how NFTs are represented on Solana, -how to create and update them using the Metaplex SDK, and provide a brief -introduction to tools that can assist you in creating and distributing NFTs on -Solana at scale. - -## NFTs on Solana - -A Solana NFT is a non-divisible token with associated metadata. Further, the -token's mint has a maximum supply of 1. - -In other words, an NFT is a standard token from the Token Program but differs -from what you might think of as "standard tokens" in that it: - -1. Has 0 decimals so that it cannot be divided into parts -2. Comes from a token mint with supply of 1 so that only 1 of these tokens - exists -3. Comes from a token mint whose authority is set to `null` (to ensure that the - supply never changes) -4. Has an associated account that stores metadata - -While the first three points are features that can be achieved with the SPL -Token Program, the associated metadata requires some additional functionality. - -Typically, an NFT’s metadata has both an on-chain and off-chain component. The -on-chain metadata is stored in an account associated with the token mint. One of -its fields is URI that typically points to an off-chain JSON file (see -[this link](https://lsc6xffbdvalb5dvymf5gwjpeou7rr2btkoltutn5ij5irlpg3wa.arweave.net/XIXrlKEdQLD0dcML01kvI6n4x0GanLnSbeoT1EVvNuw) -as an example). The off-chain component stores additional data and a link to the -image. Permanent data storage systems such as Arweave are often used to store -the off-chain component of NFT metadata. - -Below is an example of the relationship between on-chain and off-chain metadata. -The on-chain metadata contains a URI field that points to an off-chain `.json` -file that stores the link to the image of the NFT and additional metadata. - -![Screenshot of Metadata](../assets/solana-nft-metaplex-metadata.png) - -## **Metaplex** - -[Metaplex](https://www.metaplex.com/) is an organization that provides a suite -of tools, like the [Metaplex SDK](https://docs.metaplex.com/sdks/js/), that -simplify the creation and distribution of NFTs on the Solana blockchain. These -tools cater to a wide range of use cases and allow you to easily manage the -entire NFT process of creating and minting an NFT collection. - -More specifically, the Metaplex SDK is designed to assist developers in -utilizing the on-chain tools offered by Metaplex. It offers a user-friendly API -that focuses on popular use cases and allows for easy integration with -third-party plugins. To learn more about the capabilities of the Metaplex SDK, -you can refer to the [README](https://github.com/metaplex-foundation/js#readme). - -One of the essential programs offered by Metaplex is the Token Metadata program. -The Token Metadata program standardizes the process of attaching metadata to SPL -Tokens. When creating an NFT with Metaplex, the Token Metadata program creates a -metadata account using a Program Derived Address (PDA) with the token mint as a -seed. This allows the metadata account for any NFT to be located -deterministically using the address of the token mint. To learn more about the -Token Metadata program, you can refer to the Metaplex -[documentation](https://docs.metaplex.com/programs/token-metadata/). - -In the following sections, we'll cover the basics of using the Metaplex SDK to -prepare assets, create NFTs, update NFTs, and associate an NFT with a broader -collection. - -### Metaplex instance - -A `Metaplex` instance serves as the entry point for accessing the Metaplex SDK -APIs. This instance accepts a connection used to communicate with the cluster. -Additionally, developers can customize the SDK's interactions by specifying an -"Identity Driver" and a "Storage Driver". - -The Identity Driver is effectively a keypair that can be used to sign -transactions, a requirement when creating an NFT. The Storage Driver is used to -specify the storage service you want to use for uploading assets. The -`bundlrStorage` driver is the default option and it uploads assets to Arweave, a -permanent and decentralized storage service. - -Below is an example of how you can set up the `Metaplex` instance for devnet. - -```tsx -import { - Metaplex, - keypairIdentity, - bundlrStorage, -} from "@metaplex-foundation/js"; -import { Connection, clusterApiUrl, Keypair } from "@solana/web3.js"; - -const connection = new Connection(clusterApiUrl("devnet")); -const wallet = Keypair.generate(); - -const metaplex = Metaplex.make(connection) - .use(keypairIdentity(wallet)) - .use( - bundlrStorage({ - address: "https://devnet.bundlr.network", - providerUrl: "https://api.devnet.solana.com", - timeout: 60000, - }), - ); -``` - -### Upload assets - -Before you can create an NFT, you need to prepare and upload any assets you plan -to associate with the NFT. While this doesn't have to be an image, most NFTs -have an image associated with them. - -Preparing and uploading an image involves converting the image to a buffer, -converting it to the Metaplex format using the `toMetaplexFile` function,, and -finally uploading it to the designated Storage Driver. - -The Metaplex SDK supports the creation of a new Metaplex file from either files -present on your local computer or those uploaded by a user through a browser. -You can do the former by using `fs.readFileSync` to read the image file, then -convert it into a Metaplex file using `toMetaplexFile`. Finally, use your -`Metaplex` instance to call `storage().upload(file)` to upload the file. The -function's return value will be the URI where the image was stored. - -```tsx -const buffer = fs.readFileSync("/path/to/image.png"); -const file = toMetaplexFile(buffer, "image.png"); - -const imageUri = await metaplex.storage().upload(file); -``` - -### Upload metadata - -After uploading an image, it's time to upload the off-chain JSON metadata using -the `nfts().uploadMetadata` function. This will return a URI where the JSON -metadata is stored. - -Remember, the off-chain portion of the metadata includes things like the image -URI as well as additional information like the name and description of the NFT. -While you can technically include anything you'd like in this JSON object, in -most cases you should follow the -[NFT standard](https://docs.metaplex.com/programs/token-metadata/token-standard#the-non-fungible-standard) -to ensure compatibility with wallets, programs, and applications. - -To create the metadata, use the `uploadMetadata` method provided by the SDK. -This method accepts a metadata object and returns a URI that points to the -uploaded metadata. - -```tsx -const { uri } = await metaplex.nfts().uploadMetadata({ - name: "My NFT", - description: "My description", - image: imageUri, -}); -``` - -### Create NFT - -After uploading the NFT's metadata, you can finally create the NFT on the -network. The Metaplex SDK's `create` method allows you to create a new NFT with -minimal configuration. This method will handle the creation of the mint account, -token account, metadata account, and the master edition account for you. The -data provided to this method will represent the on-chain portion of the NFT -metadata. You can explore the SDK to see all the other input that can be -optionally provided to this method. - -```tsx -const { nft } = await metaplex.nfts().create( - { - uri: uri, - name: "My NFT", - sellerFeeBasisPoints: 0, - }, - { commitment: "finalized" }, -); -``` - -This method returns an object containing information about the newly created -NFT. By default, the SDK sets the `isMutable` property to true, allowing for -updates to be made to the NFT's metadata. However, you can choose to set -`isMutable` to false, making the NFT's metadata immutable. - -### Update NFT - -If you've left `isMutable` as true, you may end up having a reason to update -your NFT's metadata. The SDK's `update` method allows you to update both the -on-chain and off-chain portions of the NFT's metadata. To update the off-chain -metadata, you'll need to repeat the steps of uploading a new image and metadata -URI as outlined in the previous steps, then provide the new metadata URI to this -method. This will change the URI that the on-chain metadata points to, -effectively updating the off-chain metadata as well. - -```tsx -const nft = await metaplex.nfts().findByMint({ mintAddress }); - -const { response } = await metaplex.nfts().update( - { - nftOrSft: nft, - name: "Updated Name", - uri: uri, - sellerFeeBasisPoints: 100, - }, - { commitment: "finalized" }, -); -``` - -Note that any fields you don't include in the call to `update` will stay the -same, by design. - -### Add NFT to Collection - -A -[Certified Collection](https://docs.metaplex.com/programs/token-metadata/certified-collections#introduction) -is a NFT that individual NFT's can belong to. Think of a large NFT collection -like Solana Monkey Business. If you look at an individual NFT's -[Metadata](https://explorer.solana.com/address/C18YQWbfwjpCMeCm2MPGTgfcxGeEDPvNaGpVjwYv33q1/metadata) -you will see a `collection` field with a `key` that point's to the -`Certified Collection` -[NFT](https://explorer.solana.com/address/SMBH3wF6baUj6JWtzYvqcKuj2XCKWDqQxzspY12xPND/). -Simply put, NFTs that are part of a collection are associated with another NFT -that represents the collection itself. - -In order to add an NFT to a collection, first the Collection NFT has to be -created. The process is the same as before, except you'll include one additional -field on our NFT Metadata: `isCollection`. This field tells the token program -that this NFT is a Collection NFT. - -```tsx -const { collectionNft } = await metaplex.nfts().create( - { - uri: uri, - name: "My NFT Collection", - sellerFeeBasisPoints: 0, - isCollection: true, - }, - { commitment: "finalized" }, -); -``` - -You then list the collection's Mint Address as the reference for the -`collection` field in our new Nft. - -```tsx -const { nft } = await metaplex.nfts().create( - { - uri: uri, - name: "My NFT", - sellerFeeBasisPoints: 0, - collection: collectionNft.mintAddress, - }, - { commitment: "finalized" }, -); -``` - -When you checkout the metadata on your newly created NFT, you should now see a -`collection` field like so: - -```JSON -"collection":{ - "verified": false, - "key": "SMBH3wF6baUj6JWtzYvqcKuj2XCKWDqQxzspY12xPND" -} -``` - -The last thing you need to do is verify the NFT. This effectively just flips the -`verified` field above to true, but it's incredibly important. This is what lets -consuming programs and apps know that your NFT is in fact part of the -collection. You can do this using the `verifyCollection` function: - -```tsx -await metaplex.nfts().verifyCollection({ - mintAddress: nft.address, - collectionMintAddress: collectionNft.address, - isSizedCollection: true, -}); -``` - -### Candy Machine - -When creating and distributing a bulk supply of NFT's, Metaplex makes it easy -with their -[Candy Machine](https://docs.metaplex.com/programs/candy-machine/overview) -program and [Sugar CLI](https://docs.metaplex.com/developer-tools/sugar/). - -Candy Machine is effectively a minting and distribution program to help launch -NFT collections. Sugar is a command line interface that helps you create a candy -machine, prepare assets, and create NFTs at scale. The steps covered above for -creating an NFT would be incredibly tedious to execute for thousands of NFTs in -one go. Candy Machine and Sugar solve this and help ensure a fair launch by -offering a number of safeguards. - -We won't cover these tools in-depth, but definitely check out how they work -together -[here](https://docs.metaplex.com/developer-tools/sugar/overview/introduction). - -To explore the full range of tools offered by Metaplex, you can view the -[Metaplex repository](https://github.com/metaplex-foundation/metaplex) on -GitHub. - -# Demo - -In this demo, we'll go through the steps to create an NFT using the Metaplex -SDK, update the NFT's metadata after the fact, then associate the NFT with a -collection. By the end, you will have a basic understanding of how to use the -Metaplex SDK interact with NFTs on Solana. - -### 1. Starter - -To begin, download the starter code from the `starter` branch of -[this repository](https://github.com/Unboxed-Software/solana-metaplex/tree/starter). - -The project contains two images in the `src` directory that we will be using for -the NFTs. - -Additionally, in the `index.ts` file, you will find the following code snippet -which includes sample data for the NFT we’ll be creating and updating. - -```tsx -interface NftData { - name: string; - symbol: string; - description: string; - sellerFeeBasisPoints: number; - imageFile: string; -} - -interface CollectionNftData { - name: string; - symbol: string; - description: string; - sellerFeeBasisPoints: number; - imageFile: string; - isCollection: boolean; - collectionAuthority: Signer; -} - -// example data for a new NFT -const nftData = { - name: "Name", - symbol: "SYMBOL", - description: "Description", - sellerFeeBasisPoints: 0, - imageFile: "solana.png", -}; - -// example data for updating an existing NFT -const updateNftData = { - name: "Update", - symbol: "UPDATE", - description: "Update Description", - sellerFeeBasisPoints: 100, - imageFile: "success.png", -}; - -async function main() { - // create a new connection to the cluster's API - const connection = new Connection(clusterApiUrl("devnet")); - - // initialize a keypair for the user - const user = await initializeKeypair(connection); - - console.log("PublicKey:", user.publicKey.toBase58()); -} -``` - -To install the necessary dependencies, run `npm install` in the command line. - -Next, execute the code by running `npm start`. This will create a new keypair, -write it to the `.env` file, and airdrop devnet SOL to the keypair. - -``` -Current balance is 0 -Airdropping 1 SOL... -New balance is 1 -PublicKey: GdLEz23xEonLtbmXdoWGStMst6C9o3kBhb7nf7A1Fp6F -Finished successfully -``` - -### 2. Set up Metaplex - -Before we start creating and updating NFTs, we need to set up the Metaplex -instance. Update the `main()` function with the following: - -```tsx -async function main() { - // create a new connection to the cluster's API - const connection = new Connection(clusterApiUrl("devnet")); - - // initialize a keypair for the user - const user = await initializeKeypair(connection); - - console.log("PublicKey:", user.publicKey.toBase58()); - - // metaplex set up - const metaplex = Metaplex.make(connection) - .use(keypairIdentity(user)) - .use( - bundlrStorage({ - address: "https://devnet.bundlr.network", - providerUrl: "https://api.devnet.solana.com", - timeout: 60000, - }), - ); -} -``` - -### 3. `uploadMetadata` helper function - -Next, lets create a helper function to handle the process of uploading an image -and metadata, and returning the metadata URI. This function will take in the -Metaplex instance and NFT data as input, and return the metadata URI as output. - -```tsx -// helper function to upload image and metadata -async function uploadMetadata( - metaplex: Metaplex, - nftData: NftData, -): Promise { - // file to buffer - const buffer = fs.readFileSync("src/" + nftData.imageFile); - - // buffer to metaplex file - const file = toMetaplexFile(buffer, nftData.imageFile); - - // upload image and get image uri - const imageUri = await metaplex.storage().upload(file); - console.log("image uri:", imageUri); - - // upload metadata and get metadata uri (off chain metadata) - const { uri } = await metaplex.nfts().uploadMetadata({ - name: nftData.name, - symbol: nftData.symbol, - description: nftData.description, - image: imageUri, - }); - - console.log("metadata uri:", uri); - return uri; -} -``` - -This function will read an image file, convert it to a buffer, then upload it to -get an image URI. It will then upload the NFT metadata, which includes the name, -symbol, description, and image URI, and get a metadata URI. This URI is the -off-chain metadata. This function will also log the image URI and metadata URI -for reference. - -### 5. `createNft` helper function - -Next, let's create a helper function to handle creating the NFT. This function -takes in the Metaplex instance, metadata URI and NFT data as inputs. It uses the -`create` method of the SDK to create the NFT, passing in the metadata URI, name, -seller fee, and symbol as parameters. - -```tsx -// helper function create NFT -async function createNft( - metaplex: Metaplex, - uri: string, - nftData: NftData, -): Promise { - const { nft } = await metaplex.nfts().create( - { - uri: uri, // metadata URI - name: nftData.name, - sellerFeeBasisPoints: nftData.sellerFeeBasisPoints, - symbol: nftData.symbol, - }, - { commitment: "finalized" }, - ); - - console.log( - `Token Mint: https://explorer.solana.com/address/${nft.address.toString()}?cluster=devnet`, - ); - - return nft; -} -``` - -The function `createNft` logs the token mint URL and returns the an `nft` object -containing information about the newly created NFT. The NFT will be minted to -the public key corresponding to the `user` used as the Identity Driver when -setting up the Metaplex instance. - -### 6. Create NFT - -Now that we have set up the Metaplex instance and created helper functions for -uploading metadata and creating NFTs, we can test these functions by creating an -NFT. In the `main()` function, call the `uploadMetadata` function to upload the -NFT data and get the URI for the metadata. Then, use the `createNft` function -and metadata URI to create an NFT. - -```tsx -async function main() { - ... - - // upload the NFT data and get the URI for the metadata - const uri = await uploadMetadata(metaplex, nftData) - - // create an NFT using the helper function and the URI from the metadata - const nft = await createNft(metaplex, uri, nftData) -} -``` - -Run `npm start` in the command line to execute the `main` function. You should -see output similar to the following: - -```tsx -Current balance is 1.770520342 -PublicKey: GdLEz23xEonLtbmXdoWGStMst6C9o3kBhb7nf7A1Fp6F -image uri: https://arweave.net/j5HcSX8qttSgJ_ZDLmbuKA7VGUo7ZLX-xODFU4LFYew -metadata uri: https://arweave.net/ac5fwNfRckuVMXiQW_EAHc-xKFCv_9zXJ-1caY08GFE -Token Mint: https://explorer.solana.com/address/QdK4oCUZ1zMroCd4vqndnTH7aPAsr8ApFkVeGYbvsFj?cluster=devnet -Finished successfully -``` - -Feel free to inspect the generated URIs for the image and metadata, as well as -view the NFT on the Solana explorer by visiting the URL provided in the output. - -### 7. `updateNftUri` helper function - -Next, let's create a helper function to handle updating an existing NFT's URI. -This function will take in the Metaplex instance, metadata URI, and mint address -of the NFT. It uses the `findByMint` method of the SDK to fetch the existing NFT -data using the mint address, and then uses the `update` method to update the -metadata with the new URI. Finally, it will log the token mint URL and -transaction signature for reference. - -```tsx -// helper function update NFT -async function updateNftUri( - metaplex: Metaplex, - uri: string, - mintAddress: PublicKey, -) { - // fetch NFT data using mint address - const nft = await metaplex.nfts().findByMint({ mintAddress }); - - // update the NFT metadata - const { response } = await metaplex.nfts().update( - { - nftOrSft: nft, - uri: uri, - }, - { commitment: "finalized" }, - ); - - console.log( - `Token Mint: https://explorer.solana.com/address/${nft.address.toString()}?cluster=devnet`, - ); - - console.log( - `Transaction: https://explorer.solana.com/tx/${response.signature}?cluster=devnet`, - ); -} -``` - -### 8. Update NFT - -To update an existing NFT, we first need to upload new metadata for the NFT and -get the new URI. In the `main()` function, call the `uploadMetadata` function -again to upload the updated NFT data and get the new URI for the metadata. Then, -we can use the `updateNftUri` helper function, passing in the Metaplex instance, -the new URI from the metadata, and the mint address of the NFT. The -`nft.address` is from the output of the `createNft` function. - -```tsx -async function main() { - ... - - // upload updated NFT data and get the new URI for the metadata - const updatedUri = await uploadMetadata(metaplex, updateNftData) - - // update the NFT using the helper function and the new URI from the metadata - await updateNftUri(metaplex, updatedUri, nft.address) -} -``` - -Run `npm start` in the command line to execute the `main` function. You should -see additional output similar to the following: - -```tsx -... -Token Mint: https://explorer.solana.com/address/6R9egtNxbzHr5ksnGqGNHXzKuKSgeXAbcrdRUsR1fkRM?cluster=devnet -Transaction: https://explorer.solana.com/tx/5VkG47iGmECrqD11zbF7psaVqFkA4tz3iZar21cWWbeySd66fTkKg7ni7jiFkLqmeiBM6GzhL1LvNbLh4Jh6ozpU?cluster=devnet -Finished successfully -``` - -You can also view the NFTs in Phantom wallet by importing the `PRIVATE_KEY` from -the .env file. - -### 9. Create an NFT collection - -Awesome, you now know how to create a single NFT and update it on the Solana -blockchain! But, how do you add it to a collection? - -First, let's create a helper function called `createCollectionNft`. Note that -it's very similar to `createNft`, but ensures that `isCollection` is set to true -and that the data matches the requirements for a collection. - -```tsx -async function createCollectionNft( - metaplex: Metaplex, - uri: string, - data: CollectionNftData, -): Promise { - const { nft } = await metaplex.nfts().create( - { - uri: uri, - name: data.name, - sellerFeeBasisPoints: data.sellerFeeBasisPoints, - symbol: data.symbol, - isCollection: true, - }, - { commitment: "finalized" }, - ); - - console.log( - `Collection Mint: https://explorer.solana.com/address/${nft.address.toString()}?cluster=devnet`, - ); - - return nft; -} -``` - -Next, we need to create the off-chain data for the collection. In `main` -_before_ the existing calls to `createNft`, add the following -`collectionNftData`: - -```tsx -const collectionNftData = { - name: "TestCollectionNFT", - symbol: "TEST", - description: "Test Description Collection", - sellerFeeBasisPoints: 100, - imageFile: "success.png", - isCollection: true, - collectionAuthority: user, -}; -``` - -Now, let's call `uploadMetadata` with the `collectionNftData` and then call -`createCollectionNft`. Again, do this _before_ the code that creates an NFT. - -```tsx -async function main() { - ... - - // upload data for the collection NFT and get the URI for the metadata - const collectionUri = await uploadMetadata(metaplex, collectionNftData) - - // create a collection NFT using the helper function and the URI from the metadata - const collectionNft = await createCollectionNft( - metaplex, - collectionUri, - collectionNftData - ) -} -``` - -This will return our collection's mint address so we can use it to assign NFTs -to the collection. - -### 10. Assign an NFT to a collection - -Now that we have a collection, let's change our existing code so that newly -created NFTs get added to the collection. First, let's modify our `createNft` -function so that the call to `nfts().create` includes the `collection` field. -Then, add code that calls `verifyCollection` to make it so the `verified` field -in the on-chain metadata is set to true. This is how consuming programs and apps -can know for sure that the NFT in fact belongs to the collection. - -```tsx -async function createNft( - metaplex: Metaplex, - uri: string, - nftData: NftData, -): Promise { - const { nft } = await metaplex.nfts().create( - { - uri: uri, // metadata URI - name: nftData.name, - sellerFeeBasisPoints: nftData.sellerFeeBasisPoints, - symbol: nftData.symbol, - }, - { commitment: "finalized" }, - ); - - console.log( - `Token Mint: https://explorer.solana.com/address/${nft.address.toString()}? cluster=devnet`, - ); - - //this is what verifies our collection as a Certified Collection - await metaplex.nfts().verifyCollection({ - mintAddress: nft.mint.address, - collectionMintAddress: collectionMint, - isSizedCollection: true, - }); - - return nft; -} -``` - -Now, run `npm start` and voila! If you follow the new nft link and look at the -Metadata tab you will see a `collection` field with your collection's mint -address listed. - -Congratulations! You've successfully learned how to use the Metaplex SDK to -create, update, and verify NFTs as part of a collection. That's everything you -need to build out your own collection for just about any use case. You could -build a TicketMaster competitor, revamp Costco's Membership Program, or even -digitize your school's Student ID system. The possibilities are endless! - -If you want to take a look at the final solution code you can find it on the -solution branch of the same -[repository](https://github.com/Unboxed-Software/solana-metaplex/tree/solution). - -# Challenge - -To deepen your understanding of the Metaplex tools, dive into the Metaplex -documentation and familiarize yourself with the various programs and tools -offered by Metaplex. For instance, you can delve into learning about the Candy -Machine program to understand its functionality. - -Once you have an understanding of how the the Candy Machine program works, put -your knowledge to the test by using the Sugar CLI to create a Candy Machine for -your own collection. This hands-on experience will not only reinforce your -understanding of the tools, but also boost your confidence in your ability to -use them effectively in the future. - -Have some fun with this! This will be your first independently created NFT -collection! With this, you'll complete Module 2. Hope you're feeling the -process! Feel free to share some quick feedback -[here](https://airtable.com/shrOsyopqYlzvmXSC?prefill_Module=Module%202) so that -we can continue to improve the course! diff --git a/content/courses/solana-course/content/owner-checks.md b/content/courses/solana-course/content/owner-checks.md deleted file mode 100644 index c310a0403..000000000 --- a/content/courses/solana-course/content/owner-checks.md +++ /dev/null @@ -1,680 +0,0 @@ ---- -title: Owner Checks -objectives: - - Explain the security risks associated with not performing appropriate owner - checks - - Implement owner checks using long-form Rust - - Use Anchor’s `Account<'info, T>` wrapper and an account type to automate - owner checks - - Use Anchor’s `#[account(owner = )]` constraint to explicitly define an - external program that should own an account ---- - -# TL;DR - -- Use **Owner Checks** to verify that accounts are owned by the expected - program. Without appropriate owner checks, accounts owned by unexpected - programs could be used in an instruction. -- To implement an owner check in Rust, simply check that an account’s owner - matches an expected program ID - -```rust -if ctx.accounts.account.owner != ctx.program_id { - return Err(ProgramError::IncorrectProgramId.into()); -} -``` - -- Anchor program account types implement the `Owner` trait which allows the - `Account<'info, T>` wrapper to automatically verify program ownership -- Anchor gives you the option to explicitly define the owner of an account if it - should be anything other than the currently executing program - -# Overview - -Owner checks are used to verify that an account passed into an instruction is -owned by an expected program. This prevents accounts owned by an unexpected -program from being used in an instruction. - -As a refresher, the `AccountInfo` struct contains the following fields. An owner -check refers to checking that the `owner` field in the `AccountInfo` matches an -expected program ID. - -```jsx -/// Account information -#[derive(Clone)] -pub struct AccountInfo<'a> { - /// Public key of the account - pub key: &'a Pubkey, - /// Was the transaction signed by this account's public key? - pub is_signer: bool, - /// Is the account writable? - pub is_writable: bool, - /// The lamports in the account. Modifiable by programs. - pub lamports: Rc>, - /// The data held in this account. Modifiable by programs. - pub data: Rc>, - /// Program that owns this account - pub owner: &'a Pubkey, - /// This account's data contains a loaded program (and is now read-only) - pub executable: bool, - /// The epoch at which this account will next owe rent - pub rent_epoch: Epoch, -} -``` - -### Missing owner check - -The example below shows an `admin_instruction` intended to be accessible only by -an `admin` account stored on an `admin_config` account. - -Although the instruction checks the `admin` account signed the transaction and -matches the `admin` field stored on the `admin_config` account, there is no -owner check to verify the `admin_config` account passed into the instruction is -owned by the executing program. - -Since the `admin_config` is unchecked as indicated by the `AccountInfo` type, a -fake `admin_config` account owned by a different program could be used in the -`admin_instruction`. This means that an attacker could create a program with an -`admin_config` whose data structure matches the `admin_config` of your program, -set their public key as the `admin` and pass their `admin_config` account into -your program. This would let them effectively spoof your program into thinking -that they are the authorized admin for your program. - -This simplified example only prints the `admin` to the program logs. However, -you can imagine how a missing owner check could allow fake accounts to exploit -an instruction. - -```rust -use anchor_lang::prelude::*; - -declare_id!("Cft4eTTrt4sJU4Ar35rUQHx6PSXfJju3dixmvApzhWws"); - -#[program] -pub mod owner_check { - use super::*; - ... - - pub fn admin_instruction(ctx: Context) -> Result<()> { - let account_data = ctx.accounts.admin_config.try_borrow_data()?; - let mut account_data_slice: &[u8] = &account_data; - let account_state = AdminConfig::try_deserialize(&mut account_data_slice)?; - - if account_state.admin != ctx.accounts.admin.key() { - return Err(ProgramError::InvalidArgument.into()); - } - msg!("Admin: {}", account_state.admin.to_string()); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Unchecked<'info> { - admin_config: AccountInfo<'info>, - admin: Signer<'info>, -} - -#[account] -pub struct AdminConfig { - admin: Pubkey, -} -``` - -### Add owner check - -In vanilla Rust, you could solve this problem by comparing the `owner` field on -the account to the program ID. If they do not match, you would return an -`IncorrectProgramId` error. - -```rust -if ctx.accounts.admin_config.owner != ctx.program_id { - return Err(ProgramError::IncorrectProgramId.into()); -} -``` - -Adding an owner check prevents accounts owned by an unexpected program to be -passed in as the `admin_config` account. If a fake `admin_config` account was -used in the `admin_instruction`, then the transaction would fail. - -```rust -use anchor_lang::prelude::*; - -declare_id!("Cft4eTTrt4sJU4Ar35rUQHx6PSXfJju3dixmvApzhWws"); - -#[program] -pub mod owner_check { - use super::*; - ... - pub fn admin_instruction(ctx: Context) -> Result<()> { - if ctx.accounts.admin_config.owner != ctx.program_id { - return Err(ProgramError::IncorrectProgramId.into()); - } - - let account_data = ctx.accounts.admin_config.try_borrow_data()?; - let mut account_data_slice: &[u8] = &account_data; - let account_state = AdminConfig::try_deserialize(&mut account_data_slice)?; - - if account_state.admin != ctx.accounts.admin.key() { - return Err(ProgramError::InvalidArgument.into()); - } - msg!("Admin: {}", account_state.admin.to_string()); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Unchecked<'info> { - admin_config: AccountInfo<'info>, - admin: Signer<'info>, -} - -#[account] -pub struct AdminConfig { - admin: Pubkey, -} -``` - -### Use Anchor’s `Account<'info, T>` - -Anchor can make this simpler with the `Account` type. - -`Account<'info, T>` is a wrapper around `AccountInfo` that verifies program -ownership and deserializes underlying data into the specified account type `T`. -This in turn allows you to use `Account<'info, T>` to easily validate ownership. - -For context, the `#[account]` attribute implements various traits for a data -structure representing an account. One of these is the `Owner` trait which -defines an address expected to own an account. The owner is set as the program -ID specified in the `declare_id!` macro. - -In the example below, `Account<'info, AdminConfig>` is used to validate the -`admin_config`. This will automatically perform the owner check and deserialize -the account data. Additionally, the `has_one` constraint is used to check that -the `admin` account matches the `admin` field stored on the `admin_config` -account. - -This way, you don’t need to clutter your instruction logic with owner checks. - -```rust -use anchor_lang::prelude::*; - -declare_id!("Cft4eTTrt4sJU4Ar35rUQHx6PSXfJju3dixmvApzhWws"); - -#[program] -pub mod owner_check { - use super::*; - ... - pub fn admin_instruction(ctx: Context) -> Result<()> { - msg!("Admin: {}", ctx.accounts.admin_config.admin.to_string()); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Checked<'info> { - #[account( - has_one = admin, - )] - admin_config: Account<'info, AdminConfig>, - admin: Signer<'info>, -} - -#[account] -pub struct AdminConfig { - admin: Pubkey, -} -``` - -### Use Anchor’s `#[account(owner = )]` constraint - -In addition to the `Account` type, you can use an `owner` constraint. The -`owner` constraint allows you to define the program that should own an account -if it’s different from the currently executing one. This comes in handy if, for -example, you are writing an instruction that expects an account to be a PDA -derived from a different program. You can use the `seeds` and `bump` constraints -and define the `owner` to properly derive and verify the address of the account -passed in. - -To use the `owner` constraint, you’ll have to have access to the public key of -the program you expect to own an account. You can either pass the program in as -an additional account or hard-code the public key somewhere in your program. - -```rust -use anchor_lang::prelude::*; - -declare_id!("Cft4eTTrt4sJU4Ar35rUQHx6PSXfJju3dixmvApzhWws"); - -#[program] -pub mod owner_check { - use super::*; - ... - pub fn admin_instruction(ctx: Context) -> Result<()> { - msg!("Admin: {}", ctx.accounts.admin_config.admin.to_string()); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Checked<'info> { - #[account( - has_one = admin, - )] - admin_config: Account<'info, AdminConfig>, - admin: Signer<'info>, - #[account( - seeds = b"test-seed", - bump, - owner = token_program.key() - )] - pda_derived_from_another_program: AccountInfo<'info>, - token_program: Program<'info, Token> -} - -#[account] -pub struct AdminConfig { - admin: Pubkey, -} -``` - -# Demo - -In this demo we’ll use two programs to demonstrate how a missing owner check -could allow a fake account to drain the tokens from a simplified token “vault” -account (note that this is very similar to the demo from the Signer -Authorization lesson). - -To help illustrate this, one program will be missing an account owner check on -the vault account it withdraws tokens to. - -The second program will be a direct clone of the first program created by a -malicious user to create an account identical to the first program’s vault -account. - -Without the owner check, this malicious user will be able to pass in the vault -account owned by their “faked” program and the original program will still -execute. - -### 1. Starter - -To get started, download the starter code from the `starter` branch of -[this repository](https://github.com/Unboxed-Software/solana-owner-checks/tree/starter). -The starter code includes two programs `clone` and `owner_check` and the -boilerplate setup for the test file. - -The `owner_check` program includes two instructions: - -- `initialize_vault` initializes a simplified vault account that stores the - addresses of a token account and an authority account -- `insecure_withdraw` withdraws tokens from the token account, but is missing an - owner check for the vault account - -```rust -use anchor_lang::prelude::*; -use anchor_spl::token::{self, Mint, Token, TokenAccount}; - -declare_id!("HQYNznB3XTqxzuEqqKMAD9XkYE5BGrnv8xmkoDNcqHYB"); - -#[program] -pub mod owner_check { - use super::*; - - pub fn initialize_vault(ctx: Context) -> Result<()> { - ctx.accounts.vault.token_account = ctx.accounts.token_account.key(); - ctx.accounts.vault.authority = ctx.accounts.authority.key(); - Ok(()) - } - - pub fn insecure_withdraw(ctx: Context) -> Result<()> { - let account_data = ctx.accounts.vault.try_borrow_data()?; - let mut account_data_slice: &[u8] = &account_data; - let account_state = Vault::try_deserialize(&mut account_data_slice)?; - - if account_state.authority != ctx.accounts.authority.key() { - return Err(ProgramError::InvalidArgument.into()); - } - - let amount = ctx.accounts.token_account.amount; - - let seeds = &[ - b"token".as_ref(), - &[*ctx.bumps.get("token_account").unwrap()], - ]; - let signer = [&seeds[..]]; - - let cpi_ctx = CpiContext::new_with_signer( - ctx.accounts.token_program.to_account_info(), - token::Transfer { - from: ctx.accounts.token_account.to_account_info(), - authority: ctx.accounts.token_account.to_account_info(), - to: ctx.accounts.withdraw_destination.to_account_info(), - }, - &signer, - ); - - token::transfer(cpi_ctx, amount)?; - Ok(()) - } -} - -#[derive(Accounts)] -pub struct InitializeVault<'info> { - #[account( - init, - payer = authority, - space = 8 + 32 + 32, - )] - pub vault: Account<'info, Vault>, - #[account( - init, - payer = authority, - token::mint = mint, - token::authority = token_account, - seeds = [b"token"], - bump, - )] - pub token_account: Account<'info, TokenAccount>, - pub mint: Account<'info, Mint>, - #[account(mut)] - pub authority: Signer<'info>, - pub token_program: Program<'info, Token>, - pub system_program: Program<'info, System>, - pub rent: Sysvar<'info, Rent>, -} - -#[derive(Accounts)] -pub struct InsecureWithdraw<'info> { - /// CHECK: - pub vault: UncheckedAccount<'info>, - #[account( - mut, - seeds = [b"token"], - bump, - )] - pub token_account: Account<'info, TokenAccount>, - #[account(mut)] - pub withdraw_destination: Account<'info, TokenAccount>, - pub token_program: Program<'info, Token>, - pub authority: Signer<'info>, -} - -#[account] -pub struct Vault { - token_account: Pubkey, - authority: Pubkey, -} -``` - -The `clone` program includes a single instruction: - -- `initialize_vault` initializes a “vault” account that mimics the vault account - of the `owner_check` program. It stores the address of the real vault’s token - account, but allows the malicious user to put their own authority account. - -```rust -use anchor_lang::prelude::*; -use anchor_spl::token::TokenAccount; - -declare_id!("DUN7nniuatsMC7ReCh5eJRQExnutppN1tAfjfXFmGDq3"); - -#[program] -pub mod clone { - use super::*; - - pub fn initialize_vault(ctx: Context) -> Result<()> { - ctx.accounts.vault.token_account = ctx.accounts.token_account.key(); - ctx.accounts.vault.authority = ctx.accounts.authority.key(); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct InitializeVault<'info> { - #[account( - init, - payer = authority, - space = 8 + 32 + 32, - )] - pub vault: Account<'info, Vault>, - pub token_account: Account<'info, TokenAccount>, - #[account(mut)] - pub authority: Signer<'info>, - pub system_program: Program<'info, System>, -} - -#[account] -pub struct Vault { - token_account: Pubkey, - authority: Pubkey, -} -``` - -### 2. Test `insecure_withdraw` instruction - -The test file includes a test to invoke the `initialize_vault` instruction on -the `owner_check` program using the provider wallet as the `authority` and then -mints 100 tokens to the token account. - -The test file also includes a test to invoke the `initialize_vault` instruction -on the `clone` program to initialize a fake `vault` account storing the same -`tokenPDA` account, but a different `authority`. Note that no new tokens are -minted here. - -Let’s add a test to invoke the `insecure_withdraw` instruction. This test should -pass in the cloned vault and the fake authority. Since there is no owner check -to verify the `vaultClone` account is owned by the `owner_check` program, the -instruction’s data validation check will pass and show `walletFake` as a valid -authority. The tokens from the `tokenPDA` account will then be withdrawn to the -`withdrawDestinationFake` account. - -```tsx -describe("owner-check", () => { - ... - it("Insecure withdraw", async () => { - const tx = await program.methods - .insecureWithdraw() - .accounts({ - vault: vaultClone.publicKey, - tokenAccount: tokenPDA, - withdrawDestination: withdrawDestinationFake, - authority: walletFake.publicKey, - }) - .transaction() - - await anchor.web3.sendAndConfirmTransaction(connection, tx, [walletFake]) - - const balance = await connection.getTokenAccountBalance(tokenPDA) - expect(balance.value.uiAmount).to.eq(0) - }) - -}) -``` - -Run `anchor test` to see that the `insecure_withdraw` completes successfully. - -```bash -owner-check - ✔ Initialize Vault (808ms) - ✔ Initialize Fake Vault (404ms) - ✔ Insecure withdraw (409ms) -``` - -Note that `vaultClone` deserializes successfully even though Anchor -automatically initializes new accounts with a unique 8 byte discriminator and -checks the discriminator when deserializing an account. This is because the -discriminator is a hash of the account type name. - -```rust -#[account] -pub struct Vault { - token_account: Pubkey, - authority: Pubkey, -} -``` - -Since both programs initialize identical accounts and both structs are named -`Vault`, the accounts have the same discriminator even though they are owned by -different programs. - -### 3. Add `secure_withdraw` instruction - -Let’s close up this security loophole. - -In the `lib.rs` file of the `owner_check` program add a `secure_withdraw` -instruction and a `SecureWithdraw` accounts struct. - -In the `SecureWithdraw` struct, let’s use `Account<'info, Vault>` to ensure that -an owner check is performed on the `vault` account. We’ll also use the `has_one` -constraint to check that the `token_account` and `authority` passed into the -instruction match the values stored on the `vault` account. - -```rust -#[program] -pub mod owner_check { - use super::*; - ... - - pub fn secure_withdraw(ctx: Context) -> Result<()> { - let amount = ctx.accounts.token_account.amount; - - let seeds = &[ - b"token".as_ref(), - &[*ctx.bumps.get("token_account").unwrap()], - ]; - let signer = [&seeds[..]]; - - let cpi_ctx = CpiContext::new_with_signer( - ctx.accounts.token_program.to_account_info(), - token::Transfer { - from: ctx.accounts.token_account.to_account_info(), - authority: ctx.accounts.token_account.to_account_info(), - to: ctx.accounts.withdraw_destination.to_account_info(), - }, - &signer, - ); - - token::transfer(cpi_ctx, amount)?; - Ok(()) - } -} -... - -#[derive(Accounts)] -pub struct SecureWithdraw<'info> { - #[account( - has_one = token_account, - has_one = authority - )] - pub vault: Account<'info, Vault>, - #[account( - mut, - seeds = [b"token"], - bump, - )] - pub token_account: Account<'info, TokenAccount>, - #[account(mut)] - pub withdraw_destination: Account<'info, TokenAccount>, - pub token_program: Program<'info, Token>, - pub authority: Signer<'info>, -} -``` - -### 4. Test `secure_withdraw` instruction - -To test the `secure_withdraw` instruction, we’ll invoke the instruction twice. -First, we’ll invoke the instruction using the `vaultClone` account, which we -expect to fail. Then, we’ll invoke the instruction using the correct `vault` -account to check that the instruction works as intended. - -```tsx -describe("owner-check", () => { - ... - it("Secure withdraw, expect error", async () => { - try { - const tx = await program.methods - .secureWithdraw() - .accounts({ - vault: vaultClone.publicKey, - tokenAccount: tokenPDA, - withdrawDestination: withdrawDestinationFake, - authority: walletFake.publicKey, - }) - .transaction() - - await anchor.web3.sendAndConfirmTransaction(connection, tx, [walletFake]) - } catch (err) { - expect(err) - console.log(err) - } - }) - - it("Secure withdraw", async () => { - await spl.mintTo( - connection, - wallet.payer, - mint, - tokenPDA, - wallet.payer, - 100 - ) - - await program.methods - .secureWithdraw() - .accounts({ - vault: vault.publicKey, - tokenAccount: tokenPDA, - withdrawDestination: withdrawDestination, - authority: wallet.publicKey, - }) - .rpc() - - const balance = await connection.getTokenAccountBalance(tokenPDA) - expect(balance.value.uiAmount).to.eq(0) - }) -}) -``` - -Run `anchor test` to see that the transaction using the `vaultClone` account -will now return an Anchor Error while the transaction using the `vault` account -completes successfully. - -```bash -'Program HQYNznB3XTqxzuEqqKMAD9XkYE5BGrnv8xmkoDNcqHYB invoke [1]', -'Program log: Instruction: SecureWithdraw', -'Program log: AnchorError caused by account: vault. Error Code: AccountOwnedByWrongProgram. Error Number: 3007. Error Message: The given account is owned by a different program than expected.', -'Program log: Left:', -'Program log: DUN7nniuatsMC7ReCh5eJRQExnutppN1tAfjfXFmGDq3', -'Program log: Right:', -'Program log: HQYNznB3XTqxzuEqqKMAD9XkYE5BGrnv8xmkoDNcqHYB', -'Program HQYNznB3XTqxzuEqqKMAD9XkYE5BGrnv8xmkoDNcqHYB consumed 5554 of 200000 compute units', -'Program HQYNznB3XTqxzuEqqKMAD9XkYE5BGrnv8xmkoDNcqHYB failed: custom program error: 0xbbf' -``` - -Here we see how using Anchor’s `Account<'info, T>` type can simplify the account -validation process to automate the ownership check. Additionally, note that -Anchor Errors can specify the account that causes the error (e.g. the third line -of the logs above say `AnchorError caused by account: vault`). This can be very -helpful when debugging. - -```bash -✔ Secure withdraw, expect error (78ms) -✔ Secure withdraw (10063ms) -``` - -That’s all you need to ensure you check the owner on an account! Like some other -exploits, it’s fairly simple to avoid but very important. Be sure to always -think through which accounts should be owned by which programs and ensure that -you add appropriate validation. - -If you want to take a look at the final solution code you can find it on the -`solution` branch of -[the repository](https://github.com/Unboxed-Software/solana-owner-checks/tree/solution). - -# Challenge - -Just as with other lessons in this module, your opportunity to practice avoiding -this security exploit lies in auditing your own or other programs. - -Take some time to review at least one program and ensure that proper owner -checks are performed on the accounts passed into each instruction. - -Remember, if you find a bug or exploit in somebody else's program, please alert -them! If you find one in your own program, be sure to patch it right away. diff --git a/content/courses/solana-course/content/paging-ordering-filtering-data.md b/content/courses/solana-course/content/paging-ordering-filtering-data.md deleted file mode 100644 index 4b7efcf02..000000000 --- a/content/courses/solana-course/content/paging-ordering-filtering-data.md +++ /dev/null @@ -1,536 +0,0 @@ ---- -title: Page, Order, and Filter Custom Account Data -objectives: - - Page, order, and filter accounts - - Prefetch accounts without data - - Determine where in an account’s buffer layout specific data is stored - - Prefetch accounts with a subset of data that can be used to order accounts - - Fetch only accounts whose data matches specific criteria - - Fetch a subset of total accounts using `getMultipleAccounts` ---- - -# TL;DR - -- This lesson delves into some functionality of the RPC calls that we used in - the deserializing account data lesson -- To save on compute time, you can fetch a large number of accounts without - their data by filtering them to return just an array of public keys -- Once you have a filtered list of public keys, you can order them and fetch the - account data they belong to - -# Overview - -You may have noticed in the last lesson that while we could fetch and display a -list of account data, we didn’t have any granular control over how many accounts -to fetch or their order. In this lesson, we’ll learn about some configuration -options for the `getProgramAccounts` function that will enable things like -paging, ordering accounts, and filtering. - -## Use `dataSlice` to only fetch data you need - -Imagine the Movie Review app we worked on in past lessons having four million -movie reviews and that the average review is 500 bytes. That would make the -total download for all review accounts over 2GB. Definitely not something you -want to have your frontend download every time the page refreshes. - -Fortunately, the `getProgramAccounts` function that you use to get all of the -accounts takes a configuration object as argument. One of the configuration -options is `dataSlice` which lets you provide two things: - -- `offset` - the offset from the beginning of the data buffer to start the slice -- `length` - the number of bytes to return, starting from the provided offset - -When you include a `dataSlice` in the configuration object, the function will -only return the subset of the data buffer that you specified. - -### Paging Accounts - -One area this becomes helpful is with paging. If you want to have a list that -displays all accounts but there are so many accounts that you don’t want to pull -all the data at once, you can fetch all of the accounts with no data. You can -then map the result to a list of account keys whose data you can fetch only when -needed. - -```tsx -const accountsWithoutData = await connection.getProgramAccounts(programId, { - dataSlice: { offset: 0, length: 0 }, -}); - -const accountKeys = accountsWithoutData.map(account => account.pubkey); -``` - -With this list of keys, you can then fetch account data in “pages” using the -`getMultipleAccountsInfo` method: - -```tsx -const paginatedKeys = accountKeys.slice(0, 10); -const accountInfos = await connection.getMultipleAccountsInfo(paginatedKeys); -const deserializedObjects = accountInfos.map(accountInfo => { - // put logic to deserialize accountInfo.data here -}); -``` - -### Ordering Accounts - -The `dataSlice` option is also helpful when you need to order a list of accounts -while paging. You still don’t want to fetch all the data at once, but you do -need all of the keys and a way to order them up front. In this case, you need to -understand the layout of the account data and configure the data slice to only -be the data you need to use for ordering. - -For example, you might have an account that stores contact information like so: - -- `initialized` as a boolean -- `phoneNumber` as an unsigned, 64-bit integer -- `firstName` as a string -- `secondName` as a string - -If you want to order all of the account keys alphabetically based on the user’s -first name, you need to find out the offset where the name starts. The first -field, `initialized`, takes the first byte, then `phoneNumber` takes another 8, -so the `firstName` field starts at offset `1 + 8 = 9`. However, dynamic data -fields in borsh use the first 4 bytes to record the length of the data, so we -can skip an additional 4 bytes, making the offset 13. - -You then need to determine the length to make the data slice. Since the length -is variable, we can’t know for sure before fetching the data. But you can choose -a length that is large enough to cover most cases and short enough to not be too -much of a burden to fetch. 15 bytes is plenty for most first names, but would -result in a small enough download even with a million users. - -Once you’ve fetched accounts with the given data slice, you can use the `sort` -method to sort the array before mapping it to an array of public keys. - -```tsx -const accounts = await connection.getProgramAccounts(programId, { - dataSlice: { offset: 13, length: 15 }, -}); - -accounts.sort((a, b) => { - const lengthA = a.account.data.readUInt32LE(0); - const lengthB = b.account.data.readUInt32LE(0); - const dataA = a.account.data.slice(4, 4 + lengthA); - const dataB = b.account.data.slice(4, 4 + lengthB); - return dataA.compare(dataB); -}); - -const accountKeys = accounts.map(account => account.pubkey); -``` - -Note that in the snippet above we don’t compare the data as given. This is -because for dynamically sized types like strings, Borsh places an unsigned, -32-bit integer at the start to indicate the length of the data representing that -field. So to compare the first names directly, we need to get the length for -each, then create a data slice with a 4 byte offset and the proper length. - -## Use `filters` to only retrieve specific accounts - -Limiting the data received per account is great, but what if you only want to -return accounts that match a specific criteria rather than all of them? That’s -where the `filters` configuration option comes in. This option is an array that -can have objects matching the following: - -- `memcmp` - compares a provided series of bytes with program account data at a - particular offset. Fields: - - `offset` - the number to offset into program account data before comparing - data - - `bytes` - a base-58 encoded string representing the data to match; limited - to less than 129 bytes -- `dataSize` - compares the program account data length with the provided data - size - -These let you filter based on matching data and/or total data size. - -For example, you could search through a list of contacts by including a `memcmp` -filter: - -```tsx -async function fetchMatchingContactAccounts( - connection: web3.Connection, - search: string, -): Promise<(web3.AccountInfo | null)[]> { - const accounts = await connection.getProgramAccounts(programId, { - dataSlice: { offset: 0, length: 0 }, - filters: [ - { - memcmp: { - offset: 13, - bytes: bs58.encode(Buffer.from(search)), - }, - }, - ], - }); -} -``` - -Two things to note in the example above: - -1. We’re setting the offset to 13 because we determined previously that the - offset for `firstName` in the data layout is 9 and we want to additionally - skip the first 4 bytes indicating the length of the string. -2. We’re using a third party library `bs58` to perform base-58 encoding on the - search term. You can install it using `npm install bs58`. - -# Demo - -Remember that Movie Review app we worked on in the last two lessons? We’re going -to spice it up a little by paging the review list, ordering the reviews so they -aren’t so random, and adding some basic search functionality. No worries if -you’re just jumping into this lesson without having looked at the previous -ones - as long as you have the prerequisite knowledge, you should be able to -follow the demo without having worked in this specific project yet. - -![Screenshot of movie review frontend](../assets/movie-reviews-frontend.png) - -### **1. Download the starter code** - -If you didn’t complete the demo from the last lesson or just want to make sure -that you didn’t miss anything, you can download the -[starter code](https://github.com/Unboxed-Software/solana-movie-frontend/tree/solution-deserialize-account-data). - -The project is a fairly simple Next.js application. It includes the -`WalletContextProvider` we created in the Wallets lesson, a `Card` component for -displaying a movie review, a `MovieList` component that displays reviews in a -list, a `Form` component for submitting a new review, and a `Movie.ts` file that -contains a class definition for a `Movie` object. - -### 2. Add paging to the reviews - -First things first, let’s create a space to encapsulate the code for fetching -account data. Create a new file `MovieCoordinator.ts` and declare a -`MovieCoordinator` class. Then let’s move the `MOVIE_REVIEW_PROGRAM_ID` constant -from `MovieList` into this new file since we’ll be moving all references to it - -```tsx -const MOVIE_REVIEW_PROGRAM_ID = "CenYq6bDRB7p73EjsPEpiYN7uveyPUTdXkDkgUduboaN"; - -export class MovieCoordinator {} -``` - -Now we can use `MovieCoordinator` to create a paging implementation. A quick -note before we dive in: this will be as simple a paging implementation as -possible so that we can focus on the complex part of interacting with Solana -accounts. You can, and should, do better for a production application. - -With that out of the way, let’s create a static property `accounts` of type -`web3.PublicKey[]`, a static function -`prefetchAccounts(connection: web3.Connection)`, and a static function -`fetchPage(connection: web3.Connection, page: number, perPage: number): Promise`. -You’ll also need to import `@solana/web3.js` and `Movie`. - -```tsx -import * as web3 from "@solana/web3.js"; -import { Movie } from "../models/Movie"; - -const MOVIE_REVIEW_PROGRAM_ID = "CenYq6bDRB7p73EjsPEpiYN7uveyPUTdXkDkgUduboaN"; - -export class MovieCoordinator { - static accounts: web3.PublicKey[] = []; - - static async prefetchAccounts(connection: web3.Connection) {} - - static async fetchPage( - connection: web3.Connection, - page: number, - perPage: number, - ): Promise {} -} -``` - -The key to paging is to prefetch all the accounts without data. Let’s fill in -the body of `prefetchAccounts` to do this and set the retrieved public keys to -the static `accounts` property. - -```tsx -static async prefetchAccounts(connection: web3.Connection) { - const accounts = await connection.getProgramAccounts( - new web3.PublicKey(MOVIE_REVIEW_PROGRAM_ID), - { - dataSlice: { offset: 0, length: 0 }, - } - ) - - this.accounts = accounts.map(account => account.pubkey) -} -``` - -Now, let’s fill in the `fetchPage` method. First, if the accounts haven’t been -prefetched yet, we’ll need to do that. Then, we can get the account public keys -that correspond to the requested page and call -`connection.getMultipleAccountsInfo`. Finally, we deserialize the account data -and return the corresponding `Movie` objects. - -```tsx -static async fetchPage(connection: web3.Connection, page: number, perPage: number): Promise { - if (this.accounts.length === 0) { - await this.prefetchAccounts(connection) - } - - const paginatedPublicKeys = this.accounts.slice( - (page - 1) * perPage, - page * perPage, - ) - - if (paginatedPublicKeys.length === 0) { - return [] - } - - const accounts = await connection.getMultipleAccountsInfo(paginatedPublicKeys) - - const movies = accounts.reduce((accum: Movie[], account) => { - const movie = Movie.deserialize(account?.data) - if (!movie) { - return accum - } - - return [...accum, movie] - }, []) - - return movies -} -``` - -With that done, we can reconfigure `MovieList` to use these methods. In -`MovieList.tsx`, add `const [page, setPage] = useState(1)` near the existing -`useState` calls. Then, update `useEffect` to call `MovieCoordinator.fetchPage` -instead of fetching the accounts inline. - -```tsx -const connection = new web3.Connection(web3.clusterApiUrl("devnet")); -const [movies, setMovies] = useState([]); -const [page, setPage] = useState(1); - -useEffect(() => { - MovieCoordinator.fetchPage(connection, page, 10).then(setMovies); -}, [page, search]); -``` - -Lastly, we need to add buttons to the bottom of the list for navigating to -different pages: - -```tsx -return ( -
- {movies.map((movie, i) => ( - - ))} -
- - {page > 1 && ( - - )} - - {MovieCoordinator.accounts.length > page * 2 && ( - - )} - -
-
-); -``` - -At this point, you should be able to run the project and click between pages! - -### 3. Order reviews alphabetically by title - -If you look at the reviews, you might notice they aren’t in any specific order. -We can fix this by adding back just enough data into our data slice to help us -do some sorting. The various properties in the movie review data buffer are laid -out as follows - -- `initialized` - unsigned 8-bit integer; 1 byte -- `rating` - unsigned 8-bit integer; 1 byte -- `title` - string; unknown number of bytes -- `description` - string; unknown number of bytes - -Based on this, the offset we need to provide to the data slice to access `title` -is 2. The length, however, is indeterminate, so we can just provide what seems -to be a reasonable length. I’ll stick with 18 as that will cover the length of -most titles without fetching too much data every time. - -Once we’ve modified the data slice in `getProgramAccounts`, we then need to -actually sort the returned array. To do this, we need to compare the part of the -data buffer that actually corresponds to `title`. The first 4 bytes of a dynamic -field in Borsh are used to store the length of the field in bytes. So in any -given buffer `data` that is sliced the way we discussed above, the string -portion is `data.slice(4, 4 + data[0])`. - -Now that we’ve thought through this, let’s modify the implementation of -`prefetchAccounts` in `MovieCoordinator`: - -```tsx -static async prefetchAccounts(connection: web3.Connection, filters: AccountFilter[]) { - const accounts = await connection.getProgramAccounts( - new web3.PublicKey(MOVIE_REVIEW_PROGRAM_ID), - { - dataSlice: { offset: 2, length: 18 }, - } - ) - - accounts.sort( (a, b) => { - const lengthA = a.account.data.readUInt32LE(0) - const lengthB = b.account.data.readUInt32LE(0) - const dataA = a.account.data.slice(4, 4 + lengthA) - const dataB = b.account.data.slice(4, 4 + lengthB) - return dataA.compare(dataB) - }) - - this.accounts = accounts.map(account => account.pubkey) -} -``` - -And just like that, you should be able to run the app and see the list of movie -reviews ordered alphabetically. - -### 4. Add search - -The last thing we’ll do to improve this app is to add some basic search -capability. Let’s add a `search` parameter to `prefetchAccounts` and reconfigure -the body of the function to use it. - -We can use the `filters` property of the `config` parameter of -`getProgramAccounts` to filter accounts by specific data. The offset to the -`title` fields is 2, but the first 4 bytes are the length of the title so the -actual offset to the string itself is 6. Remember that the bytes need to be base -58 encoded, so let’s install and import `bs58`. - -```tsx -import bs58 from 'bs58' - -... - -static async prefetchAccounts(connection: web3.Connection, search: string) { - const accounts = await connection.getProgramAccounts( - new web3.PublicKey(MOVIE_REVIEW_PROGRAM_ID), - { - dataSlice: { offset: 2, length: 18 }, - filters: search === '' ? [] : [ - { - memcmp: - { - offset: 6, - bytes: bs58.encode(Buffer.from(search)) - } - } - ] - } - ) - - accounts.sort( (a, b) => { - const lengthA = a.account.data.readUInt32LE(0) - const lengthB = b.account.data.readUInt32LE(0) - const dataA = a.account.data.slice(4, 4 + lengthA) - const dataB = b.account.data.slice(4, 4 + lengthB) - return dataA.compare(dataB) - }) - - this.accounts = accounts.map(account => account.pubkey) -} -``` - -Now, add a `search` parameter to `fetchPage` and update its call to -`prefetchAccounts` to pass it along. We’ll also need to add a `reload` boolean -parameter to `fetchPage` so that we can force a refresh of the account -prefetching every time the search value changes. - -```tsx -static async fetchPage(connection: web3.Connection, page: number, perPage: number, search: string, reload: boolean = false): Promise { - if (this.accounts.length === 0 || reload) { - await this.prefetchAccounts(connection, search) - } - - const paginatedPublicKeys = this.accounts.slice( - (page - 1) * perPage, - page * perPage, - ) - - if (paginatedPublicKeys.length === 0) { - return [] - } - - const accounts = await connection.getMultipleAccountsInfo(paginatedPublicKeys) - - const movies = accounts.reduce((accum: Movie[], account) => { - const movie = Movie.deserialize(account?.data) - if (!movie) { - return accum - } - - return [...accum, movie] - }, []) - - return movies -} -``` - -With that in place, let’s update the code in `MovieList` to call this properly. - -First, add `const [search, setSearch] = useState('')` near the other `useState` -calls. Then update the call to `MovieCoordinator.fetchPage` in the `useEffect` -to pass the `search` parameter and to reload when `search !== ''`. - -```tsx -const connection = new web3.Connection(web3.clusterApiUrl("devnet")); -const [movies, setMovies] = useState([]); -const [page, setPage] = useState(1); -const [search, setSearch] = useState(""); - -useEffect(() => { - MovieCoordinator.fetchPage(connection, page, 2, search, search !== "").then( - setMovies, - ); -}, [page, search]); -``` - -Finally, add a search bar that will set the value of `search`: - -```tsx -return ( -
-
- setSearch(event.currentTarget.value)} - placeholder="Search" - w="97%" - mt={2} - mb={2} - /> -
- ... -
-); -``` - -And that’s it! The app now has ordered reviews, paging, and search. - -That was a lot to digest, but you made it through. If you need to spend some -more time with the concepts, feel free to reread the sections that were most -challenging for you and/or have a look at the -[solution code](https://github.com/Unboxed-Software/solana-movie-frontend/tree/solution-paging-account-data). - -# Challenge - -Now it’s your turn to try and do this on your own. Using the Student Intros app -from last lesson, add paging, ordering alphabetically by name, and searching by -name. - -![Screenshot of Student Intros frontend](../assets/student-intros-frontend.png) - -1. You can build this from scratch or you can download the - [starter code](https://github.com/Unboxed-Software/solana-student-intros-frontend/tree/solution-deserialize-account-data) -2. Add paging to the project by prefetching accounts without data, then only - fetching the account data for each account when it’s needed. -3. Order the accounts displayed in the app alphabetically by name. -4. Add the ability to search through introductions by a student’s name. - -This is challenging. If you get stuck, feel free to reference the -[solution code](https://github.com/Unboxed-Software/solana-student-intros-frontend/tree/solution-paging-account-data). -With this you complete Module 1! How was your experience? Feel free to share -some quick feedback -[here](https://airtable.com/shrOsyopqYlzvmXSC?prefill_Module=Module%201), so -that we can continue to improve the course! - -As always, get creative with these challenges and take them beyond the -instructions if you want! diff --git a/content/courses/solana-course/content/pda-sharing.md b/content/courses/solana-course/content/pda-sharing.md deleted file mode 100644 index 8bf7d5948..000000000 --- a/content/courses/solana-course/content/pda-sharing.md +++ /dev/null @@ -1,560 +0,0 @@ ---- -title: PDA Sharing -objectives: - - Explain the security risks associated with PDA sharing - - Derive PDAs that have discrete authority domains - - Use Anchor’s `seeds` and `bump` constraints to validate PDA accounts ---- - -# TL;DR - -- Using the same PDA for multiple authority domains opens your program up to the - possibility of users accessing data and funds that don't belong to them -- Prevent the same PDA from being used for multiple accounts by using seeds that - are user and/or domain-specific -- Use Anchor’s `seeds` and `bump` constraints to validate that a PDA is derived - using the expected seeds and bump - -# Overview - -PDA sharing refers to using the same PDA as a signer across multiple users or -domains. Especially when using PDAs for signing, it may seem appropriate to use -a global PDA to represent the program. However, this opens up the possibility of -account validation passing but a user being able to access funds, transfers, or -data not belonging to them. - -## Insecure global PDA - -In the example below, the `authority` of the `vault` account is a PDA derived -using the `mint` address stored on the `pool` account. This PDA is passed into -the instruction as the `authority` account to sign for the transfer tokens from -the `vault` to the `withdraw_destination`. - -Using the `mint` address as a seed to derive the PDA to sign for the `vault` is -insecure because multiple `pool` accounts could be created for the same `vault` -token account, but a different `withdraw_destination`. By using the `mint` as a -seed derive the PDA to sign for token transfers, any `pool` account could sign -for the transfer of tokens from a `vault` token account to an arbitrary -`withdraw_destination`. - -```rust -use anchor_lang::prelude::*; -use anchor_spl::token::{self, Token, TokenAccount}; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod pda_sharing_insecure { - use super::*; - - pub fn withdraw_tokens(ctx: Context) -> Result<()> { - let amount = ctx.accounts.vault.amount; - let seeds = &[ctx.accounts.pool.mint.as_ref(), &[ctx.accounts.pool.bump]]; - token::transfer(ctx.accounts.transfer_ctx().with_signer(&[seeds]), amount) - } -} - -#[derive(Accounts)] -pub struct WithdrawTokens<'info> { - #[account(has_one = vault, has_one = withdraw_destination)] - pool: Account<'info, TokenPool>, - vault: Account<'info, TokenAccount>, - withdraw_destination: Account<'info, TokenAccount>, - authority: AccountInfo<'info>, - token_program: Program<'info, Token>, -} - -impl<'info> WithdrawTokens<'info> { - pub fn transfer_ctx(&self) -> CpiContext<'_, '_, '_, 'info, token::Transfer<'info>> { - let program = self.token_program.to_account_info(); - let accounts = token::Transfer { - from: self.vault.to_account_info(), - to: self.withdraw_destination.to_account_info(), - authority: self.authority.to_account_info(), - }; - CpiContext::new(program, accounts) - } -} - -#[account] -pub struct TokenPool { - vault: Pubkey, - mint: Pubkey, - withdraw_destination: Pubkey, - bump: u8, -} -``` - -## Secure account specific PDA - -One approach to create an account specific PDA is to use the -`withdraw_destination` as a seed to derive the PDA used as the authority of the -`vault` token account. This ensures the PDA signing for the CPI in the -`withdraw_tokens` instruction is derived using the intended -`withdraw_destination` token account. In other words, tokens from a `vault` -token account can only be withdrawn to the `withdraw_destination` that was -originally initialized with the `pool` account. - -```rust -use anchor_lang::prelude::*; -use anchor_spl::token::{self, Token, TokenAccount}; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod pda_sharing_secure { - use super::*; - - pub fn withdraw_tokens(ctx: Context) -> Result<()> { - let amount = ctx.accounts.vault.amount; - let seeds = &[ - ctx.accounts.pool.withdraw_destination.as_ref(), - &[ctx.accounts.pool.bump], - ]; - token::transfer(ctx.accounts.transfer_ctx().with_signer(&[seeds]), amount) - } -} - -#[derive(Accounts)] -pub struct WithdrawTokens<'info> { - #[account(has_one = vault, has_one = withdraw_destination)] - pool: Account<'info, TokenPool>, - vault: Account<'info, TokenAccount>, - withdraw_destination: Account<'info, TokenAccount>, - authority: AccountInfo<'info>, - token_program: Program<'info, Token>, -} - -impl<'info> WithdrawTokens<'info> { - pub fn transfer_ctx(&self) -> CpiContext<'_, '_, '_, 'info, token::Transfer<'info>> { - let program = self.token_program.to_account_info(); - let accounts = token::Transfer { - from: self.vault.to_account_info(), - to: self.withdraw_destination.to_account_info(), - authority: self.authority.to_account_info(), - }; - CpiContext::new(program, accounts) - } -} - -#[account] -pub struct TokenPool { - vault: Pubkey, - mint: Pubkey, - withdraw_destination: Pubkey, - bump: u8, -} -``` - -## Anchor’s `seeds` and `bump` constraints - -PDAs can be used as both the address of an account and allow programs to sign -for the PDAs they own. - -The example below uses a PDA derived using the `withdraw_destination` as both -the address of the `pool` account and owner of the `vault` token account. This -means that only the `pool` account associated with correct `vault` and -`withdraw_destination` can be used in the `withdraw_tokens` instruction. - -You can use Anchor’s `seeds` and `bump` constraints with the `#[account(...)]` -attribute to validate the `pool` account PDA. Anchor derives a PDA using the -`seeds` and `bump` specified and compare against the account passed into the -instruction as the `pool` account. The `has_one` constraint is used to further -ensure that only the correct accounts stored on the `pool` account are passed -into the instruction. - -```rust -use anchor_lang::prelude::*; -use anchor_spl::token::{self, Token, TokenAccount}; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod pda_sharing_recommended { - use super::*; - - pub fn withdraw_tokens(ctx: Context) -> Result<()> { - let amount = ctx.accounts.vault.amount; - let seeds = &[ - ctx.accounts.pool.withdraw_destination.as_ref(), - &[ctx.accounts.pool.bump], - ]; - token::transfer(ctx.accounts.transfer_ctx().with_signer(&[seeds]), amount) - } -} - -#[derive(Accounts)] -pub struct WithdrawTokens<'info> { - #[account( - has_one = vault, - has_one = withdraw_destination, - seeds = [withdraw_destination.key().as_ref()], - bump = pool.bump, - )] - pool: Account<'info, TokenPool>, - vault: Account<'info, TokenAccount>, - withdraw_destination: Account<'info, TokenAccount>, - token_program: Program<'info, Token>, -} - -impl<'info> WithdrawTokens<'info> { - pub fn transfer_ctx(&self) -> CpiContext<'_, '_, '_, 'info, token::Transfer<'info>> { - let program = self.token_program.to_account_info(); - let accounts = token::Transfer { - from: self.vault.to_account_info(), - to: self.withdraw_destination.to_account_info(), - authority: self.pool.to_account_info(), - }; - CpiContext::new(program, accounts) - } -} - -#[account] -pub struct TokenPool { - vault: Pubkey, - mint: Pubkey, - withdraw_destination: Pubkey, - bump: u8, -} -``` - -# Demo - -Let’s practice by creating a simple program to demonstrate how a PDA sharing can -allow an attacker to withdraw tokens that don’t belong to them. This demo -expands on the examples above by including the instructions to initialize the -required program accounts. - -### 1. Starter - -To get started, download the starter code on the `starter` branch of -[this repository](https://github.com/Unboxed-Software/solana-pda-sharing/tree/starter). -The starter code includes a program with two instructions and the boilerplate -setup for the test file. - -The `initialize_pool` instruction initializes a new `TokenPool` that stores a -`vault`, `mint`, `withdraw_destination`, and `bump`. The `vault` is a token -account where the authority is set as a PDA derived using the `mint` address. - -The `withdraw_insecure` instruction will transfer tokens in the `vault` token -account to a `withdraw_destination` token account. - -However, as written the seeds used for signing are not specific to the vault's -withdraw destination, thus opening up the program to security exploits. Take a -minute to familiarize yourself with the code before continuing on. - -### 2. Test `withdraw_insecure` instruction - -The test file includes the code to invoke the `initialize_pool` instruction and -then mint 100 tokens to the `vault` token account. It also includes a test to -invoke the `withdraw_insecure` using the intended `withdraw_destination`. This -shows that the instructions can be used as intended. - -After that, there are two more tests to show how the instructions are vulnerable -to exploit. - -The first test invokes the `initialize_pool` instruction to create a "fake" -`pool` account using the same `vault` token account, but a different -`withdraw_destination`. - -The second test withdraws from this pool, effectively stealing funds from the -vault. - -```tsx -it("Insecure initialize allows pool to be initialized with wrong vault", async () => { - await program.methods - .initializePool(authInsecureBump) - .accounts({ - pool: poolInsecureFake.publicKey, - mint: mint, - vault: vaultInsecure.address, - withdrawDestination: withdrawDestinationFake, - payer: walletFake.publicKey, - }) - .signers([walletFake, poolInsecureFake]) - .rpc(); - - await new Promise(x => setTimeout(x, 1000)); - - await spl.mintTo( - connection, - wallet.payer, - mint, - vaultInsecure.address, - wallet.payer, - 100, - ); - - const account = await spl.getAccount(connection, vaultInsecure.address); - expect(Number(account.amount)).to.equal(100); -}); - -it("Insecure withdraw allows stealing from vault", async () => { - await program.methods - .withdrawInsecure() - .accounts({ - pool: poolInsecureFake.publicKey, - vault: vaultInsecure.address, - withdrawDestination: withdrawDestinationFake, - authority: authInsecure, - signer: walletFake.publicKey, - }) - .signers([walletFake]) - .rpc(); - - const account = await spl.getAccount(connection, vaultInsecure.address); - expect(Number(account.amount)).to.equal(0); -}); -``` - -Run `anchor test` to see that the transactions complete successfully and the -`withdraw_instrucure` instruction allows the `vault` token account to be drained -to a fake withdraw destination stored on the fake `pool` account. - -### 3. Add `initialize_pool_secure` instruction - -Now let's add a new instruction to the program for securely initializing a pool. - -This new `initialize_pool_secure` instruction will initialize a `pool` account -as a PDA derived using the `withdraw_destination`. It will also initialize a -`vault` token account with the authority set as the `pool` PDA. - -```rust -pub fn initialize_pool_secure(ctx: Context) -> Result<()> { - ctx.accounts.pool.vault = ctx.accounts.vault.key(); - ctx.accounts.pool.mint = ctx.accounts.mint.key(); - ctx.accounts.pool.withdraw_destination = ctx.accounts.withdraw_destination.key(); - ctx.accounts.pool.bump = *ctx.bumps.get("pool").unwrap(); - Ok(()) -} - -... - -#[derive(Accounts)] -pub struct InitializePoolSecure<'info> { - #[account( - init, - payer = payer, - space = 8 + 32 + 32 + 32 + 1, - seeds = [withdraw_destination.key().as_ref()], - bump - )] - pub pool: Account<'info, TokenPool>, - pub mint: Account<'info, Mint>, - #[account( - init, - payer = payer, - token::mint = mint, - token::authority = pool, - )] - pub vault: Account<'info, TokenAccount>, - pub withdraw_destination: Account<'info, TokenAccount>, - #[account(mut)] - pub payer: Signer<'info>, - pub system_program: Program<'info, System>, - pub token_program: Program<'info, Token>, - pub rent: Sysvar<'info, Rent>, -} -``` - -### 4. Add `withdraw_secure` instruction - -Next, add a `withdraw_secure` instruction. This instruction will withdraw tokens -from the `vault` token account to the `withdraw_destination`. The `pool` account -is validated using the `seeds` and `bump` constraints to ensure the correct PDA -account is provided. The `has_one` constraints check that the correct `vault` -and `withdraw_destination` token accounts are provided. - -```rust -pub fn withdraw_secure(ctx: Context) -> Result<()> { - let amount = ctx.accounts.vault.amount; - let seeds = &[ - ctx.accounts.pool.withdraw_destination.as_ref(), - &[ctx.accounts.pool.bump], - ]; - token::transfer(ctx.accounts.transfer_ctx().with_signer(&[seeds]), amount) -} - -... - -#[derive(Accounts)] -pub struct WithdrawTokensSecure<'info> { - #[account( - has_one = vault, - has_one = withdraw_destination, - seeds = [withdraw_destination.key().as_ref()], - bump = pool.bump, - )] - pool: Account<'info, TokenPool>, - #[account(mut)] - vault: Account<'info, TokenAccount>, - #[account(mut)] - withdraw_destination: Account<'info, TokenAccount>, - token_program: Program<'info, Token>, -} - -impl<'info> WithdrawTokensSecure<'info> { - pub fn transfer_ctx(&self) -> CpiContext<'_, '_, '_, 'info, token::Transfer<'info>> { - let program = self.token_program.to_account_info(); - let accounts = token::Transfer { - from: self.vault.to_account_info(), - to: self.withdraw_destination.to_account_info(), - authority: self.pool.to_account_info(), - }; - CpiContext::new(program, accounts) - } -} -``` - -### 5. Test `withdraw_secure` instruction - -Finally, return to the test file to test the `withdraw_secure` instruction and -show that by narrowing the scope of our PDA signing authority, we've removed the -vulnerability. - -Before we write a test showing the vulnerability has been patched let's write a -test that simply shows that the initialization and withdraw instructions work as -expected: - -```typescript -it("Secure pool initialization and withdraw works", async () => { - const withdrawDestinationAccount = await getAccount( - provider.connection, - withdrawDestination, - ); - - await program.methods - .initializePoolSecure() - .accounts({ - pool: authSecure, - mint: mint, - vault: vaultRecommended.publicKey, - withdrawDestination: withdrawDestination, - }) - .signers([vaultRecommended]) - .rpc(); - - await new Promise(x => setTimeout(x, 1000)); - - await spl.mintTo( - connection, - wallet.payer, - mint, - vaultRecommended.publicKey, - wallet.payer, - 100, - ); - - await program.methods - .withdrawSecure() - .accounts({ - pool: authSecure, - vault: vaultRecommended.publicKey, - withdrawDestination: withdrawDestination, - }) - .rpc(); - - const afterAccount = await getAccount( - provider.connection, - withdrawDestination, - ); - - expect( - Number(afterAccount.amount) - Number(withdrawDestinationAccount.amount), - ).to.equal(100); -}); -``` - -Now, we'll test that the exploit no longer works. Since the `vault` authority is -the `pool` PDA derived using the intended `withdraw_destination` token account, -there should no longer be a way to withdraw to an account other than the -intended `withdraw_destination`. - -Add a test that shows you can't call `withdraw_secure` with the wrong withdrawal -destination. It can use the pool and vault created in the previous test. - -```typescript -it("Secure withdraw doesn't allow withdraw to wrong destination", async () => { - try { - await program.methods - .withdrawSecure() - .accounts({ - pool: authSecure, - vault: vaultRecommended.publicKey, - withdrawDestination: withdrawDestinationFake, - }) - .signers([walletFake]) - .rpc(); - - assert.fail("expected error"); - } catch (error) { - console.log(error.message); - expect(error); - } -}); -``` - -Lastly, since the `pool` account is a PDA derived using the -`withdraw_destination` token account, we can’t create a fake `pool` account -using the same PDA. Add one more test showing that the new -`initialize_pool_secure` instruction won't let an attacker put in the wrong -vault. - -```typescript -it("Secure pool initialization doesn't allow wrong vault", async () => { - try { - await program.methods - .initializePoolSecure() - .accounts({ - pool: authSecure, - mint: mint, - vault: vaultInsecure.address, - withdrawDestination: withdrawDestination, - }) - .signers([vaultRecommended]) - .rpc(); - - assert.fail("expected error"); - } catch (error) { - console.log(error.message); - expect(error); - } -}); -``` - -Run `anchor test` and to see that the new instructions don't allow an attacker -to withdraw from a vault that isn't theirs. - -``` - pda-sharing - ✔ Initialize Pool Insecure (981ms) - ✔ Withdraw (470ms) - ✔ Insecure initialize allows pool to be initialized with wrong vault (10983ms) - ✔ Insecure withdraw allows stealing from vault (492ms) - ✔ Secure pool initialization and withdraw works (2502ms) -unknown signer: ARjxAsEPj6YsAPKaBfd1AzUHbNPtAeUsqusAmBchQTfV - ✔ Secure withdraw doesn't allow withdraw to wrong destination -unknown signer: GJcHJLot3whbY1aC9PtCsBYk5jWoZnZRJPy5uUwzktAY - ✔ Secure pool initialization doesn't allow wrong vault -``` - -And that's it! Unlike some of the other security vulnerabilities we've -discussed, this one is more conceptual and can't be fixed by simply using a -particular Anchor type. You'll need to think through the architecture of your -program and ensure that you aren't sharing PDAs across different domains. - -If you want to take a look at the final solution code you can find it on the -`solution` branch of -[the same repository](https://github.com/Unboxed-Software/solana-pda-sharing/tree/solution). - -# Challenge - -Just as with other lessons in this module, your opportunity to practice avoiding -this security exploit lies in auditing your own or other programs. - -Take some time to review at least one program and look for potential -vulnerabilities in its PDA structure. PDAs used for signing should be narrow and -focused on a single domain as much as possible. - -Remember, if you find a bug or exploit in somebody else's program, please alert -them! If you find one in your own program, be sure to patch it right away. diff --git a/content/courses/solana-course/content/pda.md b/content/courses/solana-course/content/pda.md deleted file mode 100644 index 04dbdcba2..000000000 --- a/content/courses/solana-course/content/pda.md +++ /dev/null @@ -1,884 +0,0 @@ ---- -title: PDAs -objectives: - - Explain Program Derived Addresses (PDAs) - - Explain various use cases of PDAs - - Describe how PDAs are derived - - Use PDA derivations to locate and retrieve data ---- - -# TL;DR - -- A **Program Derived Address** (PDA) is derived from a **program ID** and an - optional list of **seeds** -- PDAs are owned and controlled by the program they are derived from -- PDA derivation provides a deterministic way to find data based on the seeds - used for the derivation -- Seeds can be used to map to the data stored in a separate PDA account -- A program can sign instructions on behalf of the PDAs derived from its ID - -# Overview - -## What is a Program Derived Address? - -Program Derived Addresses (PDAs) are account addresses designed to be signed for -by a program rather than a secret key. As the name suggests, PDAs are derived -using a program ID. Optionally, these derived accounts can also be found using -the ID along with a set of "seeds." More on this later, but these seeds will -play an important role in how we use PDAs for data storage and retrieval. - -PDAs serve two main functions: - -1. Provide a deterministic way to find the address of a program-owned account -2. Authorize the program from which a PDA was derived to sign on its behalf in - the same way a user may sign with their secret key - -In this lesson we'll focus on using PDAs to find and store data. We'll discuss -signing with a PDA more thoroughly in a future lesson where we cover Cross -Program Invocations (CPIs). - -## Finding PDAs - -PDAs are not technically created. Rather, they are _found_ or _derived_ based on -a program ID and one or more input seeds. - -Solana keypairs can be found on what is called the Ed25519 Elliptic Curve -(Ed25519). Ed25519 is a deterministic signature scheme that Solana uses to -generate corresponding public and secret keys. Together, we call these keypairs. - -Alternatively, PDAs are addresses that lie _off_ the Ed25519 curve. This -effectively means they are public keys _without_ a corresponding secret key. -This property of PDAs is essential for programs to be able to sign on their -behalf, but we'll cover that in a future lesson. - -To find a PDA within a Solana program, we'll use the `find_program_address` -function. This function takes an optional list of “seeds” and a program ID as -inputs, and then returns the PDA and a bump seed. - -```rust -let (pda, bump_seed) = Pubkey::find_program_address(&[user.key.as_ref(), user_input.as_bytes().as_ref(), "SEED".as_bytes()], program_id) -``` - -### Seeds - -“Seeds” are optional inputs used in the `find_program_address` function to -derive a PDA. For example, seeds can be any combination of public keys, inputs -provided by a user, or hardcoded values. A PDA can also be derived using only -the program ID and no additional seeds. Using seeds to find our PDAs, however, -allows us to create an arbitrary number of accounts that our program can own. - -While you, the developer, determine the seeds to pass into the -`find_program_address` function, the function itself provides an additional seed -called a "bump seed." The cryptographic function for deriving a PDA results in a -key that lies _on_ the Ed25519 curve about 50% of the time. In order to ensure -that the result _is not_ on the Ed25519 curve and therefore does not have a -secret key, the `find_program_address` function adds a numeric seed called a -bump seed. - -The function starts by using the value `255` as the bump seed, then checks to -see if the output is a valid PDA. If the result is not a valid PDA, the function -decreases the bump seed by 1 and tries again (`255`, `254`, `253`, et cetera). -Once a valid PDA is found, the function returns both the PDA and the bump that -was used to derive the PDA. - -### Under the hood of `find_program_address` - -Let's take a look at the source code for `find_program_address`. - -```rust - pub fn find_program_address(seeds: &[&[u8]], program_id: &Pubkey) -> (Pubkey, u8) { - Self::try_find_program_address(seeds, program_id) - .unwrap_or_else(|| panic!("Unable to find a viable program address bump seed")) -} -``` - -Under the hood, the `find_program_address` function passes the input `seeds` and -`program_id` to the `try_find_program_address` function. - -The `try_find_program_address` function then introduces the `bump_seed`. The -`bump_seed` is a `u8` variable with a value ranging between 0 to 255. Iterating -over a descending range starting from 255, a `bump_seed` is appended to the -optional input seeds which are then passed to the `create_program_address` -function. If the output of `create_program_address` is not a valid PDA, then the -`bump_seed` is decreased by 1 and the loop continues until a valid PDA is found. - -```rust -pub fn try_find_program_address(seeds: &[&[u8]], program_id: &Pubkey) -> Option<(Pubkey, u8)> { - - let mut bump_seed = [std::u8::MAX]; - for _ in 0..std::u8::MAX { - { - let mut seeds_with_bump = seeds.to_vec(); - seeds_with_bump.push(&bump_seed); - match Self::create_program_address(&seeds_with_bump, program_id) { - Ok(address) => return Some((address, bump_seed[0])), - Err(PubkeyError::InvalidSeeds) => (), - _ => break, - } - } - bump_seed[0] -= 1; - } - None - -} -``` - -The `create_program_address` function performs a set of hash operations over the -seeds and `program_id`. These operations compute a key, then verify if the -computed key lies on the Ed25519 elliptic curve or not. If a valid PDA is found -(i.e. an address that is _off_ the curve), then the PDA is returned. Otherwise, -an error is returned. - -```rust -pub fn create_program_address( - seeds: &[&[u8]], - program_id: &Pubkey, -) -> Result { - - let mut hasher = crate::hash::Hasher::default(); - for seed in seeds.iter() { - hasher.hash(seed); - } - hasher.hashv(&[program_id.as_ref(), PDA_MARKER]); - let hash = hasher.result(); - - if bytes_are_curve_point(hash) { - return Err(PubkeyError::InvalidSeeds); - } - - Ok(Pubkey::new(hash.as_ref())) - -} -``` - -In summary, the `find_program_address` function passes our input seeds and -`program_id` to the `try_find_program_address` function. The -`try_find_program_address` function adds a `bump_seed` (starting from 255) to -our input seeds, then calls the `create_program_address` function until a valid -PDA is found. Once found, both the PDA and the `bump_seed` are returned. - -Note that for the same input seeds, different valid bumps will generate -different valid PDAs. The `bump_seed` returned by `find_program_address` will -always be the first valid PDA found. Because the function starts with a -`bump_seed` value of 255 and iterates downwards to zero, the `bump_seed` that -ultimately gets returned will always be the largest valid 8-bit value possible. -This `bump_seed` is commonly referred to as the "_canonical bump_". To avoid -confusion, it's recommended to only use the canonical bump, and to _always -validate every PDA passed into your program._ - -One point to emphasize is that the `find_program_address` function only returns -a Program Derived Address and the bump seed used to derive it. The -`find_program_address` function does _not_ initialize a new account, nor is any -PDA returned by the function necessarily associated with an account that stores -data. - -## Use PDA accounts to store data - -Since programs themselves are stateless, program state is managed through -external accounts. Given that you can use seeds for mapping and that programs -can sign on their behalf, using PDA accounts to store data related to the -program is an extremely common design choice. While programs can invoke the -System Program to create non-PDA accounts and use those to store data as well, -PDAs tend to be the way to go. - -If you need a refresher on how to store data in PDAs, have a look at the -[Create a Basic Program, Part 2 - State Management lesson](./program-state-management.md). - -## Map to data stored in PDA accounts - -Storing data in PDA accounts is only half of the equation. You also need a way -to retrieve that data. We'll talk about two approaches: - -1. Creating a PDA "map" account that stores the addresses of various accounts - where data is stored -2. Strategically using seeds to locate the appropriate PDA accounts and retrieve - the necessary data - -### Map to data using PDA "map" accounts - -One approach to organizing data storage is to store clusters of relevant data in -their own PDAs and then to have a separate PDA account that stores a mapping of -where all of the data is. - -For example, you might have a note-taking app whose backing program uses random -seeds to generate PDA accounts and stores one note in each account. The program -would also have a single global PDA "map" account that stores a mapping of -users' public keys to the list of PDAs where their notes are stored. This map -account would be derived using a static seed, e.g. "GLOBAL_MAPPING". - -When it comes time to retrieve a user's notes, you could then look at the map -account, see the list of addresses associated with a user's public key, then -retrieve the account for each of those addresses. - -While such a solution is perhaps more approachable for traditional web -developers, it does come with some drawbacks that are particular to web3 -development. Since the size of the mapping stored in the map account will grow -over time, you'll either need to allocate more size than necessary to the -account when you first create it, or you'll need to reallocate space for it -every time a new note is created. On top of that, you'll eventually reach the -account size limit of 10 megabytes. - -You could mitigate this issue to some degree by creating a separate map account -for each user. For example, rather than having a single PDA map account for the -entire program, you would construct a PDA map account per user. Each of these -map accounts could be derived with the user's public key. The addresses for each -note could then be stored inside the corresponding user's map account. - -This approach reduces the size required for each map account, but ultimately -still adds an unnecessary requirement to the process: having to read the -information on the map account _before_ being able to find the accounts with the -relevant note data. - -There may be times where using this approach makes sense for your application, -but we don't recommend it as your "go to" strategy. - -### Map to data using PDA derivation - -If you're strategic about the seeds you use to derive PDAs, you can embed the -required mappings into the seeds themselves. This is the natural evolution of -the note-taking app example we just discussed. If you start to use the note -creator's public key as a seed to create one map account per user, then why not -use both the creator's public key and some other known piece of information to -derive a PDA for the note itself? - -Now, without talking about it explicitly, we’ve been mapping seeds to accounts -this entire course. Think about the Movie Review program we've been built in -previous lessons. This program uses a review creator's public key and the title -of the movie they're reviewing to find the address that _should_ be used to -store the review. This approach lets the program create a unique address for -every new review while also making it easy to locate a review when needed. When -you want to find a user's review of "Spiderman," you know that it is stored at -the PDA account whose address can be derived using the user's public key and the -text "Spiderman" as seeds. - -```rust -let (pda, bump_seed) = Pubkey::find_program_address(&[ - initializer.key.as_ref(), - title.as_bytes().as_ref() - ], - program_id) -``` - -### Associated token account addresses - -Another practical example of this type of mapping is how associated token -account (ATA) addresses are determined. Tokens are often held in an ATA whose -address was derived using a wallet address and the mint address of a specific -token. The address for an ATA is found using the `get_associated_token_address` -function which takes a `wallet_address` and `token_mint_address` as inputs. - -```rust -let associated_token_address = get_associated_token_address(&wallet_address, &token_mint_address); -``` - -Under the hood, the associated token address is a PDA found using the -`wallet_address`, `token_program_id`, and `token_mint_address` as seeds. This -provides a deterministic way to find a token account associated with any wallet -address for a specific token mint. - -```rust -fn get_associated_token_address_and_bump_seed_internal( - wallet_address: &Pubkey, - token_mint_address: &Pubkey, - program_id: &Pubkey, - token_program_id: &Pubkey, -) -> (Pubkey, u8) { - Pubkey::find_program_address( - &[ - &wallet_address.to_bytes(), - &token_program_id.to_bytes(), - &token_mint_address.to_bytes(), - ], - program_id, - ) -} -``` - -The mappings between seeds and PDA accounts that you use will be highly -dependent on your specific program. While this isn't a lesson on system design -or architecture, it's worth calling out a few guidelines: - -- Use seeds that will be known at the time of PDA derivation -- Be thoughtful about what data is grouped together into a single account -- Be thoughtful about the data structure used within each account -- Simpler is usually better - -# Demo - -Let’s practice together with the Movie Review program we've worked on in -previous lessons. No worries if you’re just jumping into this lesson without -having done the previous lesson - it should be possible to follow along either -way. - -As a refresher, the Movie Review program lets users create movie reviews. These -reviews are stored in an account using a PDA derived with the initializer’s -public key and the title of the movie they are reviewing. - -Previously, we finished implementing the ability to update a movie review in a -secure manner. In this demo, we'll add the ability for users to comment on a -movie review. We'll use building this feature as an opportunity to work through -how to structure the comment storage using PDA accounts. - -### 1. Get the starter code - -To begin, you can find the starter -code [here](https://github.com/Unboxed-Software/solana-movie-program/tree/starter) -on the `starter` branch. - -If you've been following along with the Movie Review demos, you'll notice that -this is the program we’ve built out so far. Previously, we -used [Solana Playground](https://beta.solpg.io/) to write, build, and deploy our -code. In this lesson, we’ll build and deploy the program locally. - -Open the folder, then run `cargo-build-bpf` to build the program. The -`cargo-build-bpf` command will output instruction to deploy the program. - -```sh -cargo-build-bpf -``` - -Deploy the program by copying the output of `cargo-build-bpf` and running the -`solana program deploy` command. - -```sh -solana program deploy -``` - -You can test the program by using the movie review -[frontend](https://github.com/Unboxed-Software/solana-movie-frontend/tree/solution-update-reviews) -and updating the program ID with the one you’ve just deployed. Make sure you use -the `solution-update-reviews` branch. - -### 2. Plan out the account structure - -Adding comments means we need to make a few decisions about how to store the -data associated with each comment. The criteria for a good structure here are: - -- Not overly complicated -- Data is easily retrievable -- Each comment has something to link it to the review it's associated with - -To do this, we'll create two new account types: - -- Comment counter account -- Comment account - -There will be one comment counter account per review and one comment account per -comment. The comment counter account will be linked to a given review by using a -review's address as a seed for finding the comment counter PDA. It will also use -the static string "comment" as a seed. - -The comment account will be linked to a review in the same way. However, it will -not include the "comment" string as a seed and will instead use the _actual -comment count_ as a seed. That way the client can easily retrieve comments for a -given review by doing the following: - -1. Read the data on the comment counter account to determine the number of - comments on a review. -2. Where `n` is the total number of comments on the review, loop `n` times. Each - iteration of the loop will derive a PDA using the review address and the - current number as seeds. The result is `n` number of PDAs, each of which is - the address of an account that stores a comment. -3. Fetch the accounts for each of the `n` PDAs and read the data stored in each. - -This ensures that every one of our accounts can be deterministically retrieved -using data that is already known ahead of time. - -In order to implement these changes, we'll need to do the following: - -- Define structs to represent the comment counter and comment accounts -- Update the existing `MovieAccountState` to contain a discriminator (more on - this later) -- Add an instruction variant to represent the `add_comment` instruction -- Update the existing `add_movie_review` instruction processing function to - include creating the comment counter account -- Create a new `add_comment` instruction processing function - -### 3. Define `MovieCommentCounter` and `MovieComment` structs - -Recall that the `state.rs` file defines the structs our program uses to populate -the data field of a new account. - -We’ll need to define two new structs to enable commenting. - -1. `MovieCommentCounter` - to store a counter for the number of comments - associated with a review -2. `MovieComment` - to store data associated with each comment - -To start, let’s define the structs we’ll be using for our program. Note that we -are adding a `discriminator` field to each struct, including the existing -`MovieAccountState`. Since we now have multiple account types, we need a way to -only fetch the account type we need from the client. This discriminator is a -string that can be used to filter through accounts when we fetch our program -accounts. - -```rust -#[derive(BorshSerialize, BorshDeserialize)] -pub struct MovieAccountState { - pub discriminator: String, - pub is_initialized: bool, - pub reviewer: Pubkey, - pub rating: u8, - pub title: String, - pub description: String, -} - -#[derive(BorshSerialize, BorshDeserialize)] -pub struct MovieCommentCounter { - pub discriminator: String, - pub is_initialized: bool, - pub counter: u64 -} - -#[derive(BorshSerialize, BorshDeserialize)] -pub struct MovieComment { - pub discriminator: String, - pub is_initialized: bool, - pub review: Pubkey, - pub commenter: Pubkey, - pub comment: String, - pub count: u64 -} - -impl Sealed for MovieAccountState {} - -impl IsInitialized for MovieAccountState { - fn is_initialized(&self) -> bool { - self.is_initialized - } -} - -impl IsInitialized for MovieCommentCounter { - fn is_initialized(&self) -> bool { - self.is_initialized - } -} - -impl IsInitialized for MovieComment { - fn is_initialized(&self) -> bool { - self.is_initialized - } -} -``` - -Since we've added a new `discriminator` field to our existing struct, the -account size calculation needs to change. Let's use this as an opportunity to -clean up some of our code a bit. We'll add an implementation for each of the -three structs above that adds a constant `DISCRIMINATOR` and either a constant -`SIZE` or function `get_account_size` so we can quickly get the size needed when -initializing an account. - -```rust -impl MovieAccountState { - pub const DISCRIMINATOR: &'static str = "review"; - - pub fn get_account_size(title: String, description: String) -> usize { - return (4 + MovieAccountState::DISCRIMINATOR.len()) - + 1 - + 1 - + (4 + title.len()) - + (4 + description.len()); - } -} - -impl MovieCommentCounter { - pub const DISCRIMINATOR: &'static str = "counter"; - pub const SIZE: usize = (4 + MovieCommentCounter::DISCRIMINATOR.len()) + 1 + 8; -} - -impl MovieComment { - pub const DISCRIMINATOR: &'static str = "comment"; - - pub fn get_account_size(comment: String) -> usize { - return (4 + MovieComment::DISCRIMINATOR.len()) + 1 + 32 + 32 + (4 + comment.len()) + 8; - } -} -``` - -Now everywhere we need the discriminator or account size we can use this -implementation and not risk unintentional typos. - -### 4. Create `AddComment` instruction - -Recall that the `instruction.rs` file defines the instructions our program will -accept and how to deserialize the data for each. We need to add a new -instruction variant for adding comments. Let’s start by adding a new variant -`AddComment` to the `MovieInstruction` enum. - -```rust -pub enum MovieInstruction { - AddMovieReview { - title: String, - rating: u8, - description: String - }, - UpdateMovieReview { - title: String, - rating: u8, - description: String - }, - AddComment { - comment: String - } -} -``` - -Next, let's create a `CommentPayload` struct to represent the instruction data -associated with this new instruction. Most of the data we'll include in the -account are public keys associated with accounts passed into the program, so the -only thing we actually need here is a single field to represent the comment -text. - -```rust -#[derive(BorshDeserialize)] -struct CommentPayload { - comment: String -} -``` - -Now let’s update how we unpack the instruction data. Notice that we’ve moved the -deserialization of instruction data into each matching case using the associated -payload struct for each instruction. - -```rust -impl MovieInstruction { - pub fn unpack(input: &[u8]) -> Result { - let (&variant, rest) = input.split_first().ok_or(ProgramError::InvalidInstructionData)?; - Ok(match variant { - 0 => { - let payload = MovieReviewPayload::try_from_slice(rest).unwrap(); - Self::AddMovieReview { - title: payload.title, - rating: payload.rating, - description: payload.description } - }, - 1 => { - let payload = MovieReviewPayload::try_from_slice(rest).unwrap(); - Self::UpdateMovieReview { - title: payload.title, - rating: payload.rating, - description: payload.description - } - }, - 2 => { - let payload = CommentPayload::try_from_slice(rest).unwrap(); - Self::AddComment { - comment: payload.comment - } - } - _ => return Err(ProgramError::InvalidInstructionData) - }) - } -} -``` - -Lastly, let's update the `process_instruction` function in `processor.rs` to use -the new instruction variant we've created. - -In `processor.rs`, bring into scope the new structs from `state.rs`. - -```rust -use crate::state::{MovieAccountState, MovieCommentCounter, MovieComment}; -``` - -Then in `process_instruction` let’s match our deserialized `AddComment` -instruction data to the `add_comment` function we’ll be implementing shortly. - -```rust -pub fn process_instruction( - program_id: &Pubkey, - accounts: &[AccountInfo], - instruction_data: &[u8] -) -> ProgramResult { - let instruction = MovieInstruction::unpack(instruction_data)?; - match instruction { - MovieInstruction::AddMovieReview { title, rating, description } => { - add_movie_review(program_id, accounts, title, rating, description) - }, - MovieInstruction::UpdateMovieReview { title, rating, description } => { - update_movie_review(program_id, accounts, title, rating, description) - }, - - MovieInstruction::AddComment { comment } => { - add_comment(program_id, accounts, comment) - } - } -} -``` - -### 5. Update `add_movie_review` to create comment counter account - -Before we implement the `add_comment` function, we need to update the -`add_movie_review` function to create the review's comment counter account. - -Remember that this account will keep track of the total number of comments that -exist for an associated review. It's address will be a PDA derived using the -movie review address and the word “comment” as seeds. Note that how we store the -counter is simply a design choice. We could also add a “counter” field to the -original movie review account. - -Within the `add_movie_review` function, let’s add a `pda_counter` to represent -the new counter account we’ll be initializing along with the movie review -account. This means we now expect four accounts to be passed into -the `add_movie_review` function through the `accounts` argument. - -```rust -let account_info_iter = &mut accounts.iter(); - -let initializer = next_account_info(account_info_iter)?; -let pda_account = next_account_info(account_info_iter)?; -let pda_counter = next_account_info(account_info_iter)?; -let system_program = next_account_info(account_info_iter)?; -``` - -Next, there's a check to make sure `total_len` is less than 1000 bytes, but -`total_len` is no longer accurate since we added the discriminator. Let's -replace `total_len` with a call to `MovieAccountState::get_account_size`: - -```rust -let account_len: usize = 1000; - -if MovieAccountState::get_account_size(title.clone(), description.clone()) > account_len { - msg!("Data length is larger than 1000 bytes"); - return Err(ReviewError::InvalidDataLength.into()); -} -``` - -Note that this also needs to be updated in the `update_movie_review` function -for that instruction to work properly. - -Once we’ve initialized the review account, we’ll also need to update the -`account_data` with the new fields we specified in the `MovieAccountState` -struct. - -```rust -account_data.discriminator = MovieAccountState::DISCRIMINATOR.to_string(); -account_data.reviewer = *initializer.key; -account_data.title = title; -account_data.rating = rating; -account_data.description = description; -account_data.is_initialized = true; -``` - -Finally, let’s add the logic to initialize the counter account within the -`add_movie_review` function. This means: - -1. Calculating the rent exemption amount for the counter account -2. Deriving the counter PDA using the review address and the string "comment" as - seeds -3. Invoking the system program to create the account -4. Set the starting counter value -5. Serialize the account data and return from the function - -All of this should be added to the end of the `add_movie_review` function before -the `Ok(())`. - -```rust -msg!("create comment counter"); -let rent = Rent::get()?; -let counter_rent_lamports = rent.minimum_balance(MovieCommentCounter::SIZE); - -let (counter, counter_bump) = - Pubkey::find_program_address(&[pda.as_ref(), "comment".as_ref()], program_id); -if counter != *pda_counter.key { - msg!("Invalid seeds for PDA"); - return Err(ProgramError::InvalidArgument); -} - -invoke_signed( - &system_instruction::create_account( - initializer.key, - pda_counter.key, - counter_rent_lamports, - MovieCommentCounter::SIZE.try_into().unwrap(), - program_id, - ), - &[ - initializer.clone(), - pda_counter.clone(), - system_program.clone(), - ], - &[&[pda.as_ref(), "comment".as_ref(), &[counter_bump]]], -)?; -msg!("comment counter created"); - -let mut counter_data = - try_from_slice_unchecked::(&pda_counter.data.borrow()).unwrap(); - -msg!("checking if counter account is already initialized"); -if counter_data.is_initialized() { - msg!("Account already initialized"); - return Err(ProgramError::AccountAlreadyInitialized); -} - -counter_data.discriminator = MovieCommentCounter::DISCRIMINATOR.to_string(); -counter_data.counter = 0; -counter_data.is_initialized = true; -msg!("comment count: {}", counter_data.counter); -counter_data.serialize(&mut &mut pda_counter.data.borrow_mut()[..])?; -``` - -Now when a new review is created, two accounts are initialized: - -1. The first is the review account that stores the contents of the review. This - is unchanged from the version of the program we started with. -2. The second account stores the counter for comments - -### 6. Implement `add_comment` - -Finally, let’s implement our `add_comment` function to create new comment -accounts. - -When a new comment is created for a review, we will increment the count on the -comment counter PDA account and derive the PDA for the comment account using the -review address and current count. - -Like in other instruction processing functions, we'll start by iterating through -accounts passed into the program. Then before we do anything else we need to -deserialize the counter account so we have access to the current comment count: - -```rust -pub fn add_comment( - program_id: &Pubkey, - accounts: &[AccountInfo], - comment: String -) -> ProgramResult { - msg!("Adding Comment..."); - msg!("Comment: {}", comment); - - let account_info_iter = &mut accounts.iter(); - - let commenter = next_account_info(account_info_iter)?; - let pda_review = next_account_info(account_info_iter)?; - let pda_counter = next_account_info(account_info_iter)?; - let pda_comment = next_account_info(account_info_iter)?; - let system_program = next_account_info(account_info_iter)?; - - let mut counter_data = try_from_slice_unchecked::(&pda_counter.data.borrow()).unwrap(); - - Ok(()) -} -``` - -Now that we have access to the counter data, we can continue with the remaining -steps: - -1. Calculate the rent exempt amount for the new comment account -2. Derive the PDA for the comment account using the review address and the - current comment count as seeds -3. Invoke the System Program to create the new comment account -4. Set the appropriate values to the newly created account -5. Serialize the account data and return from the function - -```rust -pub fn add_comment( - program_id: &Pubkey, - accounts: &[AccountInfo], - comment: String -) -> ProgramResult { - msg!("Adding Comment..."); - msg!("Comment: {}", comment); - - let account_info_iter = &mut accounts.iter(); - - let commenter = next_account_info(account_info_iter)?; - let pda_review = next_account_info(account_info_iter)?; - let pda_counter = next_account_info(account_info_iter)?; - let pda_comment = next_account_info(account_info_iter)?; - let system_program = next_account_info(account_info_iter)?; - - let mut counter_data = try_from_slice_unchecked::(&pda_counter.data.borrow()).unwrap(); - - let account_len = MovieComment::get_account_size(comment.clone()); - - let rent = Rent::get()?; - let rent_lamports = rent.minimum_balance(account_len); - - let (pda, bump_seed) = Pubkey::find_program_address(&[pda_review.key.as_ref(), counter_data.counter.to_be_bytes().as_ref(),], program_id); - if pda != *pda_comment.key { - msg!("Invalid seeds for PDA"); - return Err(ReviewError::InvalidPDA.into()) - } - - invoke_signed( - &system_instruction::create_account( - commenter.key, - pda_comment.key, - rent_lamports, - account_len.try_into().unwrap(), - program_id, - ), - &[commenter.clone(), pda_comment.clone(), system_program.clone()], - &[&[pda_review.key.as_ref(), counter_data.counter.to_be_bytes().as_ref(), &[bump_seed]]], - )?; - - msg!("Created Comment Account"); - - let mut comment_data = try_from_slice_unchecked::(&pda_comment.data.borrow()).unwrap(); - - msg!("checking if comment account is already initialized"); - if comment_data.is_initialized() { - msg!("Account already initialized"); - return Err(ProgramError::AccountAlreadyInitialized); - } - - comment_data.discriminator = MovieComment::DISCRIMINATOR.to_string(); - comment_data.review = *pda_review.key; - comment_data.commenter = *commenter.key; - comment_data.comment = comment; - comment_data.is_initialized = true; - comment_data.serialize(&mut &mut pda_comment.data.borrow_mut()[..])?; - - msg!("Comment Count: {}", counter_data.counter); - counter_data.counter += 1; - counter_data.serialize(&mut &mut pda_counter.data.borrow_mut()[..])?; - - Ok(()) -} -``` - -### 7. Build and deploy - -We're ready to build and deploy our program! - -Build the updated program by running `cargo-build-bpf`. Then deploy the program -by running the `solana program deploy` command printed to the console. - -You can test your program by submitting a transaction with the right instruction -data. You can create your own script or feel free to use -[this frontend](https://github.com/Unboxed-Software/solana-movie-frontend/tree/solution-add-comments). -Be sure to use the `solution-add-comments` branch and replace the -`MOVIE_REVIEW_PROGRAM_ID` in `utils/constants.ts` with your program's ID or the -frontend won't work with your program. - -Keep in mind that we made breaking changes to the review accounts (i.e. adding a -discriminator). If you were to use the same program ID that you've used -previously when deploying this program, none of the reviews you created -previously will show on this frontend due to a data mismatch. - -If you need more time with this project to feel comfortable with these concepts, -have a look at -the [solution code](https://github.com/Unboxed-Software/solana-movie-program/tree/solution-add-comments) -before continuing. Note that the solution code is on the `solution-add-comments` -branch of the linked repository. - -# Challenge - -Now it’s your turn to build something independently! Go ahead and work with the -Student Intro program that we've used in past lessons. The Student Intro program -is a Solana program that lets students introduce themselves. This program takes -a user's name and a short message as the `instruction_data` and creates an -account to store the data on-chain. For this challenge you should: - -1. Add an instruction allowing other users to reply to an intro -2. Build and deploy the program locally - -If you haven't been following along with past lessons or haven't saved your work -from before, feel free to use the starter code on the `starter` branch of -[this repository](https://github.com/Unboxed-Software/solana-student-intro-program/tree/starter). - -Try to do this independently if you can! If you get stuck though, feel free to -reference the -[solution code](https://github.com/Unboxed-Software/solana-student-intro-program/tree/solution-add-replies). -Note that the solution code is on the `solution-add-replies` branch and that -your code may look slightly different. diff --git a/content/courses/solana-course/content/program-security.md b/content/courses/solana-course/content/program-security.md deleted file mode 100644 index 0d153eb71..000000000 --- a/content/courses/solana-course/content/program-security.md +++ /dev/null @@ -1,936 +0,0 @@ ---- -title: Create a Basic Program, Part 3 - Basic Security and Validation -objectives: - - Explain the importance of "thinking like an attacker" - - Understand basic security practices - - Perform owner checks - - Perform signer checks - - Validate accounts passed into the program - - Perform basic data validation ---- - -# TL;DR - -- **Thinking like an attacker** means asking "How do I break this?" -- Perform **owner checks** to ensure that the provided account is owned by the - public key you expect, e.g. ensuring that an account you expect to be a PDA is - owned by `program_id` -- Perform **signer checks** to ensure that any account modification has been - signed by the right party or parties -- **Account validation** entails ensuring that provided accounts are the - accounts you expect them to be, e.g. deriving PDAs with the expected seeds to - make sure the address matches the provided account -- **Data validation** entails ensuring that any provided data meets the criteria - required by the program - -# Overview - -In the last two lessons we worked through building a Movie Review program -together. The end result is pretty cool! It's exciting to get something working -in a new development environment. - -Proper program development, however, doesn't end at "get it working." It's -important to think through the possible failure points in your code in order to -mitigate them. Failure points are where undesirable behavior in your code could -potentially occur. Whether the undesirable behavior happens due to users -interacting with your program in unexpected ways or bad actors intentionally -trying to exploit your program, anticipating failure points is essential to -secure program development. - -Remember, **you have no control over the transactions that will be sent to your -program once it’s deployed**. You can only control how your program handles -them. While this lesson is far from a comprehensive overview of program -security, we'll cover some of the basic pitfalls to look out for. - -## Think like an attacker - -[Neodyme](https://workshop.neodyme.io/) gave a presentation at Breakpoint 2021 -entitled "Think Like An Attacker: Bringing Smart Contracts to Their Break(ing) -Point." If there's one thing you take away from this lesson, it's that you -should think like an attacker. - -In this lesson, of course, we cannot cover everything that could possibly go -wrong with your programs. Ultimately, every program will have different security -risks associated with it. While understanding common pitfalls is _essential_ to -engineering good programs, it is _insufficient_ for deploying secure ones. In -order to have the broadest security coverage possible, you have to approach your -code with the right mindset. - -As Neodyme mentioned in their presentation, the right mindset requires moving -from the question "Is this broken?" to "How do I break this?" This is the first -and most essential step in understanding what your code _actually does_ as -opposed to what you wrote it to do. - -### All programs can be broken - -It's not a question of "if." - -Rather, it's a question of "how much effort and dedication would it take." - -Our job as developers is to close as many holes as possible and increase the -effort and dedication required to break our code. For example, in the Movie -Review program we built together over the last two lessons, we wrote code to -create new accounts to store movie reviews. If we take a closer look at the -code, however, we'll notice how the program also facilitates a lot of -unintentional behavior we could easily catch by asking "How do I break this?" -We'll dig into some of these problems and how to fix them in this lesson, but -remember that memorizing a few pitfalls isn't sufficient. It's up to you to -change your mindset toward security. - -## Error handling - -Before we dive into some of the common security pitfalls and how to avoid them, -it's important to know how to use errors in your program. While your code can -handle some issues gracefully, other issues will require that your program stop -execution and return a program error. - -### How to create errors - -While the `solana_program` crate provides a `ProgramError` enum with a list of -generic errors we can use, it will often be useful to create your own. Your -custom errors will be able to provide more context and detail while you're -debugging your code. - -We can define our own errors by creating an enum type listing the errors we want -to use. For example, the `NoteError` contains variants `Forbidden` and -`InvalidLength`. The enum is made into a Rust `Error` type by using the `derive` -attribute macro to implement the `Error` trait from the `thiserror` library. -Each error type also has its own `#[error("...")]` notation. This lets you -provide an error message for each particular error type. - -```rust -use solana_program::{program_error::ProgramError}; -use thiserror::Error; - -#[derive(Error)] -pub enum NoteError { - #[error("Wrong note owner")] - Forbidden, - - #[error("Text is too long")] - InvalidLength, -} -``` - -### How to return errors - -The compiler expects errors returned by the program to be of type `ProgramError` -from the `solana_program` crate. That means we won't be able to return our -custom error unless we have a way to convert it into this type. The following -implementation handles conversion between our custom error and the -`ProgramError` type. - -```rust -impl From for ProgramError { - fn from(e: NoteError) -> Self { - ProgramError::Custom(e as u32) - } -} -``` - -To return the custom error from the program, simply use the `into()` method to -convert the error into an instance of `ProgramError`. - -```rust -if pda != *note_pda.key { - return Err(NoteError::Forbidden.into()); -} -``` - -## Basic security checks - -While these won't comprehensively secure your program, there are a few security -checks you can keep in mind to fill in some of the larger gaps in your code: - -- Ownership checks - used to verify that an account is owned by the program -- Signer checks - used to verify that an account has signed a transaction -- General Account Validation - used to verify that an account is the expected - account -- Data Validation - used to verify the inputs provided by a user - -### Ownership checks - -An ownership check verifies that an account is owned by the expected public key. -Let's use the note-taking app example that we've referenced in previous lessons. -In this app, users can create, update, and delete notes that are stored by the -program in PDA accounts. - -When a user invokes the `update` instruction, they also provide a `pda_account`. -We presume the provided `pda_account` is for the particular note they want to -update, but the user can input any instruction data they want. They could even -potentially send data which matches the data format of a note account but was -not also created by the note-taking program. This security vulnerability is one -potential way to introduce malicious code. - -The simplest way to avoid this problem is to always check that the owner of an -account is the public key you expect it to be. In this case, we expect the note -account to be a PDA account owned by the program itself. When this is not the -case, we can report it as an error accordingly. - -```rust -if note_pda.owner != program_id { - return Err(ProgramError::InvalidNoteAccount); -} -``` - -As a side note, using PDAs whenever possible is more secure than trusting -externally-owned accounts, even if they are owned by the transaction signer. The -only accounts that the program has complete control over are PDA accounts, -making them the most secure. - -### Signer checks - -A signer check simply verifies that the right parties have signed a transaction. -In the note-taking app, for example, we would want to verify that the note -creator signed the transaction before we process the `update` instruction. -Otherwise, anyone can update another user's notes by simply passing in the -user's public key as the initializer. - -```rust -if !initializer.is_signer { - msg!("Missing required signature"); - return Err(ProgramError::MissingRequiredSignature) -} -``` - -### General account validation - -In addition to checking the signers and owners of accounts, it's important to -ensure that the provided accounts are what your code expects them to be. For -example, you would want to validate that a provided PDA account's address can be -derived with the expected seeds. This ensures that it is the account you expect -it to be. - -In the note-taking app example, that would mean ensuring that you can derive a -matching PDA using the note creator's public key and the ID as seeds (that's -what we're assuming was used when creating the note). That way a user couldn't -accidentally pass in a PDA account for the wrong note or, more importantly, that -the user isn't passing in a PDA account that represents somebody else's note -entirely. - -```rust -let (pda, bump_seed) = Pubkey::find_program_address(&[note_creator.key.as_ref(), id.as_bytes().as_ref(),], program_id); - -if pda != *note_pda.key { - msg!("Invalid seeds for PDA"); - return Err(ProgramError::InvalidArgument) -} -``` - -## Data validation - -Similar to validating accounts, you should also validate any data provided by -the client. - -For example, you may have a game program where a user can allocate character -attribute points to various categories. You may have a maximum limit in each -category of 100, in which case you would want to verify that the existing -allocation of points plus the new allocation doesn't exceed the maximum. - -```rust -if character.agility + new_agility > 100 { - msg!("Attribute points cannot exceed 100"); - return Err(AttributeError::TooHigh.into()) -} -``` - -Or, the character may have an allowance of attribute points they can allocate -and you want to make sure they don't exceed that allowance. - -```rust -if attribute_allowance < new_agility { - msg!("Trying to allocate more points than allowed"); - return Err(AttributeError::ExceedsAllowance.into()) -} -``` - -Without these checks, program behavior would differ from what you expect. In -some cases, however, it's more than just an issue of undefined behavior. -Sometimes failure to validate data can result in security loopholes that are -financially devastating. - -For example, imagine that the character referenced in these examples is an NFT. -Further, imagine that the program allows the NFT to be staked to earn token -rewards proportional to the NFTs number of attribute points. Failure to -implement these data validation checks would allow a bad actor to assign an -obscenely high number of attribute points and quickly drain your treasury of all -the rewards that were meant to be spread more evenly amongst a larger pool of -stakers. - -### Integer overflow and underflow - -Rust integers have fixed sizes. This means they can only support a specific -range of numbers. An arithmetic operation that results in a higher or lower -value than what is supported by the range will cause the resulting value to wrap -around. For example, a `u8` only supports numbers 0-255, so the result of -addition that would be 256 would actually be 0, 257 would be 1, etc. - -This is always important to keep in mind, but especially so when dealing with -any code that represents true value, such as depositing and withdrawing tokens. - -To avoid integer overflow and underflow, either: - -1. Have logic in place that ensures overflow or underflow _cannot_ happen or -2. Use checked math like `checked_add` instead of `+` - ```rust - let first_int: u8 = 5; - let second_int: u8 = 255; - let sum = first_int.checked_add(second_int); - ``` - -# Demo - -Let’s practice together with the Movie Review program we've worked on in -previous lessons. No worries if you’re just jumping into this lesson without -having done the previous lesson - it should be possible to follow along either -way. - -As a refresher, the Movie Review program lets users store movie reviews in PDA -accounts. Last lesson, we finished implementing the basic functionality of -adding a movie review. Now, we'll add some security checks to the functionality -we've already created and add the ability to update a movie review in a secure -manner. - -Just as before, we'll be using [Solana Playground](https://beta.solpg.io/) to -write, build, and deploy our code. - -## 1. Get the starter code - -To begin, you can find the starter code -[here](https://beta.solpg.io/62b552f3f6273245aca4f5c9). If you've been following -along with the Movie Review demos, you'll notice that we've refactored our -program. - -The refactored starter code is almost the same as what it was before. Since -`lib.rs` was getting rather large and unwieldy, we've separated its code into 3 -files: `lib.rs`, `entrypoint.rs`, and `processor.rs`. `lib.rs` now _only_ -registers the code's modules, `entrypoint.rs` _only_ defines and sets the -program's entrypoint, and `processor.rs` handles the program logic for -processing instructions. We've also added an `error.rs` file where we'll be -defining custom errors. The complete file structure is as follows: - -- **lib.rs** - register modules -- **entrypoint.rs -** entry point to the program -- **instruction.rs -** serialize and deserialize instruction data -- **processor.rs -** program logic to process instructions -- **state.rs -** serialize and deserialize state -- **error.rs -** custom program errors - -In addition to some changes to file structure, we've updated a small amount of -code that will let this demo be more focused on security without having you -write unnecessary boiler plate. - -Since we'll be allowing updates to movie reviews, we also changed `account_len` -in the `add_movie_review` function (now in `processor.rs`). Instead of -calculating the size of the review and setting the account length to only as -large as it needs to be, we're simply going to allocate 1000 bytes to each -review account. This way, we don’t have to worry about reallocating size or -re-calculating rent when a user updates their movie review. - -We went from this: - -```rust -let account_len: usize = 1 + 1 + (4 + title.len()) + (4 + description.len()); -``` - -To this: - -```rust -let account_len: usize = 1000; -``` - -The [realloc](https://docs.rs/solana-sdk/latest/solana_sdk/account_info/struct.AccountInfo.html#method.realloc) method -was just recently enabled by Solana Labs which allows you to dynamically change -the size of your accounts. We will not be using this method for this demo, but -it’s something to be aware of. - -Finally, we've also implemented some additional functionality for our -`MovieAccountState` struct in `state.rs` using the `impl` keyword. - -For our movie reviews, we want the ability to check whether an account has -already been initialized. To do this, we create an `is_initialized` function -that checks the `is_initialized` field on the `MovieAccountState` struct. - -`Sealed` is Solana's version of Rust's `Sized` trait. This simply specifies that -`MovieAccountState` has a known size and provides for some compiler -optimizations. - -```rust -// inside state.rs -impl Sealed for MovieAccountState {} - -impl IsInitialized for MovieAccountState { - fn is_initialized(&self) -> bool { - self.is_initialized - } -} -``` - -Before moving on, make sure you have a solid grasp on the current state of the -program. Look through the code and spend some time thinking through any spots -that are confusing to you. It may be helpful to compare the starter code to the -[solution code from the previous lesson](https://beta.solpg.io/62b23597f6273245aca4f5b4). - -## 2. Custom Errors - -Let's begin by writing our custom program errors. We'll need errors that we can -use in the following situations: - -- The update instruction has been invoked on an account that hasn't been - initialized yet -- The provided PDA doesn't match the expected or derived PDA -- The input data is larger than the program allows -- The rating provided does not fall in the 1-5 range - -The starter code includes an empty `error.rs` file. Open that file and add -errors for each of the above cases. - -```rust -// inside error.rs -use solana_program::{program_error::ProgramError}; -use thiserror::Error; - -#[derive(Debug, Error)] -pub enum ReviewError{ - // Error 0 - #[error("Account not initialized yet")] - UninitializedAccount, - // Error 1 - #[error("PDA derived does not equal PDA passed in")] - InvalidPDA, - // Error 2 - #[error("Input data exceeds max length")] - InvalidDataLength, - // Error 3 - #[error("Rating greater than 5 or less than 1")] - InvalidRating, -} - -impl From for ProgramError { - fn from(e: ReviewError) -> Self { - ProgramError::Custom(e as u32) - } -} -``` - -Note that in addition to adding the error cases, we also added the -implementation that lets us convert our error into a `ProgramError` type as -needed. - -Before moving on, let’s bring `ReviewError` into scope in the `processor.rs`. We -will be using these errors shortly when we add our security checks. - -```rust -// inside processor.rs -use crate::error::ReviewError; -``` - -## 3. Add security checks to `add_movie_review` - -Now that we have errors to use, let's implement some security checks to our -`add_movie_review` function. - -### Signer check - -The first thing we should do is ensure that the `initializer` of a review is -also a signer on the transaction. This ensures that you can't submit movie -reviews impersonating somebody else. We'll put this check right after iterating -through the accounts. - -```rust -let account_info_iter = &mut accounts.iter(); - -let initializer = next_account_info(account_info_iter)?; -let pda_account = next_account_info(account_info_iter)?; -let system_program = next_account_info(account_info_iter)?; - -if !initializer.is_signer { - msg!("Missing required signature"); - return Err(ProgramError::MissingRequiredSignature) -} -``` - -### Account validation - -Next, let's make sure the `pda_account` passed in by the user is the `pda` we -expect. Recall we derived the `pda` for a movie review using the `initializer` -and `title` as seeds. Within our instruction we’ll derive the `pda` again and -then check if it matches the `pda_account`. If the addresses do not match, we’ll -return our custom `InvalidPDA` error. - -```rust -// Derive PDA and check that it matches client -let (pda, _bump_seed) = Pubkey::find_program_address(&[initializer.key.as_ref(), account_data.title.as_bytes().as_ref(),], program_id); - -if pda != *pda_account.key { - msg!("Invalid seeds for PDA"); - return Err(ReviewError::InvalidPDA.into()) -} -``` - -### Data validation - -Now let's perform some data validation. - -We'll start by making sure `rating` falls within the 1 to 5 scale. If the rating -provided by the user outside of this range, we’ll return our custom -`InvalidRating` error. - -```rust -if rating > 5 || rating < 1 { - msg!("Rating cannot be higher than 5"); - return Err(ReviewError::InvalidRating.into()) -} -``` - -Next, let’s check that the content of the review does not exceed the 1000 bytes -we’ve allocated for the account. If the size exceeds 1000 bytes, we’ll return -our custom `InvalidDataLength` error. - -```rust -let total_len: usize = 1 + 1 + (4 + title.len()) + (4 + description.len()); -if total_len > 1000 { - msg!("Data length is larger than 1000 bytes"); - return Err(ReviewError::InvalidDataLength.into()) -} -``` - -Lastly, let's checking if the account has already been initialized by calling -the `is_initialized` function we implemented for our `MovieAccountState`. If the -account already exists, then we will return an error. - -```rust -if account_data.is_initialized() { - msg!("Account already initialized"); - return Err(ProgramError::AccountAlreadyInitialized); -} -``` - -All together, the `add_movie_review` function should look something like this: - -```rust -pub fn add_movie_review( - program_id: &Pubkey, - accounts: &[AccountInfo], - title: String, - rating: u8, - description: String -) -> ProgramResult { - msg!("Adding movie review..."); - msg!("Title: {}", title); - msg!("Rating: {}", rating); - msg!("Description: {}", description); - - let account_info_iter = &mut accounts.iter(); - - let initializer = next_account_info(account_info_iter)?; - let pda_account = next_account_info(account_info_iter)?; - let system_program = next_account_info(account_info_iter)?; - - if !initializer.is_signer { - msg!("Missing required signature"); - return Err(ProgramError::MissingRequiredSignature) - } - - let (pda, bump_seed) = Pubkey::find_program_address(&[initializer.key.as_ref(), title.as_bytes().as_ref(),], program_id); - if pda != *pda_account.key { - msg!("Invalid seeds for PDA"); - return Err(ProgramError::InvalidArgument) - } - - if rating > 5 || rating < 1 { - msg!("Rating cannot be higher than 5"); - return Err(ReviewError::InvalidRating.into()) - } - - let total_len: usize = 1 + 1 + (4 + title.len()) + (4 + description.len()); - if total_len > 1000 { - msg!("Data length is larger than 1000 bytes"); - return Err(ReviewError::InvalidDataLength.into()) - } - - let account_len: usize = 1000; - - let rent = Rent::get()?; - let rent_lamports = rent.minimum_balance(account_len); - - invoke_signed( - &system_instruction::create_account( - initializer.key, - pda_account.key, - rent_lamports, - account_len.try_into().unwrap(), - program_id, - ), - &[initializer.clone(), pda_account.clone(), system_program.clone()], - &[&[initializer.key.as_ref(), title.as_bytes().as_ref(), &[bump_seed]]], - )?; - - msg!("PDA created: {}", pda); - - msg!("unpacking state account"); - let mut account_data = try_from_slice_unchecked::(&pda_account.data.borrow()).unwrap(); - msg!("borrowed account data"); - - msg!("checking if movie account is already initialized"); - if account_data.is_initialized() { - msg!("Account already initialized"); - return Err(ProgramError::AccountAlreadyInitialized); - } - - account_data.title = title; - account_data.rating = rating; - account_data.description = description; - account_data.is_initialized = true; - - msg!("serializing account"); - account_data.serialize(&mut &mut pda_account.data.borrow_mut()[..])?; - msg!("state account serialized"); - - Ok(()) -} -``` - -## 4. Support movie review updates in `MovieInstruction` - -Now that `add_movie_review` is more secure, let's turn our attention to -supporting the ability to update a movie review. - -Let’s begin by updating `instruction.rs`. We’ll start by adding an -`UpdateMovieReview` variant to `MovieInstruction` that includes embedded data -for the new title, rating, and description. - -```rust -// inside instruction.rs -pub enum MovieInstruction { - AddMovieReview { - title: String, - rating: u8, - description: String - }, - UpdateMovieReview { - title: String, - rating: u8, - description: String - } -} -``` - -The payload struct can stay the same since aside from the variant type, the -instruction data is the same as what we used for `AddMovieReview`. - -Lastly, in the `unpack` function we need to add `UpdateMovieReview` to the match -statement. - -```rust -// inside instruction.rs -impl MovieInstruction { - pub fn unpack(input: &[u8]) -> Result { - let (&variant, rest) = input.split_first().ok_or(ProgramError::InvalidInstructionData)?; - let payload = MovieReviewPayload::try_from_slice(rest).unwrap(); - Ok(match variant { - 0 => Self::AddMovieReview { - title: payload.title, - rating: payload.rating, - description: payload.description }, - 1 => Self::UpdateMovieReview { - title: payload.title, - rating: payload.rating, - description: payload.description }, - _ => return Err(ProgramError::InvalidInstructionData) - }) - } -} -``` - -## 5. Define `update_movie_review` function - -Now that we can unpack our `instruction_data` and determine which instruction of -the program to run, we can add `UpdateMovieReview` to the match statement in -the `process_instruction` function in the `processor.rs` file. - -```rust -// inside processor.rs -pub fn process_instruction( - program_id: &Pubkey, - accounts: &[AccountInfo], - instruction_data: &[u8] -) -> ProgramResult { - // unpack instruction data - let instruction = MovieInstruction::unpack(instruction_data)?; - match instruction { - MovieInstruction::AddMovieReview { title, rating, description } => { - add_movie_review(program_id, accounts, title, rating, description) - }, - // add UpdateMovieReview to match against our new data structure - MovieInstruction::UpdateMovieReview { title, rating, description } => { - // make call to update function that we'll define next - update_movie_review(program_id, accounts, title, rating, description) - } - } -} -``` - -Next, we can define the new `update_movie_review` function. The definition -should have the same parameters as the definition of `add_movie_review`. - -```rust -pub fn update_movie_review( - program_id: &Pubkey, - accounts: &[AccountInfo], - title: String, - rating: u8, - description: String -) -> ProgramResult { - -} -``` - -## 6. Implement `update_movie_review` function - -All that's left now is to fill in the logic for updating a movie review. Only -let's make it secure from the start. - -Just like the `add_movie_review` function, let's start by iterating through the -accounts. The only accounts we'll need are the first two: `initializer` and -`pda_account`. - -```rust -pub fn update_movie_review( - program_id: &Pubkey, - accounts: &[AccountInfo], - title: String, - rating: u8, - description: String -) -> ProgramResult { - msg!("Updating movie review..."); - - // Get Account iterator - let account_info_iter = &mut accounts.iter(); - - // Get accounts - let initializer = next_account_info(account_info_iter)?; - let pda_account = next_account_info(account_info_iter)?; - -} -``` - -### Ownership Check - -Before we continue, let's implement some basic security checks. We'll start with -an ownership check on for `pda_account` to verify that it is owned by our -program. If it isn't, we'll return an `InvalidOwner` error. - -```rust -if pda_account.owner != program_id { - return Err(ProgramError::InvalidOwner) -} -``` - -### Signer Check - -Next, let’s perform a signer check to verify that the `initializer` of the -update instruction has also signed the transaction. Since we are updating the -data for a movie review, we want to ensure that the original `initializer` of -the review has approved the changes by signing the transaction. If the -`initializer` did not sign the transaction, we’ll return an error. - -```rust -if !initializer.is_signer { - msg!("Missing required signature"); - return Err(ProgramError::MissingRequiredSignature) -} -``` - -### Account Validation - -Next, let’s check that the `pda_account` passed in by the user is the PDA we -expect by deriving the PDA using `initializer` and `title` as seeds. If the -addresses do not match, we’ll return our custom `InvalidPDA` error. We'll -implement this the same way we did in the `add_movie_review` function. - -```rust -// Derive PDA and check that it matches client -let (pda, _bump_seed) = Pubkey::find_program_address(&[initializer.key.as_ref(), account_data.title.as_bytes().as_ref(),], program_id); - -if pda != *pda_account.key { - msg!("Invalid seeds for PDA"); - return Err(ReviewError::InvalidPDA.into()) -} -``` - -### Unpack `pda_account` and perform data validation - -Now that our code ensures we can trust the passed in accounts, let's unpack the -`pda_account` and perform some data validation. We'll start by unpacking -`pda_account` and assigning it to a mutable variable `account_data`. - -```rust -msg!("unpacking state account"); -let mut account_data = try_from_slice_unchecked::(&pda_account.data.borrow()).unwrap(); -msg!("borrowed account data"); -``` - -Now that we have access to the account and its fields, the first thing we need -to do is verify that the account has already been initialized. An uninitialized -account can't be updated so the program should return our custom -`UninitializedAccount` error. - -```rust -if !account_data.is_initialized() { - msg!("Account is not initialized"); - return Err(ReviewError::UninitializedAccount.into()); -} -``` - -Next, we need to validate the `rating`, `title`, and `description` data just -like in the `add_movie_review` function. We want to limit the `rating` to a -scale of 1 to 5 and limit the overall size of the review to be fewer than 1000 -bytes. If the rating provided by the user outside of this range, then we’ll -return our custom `InvalidRating` error. If the review is too long, then we'll -return our custom `InvalidDataLength` error. - -```rust -if rating > 5 || rating < 1 { - msg!("Rating cannot be higher than 5"); - return Err(ReviewError::InvalidRating.into()) -} - -let total_len: usize = 1 + 1 + (4 + account_data.title.len()) + (4 + description.len()); -if total_len > 1000 { - msg!("Data length is larger than 1000 bytes"); - return Err(ReviewError::InvalidDataLength.into()) -} -``` - -### Update the movie review account - -Now that we've implemented all of the security checks, we can finally update the -movie review account by updating `account_data` and re-serializing it. At that -point, we can return `Ok` from our program. - -```rust -account_data.rating = rating; -account_data.description = description; - -account_data.serialize(&mut &mut pda_account.data.borrow_mut()[..])?; - -Ok(()) -``` - -All together, the `update_movie_review` function should look something like the -code snippet below. We've included some additional logging for clarity in -debugging. - -```rust -pub fn update_movie_review( - program_id: &Pubkey, - accounts: &[AccountInfo], - title: String, - rating: u8, - description: String -) -> ProgramResult { - msg!("Updating movie review..."); - - let account_info_iter = &mut accounts.iter(); - - let initializer = next_account_info(account_info_iter)?; - let pda_account = next_account_info(account_info_iter)?; - - if pda_account.owner != program_id { - return Err(ProgramError::IllegalOwner) - } - - if !initializer.is_signer { - msg!("Missing required signature"); - return Err(ProgramError::MissingRequiredSignature) - } - - msg!("unpacking state account"); - let mut account_data = try_from_slice_unchecked::(&pda_account.data.borrow()).unwrap(); - msg!("review title: {}", account_data.title); - - let (pda, _bump_seed) = Pubkey::find_program_address(&[initializer.key.as_ref(), account_data.title.as_bytes().as_ref(),], program_id); - if pda != *pda_account.key { - msg!("Invalid seeds for PDA"); - return Err(ReviewError::InvalidPDA.into()) - } - - msg!("checking if movie account is initialized"); - if !account_data.is_initialized() { - msg!("Account is not initialized"); - return Err(ReviewError::UninitializedAccount.into()); - } - - if rating > 5 || rating < 1 { - msg!("Invalid Rating"); - return Err(ReviewError::InvalidRating.into()) - } - - let update_len: usize = 1 + 1 + (4 + description.len()) + account_data.title.len(); - if update_len > 1000 { - msg!("Data length is larger than 1000 bytes"); - return Err(ReviewError::InvalidDataLength.into()) - } - - msg!("Review before update:"); - msg!("Title: {}", account_data.title); - msg!("Rating: {}", account_data.rating); - msg!("Description: {}", account_data.description); - - account_data.rating = rating; - account_data.description = description; - - msg!("Review after update:"); - msg!("Title: {}", account_data.title); - msg!("Rating: {}", account_data.rating); - msg!("Description: {}", account_data.description); - - msg!("serializing account"); - account_data.serialize(&mut &mut pda_account.data.borrow_mut()[..])?; - msg!("state account serialized"); - - Ok(()) -} -``` - -## 7. Build and upgrade - -We're ready to build and upgrade our program! You can test your program by -submitting a transaction with the right instruction data. For that, feel free to -use this -[frontend](https://github.com/Unboxed-Software/solana-movie-frontend/tree/solution-update-reviews). -Remember, to make sure you're testing the right program you'll need to replace -`MOVIE_REVIEW_PROGRAM_ID` with your program ID in `Form.tsx` and -`MovieCoordinator.ts`. - -If you need more time with this project to feel comfortable with these concepts, -have a look at the -[solution code](https://beta.solpg.io/62c8c6dbf6273245aca4f5e7) before -continuing. - -# Challenge - -Now it’s your turn to build something independently by building on top of the -Student Intro program that you've used in previous lessons. If you haven't been -following along or haven't saved your code from before, feel free to use -[this starter code](https://beta.solpg.io/62b11ce4f6273245aca4f5b2). - -The Student Intro program is a Solana Program that lets students introduce -themselves. The program takes a user's name and a short message as the -instruction_data and creates an account to store the data on-chain. - -Using what you've learned in this lesson, try applying what you've learned to -the Student Intro Program. The program should: - -1. Add an instruction allowing students to update their message -2. Implement the basic security checks we've learned in this lesson - -Try to do this independently if you can! But if you get stuck, feel free to -reference the [solution code](https://beta.solpg.io/62c9120df6273245aca4f5e8). -Note that your code may look slightly different than the solution code depending -on the checks you implement and the errors you write. Once you complete Module -3, we'd love to know more about your experience! Feel free to share some quick -feedback -[here](https://airtable.com/shrOsyopqYlzvmXSC?prefill_Module=Module%203), so -that we can continue to improve the course. diff --git a/content/courses/solana-course/content/program-state-management.md b/content/courses/solana-course/content/program-state-management.md deleted file mode 100644 index a75a8971f..000000000 --- a/content/courses/solana-course/content/program-state-management.md +++ /dev/null @@ -1,597 +0,0 @@ ---- -title: Create a Basic Program, Part 2 - State Management -objectives: - - Describe the process of creating a new account using a Program Derived - Address (PDA) - - Use seeds to derive a PDA - - Use the space required by an account to calculate the amount of rent (in - lamports) a user must allocate - - Use a Cross Program Invocation (CPI) to initialize an account with a PDA as - the address of the new account - - Explain how to update the data stored on a new account ---- - -# TL;DR - -- Program state is stored in other accounts rather than in the program itself -- A Program Derived Address (PDA) is derived from a program ID and an optional - list of seeds. Once derived, PDAs are subsequently used as the address for a - storage account. -- Creating an account requires that we calculate the space required and the - corresponding rent to allocate for the new account -- Creating a new account requires a Cross Program Invocation (CPI) to the - `create_account` instruction on the System Program -- Updating the data field on an account requires that we serialize (convert to - byte array) the data into the account - -# Overview - -Solana maintains speed, efficiency, and extensibility in part by making programs -stateless. Rather than having state stored on the program itself, programs use -Solana's account model to read state from and write state to separate PDA -accounts. - -While this is an extremely flexible model, it's also a paradigm that can be -difficult to work in if its unfamiliar. But don't worry! We'll start simple in -this lesson and work up to more complex programs in the next module. - -In this lesson we'll learn the basics of state management for a Solana program, -including representing state as a Rust type, creating accounts using Program -Derived Addresses, and serializing account data. - -## Program state - -All Solana accounts have a `data` field that holds a byte array. This makes -accounts as flexible as files on a computer. You can store literally anything in -an account (so long as the account has the storage space for it). - -Just as files in a traditional filesystem conform to specific data formats like -PDF or MP3, the data stored in a Solana account needs to follow some kind of -pattern so that the data can be retrieved and deserialized into something -usable. - -### Represent state as a Rust type - -When writing a program in Rust, we typically create this "format" by defining a -Rust data type. If you went through the -[first part of this lesson](basic-program-pt-1.md), this is very similar to what -we did when we created an enum to represent discrete instructions. - -While this type should reflect the structure of your data, for most use cases a -simple struct is sufficient. For example, a note-taking program that stores -notes in separate accounts would likely have data for a title, body, and maybe -an ID of some kind. We could create a struct to represent that as follows: - -```rust -struct NoteState { - title: String, - body: String, - id: u64 -} -``` - -### Using Borsh for serialization and deserialization - -Just as with instruction data, we need a mechanism for converting from our Rust -data type to a byte array, and vice versa. **Serialization** is the process of -converting an object into a byte array. **Deserialization** is the process of -reconstructing an object from a byte array. - -We'll continue to use Borsh for serialization and deserialization. In Rust, we -can use the `borsh` crate to get access to the `BorshSerialize` and -`BorshDeserialize` traits. We can then apply those traits using the `derive` -attribute macro. - -```rust -use borsh::{BorshSerialize, BorshDeserialize}; - -#[derive(BorshSerialize, BorshDeserialize)] -struct NoteState { - title: String, - body: String, - id: u64 -} -``` - -These traits will provide methods on `NoteState` that we can use to serialize -and deserialize the data as needed. - -## Creating accounts - -Before we can update the data field of an account, we have to first create that -account. - -To create a new account within our program we must: - -1. Calculate the space and rent required for the account -2. Have an address to assign the new account -3. Invoke the system program to create the new account - -### Space and rent - -Recall that storing data on the Solana network requires users to allocate rent -in the form of lamports. The amount of rent required by a new account depends on -the amount of space you would like allocated to that account. That means we need -to know before creating the account how much space to allocate. - -Note that rent is more like a deposit. All the lamports allocated for rent can -be fully refunded when an account is closed. Additionally, all new accounts are -now required to be -[rent-exempt](https://twitter.com/jacobvcreech/status/1524790032938287105), -meaning lamports are not deducted from the account over time. An account is -considered rent-exempt if it holds at least 2 years worth of rent. In other -words, accounts are stored on-chain permanently until the owner closes the -account and withdraws the rent. - -In our note-taking app example, the `NoteState` struct specifies three fields -that need to be stored in an account: `title`, `body`, and `id`. To calculate -the size the account needs to be, you would simply add up the size required to -store the data in each field. - -For dynamic data, like strings, Borsh adds an additional 4 bytes at the -beginning to store the length of that particular field. That means `title` and -`body` are each 4 bytes plus their respective sizes. The `id` field is a 64-bit -integer, or 8 bytes. - -You can add up those lengths and then calculate the rent required for that -amount of space using the `minimum_balance` function from the `rent` module of -the `solana_program` crate. - -```rust -// Calculate account size required for struct NoteState -let account_len: usize = (4 + title.len()) + (4 + body.len()) + 8; - -// Calculate rent required -let rent = Rent::get()?; -let rent_lamports = rent.minimum_balance(account_len); -``` - -### Program Derived Addresses (PDA) - -Before creating an account, we also need to have an address to assign the -account. For program owned accounts, this will be a program derived address -(PDA) found using the `find_program_address` function. - -As the name implies, PDAs are derived using the program ID (address of the -program creating the account) and an optional list of “seeds”. Optional seeds -are additional inputs used in the `find_program_address` function to derive the -PDA. The function used to derive PDAs will return the same address every time -when given the same inputs. This gives us the ability to create any number of -PDA accounts and a deterministic way to find each account. - -In addition to the seeds you provide for deriving a PDA, the -`find_program_address` function will provide one additional "bump seed." What -makes PDAs unique from other Solana account addresses is that they do not have a -corresponding secret key. This ensures that only the program that owns the -address can sign on behalf of the PDA. When the `find_program_address` function -attempts to derive a PDA using the provided seeds, it passes in the number 255 -as the "bump seed." If the resulting address is invalid (i.e. has a -corresponding secret key), then the function decreases the bump seed by 1 and -derives a new PDA with that bump seed. Once a valid PDA is found, the function -returns both the PDA and the bump that was used to derive the PDA. - -For our note-taking program, we will use the note creator's public key and the -ID as the optional seeds to derive the PDA. Deriving the PDA this way allows us -to deterministically find the account for each note. - -```rust -let (note_pda_account, bump_seed) = Pubkey::find_program_address(&[note_creator.key.as_ref(), id.as_bytes().as_ref(),], program_id); -``` - -### Cross Program Invocation (CPI) - -Once we’ve calculated the rent required for our account and found a valid PDA to -assign as the address of the new account, we are finally ready to create the -account. Creating a new account within our program requires a Cross Program -Invocation (CPI). A CPI is when one program invokes an instruction on another -program. To create a new account within our program, we will invoke the -`create_account` instruction on the system program. - -CPIs can be done using either `invoke` or `invoke_signed`. - -```rust -pub fn invoke( - instruction: &Instruction, - account_infos: &[AccountInfo<'_>] -) -> ProgramResult -``` - -```rust -pub fn invoke_signed( - instruction: &Instruction, - account_infos: &[AccountInfo<'_>], - signers_seeds: &[&[&[u8]]] -) -> ProgramResult -``` - -For this lesson we will use `invoke_signed`. Unlike a regular signature where a -secret key is used to sign, `invoke_signed` uses the optional seeds, bump seed, -and program ID to derive a PDA and sign an instruction. This is done by -comparing the derived PDA against all accounts passed into the instruction. If -any of the accounts match the PDA, then the signer field for that account is set -to true. - -A program can securely sign transactions this way because `invoke_signed` -generates the PDA used for signing with the program ID of the program invoking -the instruction. Therefore, it is not possible for one program to generate a -matching PDA to sign for an account with a PDA derived using another program ID. - -```rust -invoke_signed( - // instruction - &system_instruction::create_account( - note_creator.key, - note_pda_account.key, - rent_lamports, - account_len.try_into().unwrap(), - program_id, - ), - // account_infos - &[note_creator.clone(), note_pda_account.clone(), system_program.clone()], - // signers_seeds - &[&[note_creator.key.as_ref(), note_id.as_bytes().as_ref(), &[bump_seed]]], -)?; -``` - -## Serializing and deserializing account data - -Once we've created a new account, we need to access and update the account's -data field. This means deserializing its byte array into an instance of the type -we created, updating the fields on that instance, then serializing that instance -back into a byte array. - -### Deserialize account data - -The first step to updating an account's data is to deserialize its `data` byte -array into its Rust type. You can do this by first borrowing the data field on -the account. This allows you to access the data without taking ownership. - -You can then use the `try_from_slice_unchecked` function to deserialize the data -field of the borrowed account using the format of the type you created to -represent the data. This gives you an instance of your Rust type so you can -easily update fields using dot notation. If we were to do this with the -note-taking app example we've been using, it would look like this: - -```rust -let mut account_data = try_from_slice_unchecked::(note_pda_account.data.borrow()).unwrap(); - -account_data.title = title; -account_data.body = rating; -account_data.id = id; -``` - -### Serialize account data - -Once the Rust instance representing the account's data has been updated with the -appropriate values, you can "save" the data on the account. - -This is done with the `serialize` function on the instance of the Rust type you -created. You'll need to pass in a mutable reference to the account data. The -syntax here is tricky, so don't worry if you don't understand it completely. -Borrowing and references are two of the toughest concepts in Rust. - -```rust -account_data.serialize(&mut &mut note_pda_account.data.borrow_mut()[..])?; -``` - -The above example converts the `account_data` object to a byte array and sets it -to the `data` property on `note_pda_account`. This effectively saves the updated -`account_data` variable to the data field of the new account. Now when a user -fetches the `note_pda_account` and deserializes the data, it will display the -updated data we’ve serialized into the account. - -## Iterators - -You may have noticed in the previous examples that we referenced `note_creator` -and didn't show where that came from. - -To get access to this and other accounts, we use an -[Iterator](https://doc.rust-lang.org/std/iter/trait.Iterator.html). An iterator -is a Rust trait used to give sequential access to each element in a collection -of values. Iterators are used in Solana programs to safely iterate over the list -of accounts passed into the program entry point through the `accounts` argument. - -### Rust iterator - -The iterator pattern allows you to perform some task on a sequence of items. The -`iter()` method creates an iterator object that references a collection. An -iterator is responsible for the logic of iterating over each item and -determining when the sequence has finished. In Rust, iterators are lazy, meaning -they have no effect until you call methods that consume the iterator to use it -up. Once you've created an iterator, you must call the `next()` function on it -to get the next item. - -```rust -let v1 = vec![1, 2, 3]; - -// create the iterator over the vec -let v1_iter = v1.iter(); - -// use the iterator to get the first item -let first_item = v1_iter.next(); - -// use the iterator to get the second item -let second_item = v1_iter.next(); -``` - -### Solana accounts iterator - -Recall that the `AccountInfo` for all accounts required by an instruction are -passing through a single `accounts` argument. In order to parse through the -accounts and use them within our instruction, we will need to create an iterator -with a mutable reference to the `accounts`. - -At that point, instead of using the iterator directly, we pass it to the -`next_account_info` function from the `account_info` module provided by the -`solana_program` crate. - -For example, the instruction to create a new note in a note-taking program would -at minimum require the accounts for the user creating the note, a PDA to store -the note, and the `system_program` to initialize a new account. All three -accounts would be passed into the program entry point through the `accounts` -argument. An iterator of `accounts` is then used to separate out the -`AccountInfo` associated with each account to process the instruction. - -Note that `&mut` means a mutable reference to the `accounts` argument. You can -read more about references in Rust -[here](https://doc.rust-lang.org/book/ch04-02-references-and-borrowing.html) and -the `mut` keyword [here](https://doc.rust-lang.org/std/keyword.mut.html). - -```rust -// Get Account iterator -let account_info_iter = &mut accounts.iter(); - -// Get accounts -let note_creator = next_account_info(account_info_iter)?; -let note_pda_account = next_account_info(account_info_iter)?; -let system_program = next_account_info(account_info_iter)?; -``` - -# Demo - -This overview covered a lot of new concepts. Let’s practice them together by -continuing to work on the Movie Review program from the last lesson. No worries -if you’re just jumping into this lesson without having done the previous -lesson - it should be possible to follow along either way. We'll be using the -[Solana Playground](https://beta.solpg.io) to write, build, and deploy our code. - -As a refresher, we are building a Solana program which lets users review movies. -Last lesson, we deserialized the instruction data passed in by the user but we -have not yet stored this data in an account. Let’s now update our program to -create new accounts to store the user’s movie review. - -### 1. Get the starter code - -If you didn’t complete the demo from the last lesson or just want to make sure -that you didn’t miss anything, you can reference the starter code -[here](https://beta.solpg.io/6295b25b0e6ab1eb92d947f7). - -Our program currently includes the `instruction.rs` file we use to deserialize -the `instruction_data` passed into the program entry point. We have also -completed `lib.rs` file to the point where we can print our deserialized -instruction data to the program log using the `msg!` macro. - -### 2. Create struct to represent account data - -Let’s begin by creating a new file named `state.rs`. - -This file will: - -1. Define the struct our program uses to populate the data field of a new - account -2. Add `BorshSerialize` and `BorshDeserialize` traits to this struct - -First, let’s bring into scope everything we’ll need from the `borsh` crate. - -```rust -use borsh::{BorshSerialize, BorshDeserialize}; -``` - -Next, let’s create our `MovieAccountState` struct. This struct will define the -parameters that each new movie review account will store in its data field. Our -`MovieAccountState` struct will require the following parameters: - -- `is_initialized` - shows whether or not the account has been initialized -- `rating` - user’s rating of the movie -- `description` - user’s description of the movie -- `title` - title of the movie the user is reviewing - -```rust -#[derive(BorshSerialize, BorshDeserialize)] -pub struct MovieAccountState { - pub is_initialized: bool, - pub rating: u8, - pub title: String, - pub description: String -} -``` - -### 3. Update `lib.rs` - -Next, let’s update our `lib.rs` file. First, we’ll bring into scope everything -we will need to complete our Movie Review program. You can read more about the -details each item we are using from the `solana_program` crate -[here](https://docs.rs/solana-program/latest/solana_program/). - -```rust -use solana_program::{ - entrypoint, - entrypoint::ProgramResult, - pubkey::Pubkey, - msg, - account_info::{next_account_info, AccountInfo}, - system_instruction, - program_error::ProgramError, - sysvar::{rent::Rent, Sysvar}, - program::{invoke_signed}, - borsh::try_from_slice_unchecked, -}; -use std::convert::TryInto; -pub mod instruction; -pub mod state; -use instruction::MovieInstruction; -use state::MovieAccountState; -use borsh::BorshSerialize; -``` - -### 4. Iterate through `accounts` - -Next, let’s continue building out our `add_movie_review` function. Recall that -an array of accounts is passed into the `add_movie_review` function through a -single `accounts` argument. To process our instruction, we will need to iterate -through `accounts` and assign the `AccountInfo` for each account to its own -variable. - -```rust -// Get Account iterator -let account_info_iter = &mut accounts.iter(); - -// Get accounts -let initializer = next_account_info(account_info_iter)?; -let pda_account = next_account_info(account_info_iter)?; -let system_program = next_account_info(account_info_iter)?; -``` - -### 5. Derive PDA - -Next, within our `add_movie_review` function, let’s independently derive the PDA -we expect the user to have passed in. We'll need to provide the bump seed for -the derivation later, so even though `pda_account` should reference the same -account, we still need to call `find_program_address`. - -Note that we derive the PDA for each new account using the initializer’s public -key and the movie title as optional seeds. Setting up the PDA this way restricts -each user to only one review for any one movie title. However, it still allows -the same user to review movies with different titles and different users to -review movies with the same title. - -```rust -// Derive PDA and check that it matches client -let (pda, bump_seed) = Pubkey::find_program_address(&[initializer.key.as_ref(), title.as_bytes().as_ref(),], program_id); -``` - -### 6. Calculate space and rent - -Next, let’s calculate the rent that our new account will need. Recall that rent -is the amount of lamports a user must allocate to an account for storing data on -the Solana network. To calculate rent, we must first calculate the amount of -space our new account requires. - -The `MovieAccountState` struct has four fields. We will allocate 1 byte each for -`rating` and `is_initialized`. For both `title` and `description` we will -allocate space equal to 4 bytes plus the length of the string. - -```rust -// Calculate account size required -let account_len: usize = 1 + 1 + (4 + title.len()) + (4 + description.len()); - -// Calculate rent required -let rent = Rent::get()?; -let rent_lamports = rent.minimum_balance(account_len); -``` - -### 7. Create new account - -Once we’ve calculated the rent and verified the PDA, we are ready to create our -new account. In order to create a new account, we must call the `create_account` -instruction from the system program. We do this with a Cross Program Invocation -(CPI) using the `invoke_signed` function. We use `invoke_signed` because we are -creating the account using a PDA and need the Movie Review program to “sign” the -instruction. - -```rust -// Create the account -invoke_signed( - &system_instruction::create_account( - initializer.key, - pda_account.key, - rent_lamports, - account_len.try_into().unwrap(), - program_id, - ), - &[initializer.clone(), pda_account.clone(), system_program.clone()], - &[&[initializer.key.as_ref(), title.as_bytes().as_ref(), &[bump_seed]]], -)?; - -msg!("PDA created: {}", pda); -``` - -### 8. Update account data - -Now that we’ve created a new account, we are ready to update the data field of -the new account using the format of the `MovieAccountState` struct from our -`state.rs` file. We first deserialize the account data from `pda_account` using -`try_from_slice_unchecked`, then set the values of each field. - -```rust -msg!("unpacking state account"); -let mut account_data = try_from_slice_unchecked::(&pda_account.data.borrow()).unwrap(); -msg!("borrowed account data"); - -account_data.title = title; -account_data.rating = rating; -account_data.description = description; -account_data.is_initialized = true; -``` - -Lastly, we serialize the updated `account_data` into the data field of our -`pda_account`. - -```rust -msg!("serializing account"); -account_data.serialize(&mut &mut pda_account.data.borrow_mut()[..])?; -msg!("state account serialized"); -``` - -### 9. Build and deploy - -We're ready to build and deploy our program! - -![Gif Build and Deploy Program](../assets/movie-review-pt2-build-deploy.gif) - -You can test your program by submitting a transaction with the right instruction -data. For that, feel free to use -[this script](https://github.com/Unboxed-Software/solana-movie-client) or -[the frontend](https://github.com/Unboxed-Software/solana-movie-frontend) we -built in the -[Deserialize Custom Instruction Data lesson](deserialize-custom-data.md). In -both cases, make sure you copy and paste the program ID for your program into -the appropriate area of the source code to make sure you're testing the right -program. - -If you use the frontend, simply replace the `MOVIE_REVIEW_PROGRAM_ID` in both -the `MovieList.tsx` and `Form.tsx` components with the address of the program -you’ve deployed. Then run the frontend, submit a view, and refresh the browser -to see the review. - -If you need more time with this project to feel comfortable with these concepts, -have a look at the -[solution code](https://beta.solpg.io/62b23597f6273245aca4f5b4) before -continuing. - -# Challenge - -Now it’s your turn to build something independently. Equipped with the concepts -intoduced in this lesson, you now know everything you'll need to recreate the -entirety of the Student Intro program from Module 1. - -The Student Intro program is a Solana Program that lets students introduce -themselves. The program takes a user's name and a short message as the -`instruction_data` and creates an account to store the data on-chain. - -Using what you've learned in this lesson, build out this program. In addition to -taking a name a short message as instruction data, the program should: - -1. Create a separate account for each student -2. Store `is_initialized` as a boolean, `name` as a string, and `msg` as a - string in each account - -You can test your program by building the -[frontend](https://github.com/Unboxed-Software/solana-student-intros-frontend) -we created in the -[Page, Order, and Filter Custom Account Data lesson](./paging-ordering-filtering-data.md). -Remember to replace the program ID in the frontend code with the one you've -deployed. - -Try to do this independently if you can! But if you get stuck, feel free to -reference the [solution code](https://beta.solpg.io/62b11ce4f6273245aca4f5b2). diff --git a/content/courses/solana-course/content/reinitialization-attacks.md b/content/courses/solana-course/content/reinitialization-attacks.md deleted file mode 100644 index a340fd735..000000000 --- a/content/courses/solana-course/content/reinitialization-attacks.md +++ /dev/null @@ -1,471 +0,0 @@ ---- -title: Reinitialization Attacks -objectives: - - Explain security risks associated with a reinitialization vulnerability - - Use long-form Rust check if an account has already been initialized - - Using Anchor’s `init` constraint to initialize accounts, which automatically - sets an account discriminator that is checked to prevent the - reinitialization of an account ---- - -# TL;DR - -- Use an account discriminator or initialization flag to check whether an - account has already been initialized to prevent an account from being - reinitialized and overriding existing account data. -- To prevent account reinitialization in plain Rust, initialize accounts with an - `is_initialized` flag and check if it has already been set to true when - initializing an account - ```rust - if account.is_initialized { - return Err(ProgramError::AccountAlreadyInitialized.into()); - } - ``` -- To simplify this, use Anchor’s `init` constraint to create an account via a - CPI to the system program and sets its discriminator - -# Overview - -Initialization refers to setting the data of a new account for the first time. -When initializing a new account, you should implement a way to check if the -account has already been initialized. Without an appropriate check, an existing -account could be reinitialized and have existing data overwritten. - -Note that initializing an account and creating an account are two separate -instructions. Creating an account requires invoking the `create_account` -instruction on the System Program which specifies the space required for the -account, the rent in lamports allocated to the account, and the program owner of -the account. Initialization is an instruction that sets the data of a newly -created account. Creating and initializing an account can be combined into a -single transaction. - -### Missing Initialization Check - -In the example below, there are no checks on the `user` account. The -`initialize` instruction deserializes the data of the `user` account as a `User` -account type, sets the `authority` field, and serializes the updated account -data to the `user` account. - -Without checks on the `user` account, the same account could be passed into the -`initialize` instruction a second time by another party to overwrite the -existing `authority` stored on the account data. - -```rust -use anchor_lang::prelude::*; -use borsh::{BorshDeserialize, BorshSerialize}; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod initialization_insecure { - use super::*; - - pub fn initialize(ctx: Context) -> Result<()> { - let mut user = User::try_from_slice(&ctx.accounts.user.data.borrow()).unwrap(); - user.authority = ctx.accounts.authority.key(); - user.serialize(&mut *ctx.accounts.user.data.borrow_mut())?; - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Initialize<'info> { - #[account(mut)] - user: AccountInfo<'info>, - #[account(mut)] - authority: Signer<'info>, -} - -#[derive(BorshSerialize, BorshDeserialize)] -pub struct User { - authority: Pubkey, -} -``` - -### Add `is_initialized` check - -One approach to fix this is to add an additional `is_initialized` field to the -`User` account type and use it as a flag to check if an account has already been -initialized. - -```jsx -if user.is_initialized { - return Err(ProgramError::AccountAlreadyInitialized.into()); -} -``` - -By including a check within the `initialize` instruction, the `user` account -would only be initialized if the `is_initialized` field has not yet been set to -true. If the `is_initialized` field was already set, the transaction would fail, -thereby avoiding the scenario where an attacker could replace the account -authority with their own public key. - -```rust -use anchor_lang::prelude::*; -use borsh::{BorshDeserialize, BorshSerialize}; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod initialization_secure { - use super::*; - - pub fn initialize(ctx: Context) -> Result<()> { - let mut user = User::try_from_slice(&ctx.accounts.user.data.borrow()).unwrap(); - if user.is_initialized { - return Err(ProgramError::AccountAlreadyInitialized.into()); - } - - user.authority = ctx.accounts.authority.key(); - user.is_initialized = true; - - user.serialize(&mut *ctx.accounts.user.data.borrow_mut())?; - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Initialize<'info> { - #[account(mut)] - user: AccountInfo<'info>, - #[account(mut)] - authority: Signer<'info>, -} - -#[derive(BorshSerialize, BorshDeserialize)] -pub struct User { - is_initialized: bool, - authority: Pubkey, -} -``` - -### Use Anchor’s `init` constraint - -Anchor provides an `init` constraint that can be used with the `#[account(...)]` -attribute to initialize an account. The `init` constraint creates the account -via a CPI to the system program and sets the account discriminator. - -The `init` constraint must be used in combination with the `payer` and `space` -constraints. The `payer` specifies the account paying for the initialization of -the new account. The `space` specifies the amount of space the new account -requires, which determines the amount of lamports that must be allocated to the -account. The first 8 bytes of data is set as a discriminator that Anchor -automatically adds to identify the account type. - -Most importantly for this lesson, the `init` constraint ensures that this -instruction can only be called once per account, so you can set the initial -state of the account in the instruction logic and not have to worry about an -attacker trying to reinitialize the account. - -```rust -use anchor_lang::prelude::*; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod initialization_recommended { - use super::*; - - pub fn initialize(_ctx: Context) -> Result<()> { - msg!("GM"); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Initialize<'info> { - #[account(init, payer = authority, space = 8+32)] - user: Account<'info, User>, - #[account(mut)] - authority: Signer<'info>, - system_program: Program<'info, System>, -} - -#[account] -pub struct User { - authority: Pubkey, -} -``` - -### Anchor’s `init_if_needed` constraint - -It’s worth noting that Anchor has an `init_if_needed` constraint. This -constraint should be used very cautiously. In fact, it is blocked behind a -feature flag so that you are forced to be intentional about using it. - -The `init_if_needed` constraint does the same thing as the `init` constraint, -only if the account has already been initialized the instruction will still run. - -Given this, it’s \***\*\*\*\***extremely\***\*\*\*\*** important that when you -use this constraint you include checks to avoid resetting the account to its -initial state. - -For example, if the account stores an `authority` field that gets set in the -instruction using the `init_if_needed` constraint, you need checks that ensure -that no attacker could call the instruction after it has already been -initialized and have the `authority` field set again. - -In most cases, it’s safer to have a separate instruction for initializing -account data. - -# Demo - -For this demo we’ll create a simple program that does nothing but initialize -accounts. We’ll include two instructions: - -- `insecure_initialization` - initializes an account that can be reinitialized -- `recommended_initialization` - initialize an account using Anchor’s `init` - constraint - -### 1. Starter - -To get started, download the starter code from the `starter` branch of -[this repository](https://github.com/Unboxed-Software/solana-reinitialization-attacks/tree/starter). -The starter code includes a program with one instruction and the boilerplate -setup for the test file. - -The `insecure_initialization` instruction initializes a new `user` account that -stores the public key of an `authority`. In this instruction, the account is -expected to be allocated client-side, then passed into the program instruction. -Once passed into the program, there are no checks to see if the `user` account's -initial state has already been set. This means the same account can be passed in -a second time to override the `authority` stored on an existing `user` account. - -```rust -use anchor_lang::prelude::*; -use borsh::{BorshDeserialize, BorshSerialize}; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod initialization { - use super::*; - - pub fn insecure_initialization(ctx: Context) -> Result<()> { - let mut user = User::try_from_slice(&ctx.accounts.user.data.borrow()).unwrap(); - user.authority = ctx.accounts.authority.key(); - user.serialize(&mut *ctx.accounts.user.data.borrow_mut())?; - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Unchecked<'info> { - #[account(mut)] - /// CHECK: - user: UncheckedAccount<'info>, - authority: Signer<'info>, -} - -#[derive(BorshSerialize, BorshDeserialize)] -pub struct User { - authority: Pubkey, -} -``` - -### 2. Test `insecure_initialization` instruction - -The test file includes the setup to create an account by invoking the system -program and then invokes the `insecure_initialization` instruction twice using -the same account. - -Since there are no checks the verify that the account data has not already been -initialized, the `insecure_initialization` instruction will complete -successfully both times, despite the second invocation providing a _different_ -authority account. - -```tsx -import * as anchor from "@project-serum/anchor"; -import { Program } from "@project-serum/anchor"; -import { expect } from "chai"; -import { Initialization } from "../target/types/initialization"; - -describe("initialization", () => { - const provider = anchor.AnchorProvider.env(); - anchor.setProvider(provider); - - const program = anchor.workspace.Initialization as Program; - - const wallet = anchor.workspace.Initialization.provider.wallet; - const walletTwo = anchor.web3.Keypair.generate(); - - const userInsecure = anchor.web3.Keypair.generate(); - const userRecommended = anchor.web3.Keypair.generate(); - - before(async () => { - const tx = new anchor.web3.Transaction().add( - anchor.web3.SystemProgram.createAccount({ - fromPubkey: wallet.publicKey, - newAccountPubkey: userInsecure.publicKey, - space: 32, - lamports: await provider.connection.getMinimumBalanceForRentExemption( - 32, - ), - programId: program.programId, - }), - ); - - await anchor.web3.sendAndConfirmTransaction(provider.connection, tx, [ - wallet.payer, - userInsecure, - ]); - - await provider.connection.confirmTransaction( - await provider.connection.requestAirdrop( - walletTwo.publicKey, - 1 * anchor.web3.LAMPORTS_PER_SOL, - ), - "confirmed", - ); - }); - - it("Insecure init", async () => { - await program.methods - .insecureInitialization() - .accounts({ - user: userInsecure.publicKey, - }) - .rpc(); - }); - - it("Re-invoke insecure init with different auth", async () => { - const tx = await program.methods - .insecureInitialization() - .accounts({ - user: userInsecure.publicKey, - authority: walletTwo.publicKey, - }) - .transaction(); - await anchor.web3.sendAndConfirmTransaction(provider.connection, tx, [ - walletTwo, - ]); - }); -}); -``` - -Run `anchor test` to see that both transactions will complete successfully. - -```bash -initialization - ✔ Insecure init (478ms) - ✔ Re-invoke insecure init with different auth (464ms) -``` - -### 3. Add `recommended_initialization` instruction - -Let's create a new instruction called `recommended_initialization` that fixes -this problem. Unlike the previous insecure instruction, this instruction should -handle both the creation and initialization of the user's account using Anchor's -`init` constraint. - -This constraint instructs the program to create the account via a CPI to the -system program, so the account no longer needs to be created client-side. The -constraint also sets the account discriminator. Your instruction logic can then -set the account's initial state. - -By doing this, you ensure that any subsequent invocation of the same instruction -with the same user account will fail rather than reset the account's initial -state. - -```rust -use anchor_lang::prelude::*; -use borsh::{BorshDeserialize, BorshSerialize}; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod initialization { - use super::*; - ... - pub fn recommended_initialization(ctx: Context) -> Result<()> { - ctx.accounts.user.authority = ctx.accounts.authority.key(); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct Checked<'info> { - #[account(init, payer = authority, space = 8+32)] - user: Account<'info, User>, - #[account(mut)] - authority: Signer<'info>, - system_program: Program<'info, System>, -} -``` - -### 4. Test `recommended_initialization` instruction - -To test the `recommended_initialization` instruction, we’ll invoke the -instruction twice just like before. This time, we expect the transaction to fail -when we try to initialize the same account a second time. - -```tsx -describe("initialization", () => { - ... - it("Recommended init", async () => { - await program.methods - .recommendedInitialization() - .accounts({ - user: userRecommended.publicKey, - }) - .signers([userRecommended]) - .rpc() - }) - - it("Re-invoke recommended init with different auth, expect error", async () => { - try { - // Add your test here. - const tx = await program.methods - .recommendedInitialization() - .accounts({ - user: userRecommended.publicKey, - authority: walletTwo.publicKey, - }) - .transaction() - await anchor.web3.sendAndConfirmTransaction(provider.connection, tx, [ - walletTwo, - userRecommended, - ]) - } catch (err) { - expect(err) - console.log(err) - } - }) -}) -``` - -Run `anchor test` and to see that the second transaction which tries to -initialize the same account twice will now return an error stating the account -address is already in use. - -```bash -'Program CpozUgSwe9FPLy9BLNhY2LTGqLUk1nirUkMMA5RmDw6t invoke [1]', -'Program log: Instruction: RecommendedInitialization', -'Program 11111111111111111111111111111111 invoke [2]', -'Allocate: account Address { address: EMvbwzrs4VTR7G1sNUJuQtvRX1EuvLhqs4PFqrtDcCGV, base: None } already in use', -'Program 11111111111111111111111111111111 failed: custom program error: 0x0', -'Program CpozUgSwe9FPLy9BLNhY2LTGqLUk1nirUkMMA5RmDw6t consumed 4018 of 200000 compute units', -'Program CpozUgSwe9FPLy9BLNhY2LTGqLUk1nirUkMMA5RmDw6t failed: custom program error: 0x0' -``` - -If you use Anchor's `init` constraint, that's usually all you need to protect -against reinitialization attacks! Remember, just because the fix for these -security exploits is simple doesn't mean it isn't important. Every time your -initialize an account, make sure you're either using the `init` constraint or -have some other check in place to avoid resetting an existing account's initial -state. - -If you want to take a look at the final solution code you can find it on the -`solution` branch of -[this repository](https://github.com/Unboxed-Software/solana-reinitialization-attacks/tree/solution). - -# Challenge - -Just as with other lessons in this module, your opportunity to practice avoiding -this security exploit lies in auditing your own or other programs. - -Take some time to review at least one program and ensure that instructions are -properly protected against reinitialization attacks. - -Remember, if you find a bug or exploit in somebody else's program, please alert -them! If you find one in your own program, be sure to patch it right away. diff --git a/content/courses/solana-course/content/rust-macros.md b/content/courses/solana-course/content/rust-macros.md deleted file mode 100644 index f49c3a680..000000000 --- a/content/courses/solana-course/content/rust-macros.md +++ /dev/null @@ -1,1093 +0,0 @@ ---- -title: Rust Procedural Macros -objectives: - - Create and use **Procedural Macros** in Rust - - Explain and work with a Rust Abstract Syntax Tree (AST) - - Describe how procedural macros are used in the Anchor framework ---- - -# TL;DR - -- **Procedural macros** are a special kind of Rust macro that allow the - programmer to generate code at compile time based on custom input. -- In the Anchor framework, procedural macros are used to generate code that - reduces the amount of boilerplate required when writing Solana programs. -- An **Abstract Syntax Tree (AST)** is a representation of the syntax and - structure of the input code that is passed to a procedural macro. When - creating a macro, you use elements of the AST like tokens and items to - generate the appropriate code. -- A **Token** is the smallest unit of source code that can be parsed by the - compiler in Rust. -- An **Item** is a declaration that defines something that can be used in a Rust - program, such as a struct, an enum, a trait, a function, or a method. -- A **TokenStream** is a sequence of tokens that represents a piece of source - code, and can be passed to a procedural macro to allow it to access and - manipulate the individual tokens in the code. - -# Overview - -In Rust, a macro is a piece of code that you can write once and then "expand" to -generate code at compile time. This can be useful when you need to generate code -that is repetitive or complex, or when you want to use the same code in multiple -places in your program. - -There are two different types of macros: declarative macros and procedural -macros. - -- Declarative macros are defined using the `macro_rules!` macro, which allows - you to match against patterns of code and generate code based on the matching - pattern. -- Procedural macros in Rust are defined using Rust code and operate on the - abstract syntax tree (AST) of the input TokenStream, which allows them to - manipulate and generate code at a finer level of detail. - -In this lesson, we'll focus on procedural macros, which are commonly used in the -Anchor framework. - -## Rust concepts - -Before we dig into macros, specifically, let's talk about some of the important -terminology, concepts, and tools we'll be using throughout the lesson. - -### Token - -In the context of Rust programming, a -[token](https://doc.rust-lang.org/reference/tokens.html) is a basic element of -the language syntax like an identifier or literal value. Tokens represent the -smallest unit of source code that are recognized by the Rust compiler, and they -are used to build up more complex expressions and statements in a program. - -Examples of Rust tokens include: - -- [Keywords](https://doc.rust-lang.org/reference/keywords.html), such as `fn`, - `let`, and `match`, are reserved words in the Rust language that have special - meanings. -- [Identifiers](https://doc.rust-lang.org/reference/identifiers.html), such as - variable and function names, are used to refer to values and functions. -- [Punctuation](https://doc.rust-lang.org/reference/tokens.html#punctuation) - marks, such as `{`, `}`, and `;`, are used to structure and delimit blocks of - code. -- [Literals](https://doc.rust-lang.org/reference/tokens.html#literals), such as - numbers and strings, represent constant values in a Rust program. - -You can read more about Rust tokens -[here](https://doc.rust-lang.org/reference/tokens.html). - -### Item - -Items are named, self-contained pieces of code in Rust. They provide a way to -group related code together and give it a name by which the group can be -referenced. This allows you to reuse and organize your code in a modular way. - -There are several different kinds of items, such as: - -- Functions -- Structs -- Enums -- Traits -- Modules -- Macros - -You can read more about Rust items -[here](https://doc.rust-lang.org/reference/items.html). - -### Token Streams - -The `TokenStream` type is a data type that represents a sequence of tokens. This -type is defined in the `proc_macro` crate and is surfaced as a way for you write -macros based on other code in the codebase. - -When defining a procedural macro, the macro input is passed to the macro as a -`TokenStream`, which can then be parsed and transformed as needed. The resulting -`TokenStream` can then be expanded into the final code output by the macro. - -```rust -use proc_macro::TokenStream; - -#[proc_macro] -pub fn my_macro(input: TokenStream) -> TokenStream { - ... -} -``` - -### Abstract syntax tree - -In the context of a Rust procedural macro, an abstract syntax tree (AST) is a -data structure that represents the hierarchical structure of the input tokens -and their meaning in the Rust language. It's typically used as an intermediate -representation of the input that can be easily processed and transformed by the -procedural macro. - -The macro can use the AST to analyze the input code and make changes to it, such -as adding or removing tokens, or transforming the meaning of the code in some -way. It can then use this transformed AST to generate new code, which can be -returned as the output of the proc macro. - -### The `syn` crate - -The `syn` crate is available to help parse a token stream into an AST that macro -code can traverse and manipulate. When a procedural macro is invoked in a Rust -program, the macro function is called with the a token stream as the input. -Parsing this input is the first step to virtually any macro. - -Take as an example a proc macro that you invoke using `my_macro!`as follows: - -```rust -my_macro!("hello, world"); -``` - -When the above code is executed, the Rust compiler passes the input tokens -(`"hello, world"`) as a `TokenStream` to the `my_macro` proc macro. - -```rust -use proc_macro::TokenStream; -use syn::parse_macro_input; - -#[proc_macro] -pub fn my_macro(input: TokenStream) -> TokenStream { - let ast = parse_macro_input!(input as syn::LitStr); - eprintln! {"{:#?}", ast}; - ... -} -``` - -Inside the proc macro, the code uses the `parse_macro_input!` macro from the -`syn` crate to parse the input `TokenStream` into an abstract syntax tree (AST). -Specifically, this example parses it as an instance of `LitStr` that represents -a string literal in Rust. The `eprintln!` macro is then used to print the -`LitStr` AST for debugging purposes. - -```rust -LitStr { - token: Literal { - kind: Str, - symbol: "hello, world", - suffix: None, - span: #0 bytes(172..186), - }, -} -``` - -The output of the `eprintln!` macro shows the structure of the `LitStr` AST that -was generated from the input tokens. It shows the string literal value -(`"hello, world"`) and other metadata about the token, such as its kind (`Str`), -suffix (`None`), and span. - -### The `quote` crate - -Another important crate is the `quote` crate. This crate is pivotal in the code -generation portion of the macro. - -Once a proc macro has finished analyzing and transforming the AST, it can use -the `quote` crate or a similar code generation library to convert the AST back -into a token stream. After that, it returns the `TokenStream`, which the Rust -compiler uses to replace the original stream in the source code. - -Take the below example of `my_macro`: - -```rust -use proc_macro::TokenStream; -use syn::parse_macro_input; -use quote::quote; - -#[proc_macro] -pub fn my_macro(input: TokenStream) -> TokenStream { - let ast = parse_macro_input!(input as syn::LitStr); - eprintln! {"{:#?}", ast}; - let expanded = { - quote! {println!("The input is: {}", #ast)} - }; - expanded.into() -} -``` - -This example uses the `quote!` macro to generate a new `TokenStream` consisting -of a `println!` macro call with the `LitStr` AST as its argument. - -Note that the `quote!` macro generates a `TokenStream` of type -`proc_macro2::TokenStream`. To return this `TokenStream` to the Rust compiler, -you need to use the `.into()` method to convert it to `proc_macro::TokenStream`. -The Rust compiler will then use this `TokenStream` to replace the original proc -macro call in the source code. - -```text -The input is: hello, world -``` - -This allows you to create procedural macros that perform powerful code -generation and metaprogramming tasks. - -## Procedural Macro - -Procedural macros in Rust are a powerful way to extend the language and create -custom syntax. These macros are written in Rust and are compiled along with the -rest of the code. There are three types of procedural macros: - -- Function-like macros - `custom!(...)` -- Derive macros - `#[derive(CustomDerive)]` -- Attribute macros - `#[CustomAttribute]` - -This section will discuss the three types of procedural macros and provide an -example implementation of one. The process of writing a procedural macro is -consistent across all three types, so the example provided can be adapted to the -other types. - -### Function-like macros - -Function-like procedural macros are the simplest of the three types of -procedural macros. These macros are defined using a function preceded by the -`#[proc_macro]` attribute. The function must take a `TokenStream` as input and -return a new `TokenStream` as output to replace the original code. - -```rust -#[proc_macro] -pub fn my_macro(input: TokenStream) -> TokenStream { - ... -} -``` - -These macros are invoked using the name of the function followed by the `!` -operator. They can be used in various places in a Rust program, such as in -expressions, statements, and function definitions. - -```rust -my_macro!(input); -``` - -Function-like procedural macros are best suited for simple code generation tasks -that require only a single input and output stream. They are easy to understand -and use, and they provide a straightforward way to generate code at compile -time. - -### Attribute macros - -Attribute macros define new attributes that are attached to items in a Rust -program such as functions and structs. - -```rust -#[my_macro] -fn my_function() { - ... -} -``` - -Attribute macros are defined with a function preceded by the -`#[proc_macro_attribute]` attribute. The function requires two token streams as -input and returns a single `TokenStream` as output that replaces the original -item with an arbitrary number of new items. - -```rust -#[proc_macro_attribute] -pub fn my_macro(attr: TokenStream, input: TokenStream) -> TokenStream { - ... -} -``` - -The first token stream input represents attribute arguments. The second token -stream is the rest of the item that the attribute is attached to, including any -other attributes that may be present. - -```rust -#[my_macro(arg1, arg2)] -fn my_function() { - ... -} -``` - -For example, an attribute macro could process the arguments passed to the -attribute to enable or disable certain features, and then use the second token -stream to modify the original item in some way. By having access to both token -streams, attribute macros can provide greater flexibility and functionality -compared to using only a single token stream. - -### Derive macros - -Derive macros are invoked using the `#[derive]` attribute on a struct, enum, or -union are typically used to automatically implement traits for the input types. - -```rust -#[derive(MyMacro)] -struct Input { - field: String -} -``` - -Derive macros are defined with a function preceded by the `#[proc_macro_derive]` -attribute. They're limited to generating code for structs, enums, and unions. -They take a single token stream as input and return a single token stream as -output. - -Unlike the other procedural macros, the returned token stream doesn't replace -the original code. Rather, the returned token stream gets appended to the module -or block that the original item belongs to. This allows developers to extend the -functionality of the original item without modifying the original code. - -```rust -#[proc_macro_derive(MyMacro)] -pub fn my_macro(input: TokenStream) -> TokenStream { - ... -} -``` - -In addition to implementing traits, derive macros can define helper attributes. -Helper attributes can be used in the scope of the item that the derive macro is -applied to and customize the code generation process. - -```rust -#[proc_macro_derive(MyMacro, attributes(helper))] -pub fn my_macro(body: TokenStream) -> TokenStream { - ... -} -``` - -Helper attributes are inert, which means they do not have any effect on their -own, and their only purpose is to be used as input to the derive macro that -defined them. - -```rust -#[derive(MyMacro)] -struct Input { - #[helper] - field: String -} -``` - -For example, a derive macro could define a helper attribute to perform -additional operations depending on the presence of the attribute. This allows -developers to further extend the functionality of derive macros and customize -the code they generate in a more flexible way. - -### Example of a procedural macro - -This example shows how to use a derive procedural macro to automatically -generate an implementation of a `describe()` method for a struct. - -```rust -use example_macro::Describe; - -#[derive(Describe)] -struct MyStruct { - my_string: String, - my_number: u64, -} - -fn main() { - MyStruct::describe(); -} -``` - -The `describe()` method will print a description of the struct's fields to the -console. - -```text -MyStruct is a struct with these named fields: my_string, my_number. -``` - -The first step is to define the procedural macro using the using the -`#[proc_macro_derive]` attribute. The input `TokenStream` is parsed using the -`parse_macro_input!()` macro to extract the struct's identifier and data. - -```rust -use proc_macro::{self, TokenStream}; -use quote::quote; -use syn::{parse_macro_input, DeriveInput, FieldsNamed}; - -#[proc_macro_derive(Describe)] -pub fn describe_struct(input: TokenStream) -> TokenStream { - let DeriveInput { ident, data, .. } = parse_macro_input!(input); - ... -} -``` - -The next step is to use the `match` keyword to perform pattern matching on the -`data` value to extract the names of the fields in the struct. - -The first `match` has two arms: one for the `syn::Data::Struct` variant, and one -for the "catch-all" `_` arm that handles all other variants of `syn::Data`. - -The second `match` has two arms as well: one for the `syn::Fields::Named` -variant, and one for the "catch-all" `_` arm that handles all other variants of -`syn::Fields`. - -The `#(#idents), *` syntax specifies that the `idents` iterator will be -"expanded" to create a comma-separated list of the elements in the iterator. - -```rust -use proc_macro::{self, TokenStream}; -use quote::quote; -use syn::{parse_macro_input, DeriveInput, FieldsNamed}; - -#[proc_macro_derive(Describe)] -pub fn describe_struct(input: TokenStream) -> TokenStream { - let DeriveInput { ident, data, .. } = parse_macro_input!(input); - - let field_names = match data { - syn::Data::Struct(s) => match s.fields { - syn::Fields::Named(FieldsNamed { named, .. }) => { - let idents = named.iter().map(|f| &f.ident); - format!( - "a struct with these named fields: {}", - quote! {#(#idents), *}, - ) - } - _ => panic!("The syn::Fields variant is not supported"), - }, - _ => panic!("The syn::Data variant is not supported"), - }; - ... -} -``` - -The last step is to implement a `describe()` method for a struct. The `expanded` -variable is defined using the `quote!` macro and the `impl` keyword to create an -implementation for the struct name stored in the `#ident` variable. - -This implementation defines the `describe()` method that uses the `println!` -macro to print the name of the struct and its field names. - -Finally, the `expanded` variable is converted into a `TokenStream` using the -`into()` method. - -```rust -use proc_macro::{self, TokenStream}; -use quote::quote; -use syn::{parse_macro_input, DeriveInput, FieldsNamed}; - -#[proc_macro_derive(Describe)] -pub fn describe(input: TokenStream) -> TokenStream { - let DeriveInput { ident, data, .. } = parse_macro_input!(input); - - let field_names = match data { - syn::Data::Struct(s) => match s.fields { - syn::Fields::Named(FieldsNamed { named, .. }) => { - let idents = named.iter().map(|f| &f.ident); - format!( - "a struct with these named fields: {}", - quote! {#(#idents), *}, - ) - } - _ => panic!("The syn::Fields variant is not supported"), - }, - _ => panic!("The syn::Data variant is not supported"), - }; - - let expanded = quote! { - impl #ident { - fn describe() { - println!("{} is {}.", stringify!(#ident), #field_names); - } - } - }; - - expanded.into() -} -``` - -Now, when the `#[derive(Describe)]` attribute is added to a struct, the Rust -compiler automatically generates an implementation of the `describe()` method -that can be called to print the name of the struct and the names of its fields. - -```rust -#[derive(Describe)] -struct MyStruct { - my_string: String, - my_number: u64, -} -``` - -The `cargo expand` command from the `cargo-expand` crate can be used to expand -Rust code that uses procedural macros. For example, the code for the `MyStruct` -struct generated using the the `#[derive(Describe)]` attribute looks like this: - -```rust -struct MyStruct { - my_string: String, - my_number: f64, -} -impl MyStruct { - fn describe() { - { - ::std::io::_print( - ::core::fmt::Arguments::new_v1( - &["", " is ", ".\n"], - &[ - ::core::fmt::ArgumentV1::new_display(&"MyStruct"), - ::core::fmt::ArgumentV1::new_display( - &"a struct with these named fields: my_string, my_number", - ), - ], - ), - ); - }; - } -} -``` - -## Anchor procedural macros - -Procedural macros are the magic behind the Anchor library that is commonly used -in Solana development. Anchor macros allow for more succinct code, common -security checks, and more. Let's go through a few examples of how Anchor uses -procedural macros. - -### Function-like macro - -The `declare_id` macro shows how function-like macros are used in Anchor. This -macro takes in a string of characters representing a program's ID as input and -converts it into a `Pubkey` type that can be used in the Anchor program. - -```rust -declare_id!("G839pmstFmKKGEVXRGnauXxFgzucvELrzuyk6gHTiK7a"); -``` - -The `declare_id` macro is defined using the `#[proc_macro]` attribute, -indicating that it's a function-like proc macro. - -```rust -#[proc_macro] -pub fn declare_id(input: proc_macro::TokenStream) -> proc_macro::TokenStream { - let id = parse_macro_input!(input as id::Id); - proc_macro::TokenStream::from(quote! {#id}) -} -``` - -### Derive macro - -The `#[derive(Accounts)]` is an example of just one of many derive macros that -are used in Anchor. - -The `#[derive(Accounts)]` macro generates code that implements the `Accounts` -trait for the given struct. This trait does a number of things, including -validating and deserializing the accounts passed into an instruction. This -allows the struct to be used as a list of accounts required by an instruction in -an Anchor program. - -Any constraints specified on fields by the `#[account(..)]` attribute are -applied during deserialization. The `#[instruction(..)]` attribute can also be -added to specify the instruction's arguments and make them accessible to the -macro. - -```rust -#[derive(Accounts)] -#[instruction(input: String)] -pub struct Initialize<'info> { - #[account(init, payer = payer, space = 8 + input.len())] - pub data_account: Account<'info, MyData>, - #[account(mut)] - pub payer: Signer<'info>, - pub system_program: Program<'info, System>, -} -``` - -This macro is defined using the `proc_macro_derive` attribute, which allows it -to be used as a derive macro that can be applied to a struct. The line -`#[proc_macro_derive(Accounts, attributes(account, instruction))]` indicates -that this is a derive macro that processes `account` and `instruction` helper -attributes. - -```rust -#[proc_macro_derive(Accounts, attributes(account, instruction))] -pub fn derive_anchor_deserialize(item: TokenStream) -> TokenStream { - parse_macro_input!(item as anchor_syn::AccountsStruct) - .to_token_stream() - .into() -} -``` - -### Attribute macro `#[program]` - -The `#[program]` attribute macro is an example of an attribute macro used in the -Anchor to define the module containing instruction handlers for a Solana -program. - -```rust -#[program] -pub mod my_program { - use super::*; - - pub fn initialize(ctx: Context) -> Result<()> { - ... - } -} -``` - -In this case, the `#[program]` attribute is applied to a module, and it is used -to specify that the module contains instruction handlers for a Solana program. - -```rust -#[proc_macro_attribute] -pub fn program( - _args: proc_macro::TokenStream, - input: proc_macro::TokenStream, -) -> proc_macro::TokenStream { - parse_macro_input!(input as anchor_syn::Program) - .to_token_stream() - .into() -} -``` - -Overall, the use of proc macros in Anchor greatly reduces the amount of -repetitive code that Solana developers have to write. By reducing the amount of -boilerplate code, developers can focus on their program's core functionality and -avoid mistakes caused by manual repetition. This ultimately results in a faster -and more efficient development process. - -# Demo - -Let's practice this by creating a new derive macro! Our new macro will let us -automatically generate instruction logic for updating each field on an account -in an Anchor program. - -### 1. Starter - -To get started, download the starter code from the `starter` branch of -[this repository](https://github.com/Unboxed-Software/anchor-custom-macro/tree/starter). - -The starter code includes a simple Anchor program that allows you to initialize -and update a `Config` account. This is similar to what we did with the -[Environment Variables lesson](./env-variables.md). - -The account in question is structured as follows: - -```rust -use anchor_lang::prelude::*; - -#[account] -pub struct Config { - pub auth: Pubkey, - pub bool: bool, - pub first_number: u8, - pub second_number: u64, -} - -impl Config { - pub const LEN: usize = 8 + 32 + 1 + 1 + 8; -} -``` - -The `programs/admin/src/lib.rs` file contains the program entrypoint with the -definitions of the program's instructions. Currently, the program has -instructions to initialize this account and then one instruction per account -field for updating the field. - -The `programs/admin/src/admin_config` directory contains the program's -instruction logic and state. Take a look through each of these files. You'll -notice that instruction logic for each field is effectively duplicated for each -instruction. - -The goal of this demo is to implement a procedural macro that will allow us to -replace all of the instruction logic functions and automatically generate -functions for each instruction. - -### 2. Set up the custom macro declaration - -Let's get started by creating a separate crate for our custom macro. In the -project's root directory, run `cargo new custom-macro`. This will create a new -`custom-macro` directory with its own `Cargo.toml`. Update the new `Cargo.toml` -file to be the following: - -```text -[package] -name = "custom-macro" -version = "0.1.0" -edition = "2021" - -[lib] -proc-macro = true - -[dependencies] -syn = "1.0.105" -quote = "1.0.21" -proc-macro2 = "0.4" -anchor-lang = "0.25.0" -``` - -The `proc-macro = true` line defines this crate as containing a procedural -macro. The dependencies are all crates we'll be using to create our derive -macro. - -Next, change `src/main.rs` to `src/lib.rs`. - -Next, update the project root's `Cargo.toml` file's `members` field to include -`"custom-macro"`: - -```text -[workspace] -members = [ - "programs/*", - "custom-macro" -] -``` - -Now our crate is set up and ready to go. But before we move on, let's create one -more crate at the root level that we can use to test out our macro as we create -it. Use `cargo new custom-macro-test` at the project root. Then update the newly -created `Cargo.toml` to add `anchor-lang` and the `custom-macro` crates as -dependencies: - -```text -[package] -name = "custom-macro-test" -version = "0.1.0" -edition = "2021" - -[dependencies] -anchor-lang = "0.25.0" -custom-macro = { path = "../custom-macro" } -``` - -Next, update the root project's `Cargo.toml` to include the new -`custom-macro-test` crate as before: - -```text -[workspace] -members = [ - "programs/*", - "custom-macro", - "custom-macro-test" -] -``` - -Finally, replace the code in `custom-macro-test/src/main.rs` with the following -code. We'll use this later for testing: - -```rust -use anchor_lang::prelude::*; -use custom_macro::InstructionBuilder; - -#[derive(InstructionBuilder)] -pub struct Config { - pub auth: Pubkey, - pub bool: bool, - pub first_number: u8, - pub second_number: u64, -} -``` - -### 3. Define the custom macro - -Now, in the `custom-macro/src/lib.rs` file, let's add our new macro's -declaration. In this file, we’ll use the `parse_macro_input!` macro to parse the -input `TokenStream` and extract the `ident` and `data` fields from a -`DeriveInput` struct. Then, we’ll use the `eprintln!` macro to print the values -of `ident` and `data`. For now, we will use `TokenStream::new()` to return an -empty `TokenStream`. - -```rust -use proc_macro::TokenStream; -use quote::*; -use syn::*; - -#[proc_macro_derive(InstructionBuilder)] -pub fn instruction_builder(input: TokenStream) -> TokenStream { - let DeriveInput { ident, data, .. } = parse_macro_input!(input); - - eprintln! {"{:#?}", ident}; - eprintln! {"{:#?}", data}; - - TokenStream::new() -} -``` - -Let's test what this prints. To do this, you first need to install the -`cargo-expand` command by running `cargo install cargo-expand`. You'll also need -to install the nightly version of Rust by running `rustup install nightly`. - -Once you've done this, you can see the output of the code described above by -navigating to the `custom-macro-test` directory and running `cargo expand`. - -This command expands macros in the crate. Since the `main.rs` file uses the -newly created `InstructionBuilder` macro, this will print the syntax tree for -the `ident` and `data` of the struct to the console. Once you have confirmed -that the input `TokenStream` is parsing correctly, feel free to remove the -`eprintln!` statements. - -### 4. Get the struct's fields - -Next, let’s use `match` statements to get the named fields from the `data` of -the struct. Then we'll use the `eprintln!` macro to print the values of the -fields. - -```rust -use proc_macro::TokenStream; -use quote::*; -use syn::*; - -#[proc_macro_derive(InstructionBuilder)] -pub fn instruction_builder(input: TokenStream) -> TokenStream { - let DeriveInput { ident, data, .. } = parse_macro_input!(input); - - let fields = match data { - syn::Data::Struct(s) => match s.fields { - syn::Fields::Named(n) => n.named, - _ => panic!("The syn::Fields variant is not supported: {:#?}", s.fields), - }, - _ => panic!("The syn::Data variant is not supported: {:#?}", data), - }; - - eprintln! {"{:#?}", fields}; - - TokenStream::new() -} -``` - -Once again, use `cargo expand` in the terminal to see the output of this code. -Once you have confirmed that the fields are being extracted and printed -correctly, you can remove the `eprintln!` statement. - -### 5. Build update instructions - -Next, let’s iterate over the fields of the struct and generate an update -instruction for each field. The instruction will be generated using the `quote!` -macro and will include the field's name and type, as well as a new function name -for the update instruction. - -```rust -use proc_macro::TokenStream; -use quote::*; -use syn::*; - -#[proc_macro_derive(InstructionBuilder)] -pub fn instruction_builder(input: TokenStream) -> TokenStream { - let DeriveInput { ident, data, .. } = parse_macro_input!(input); - - let fields = match data { - syn::Data::Struct(s) => match s.fields { - syn::Fields::Named(n) => n.named, - _ => panic!("The syn::Fields variant is not supported: {:#?}", s.fields), - }, - _ => panic!("The syn::Data variant is not supported: {:#?}", data), - }; - - let update_instruction = fields.into_iter().map(|f| { - let name = &f.ident; - let ty = &f.ty; - let fname = format_ident!("update_{}", name.clone().unwrap()); - - quote! { - pub fn #fname(ctx: Context, new_value: #ty) -> Result<()> { - let admin_account = &mut ctx.accounts.admin_account; - admin_account.#name = new_value; - Ok(()) - } - } - }); - - TokenStream::new() -} -``` - -### 6. Return new `TokenStream` - -Lastly, let’s use the `quote!` macro to generate an implementation for the -struct with the name specified by the `ident` variable. The implementation -includes the update instructions that were generated for each field in the -struct. The generated code is then converted to a `TokenStream` using the -`into()` method and returned as the result of the macro. - -```rust -use proc_macro::TokenStream; -use quote::*; -use syn::*; - -#[proc_macro_derive(InstructionBuilder)] -pub fn instruction_builder(input: TokenStream) -> TokenStream { - let DeriveInput { ident, data, .. } = parse_macro_input!(input); - - let fields = match data { - syn::Data::Struct(s) => match s.fields { - syn::Fields::Named(n) => n.named, - _ => panic!("The syn::Fields variant is not supported: {:#?}", s.fields), - }, - _ => panic!("The syn::Data variant is not supported: {:#?}", data), - }; - - let update_instruction = fields.into_iter().map(|f| { - let name = &f.ident; - let ty = &f.ty; - let fname = format_ident!("update_{}", name.clone().unwrap()); - - quote! { - pub fn #fname(ctx: Context, new_value: #ty) -> Result<()> { - let admin_account = &mut ctx.accounts.admin_account; - admin_account.#name = new_value; - Ok(()) - } - } - }); - - let expanded = quote! { - impl #ident { - #(#update_instruction)* - } - }; - expanded.into() -} -``` - -To verify that the macro is generating the correct code, use the `cargo expand` -command to see the expanded form of the macro. The output of this look like the -following: - -```rust -use anchor_lang::prelude::*; -use custom_macro::InstructionBuilder; -pub struct Config { - pub auth: Pubkey, - pub bool: bool, - pub first_number: u8, - pub second_number: u64, -} -impl Config { - pub fn update_auth( - ctx: Context, - new_value: Pubkey, - ) -> Result<()> { - let admin_account = &mut ctx.accounts.admin_account; - admin_account.auth = new_value; - Ok(()) - } - pub fn update_bool(ctx: Context, new_value: bool) -> Result<()> { - let admin_account = &mut ctx.accounts.admin_account; - admin_account.bool = new_value; - Ok(()) - } - pub fn update_first_number( - ctx: Context, - new_value: u8, - ) -> Result<()> { - let admin_account = &mut ctx.accounts.admin_account; - admin_account.first_number = new_value; - Ok(()) - } - pub fn update_second_number( - ctx: Context, - new_value: u64, - ) -> Result<()> { - let admin_account = &mut ctx.accounts.admin_account; - admin_account.second_number = new_value; - Ok(()) - } -} -``` - -### 7. Update the program to use your new macro - -To use the new macro to generate update instructions for the `Config` struct, -first add the `custom-macro` crate as a dependency to the program in its -`Cargo.toml`: - -```text -[dependencies] -anchor-lang = "0.25.0" -custom-macro = { path = "../../custom-macro" } -``` - -Then, navigate to the `state.rs` file in the Anchor program and update it with -the following code: - -```rust -use crate::admin_update::UpdateAdminAccount; -use anchor_lang::prelude::*; -use custom_macro::InstructionBuilder; - -#[derive(InstructionBuilder)] -#[account] -pub struct Config { - pub auth: Pubkey, - pub bool: bool, - pub first_number: u8, - pub second_number: u64, -} - -impl Config { - pub const LEN: usize = 8 + 32 + 1 + 1 + 8; -} -``` - -Next, navigate to the `admin_update.rs` file and delete the existing update -instructions. This should leave only the `UpdateAdminAccount` context struct in -the file. - -```rust -use crate::state::Config; -use anchor_lang::prelude::*; - -#[derive(Accounts)] -pub struct UpdateAdminAccount<'info> { - pub auth: Signer<'info>, - #[account( - mut, - has_one = auth, - )] - pub admin_account: Account<'info, Config>, -} -``` - -Next, update `lib.rs` in the Anchor program to use the update instructions -generated by the `InstructionBuilder` macro. - -```rust -use anchor_lang::prelude::*; -mod admin_config; -use admin_config::*; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod admin { - use super::*; - - pub fn initialize(ctx: Context) -> Result<()> { - Initialize::initialize(ctx) - } - - pub fn update_auth(ctx: Context, new_value: Pubkey) -> Result<()> { - Config::update_auth(ctx, new_value) - } - - pub fn update_bool(ctx: Context, new_value: bool) -> Result<()> { - Config::update_bool(ctx, new_value) - } - - pub fn update_first_number(ctx: Context, new_value: u8) -> Result<()> { - Config::update_first_number(ctx, new_value) - } - - pub fn update_second_number(ctx: Context, new_value: u64) -> Result<()> { - Config::update_second_number(ctx, new_value) - } -} -``` - -Lastly, navigate to the `admin` directory and run `anchor test` to verify that -the update instructions generated by the `InstructionBuilder` macro are working -correctly. - -``` - admin - ✔ Is initialized! (160ms) - ✔ Update bool! (409ms) - ✔ Update u8! (403ms) - ✔ Update u64! (406ms) - ✔ Update Admin! (405ms) - - - 5 passing (2s) -``` - -Nice work! At this point, you can create procedural macros to help in your -development process. We encourage you to make the most of the Rust language and -use macros where they make sense. But even if you don't, knowing how they work -helps to understand what's happening with Anchor under the hood. - -If you need to spend more time with the solution code, feel free to reference -the `solution` branch of -[the repository](https://github.com/Unboxed-Software/anchor-custom-macro/tree/solution). - -# Challenge - -To solidify what you've learned, go ahead and create another procedural macro on -your own. Think about code you've written that could be reduced or improved by a -macro and try it out! Since this is still practice, it's okay if it doesn't work -out the way you want or expect. Just jump in and experiment! diff --git a/content/courses/solana-course/content/security-intro.md b/content/courses/solana-course/content/security-intro.md deleted file mode 100644 index 98584702e..000000000 --- a/content/courses/solana-course/content/security-intro.md +++ /dev/null @@ -1,41 +0,0 @@ ---- -title: How to approach the Program Security module -objectives: - - understand how to approach the Program Security Module ---- - -The goal of this module is to expose you to a wide variety of common security -exploits that are unique to Solana development. We’ve heavily modeled this -module off a public GitHub repository call Sealevel Attacks created by the great -Armani Ferrante. - -You might be thinking: “didn’t we have a security lesson in module 3?” Yes, we -most certainly did. We wanted to make sure that anyone deploying programs to -Mainnet right out of the gates had at least a basic understanding of security. -And if that’s you then hopefully the fundamental principles you learned in that -lesson have led to you avoiding some common Solana exploits on your own. - -This module is meant to build on top of that lesson with two goals in mind: - -1. To expand your awareness of the Solana programming model and the areas where - you need to focus to close up security loopholes in your programs -2. To show you the array of tools provided by Anchor to help you keep your - programs secure - -If you went through the Basic Security lesson, the first few lessons should seem -familiar. They largely cover topics we discussed in that lesson. After that, -some of the attacks may seem new. We encourage you to go through all of them. - -The last thing to call out is that there are a lot more lessons in this module -than in prior modules. And the lessons aren't dependent on each other in the -same ways, so you can bounce around a bit more if you'd like. - -Originally, we were going to have more, shorter lessons in this module. And -while they might be shorter than average, they aren't much shorter. It turns out -that even though each of the security vulnerabilities is "simple," there's a lot -to discuss. So each lesson may have a little bit less prose and more code -snippets, making it easy for readers to choose how in depth to go. But, -ultimately, each lesson is still as fully-fledged as they have been before so -that you can really get a solid grasp on each of the discussed security risks. - -As always, we appreciate feedback. Good luck digging in! diff --git a/content/courses/solana-course/content/serialize-instruction-data.md b/content/courses/solana-course/content/serialize-instruction-data.md deleted file mode 100644 index b75bd1920..000000000 --- a/content/courses/solana-course/content/serialize-instruction-data.md +++ /dev/null @@ -1,568 +0,0 @@ ---- -title: Serialize Custom Instruction Data -objectives: - - Explain the contents of a transaction - - Explain transaction instructions - - Explain the basics of Solana's runtime optimizations - - Explain Borsh - - Use Borsh to serialize custom instruction data ---- - -# TL;DR - -- Transactions are made up of an array of instructions, a single transaction can - have one or more instructions in it, each targeting its own program. When a - transaction is submitted, the Solana runtime will process all the - transaction's instructions in order and atomically, meaning that if any of the - instructions fail for any reason, the entire transaction will fail to be - processed. -- Every _instruction_ is made up of 3 components: the intended program's ID, an - array of all accounts involved, and a byte buffer of instruction data. -- Every _transaction_ contains: an array of all accounts it intends to read from - or write to, one or more instructions, a recent blockhash, and one or more - signatures. -- In order to pass instruction data from a client, it must be serialized into a - byte buffer. To facilitate this process of serialization, we will be using - [Borsh](https://borsh.io/). -- Transactions can fail to be processed by the blockchain for any number of - reasons, we’ll discuss some of the most common ones here. - -# Overview - -## Transactions - -Transactions are how we send information to the blockchain in order to be -processed. So far, we’ve learned how to create very basic transactions with -limited functionality. But transactions, and the programs they are sent to, can -be designed to be far more flexible and handle far more complexity than we’ve -dealt with up to now. - -### Transaction Contents - -Every transaction contains: - -- An array that includes every account it intends to read from or write to -- One or more instructions -- A recent blockhash -- One or more signatures - -`@solana/web3.js` simplifies this process for you so that all you really need to -focus on is adding instructions and signatures. The library builds the array of -accounts based on that information and handles the logic for including a recent -blockhash. - -## Instructions - -Every instruction contains: - -- The program ID (public key) of the intended program -- An array listing every account that will be read from or written to during - execution -- A byte buffer of instruction data - -Identifying the program by its public key ensures that the instruction is -carried out by the correct program. - -Including an array of every account that will be read from or written to allows -the network to perform a number of optimizations which allow for high -transaction load and quicker execution. - -The byte buffer lets you pass external data to a program. - -You can include multiple instructions in a single transaction. The Solana -runtime will process these instructions in order and atomically. In other words, -if every instruction succeeds then the transaction as a whole will be -successful, but if a single instruction fails then the entire transaction will -fail immediately with no side-effects. - -A note on the account array and optimization: - -It is not just an array of the accounts’ public keys. Each object in the array -includes the account’s public key, whether or not it is a signer on the -transaction, and whether or not it is writable. Including whether or not an -account is writable during the execution of an instruction allows the runtime to -facilitate parallel processing of on-chain programs. Because you must define -which accounts are read-only and which you will write to, the runtime can -determine which transactions are non-overlapping or read-only and allow them to -execute concurrently. To learn more about the Solana’s runtime, check out this -[blog post](https://solana.com/news/sealevel---parallel-processing-thousands-of-smart-contracts). - -### Instruction Data - -The ability to add arbitrary data to an instruction ensures that programs can be -dynamic and flexible enough for broad use cases in the same way that the body of -an HTTP request lets you build dynamic and flexible REST APIs. - -Just as the structure of the body of an HTTP request is dependent on the -endpoint you intend to call, the structure of the byte buffer used as -instruction data is entirely dependent on the recipient program. If you’re -building a full-stack dApp on your own, then you’ll need to copy the same -structure that you used when building the program over to the client-side code. -If you’re working with another developer who is handling the program -development, you can coordinate to ensure matching buffer layouts. - -Let’s think about a concrete example. Imagine working on a Web3 game and being -responsible for writing client-side code that interacts with a player inventory -program. The program was designed to allow the client to: - -- Add inventory based on a player’s game-play results -- Transfer inventory from one player to another -- Equip a player with selected inventory items - -This program would have been structured such that each of these is encapsulated -in its own function. - -Each program, however, only has one entry point. You would instruct the program -on which of these functions to run through the instruction data. - -You would also include in the instruction data any information the function -needs in order to execute properly, e.g. an inventory item’s ID, a player to -transfer inventory to, etc. - -Exactly _how_ this data would be structured would depend on how the program was -written, but it’s common to have the first field in instruction data be a number -that the program can map to a function, after which additional fields act as -function arguments. - -## Serialization - -In addition to knowing what information to include in an instruction data -buffer, you also need to serialize it properly. The most common serializer used -in Solana is [Borsh](https://borsh.io). Per the website: - -> Borsh stands for Binary Object Representation Serializer for Hashing. It is -> meant to be used in security-critical projects as it prioritizes consistency, -> safety, speed; and comes with a strict specification. - -Borsh maintains a [JS library](https://github.com/near/borsh-js) that handles -serializing common types into a buffer. There are also other packages built on -top of borsh that try to make this process even easier. We’ll be using the -`@project-serum/borsh` library which can be installed using `npm`. - -Building off of the previous game inventory example, let’s look at a -hypothetical scenario where we are instructing the program to equip a player -with a given item. Assume the program is designed to accept a buffer that -represents a struct with the following properties: - -1. `variant` as an unsigned, 8-bit integer that instructs the program which - instruction, or function, to execute. -2. `playerId` as an unsigned, 16-bit integer that represents the player ID of - the player who is to be equipped with the given item. -3. `itemId` as an unsigned, 256-bit integer that represents the item ID of the - item that will be equipped to the given player. - -All of this will be passed as a byte buffer that will be read in order, so -ensuring proper buffer layout order is crucial. You would create the buffer -layout schema or template for the above as follows: - -```tsx -import * as borsh from "@project-serum/borsh"; - -const equipPlayerSchema = borsh.struct([ - borsh.u8("variant"), - borsh.u16("playerId"), - borsh.u256("itemId"), -]); -``` - -You can then encode data using this schema with the `encode` method. This method -accepts as arguments an object representing the data to be serialized and a -buffer. In the below example, we allocate a new buffer that’s much larger than -needed, then encode the data into that buffer and slice it down into a new -buffer that’s only as large as needed. - -```tsx -import * as borsh from "@project-serum/borsh"; - -const equipPlayerSchema = borsh.struct([ - borsh.u8("variant"), - borsh.u16("playerId"), - borsh.u256("itemId"), -]); - -const buffer = Buffer.alloc(1000); -equipPlayerSchema.encode( - { variant: 2, playerId: 1435, itemId: 737498 }, - buffer, -); - -const instructionBuffer = buffer.slice(0, equipPlayerSchema.getSpan(buffer)); -``` - -Once a buffer is properly created and the data serialized, all that’s left is -building the transaction. This is similar to what you’ve done in previous -lessons. The example below assumes that: - -- `player`, `playerInfoAccount`, and `PROGRAM_ID` are already defined somewhere - outside the code snippet -- `player` is a user’s public key -- `playerInfoAccount` is the public key of the account where inventory changes - will be written -- `SystemProgram` will be used in the process of executing the instruction. - -```tsx -import * as borsh from "@project-serum/borsh"; -import * as web3 from "@solana/web3.js"; - -const equipPlayerSchema = borsh.struct([ - borsh.u8("variant"), - borsh.u16("playerId"), - borsh.u256("itemId"), -]); - -const buffer = Buffer.alloc(1000); -equipPlayerSchema.encode( - { variant: 2, playerId: 1435, itemId: 737498 }, - buffer, -); - -const instructionBuffer = buffer.slice(0, equipPlayerSchema.getSpan(buffer)); - -const endpoint = web3.clusterApiUrl("devnet"); -const connection = new web3.Connection(endpoint); - -const transaction = new web3.Transaction(); -const instruction = new web3.TransactionInstruction({ - keys: [ - { - pubkey: player.publicKey, - isSigner: true, - isWritable: false, - }, - { - pubkey: playerInfoAccount, - isSigner: false, - isWritable: true, - }, - { - pubkey: web3.SystemProgram.programId, - isSigner: false, - isWritable: false, - }, - ], - data: instructionBuffer, - programId: PROGRAM_ID, -}); - -transaction.add(instruction); - -web3.sendAndConfirmTransaction(connection, transaction, [player]).then(txid => { - console.log( - `Transaction submitted: https://explorer.solana.com/tx/${txid}?cluster=devnet`, - ); -}); -``` - -# Demo - -Let’s practice this together by building a Movie Review app that lets users -submit a movie review and have it stored on Solana’s network. We’ll build this -app a little bit at a time over the next few lessons, adding new functionality -each lesson. - -![Screenshot of movie review frontend](../assets/movie-reviews-frontend.png) - -The public key of the Solana program we’ll use for this application is -`CenYq6bDRB7p73EjsPEpiYN7uveyPUTdXkDkgUduboaN`. - -### 1. Download the starter code - -Before we get started, go ahead and download the -[starter code](https://github.com/Unboxed-Software/solana-movie-frontend/tree/starter). - -The project is a fairly simple Next.js application. It includes the -`WalletContextProvider` we created in the Wallets lesson, a `Card` component for -displaying a movie review, a `MovieList` component that displays reviews in a -list, a `Form` component for submitting a new review, and a `Movie.ts` file that -contains a class definition for a `Movie` object. - -Note that for now, the movies displayed on the page when you run `npm run dev` -are mocks. In this lesson, we’ll focus on adding a new review but we won’t -actually be able to see that review displayed. Next lesson, we’ll focus on -deserializing custom data from on-chain accounts. - -### 2. Create the buffer layout - -Remember that to properly interact with a Solana program, you need to know how -it expects data to be structured. Our Movie Review program is expecting -instruction data to contain: - -1. `variant` as an unsigned, 8-bit integer representing which instruction should - be executed (in other words which function on the program should be called). -2. `title` as a string representing the title of the movie that you are - reviewing. -3. `rating` as an unsigned, 8-bit integer representing the rating out of 5 that - you are giving to the movie you are reviewing. -4. `description` as a string representing the written portion of the review you - are leaving for the movie. - -Let’s configure a `borsh` layout in the `Movie` class. Start by importing -`@project-serum/borsh`. Next, create a `borshInstructionSchema` property and set -it to the appropriate `borsh` struct containing the properties listed above. - -```tsx -import * as borsh from '@project-serum/borsh' - -export class Movie { - title: string; - rating: number; - description: string; - - ... - - borshInstructionSchema = borsh.struct([ - borsh.u8('variant'), - borsh.str('title'), - borsh.u8('rating'), - borsh.str('description'), - ]) -} -``` - -Keep in mind that _order matters_. If the order of properties here differs from -how the program is structured, the transaction will fail. - -### 3. Create a method to serialize data - -Now that we have the buffer layout set up, let’s create a method in `Movie` -called `serialize()` that will return a `Buffer` with a `Movie` object’s -properties encoded into the appropriate layout. - -```tsx -import * as borsh from '@project-serum/borsh' - -export class Movie { - title: string; - rating: number; - description: string; - - ... - - borshInstructionSchema = borsh.struct([ - borsh.u8('variant'), - borsh.str('title'), - borsh.u8('rating'), - borsh.str('description'), - ]) - - serialize(): Buffer { - const buffer = Buffer.alloc(1000) - this.borshInstructionSchema.encode({ ...this, variant: 0 }, buffer) - return buffer.slice(0, this.borshInstructionSchema.getSpan(buffer)) - } -} -``` - -The method shown above first creates a large enough buffer for our object, then -encodes `{ ...this, variant: 0 }` into the buffer. Because the `Movie` class -definition contains 3 of the 4 properties required by the buffer layout and uses -the same naming, we can use it directly with the spread operator and just add -the `variant` property. Finally, the method returns a new buffer that leaves off -the unused portion of the original. - -### 4. Send transaction when user submits form - -Now that we have the building blocks for the instruction data, we can create and -send the transaction when a user submits the form. Open `Form.tsx` and locate -the `handleTransactionSubmit` function. This gets called by `handleSubmit` each -time a user submits the Movie Review form. - -Inside this function, we’ll be creating and sending the transaction that -contains the data submitted through the form. - -Start by importing `@solana/web3.js` and importing `useConnection` and -`useWallet` from `@solana/wallet-adapter-react`. - -```tsx -import { FC } from "react"; -import { Movie } from "../models/Movie"; -import { useState } from "react"; -import { - Box, - Button, - FormControl, - FormLabel, - Input, - NumberDecrementStepper, - NumberIncrementStepper, - NumberInput, - NumberInputField, - NumberInputStepper, - Textarea, -} from "@chakra-ui/react"; -import * as web3 from "@solana/web3.js"; -import { useConnection, useWallet } from "@solana/wallet-adapter-react"; -``` - -Next, before the `handleSubmit` function, call `useConnection()` to get a -`connection` object and call `useWallet()` to get `publicKey` and -`sendTransaction`. - -```tsx -import { FC } from 'react' -import { Movie } from '../models/Movie' -import { useState } from 'react' -import { Box, Button, FormControl, FormLabel, Input, NumberDecrementStepper, NumberIncrementStepper, NumberInput, NumberInputField, NumberInputStepper, Textarea } from '@chakra-ui/react' -import * as web3 from '@solana/web3.js' -import { useConnection, useWallet } from '@solana/wallet-adapter-react' - -const MOVIE_REVIEW_PROGRAM_ID = 'CenYq6bDRB7p73EjsPEpiYN7uveyPUTdXkDkgUduboaN' - -export const Form: FC = () => { - const [title, setTitle] = useState('') - const [rating, setRating] = useState(0) - const [message, setMessage] = useState('') - - const { connection } = useConnection(); - const { publicKey, sendTransaction } = useWallet(); - - const handleSubmit = (event: any) => { - event.preventDefault() - const movie = new Movie(title, rating, description) - handleTransactionSubmit(movie) - } - - ... -} -``` - -Before we implement `handleTransactionSubmit`, let’s talk about what needs to be -done. We need to: - -1. Check that `publicKey` exists to ensure that the user has connected their - wallet. -2. Call `serialize()` on `movie` to get a buffer representing the instruction - data. -3. Create a new `Transaction` object. -4. Get all of the accounts that the transaction will read from or write to. -5. Create a new `Instruction` object that includes all of these accounts in the - `keys` argument, includes the buffer in the `data` argument, and includes the - program’s public key in the `programId` argument. -6. Add the instruction from the last step to the transaction. -7. Call `sendTransaction`, passing in the assembled transaction. - -That’s quite a lot to process! But don’t worry, it gets easier the more you do -it. Let’s start with the first 3 steps from above: - -```tsx -const handleTransactionSubmit = async (movie: Movie) => { - if (!publicKey) { - alert("Please connect your wallet!"); - return; - } - - const buffer = movie.serialize(); - const transaction = new web3.Transaction(); -}; -``` - -The next step is to get all of the accounts that the transaction will read from -or write to. In past lessons, the account where data will be stored has been -given to you. This time, the account’s address is more dynamic, so it needs to -be computed. We’ll cover this in depth in the next lesson, but for now you can -use the following, where `pda` is the address to the account where data will be -stored: - -```tsx -const [pda] = await web3.PublicKey.findProgramAddress( - [publicKey.toBuffer(), Buffer.from(movie.title)], - new web3.PublicKey(MOVIE_REVIEW_PROGRAM_ID), -); -``` - -In addition to this account, the program will also need to read from -`SystemProgram`, so our array needs to include `web3.SystemProgram.programId` as -well. - -With that, we can finish the remaining steps: - -```tsx -const handleTransactionSubmit = async (movie: Movie) => { - if (!publicKey) { - alert("Please connect your wallet!"); - return; - } - - const buffer = movie.serialize(); - const transaction = new web3.Transaction(); - - const [pda] = await web3.PublicKey.findProgramAddress( - [publicKey.toBuffer(), new TextEncoder().encode(movie.title)], - new web3.PublicKey(MOVIE_REVIEW_PROGRAM_ID), - ); - - const instruction = new web3.TransactionInstruction({ - keys: [ - { - pubkey: publicKey, - isSigner: true, - isWritable: false, - }, - { - pubkey: pda, - isSigner: false, - isWritable: true, - }, - { - pubkey: web3.SystemProgram.programId, - isSigner: false, - isWritable: false, - }, - ], - data: buffer, - programId: new web3.PublicKey(MOVIE_REVIEW_PROGRAM_ID), - }); - - transaction.add(instruction); - - try { - let txid = await sendTransaction(transaction, connection); - console.log( - `Transaction submitted: https://explorer.solana.com/tx/${txid}?cluster=devnet`, - ); - } catch (e) { - alert(JSON.stringify(e)); - } -}; -``` - -And that’s it! You should now be able to use the form on the site to submit a -movie review. While you won’t see the UI update to reflect the new review, you -can look at the transaction’s program logs on Solana Explorer to see that it was -successful. - -If you need a bit more time with this project to feel comfortable, have a look -at the complete -[solution code](https://github.com/Unboxed-Software/solana-movie-frontend/tree/solution-serialize-instruction-data). - -# Challenge - -Now it’s your turn to build something independently. Create an application that -lets students of this course introduce themselves! The Solana program that -supports this is at `HdE95RSVsdb315jfJtaykXhXY478h53X6okDupVfY9yf`. - -![Screenshot of Student Intros frontend](../assets/student-intros-frontend.png) - -1. You can build this from scratch or you can download the starter code - [here](https://github.com/Unboxed-Software/solana-student-intros-frontend/tree/starter). -2. Create the instruction buffer layout in `StudentIntro.ts`. The program - expects instruction data to contain: - 1. `variant` as an unsigned, 8-bit integer representing the instruction to - run (should be 0). - 2. `name` as a string representing the student's name. - 3. `message` as a string representing the message the student is sharing - about their Solana journey. -3. Create a method in `StudentIntro.ts` that will use the buffer layout to - serialize a `StudentIntro` object. -4. In the `Form` component, implement the `handleTransactionSubmit` function so - that it serializes a `StudentIntro`, builds the appropriate transaction and - transaction instructions, and submits the transaction to the user's wallet. -5. You should now be able to submit introductions and have the information - stored on chain! Be sure to log the transaction ID and look at it in Solana - Explorer to verify that it worked. - -If you get really stumped, you can check out the solution code -[here](https://github.com/Unboxed-Software/solana-student-intros-frontend/tree/solution-serialize-instruction-data). - -Feel free to get creative with these challenges and take them even further. The -instructions aren't here to hold you back! diff --git a/content/courses/solana-course/content/signer-auth.md b/content/courses/solana-course/content/signer-auth.md deleted file mode 100644 index 85ae5a422..000000000 --- a/content/courses/solana-course/content/signer-auth.md +++ /dev/null @@ -1,561 +0,0 @@ ---- -title: Signer Authorization -objectives: - - Explain the security risks associated with not performing appropriate signer - checks - - Implement signer checks using long-form Rust - - Implement signer checks using Anchor’s `Signer` type - - Implement signer checks using Anchor’s `#[account(signer)]` constraint ---- - -# TL;DR - -- Use **Signer Checks** to verify that specific accounts have signed a - transaction. Without appropriate signer checks, accounts may be able to - execute instructions they shouldn’t be authorized to perform. -- To implement a signer check in Rust, simply check that an account’s - `is_signer` property is `true` - ```rust - if !ctx.accounts.authority.is_signer { - return Err(ProgramError::MissingRequiredSignature.into()); - } - ``` -- In Anchor, you can use the **`Signer`** account type in your account - validation struct to have Anchor automatically perform a signer check on a - given account -- Anchor also has an account constraint that will automatically verify that a - given account has signed a transaction - -# Overview - -Signer checks are used to verify that a given account’s owner has authorized a -transaction. Without a signer check, operations whose execution should be -limited to only specific accounts can potentially be performed by any account. -In the worst case scenario, this could result in wallets being completely -drained by attackers passing in whatever account they want to an instruction. - -### Missing Signer Check - -The example below shows an oversimplified version of an instruction that updates -the `authority` field stored on a program account. - -Notice that the `authority` field on the `UpdateAuthority` account validation -struct is of type `AccountInfo`. In Anchor, the `AccountInfo` account type -indicates that no checks are performed on the account prior to instruction -execution. - -Although the `has_one` constraint is used to validate the `authority` account -passed into the instruction matches the `authority` field stored on the `vault` -account, there is no check to verify the `authority` account authorized the -transaction. - -This means an attacker can simply pass in the public key of the `authority` -account and their own public key as the `new_authority` account to reassign -themselves as the new authority of the `vault` account. At that point, they can -interact with the program as the new authority. - -```rust -use anchor_lang::prelude::*; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod insecure_update{ - use super::*; - ... - pub fn update_authority(ctx: Context) -> Result<()> { - ctx.accounts.vault.authority = ctx.accounts.new_authority.key(); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct UpdateAuthority<'info> { - #[account( - mut, - has_one = authority - )] - pub vault: Account<'info, Vault>, - pub new_authority: AccountInfo<'info>, - pub authority: AccountInfo<'info>, -} - -#[account] -pub struct Vault { - token_account: Pubkey, - authority: Pubkey, -} -``` - -### Add signer authorization checks - -All you need to do to validate that the `authority` account signed is to add a -signer check within the instruction. That simply means checking that -`authority.is_signer` is `true`, and returning a `MissingRequiredSignature` -error if `false`. - -```tsx -if !ctx.accounts.authority.is_signer { - return Err(ProgramError::MissingRequiredSignature.into()); -} -``` - -By adding a signer check, the instruction would only process if the account -passed in as the `authority` account also signed the transaction. If the -transaction was not signed by the account passed in as the `authority` account, -then the transaction would fail. - -```rust -use anchor_lang::prelude::*; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod secure_update{ - use super::*; - ... - pub fn update_authority(ctx: Context) -> Result<()> { - if !ctx.accounts.authority.is_signer { - return Err(ProgramError::MissingRequiredSignature.into()); - } - - ctx.accounts.vault.authority = ctx.accounts.new_authority.key(); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct UpdateAuthority<'info> { - #[account( - mut, - has_one = authority - )] - pub vault: Account<'info, Vault>, - pub new_authority: AccountInfo<'info>, - pub authority: AccountInfo<'info>, -} - -#[account] -pub struct Vault { - token_account: Pubkey, - authority: Pubkey, -} -``` - -### Use Anchor’s `Signer` account type - -However, putting this check into the instruction function muddles the separation -between account validation and instruction logic. - -Fortunately, Anchor makes it easy to perform signer checks by providing the -`Signer` account type. Simply change the `authority` account’s type in the -account validation struct to be of type `Signer`, and Anchor will check at -runtime that the specified account is a signer on the transaction. This is the -approach we generally recommend since it allows you to separate the signer check -from instruction logic. - -In the example below, if the `authority` account does not sign the transaction, -then the transaction will fail before even reaching the instruction logic. - -```rust -use anchor_lang::prelude::*; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod secure_update{ - use super::*; - ... - pub fn update_authority(ctx: Context) -> Result<()> { - ctx.accounts.vault.authority = ctx.accounts.new_authority.key(); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct UpdateAuthority<'info> { - #[account( - mut, - has_one = authority - )] - pub vault: Account<'info, Vault>, - pub new_authority: AccountInfo<'info>, - pub authority: Signer<'info>, -} - -#[account] -pub struct Vault { - token_account: Pubkey, - authority: Pubkey, -} -``` - -Note that when you use the `Signer` type, no other ownership or type checks are -performed. - -### Use Anchor’s `#[account(signer)]` constraint - -While in most cases, the `Signer` account type will suffice to ensure an account -has signed a transaction, the fact that no other ownership or type checks are -performed means that this account can’t really be used for anything else in the -instruction. - -This is where the `signer` _constraint_ comes in handy. The `#[account(signer)]` -constraint allows you to verify the account signed the transaction, while also -getting the benefits of using the `Account` type if you wanted access to it’s -underlying data as well. - -As an example of when this would be useful, imagine writing an instruction that -you expect to be invoked via CPI that expects one of the passed in accounts to -be both a **\*\***signer**\*\*** on the transaciton and a \***\*\*\*\*\*\***data -source\***\*\*\*\*\*\***. Using the `Signer` account type here removes the -automatic deserialization and type checking you would get with the `Account` -type. This is both inconvenient, as you need to manually deserialize the account -data in the instruction logic, and may make your program vulnerable by not -getting the ownership and type checking performed by the `Account` type. - -In the example below, you can safely write logic to interact with the data -stored in the `authority` account while also verifying that it signed the -transaction. - -```rust -use anchor_lang::prelude::*; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod secure_update{ - use super::*; - ... - pub fn update_authority(ctx: Context) -> Result<()> { - ctx.accounts.vault.authority = ctx.accounts.new_authority.key(); - - // access the data stored in authority - msg!("Total number of depositors: {}", ctx.accounts.authority.num_depositors); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct UpdateAuthority<'info> { - #[account( - mut, - has_one = authority - )] - pub vault: Account<'info, Vault>, - pub new_authority: AccountInfo<'info>, - #[account(signer)] - pub authority: Account<'info, AuthState> -} - -#[account] -pub struct Vault { - token_account: Pubkey, - authority: Pubkey, -} -#[account] -pub struct AuthState{ - amount: u64, - num_depositors: u64, - num_vaults: u64 -} -``` - -# Demo - -Let’s practice by creating a simple program to demonstrate how a missing signer -check can allow an attacker to withdraw tokens that don’t belong to them. - -This program initializes a simplified token “vault” account and demonstrates how -a missing signer check could allow the vault to be drained. - -### 1. Starter - -To get started, download the starter code from the `starter` branch of -[this repository](https://github.com/Unboxed-Software/solana-signer-auth/tree/starter). The -starter code includes a program with two instructions and the boilerplate setup -for the test file. - -The `initialize_vault` instruction initializes two new accounts: `Vault` and -`TokenAccount`. The `Vault` account will be initialized using a Program Derived -Address (PDA) and store the address of a token account and the authority of the -vault. The authority of the token account will be the `vault` PDA which enables -the program to sign for the transfer of tokens. - -The `insecure_withdraw` instruction will transfer tokens in the `vault` -account’s token account to a `withdraw_destination` token account. However, the -`authority` account in the `InsecureWithdraw` struct has a type of -`UncheckedAccount`. This is a wrapper around `AccountInfo` to explicitly -indicate the account is unchecked. - -Without a signer check, anyone can simply provide the public key of the -`authority` account that matches `authority` stored on the `vault` account and -the `insecure_withdraw` instruction would continue to process. - -While this is somewhat contrived in that any DeFi program with a vault would be -more sophisticated than this, it will show how the lack of a signer check can -result in tokens being withdrawn by the wrong party. - -```rust -use anchor_lang::prelude::*; -use anchor_spl::token::{self, Mint, Token, TokenAccount}; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod signer_authorization { - use super::*; - - pub fn initialize_vault(ctx: Context) -> Result<()> { - ctx.accounts.vault.token_account = ctx.accounts.token_account.key(); - ctx.accounts.vault.authority = ctx.accounts.authority.key(); - Ok(()) - } - - pub fn insecure_withdraw(ctx: Context) -> Result<()> { - let amount = ctx.accounts.token_account.amount; - - let seeds = &[b"vault".as_ref(), &[*ctx.bumps.get("vault").unwrap()]]; - let signer = [&seeds[..]]; - - let cpi_ctx = CpiContext::new_with_signer( - ctx.accounts.token_program.to_account_info(), - token::Transfer { - from: ctx.accounts.token_account.to_account_info(), - authority: ctx.accounts.vault.to_account_info(), - to: ctx.accounts.withdraw_destination.to_account_info(), - }, - &signer, - ); - - token::transfer(cpi_ctx, amount)?; - Ok(()) - } -} - -#[derive(Accounts)] -pub struct InitializeVault<'info> { - #[account( - init, - payer = authority, - space = 8 + 32 + 32, - seeds = [b"vault"], - bump - )] - pub vault: Account<'info, Vault>, - #[account( - init, - payer = authority, - token::mint = mint, - token::authority = vault, - )] - pub token_account: Account<'info, TokenAccount>, - pub mint: Account<'info, Mint>, - #[account(mut)] - pub authority: Signer<'info>, - pub token_program: Program<'info, Token>, - pub system_program: Program<'info, System>, - pub rent: Sysvar<'info, Rent>, -} - -#[derive(Accounts)] -pub struct InsecureWithdraw<'info> { - #[account( - seeds = [b"vault"], - bump, - has_one = token_account, - has_one = authority - )] - pub vault: Account<'info, Vault>, - #[account(mut)] - pub token_account: Account<'info, TokenAccount>, - #[account(mut)] - pub withdraw_destination: Account<'info, TokenAccount>, - pub token_program: Program<'info, Token>, - /// CHECK: demo missing signer check - pub authority: UncheckedAccount<'info>, -} - -#[account] -pub struct Vault { - token_account: Pubkey, - authority: Pubkey, -} -``` - -### 2. Test `insecure_withdraw` instruction - -The test file includes the code to invoke the `initialize_vault` instruction -using `wallet` as the `authority` on the vault. The code then mints 100 tokens -to the `vault` token account. Theoretically, the `wallet` key should be the only -one that can withdraw the 100 tokens from the vault. - -Now, let’s add a test to invoke `insecure_withdraw` on the program to show that -the current version of the program allows a third party to in fact withdraw -those 100 tokens. - -In the test, we’ll still use the public key of `wallet` as the `authority` -account, but we’ll use a different keypair to sign and send the transaction. - -```tsx -describe("signer-authorization", () => { - ... - it("Insecure withdraw", async () => { - const tx = await program.methods - .insecureWithdraw() - .accounts({ - vault: vaultPDA, - tokenAccount: tokenAccount.publicKey, - withdrawDestination: withdrawDestinationFake, - authority: wallet.publicKey, - }) - .transaction() - - await anchor.web3.sendAndConfirmTransaction(connection, tx, [walletFake]) - - const balance = await connection.getTokenAccountBalance( - tokenAccount.publicKey - ) - expect(balance.value.uiAmount).to.eq(0) - }) -}) -``` - -Run `anchor test` to see that both transactions will complete successfully. - -```bash -signer-authorization - ✔ Initialize Vault (810ms) - ✔ Insecure withdraw (405ms) -``` - -Since there is no signer check for the `authority` account, the -`insecure_withdraw` instruction will transfer tokens from the `vault` token -account to the `withdrawDestinationFake` token account as long as the public key -of the`authority` account matches the public key stored on the authority field -of the `vault` account. Clearly, the `insecure_withdraw` instruction is as -insecure as the name suggests. - -### 3. Add `secure_withdraw` instruction - -Let’s fix the problem in a new instruction called `secure_withdraw`. This -instruction will be identical to the `insecure_withdraw` instruction, except -we’ll use the `Signer` type in the Accounts struct to validate the `authority` -account in the `SecureWithdraw` struct. If the `authority` account is not a -signer on the transaction, then we expect the transaction to fail and return an -error. - -```rust -use anchor_lang::prelude::*; -use anchor_spl::token::{self, Mint, Token, TokenAccount}; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod signer_authorization { - use super::*; - ... - pub fn secure_withdraw(ctx: Context) -> Result<()> { - let amount = ctx.accounts.token_account.amount; - - let seeds = &[b"vault".as_ref(), &[*ctx.bumps.get("vault").unwrap()]]; - let signer = [&seeds[..]]; - - let cpi_ctx = CpiContext::new_with_signer( - ctx.accounts.token_program.to_account_info(), - token::Transfer { - from: ctx.accounts.token_account.to_account_info(), - authority: ctx.accounts.vault.to_account_info(), - to: ctx.accounts.withdraw_destination.to_account_info(), - }, - &signer, - ); - - token::transfer(cpi_ctx, amount)?; - Ok(()) - } -} - -#[derive(Accounts)] -pub struct SecureWithdraw<'info> { - #[account( - seeds = [b"vault"], - bump, - has_one = token_account, - has_one = authority - )] - pub vault: Account<'info, Vault>, - #[account(mut)] - pub token_account: Account<'info, TokenAccount>, - #[account(mut)] - pub withdraw_destination: Account<'info, TokenAccount>, - pub token_program: Program<'info, Token>, - pub authority: Signer<'info>, -} -``` - -### 4. Test `secure_withdraw` instruction - -With the instruction in place, return to the test file to test the -`secure_withdraw` instruction. Invoke the `secure_withdraw` instruction, again -using the public key of `wallet` as the `authority` account and the -`withdrawDestinationFake` keypair as the signer and withdraw destination. Since -the `authority` account is validated using the `Signer` type, we expect the -transaction to fail the signer check and return an error. - -```tsx -describe("signer-authorization", () => { - ... - it("Secure withdraw", async () => { - try { - const tx = await program.methods - .secureWithdraw() - .accounts({ - vault: vaultPDA, - tokenAccount: tokenAccount.publicKey, - withdrawDestination: withdrawDestinationFake, - authority: wallet.publicKey, - }) - .transaction() - - await anchor.web3.sendAndConfirmTransaction(connection, tx, [walletFake]) - } catch (err) { - expect(err) - console.log(err) - } - }) -}) -``` - -Run `anchor test` to see that the transaction will now return a signature -verification error. - -```bash -Error: Signature verification failed -``` - -That’s it! This is a fairly simple thing to avoid, but incredibly important. -Make sure to always think through who should who should be authorizing -instructions and make sure that each is a signer on the transaction. - -If you want to take a look at the final solution code you can find it on the -`solution` branch of -[the repository](https://github.com/Unboxed-Software/solana-signer-auth/tree/solution). - -# Challenge - -At this point in the course, we hope you've started to work on programs and -projects outside the Demos and Challenges provided in these lessons. For this -and the remainder of the lessons on security vulnerabilities, the Challenge for -each lesson will be to audit your own code for the security vulnerability -discussed in the lesson. - -Alternatively, you can find open source programs to audit. There are plenty of -programs you can look at. A good start if you don't mind diving into native Rust -would be the -[SPL programs](https://github.com/solana-labs/solana-program-library). - -So for this lesson, take a look at a program (whether yours or one you've found -online) and audit it for signer checks. If you find a bug in somebody else's -program, please alert them! If you find a bug in your own program, be sure to -patch it right away. diff --git a/content/courses/solana-course/content/solana-pay.md b/content/courses/solana-course/content/solana-pay.md deleted file mode 100644 index bef7979bf..000000000 --- a/content/courses/solana-course/content/solana-pay.md +++ /dev/null @@ -1,852 +0,0 @@ ---- -title: Solana Pay -objectives: - - Use the Solana Pay specification to build payment requests and initiate - transactions using URLs encoded as QR codes - - Use the `@solana/pay` library to help with the creation of Solana Pay - transaction requests - - Partially sign transactions and implement transaction gating based on - certain conditions ---- - -# TL;DR - -- **Solana Pay** is a specification for encoding Solana transaction requests - within URLs, enabling standardized transaction requests across different - Solana apps and wallets -- **Partial signing** of transactions allows for the creation of transactions - that require multiple signatures before they are submitted to the network -- **Transaction gating** involves implementing rules that determine whether - certain transactions are allowed to be processed or not, based on certain - conditions or the presence of specific data in the transaction - -# Overview - -The Solana community is continually improving and expanding the network's -functionality. But that doesn't always mean developing brand new technology. -Sometimes it means leveraging the network's existing features in new and -interesting ways. - -Solana Pay is a great example of this. Rather than add new functionality to the -network, Solana Pay uses the network's existing signing features in a unique way -to enable merchants and applications to request transactions and build gating -mechanisms for specific transaction types. - -Throughout this lesson, you'll learn how to use Solana Pay to create transfer -and transaction requests, encode these requests as a QR code, partially sign -transactions, and gate transactions based on conditions you choose. Rather than -leaving it at that, we hope you'll see this as an example of leveraging existing -features in new and interesting ways, using it as a launching pad for your own -unique client-side network interactions. - -## Solana Pay - -The [Solana Pay specification](https://docs.solanapay.com/spec) is a set -standards that allow users to request payments and initiate transactions using -URLs in a uniform way across various Solana apps and wallets. - -Request URLs are prefixed with `solana:` so that platforms can direct the link -to the appropriate application. For example, on mobile a URL that starts with -`solana:` will be directed to wallet applications that support the Solana Pay -specification. From there, the wallet can use the remainder of the URL to -appropriately handle the request. - -There are two types of requests defined by the Solana Pay specification: - -1. Transfer Request: used for simple SOL or SPL Token transfers -2. Transaction Request: used to request any type of Solana transaction - -### Transfer requests - -The transfer request specification describes a non-interactive request for SOL -or SPL token transfer. Transfer request URLs take the following format -`solana:?`. - -The value of `recipient` is required and must be a base58-encoded public key of -the account from which a transfer is being requested. Additionally, the -following optional query parameters are supported: - -- `amount` - a non-negative integer or decimal value indicating the amount of - tokens to transfer -- `spl-token` - a base58-encoded public key of an SPL Token mint account if the - transfer is of an SPL token and not SOL -- `reference` - optional reference values as base58-encoded 32 byte arrays. This - can be used by a client for identifying the transaction on-chain since the - client will not have a transaction's signature. -- `label` - a URL-encoded UTF-8 string that describes the source of the transfer - request -- `message` - a URL-encoded UTF-8 string that describes the nature of the - transfer request -- `memo` - a URL-encoded UTF-8 string that must be included in the SPL memo - instruction in the payment transaction - -By way of example, here is a URL describing a transfer request for 1 SOL: - -```text -solana:mvines9iiHiQTysrwkJjGf2gb9Ex9jXJX8ns3qwf2kN?amount=1&label=Michael&message=Thanks%20for%20all%20the%20fish&memo=OrderId12345 -``` - -And here is a URL describing a transfer request for 0.1 USDC: - -```text -solana:mvines9iiHiQTysrwkJjGf2gb9Ex9jXJX8ns3qwf2kN?amount=0.01&spl-token=EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v -``` - -### Transaction requests - -The Solana Pay transaction request is similar to a transfer request in that it -is simply a URL that can be consumed by a supporting wallet. However, this -request is interactive and the format is more open-ended: - -```text -solana: -``` - -The value of `link` should be a URL to which the consuming wallet can make an -HTTP request. Rather than containing all the information needed for a -transaction, a transaction request uses this URL to fetch the transaction that -should be presented to the user. - -When a wallet receives a transaction Request URL, four things happen: - -1. The wallet sends a GET request to the application at the provided `link` URL - to retrieve a label and icon image to display to the user. -2. The wallet then sends a POST request with the public key of the end user. -3. Using the public key of the end user (and any additional information provided - in `link`), the application then builds the transaction and responds with a - base64-encoded serialized transaction. -4. The wallet decodes and deserializes the transaction, then lets the user sign - and send the transaction. - -Given that transaction requests are more involved than transfer requests, the -remainder of this lesson will focus on creating transaction requests. - -## Create a transaction request - -### Define the API endpoint - -The main thing you, the developer, need to do to make the transaction request -flow work is set up a REST API endpoint at the URL you plan to include in the -transaction request. In this lesson, we'll be using -[Next.js API Routes](https://nextjs.org/docs/api-routes/introduction) for our -endpoints, but you're welcome to use whatever stack and tools you're most -comfortable with. - -In Next.js, you do this by adding a file to the `pages/api` folder and exporting -a function that handles the request and response. - -```typescript -import { NextApiRequest, NextApiResponse } from "next"; - -export default async function handler( - req: NextApiRequest, - res: NextApiResponse, -) { - // Handle the request -} -``` - -### Handle a GET request - -The wallet consuming your transaction request URL will first issue a GET request -to this endpoint. You'll want your endpoint to return a JSON object with two -fields: - -1. `label` - a string that describes the source of the transaction request -2. `icon`- a URL to an image that can be displayed to the user - -Building on the empty endpoint from before, that may look like this: - -```typescript -import { NextApiRequest, NextApiResponse } from "next"; - -export default async function handler( - req: NextApiRequest, - res: NextApiResponse, -) { - if (req.method === "GET") { - return get(res); - } else { - return res.status(405).json({ error: "Method not allowed" }); - } -} - -function get(res: NextApiResponse) { - res.status(200).json({ - label: "Store Name", - icon: "https://solana.com/src/img/branding/solanaLogoMark.svg", - }); -} -``` - -When the wallet makes a GET request to the API endpoint, the `get` function is -called, returning a response with a status code of 200 and the JSON object -containing `label` and `icon`. - -### Handle a POST request and build the transaction - -After issuing a GET request, the wallet will issue a POST request to the same -URL. Your endpoint should expect the POST request's `body` to contain a JSON -object with an `account` field provided by the requesting wallet. The value of -`account` will be a string representing the end user's public key. - -With this information and any additional parameters provided, you can build the -transaction and return it to the wallet for signing by: - -1. Connecting to the Solana network and getting the latest `blockhash`. -2. Creating a new transaction using the `blockhash`. -3. Adding instructions to the transaction -4. Serializing the transaction and returning it in a `PostResponse` object along - with a message for the user. - -```typescript -import { NextApiRequest, NextApiResponse } from "next"; - -export default async function handler( - req: NextApiRequest, - res: NextApiResponse, -) { - if (req.method === "GET") { - return get(res); - } else if (req.method === "POST") { - return post(req, res); - } else { - return res.status(405).json({ error: "Method not allowed" }); - } -} - -function get(res: NextApiResponse) { - res.status(200).json({ - label: "Store Name", - icon: "https://solana.com/src/img/branding/solanaLogoMark.svg", - }); -} -async function post(req: PublicKey, res: PublicKey) { - const { account, reference } = req.body; - - const connection = new Connection(clusterApiUrl("devnet")); - - const { blockhash } = await connection.getLatestBlockhash(); - - const transaction = new Transaction({ - recentBlockhash: blockhash, - feePayer: account, - }); - - const instruction = SystemProgram.transfer({ - fromPubkey: account, - toPubkey: Keypair.generate().publicKey, - lamports: 0.001 * LAMPORTS_PER_SOL, - }); - - transaction.add(instruction); - - transaction.keys.push({ - pubkey: reference, - isSigner: false, - isWritable: false, - }); - - const serializedTransaction = transaction.serialize({ - requireAllSignatures: false, - }); - const base64 = serializedTransaction.toString("base64"); - - const message = "Simple transfer of 0.001 SOL"; - - res.send(200).json({ - transaction: base64, - message, - }); -} -``` - -There is nothing too out of the ordinary here. It's the same transaction -construction you would use in a standard client-side application. The only -difference is that instead of signing and submitting to the network, you send -the transaction as a base64-encoded string back in the HTTP response. The wallet -that issued the request can then present the transaction to the user for -signing. - -### Confirm transaction - -You may have noticed that the previous example assumed a `reference` was -provided as a query parameter. While this is _not_ a value provided by the -requesting wallet, it _is_ useful to set up your initial transaction request URL -to contain this query parameter. - -Since your application isn't the one submitting a transaction to the network, -your code won't have access to a transaction signature. This would typically be -how your app can locate a transaction on the network and see its status. - -To get around this, you can include a `reference` value as a query parameter for -each transaction request. This value should be a base58-encoded 32 byte array -that can be included as a non-signer key on the transaction. This allows your -app to then use the `getSignaturesForAddress` RPC method to locate the -transaction. Your app can then tailor its UI according to a transaction's -status. - -If you use the `@solana/pay` library, you can use the `findReference` helper -function instead of using `getSignaturesForAddress` directly. - -## Gated transactions - -We've mentioned before how Solana Pay is an example of being able to do cool new -things with the network by getting creative with existing functionality. Another -small example of doing this within the Solana Pay umbrella is to only make -certain transactions available once certain conditions are met. - -Since you control the endpoint building the transaction, you can determine what -criteria must be met before a transaction is built. For example, you can use the -`account` field provided in the POST request to check if the end user holds an -NFT from a particular collection or if that public key is on a predetermined -list of accounts who can make this particular transaction. - -```typescript -// retrieve array of nfts owned by the given wallet -const nfts = await metaplex.nfts().findAllByOwner({ owner: account }).run(); - -// iterate over the nfts array -for (let i = 0; i < nfts.length; i++) { - // check if the current nft has a collection field with the desired value - if (nfts[i].collection?.address.toString() == collection.toString()) { - // build transaction - } else { - // return an error - } -} -``` - -### Partial Signing - -If you want certain transactions behind some kind of gating mechanism, that -functionality will have to be enforced on-chain as well. Returning an error from -your Solana Pay endpoint makes it more difficult for end users to do the -transaction, but they could still build it manually. - -What this means is that the instruction(s) being called should require some type -of "admin" signature that only your application can provide. In doing that, -however, you'll have made it so that our previous examples don't work. The -transaction is built and sent to the requesting wallet for the end user's -signature, but the submitted transaction will fail without the admin signature. - -Fortunately, Solana enables signature composability with partial signing. - -Partially signing a multi-signature transaction allows signers to add their -signature before the transaction is broadcast on the network. This can be useful -in a number of situations, including: - -- Approving transactions that require the signature of multiple parties, such as - a merchant and a buyer who need to confirm the details of a payment. -- Invoking custom programs that require the signatures of both a user and an - administrator. This can help to limit access to the program instructions and - ensure that only authorized parties can execute them. - -```typescript -const { blockhash, lastValidBlockHeight } = await connection.getLatestBlockhash() - -const transaction = new Transaction({ - feePayer: account, - blockhash, - lastValidBlockHeight, -}) - -... - -transaction.partialSign(adminKeypair) -``` - -The `partialSign` function is used to add a signature to a transaction without -overriding any previous signatures on the transaction. If you are building a -transaction with multiple signers, it is important to remember that if you don't -specify a transaction's `feePayer`, the first signer will be used as the fee -payer for the transaction. To avoid any confusion or unexpected behavior, make -sure to explicitly set the fee payer when necessary. - -In our example of only allowing a transaction request to go through when the end -user has a specific NFT, you would simply add your admin signature to the -transaction using `partialSign` before encoding the transaction as a -base64-encoded string and issuing the HTTP response. - -## Solana Pay QR codes - -One of the standout features of Solana Pay is its easy integration with QR -codes. Since transfer and transaction requests are simply URLs, you can embed -them into QR codes that you make available in your application or elsewhere. - -The `@solana/pay` library simplifies this with the provided `createQR` helper -function. This function needs you to provide the following: - -- `url` - the url of the transaction request. -- `size` (optional) - the width and height of the QR code in pixels. Defaults - to 512. -- `background` (optional) - the background color. Defaults to white. -- `color` (optional) - the foreground color. Defaults to black. - -```typescript -const qr = createQR(url, 400, "transparent"); -``` - -# Demo - -Now that you've got a conceptual grasp on Solana Pay, let's put it into -practice. We'll use Solana Pay to generate a series of QR codes for a scavenger -hunt. Participants must visit each scavenger hunt location in order. At each -location, they'll use the provided QR code to submit the appropriate transaction -to the scavenger hunt's on-chain program that keeps track of user progress. - -### 1. Starter - -To get started, download the starter code on the `starter` branch of this -[repository](https://github.com/Unboxed-Software/solana-scavenger-hunt-app/tree/starter). -The starter code is a Next.js app that displays a Solana Pay QR code. Notice -that the menu bar lets you switch between different QR codes. The default option -is a simple SOL transfer for illustrative purposes. Throughout We'll be adding -functionality to the location options in the menu bar. - -![Screenshot of scavenger hunt app](../assets/scavenger-hunt-screenshot.png) - -To do this, we'll be creating a new endpoint for a transaction request that -builds a transaction for invoking an Anchor program on Devnet. This program has -been made specifically for this "scavenger hunt" app and has two instructions: -`initialize` and `check_in`. The `initialize` instruction is used to set up the -user's state, while the `check_in` instruction is used to record a check-in at a -location in the scavenger hunt. We won't be making any changes to the program in -this demo, but feel free to check out the -[source code](https://github.com/Unboxed-Software/anchor-scavenger-hunt) if -you'd like to familiarize yourself with the program. - -Before moving on, make sure you get familiar with the starter code for the -Scavenger Hunt app. Looking at `pages/index.tsx`, -`utils/createQrCode/simpleTransfer`, and `/utils/checkTransaction` will let you -see how the transaction request for sending SOL is set up. We'll be following a -similar pattern for the transaction request for checking in at a location. - -### 2. Setup - -Before we move forward, let's make sure you can run the app locally. Start by -renaming the `.env.example` file in the frontend directory to `.env`. This file -contains a keypair that will be used in this demo to partially sign -transactions. - -Next, install dependencies with `yarn`, then use `yarn dev` and open your -browser `localhost:3000` (or the port indicated in the console if 3000 was -already in use). - -Now, if you try to scan the QR code shown on the page from your mobile device, -you'll get an error. That's because the QR code is set up to send you to your -computer's `localhost:3000`, which isn't an address your phone can get to. -Further, Solana Pay needs to use an HTTPS URL to work. - -To get around this, you can use [ngrok](https://ngrok.com/). You'll need to -install it if you haven't used it before. Once it's installed, run the following -command in your terminal, replacing `3000` with whichever port you're using for -this project: - -```bash -ngrok http 3000 -``` - -This will provide you with a unique URL that you can use to access your local -server remotely. The output will look something like this: - -```bash -Session Status online -Account your_email@gmail.com (Plan: Free) -Update update available (version 3.1.0, Ctrl-U to update) -Version 3.0.6 -Region United States (us) -Latency 45ms -Web Interface http://127.0.0.1:4040 -Forwarding https://7761-24-28-107-82.ngrok.io -> http://localhost:3000 -``` - -Now, open the HTTPS ngrok URL shown in your console in the browser (e.g. -https://7761-24-28-107-82.ngrok.io). This will allow you to scan QR codes from -your mobile device while testing locally. - -At the time of writing, this demo works best with Solflare. Some wallets will -display an incorrect warning message when scanning a Solana Pay QR code. -Regardless of the wallet you use, make sure you switch to devnet in the wallet. -Then scan the QR code on the home page labeled “SOL Transfer”. This QR code is a -reference implementation for a transaction request that performs a simple SOL -transfer. It also calls the `requestAirdrop` function to fund your mobile wallet -with Devnet SOL since most people don't have Devnet SOL available for testing. - -If you were able to successfully execute the transaction using the QR code, -you're good to move on! - -### 3. Create a check-in transaction request endpoint - -Now that you're up and running, it's time to create an endpoint that supports -transaction requests for location check-in using the Scavenger Hunt program. - -Start by opening the file at `pages/api/checkIn.ts`. Notice that it has a helper -function for initializing `eventOrganizer` from a secret key environment -variable. The first thing we'll do in this file is the following: - -1. Export a `handler` function to handle an arbitrary HTTP request -2. Add `get` and `post` functions for handling those HTTP methods -3. Add logic to the body of the `handler` function to either call `get`, `post`, - or return a 405 error based on the HTTP request method - -```typescript -import { NextApiRequest, NextApiResponse } from "next"; - -export default async function handler( - req: NextApiRequest, - res: NextApiResponse, -) { - if (req.method === "GET") { - return get(res); - } else if (req.method === "POST") { - return await post(req, res); - } else { - return res.status(405).json({ error: "Method not allowed" }); - } -} - -function get(res: NextApiResponse) {} - -async function post(req: NextApiRequest, res: NextApiResponse) {} -``` - -### 4. Update `get` function - -Remember, the first request from a wallet will be a GET request expecting the -endpoint to return a label and icon. Update the `get` function to send a -response with a "Scavenger Hunt!" label and a Solana logo icon. - -```jsx -function get(res: NextApiResponse) { - res.status(200).json({ - label: "Scavenger Hunt!", - icon: "https://solana.com/src/img/branding/solanaLogoMark.svg", - }); -} -``` - -### 5. Update `post` function - -After the GET request, a wallet will issue a POST request to the endpoint. The -request's `body` will contain a JSON object with an `account` field representing -the end user's public key. - -Additionally, the query parameters will contain whatever you encoded into the QR -code. If you take a look at `utils/createQrCode/checkIn.ts`, you'll notice that -this particular app includes parameters for `reference` and `id` as the -following: - -1. `reference` - a randomly generated public key used to identify the - transaction -2. `id` - the location id as an integer - -Go ahead and update the `post` function to extract `account`, `reference`, and -`id` from the request. You should respond with an error if any of these is -missing. - -Next, add a `try catch` statement where the `catch` block responds with an error -and the `try` block calls out to a new function `buildTransaction`. If -`buildTransaction` is successful, respond with a 200 and a JSON object with the -transaction and a message that the user has found the given location. Don't -worry about the logic for the `buildTransaction` function just yet - we'll do -that next. - -Note that you'll need to import `PublicKey` and `Transaction` from -`@solana/web3.js` here as well. - -```typescript -import { NextApiRequest, NextApiResponse } from "next" -import { PublicKey, Transaction } from "@solana/web3.js" -... - -async function post(req: NextApiRequest, res: NextApiResponse) { - const { account } = req.body - const { reference, id } = req.query - - if (!account || !reference || !id) { - res.status(400).json({ error: "Missing required parameter(s)" }) - return - } - - try { - const transaction = await buildTransaction( - new PublicKey(account), - new PublicKey(reference), - id.toString() - ) - - res.status(200).json({ - transaction: transaction, - message: `You've found location ${id}!`, - }) - } catch (err) { - console.log(err) - let error = err as any - if (error.message) { - res.status(200).json({ transaction: "", message: error.message }) - } else { - res.status(500).json({ error: "error creating transaction" }) - } - } -} - -async function buildTransaction( - account: PublicKey, - reference: PublicKey, - id: string -): Promise { - return new Transaction() -} -``` - -### 6. Implement the `buildTransaction` function - -Next, let’s implement the `buildTransaction` function. It should build, -partially sign, and return the check-in transaction. The sequence of items it -needs to perform is: - -1. Fetch the user state -2. Use the `locationAtIndex` helper function and the location id to get a - Location object -3. Verify that the user is at the correct location -4. Get the current blockhash and last valid block height from the connection -5. Create a new transaction object -6. Add an initialize instruction to the transaction if user state does not exist -7. Add a check-in instruction to the transaction -8. Add the `reference` public key to the check-in instruction -9. Partially sign the transaction with the event organizer's keypair -10. Serialize the transaction with base64 encoding and return the transaction - -While each of these steps is straightforward, it's a lot of steps. To simplify -the function, we're going to create empty helper functions that we'll fill in -later for steps 1, 3, 6, and 7-8. We'll call these `fetchUserState`, -`verifyCorrectLocation`, `createInitUserInstruction`, and -`createCheckInInstruction`, respectively. - -We'll also add the following imports: - -```typescript -import { NextApiRequest, NextApiResponse } from "next"; -import { - PublicKey, - Transaction, - TransactionInstruction, -} from "@solana/web3.js"; -import { locationAtIndex, Location, locations } from "../../utils/locations"; -import { connection, gameId, program } from "../../utils/programSetup"; -``` - -Using the empty helper functions and the new imports, we can fill in the -`buildTransaction` function: - -```typescript -async function buildTransaction( - account: PublicKey, - reference: PublicKey, - id: string, -): Promise { - const userState = await fetchUserState(account); - - const currentLocation = locationAtIndex(new Number(id).valueOf()); - - if (!currentLocation) { - throw { message: "Invalid location id" }; - } - - if (!verifyCorrectLocation(userState, currentLocation)) { - throw { message: "You must visit each location in order!" }; - } - - const { blockhash, lastValidBlockHeight } = - await connection.getLatestBlockhash(); - - const transaction = new Transaction({ - feePayer: account, - blockhash, - lastValidBlockHeight, - }); - - if (!userState) { - transaction.add(await createInitUserInstruction(account)); - } - - transaction.add( - await createCheckInInstruction(account, reference, currentLocation), - ); - - transaction.partialSign(eventOrganizer); - - const serializedTransaction = transaction.serialize({ - requireAllSignatures: false, - }); - - const base64 = serializedTransaction.toString("base64"); - - return base64; -} - -interface UserState { - user: PublicKey; - gameId: PublicKey; - lastLocation: PublicKey; -} - -async function fetchUserState(account: PublicKey): Promise { - return null; -} - -function verifyCorrectLocation( - userState: UserState | null, - currentLocation: Location, -): boolean { - return false; -} - -async function createInitUserInstruction( - account: PublicKey, -): Promise { - throw ""; -} - -async function createCheckInInstruction( - account: PublicKey, - reference: PublicKey, - location: Location, -): Promise { - throw ""; -} -``` - -### 7. Implement `fetchUserState` function - -With the `buildTransaction` function finished, we can start implementing the -empty helper functions we created, starting with `fetchUserState`. This function -uses the `gameId` and user's `account` to derive the user state PDA, then -fetches that account, returning null if it doesn't exist. - -```typescript -async function fetchUserState(account: PublicKey): Promise { - const userStatePDA = PublicKey.findProgramAddressSync( - [gameId.toBuffer(), account.toBuffer()], - program.programId, - )[0]; - - try { - return await program.account.userState.fetch(userStatePDA); - } catch { - return null; - } -} -``` - -### 8. Implement `verifyCorrectLocation` function - -Next, let’s implement the `verifyCorrectLocation` helper function. This function -is used to verify that a user is at the correct location in a scavenger hunt -game. - -If `userState` is `null`, that means the user should be visiting the first -location. Otherwise, the user should be visiting the location whose index is 1 -more than their last visited location. - -If these conditions are satisfied, the function will return true. Otherwise, -it'll return false. - -```typescript -function verifyCorrectLocation( - userState: UserState | null, - currentLocation: Location, -): boolean { - if (!userState) { - return currentLocation.index === 1; - } - - const lastLocation = locations.find( - location => location.key.toString() === userState.lastLocation.toString(), - ); - - if (!lastLocation || currentLocation.index !== lastLocation.index + 1) { - return false; - } else { - return true; - } -} -``` - -### 9. Implement the instruction creation functions - -Lastly, let's implement `createInitUserInstruction` and -`createCheckInInstruction`. These can use Anchor to generate and return the -corresponding instructions. The only catch is that `createCheckInInstruction` -needs to add `reference` to the instructions list of keys. - -```typescript -async function createInitUserInstruction( - account: PublicKey, -): Promise { - const initializeInstruction = await program.methods - .initialize(gameId) - .accounts({ user: account }) - .instruction(); - - return initializeInstruction; -} - -async function createCheckInInstruction( - account: PublicKey, - reference: PublicKey, - location: Location, -): Promise { - const checkInInstruction = await program.methods - .checkIn(gameId, location.key) - .accounts({ - user: account, - eventOrganizer: eventOrganizer.publicKey, - }) - .instruction(); - - checkInInstruction.keys.push({ - pubkey: reference, - isSigner: false, - isWritable: false, - }); - - return checkInInstruction; -} -``` - -### 10. Test the app - -At this point your app should be working! Go ahead and test it using your mobile -wallet. Start by scanning the QR code for `Location 1`. Remember to make sure -your frontend is running using the ngrok URL rather than `localhost`. - -After scanning the QR code, you should see a message indicating that you are at -location 1. From there, scan the QR code on the `Location 2` page. You may need -to wait a few seconds for the previous transaction to finalize before -continuing. - -Congratulations, you have successfully finished the scavenger hunt demo using -Solana Pay! Depending on your background, this may not feel intuitive or -straightforward. If that's the case, feel free to go through the demo again or -make something on your own. Solana Pay opens a lot of doors for bridging the gap -between real life and on-chain interaction. - -If you want to take a look at the final solution code you can find it on the -solution branch of -[the same repository](https://github.com/Unboxed-Software/solana-scavenger-hunt-app/tree/solution). - -# Challenge - -It's time to try this out on your own. Feel free to build out an idea of your -own using Solana Pay. Or, if you need some inspiration, you can use the prompt -below. - -Build out an app using Solana Pay (or modify the one from the demo) to mint an -NFT to users. To take it up a notch, only make the transaction possible if the -user meets one or more conditions (e.g. holds an NFT from a specific collection, -is already on a pre-determined list, etc.). - -Get creative with this! The Solana pay spec opens up a lot of doors for unique -use cases. diff --git a/content/courses/solana-course/content/token-program.md b/content/courses/solana-course/content/token-program.md deleted file mode 100644 index cedced616..000000000 --- a/content/courses/solana-course/content/token-program.md +++ /dev/null @@ -1,1234 +0,0 @@ ---- -title: Create Tokens With The Token Program -objectives: - - Create token mints - - Create token accounts - - Mint tokens - - Transfer tokens - - Burn tokens ---- - -# TL;DR - -- **SPL-Tokens** represent all non-native tokens on the Solana network. Both - fungible and non-fungible tokens (NFTs) on Solana are SPL-Tokens -- The **Token Program** contains instructions for creating and interacting with - SPL-Tokens -- **Token Mints** are accounts which hold data about a specific Token, but do - not hold Tokens -- **Token Accounts** are used to hold Tokens of a specific Token Mint -- Creating Token Mints and Token Accounts requires allocating **rent** in SOL. - The rent for a Token Account can be refunded when the account is closed, - however, Token Mints currently cannot be closed - -# Overview - -The Token Program is one of many programs made available by the Solana Program -Library (SPL). It contains instructions for creating and interacting with -SPL-Tokens. These tokens represent all non-native (i.e. not SOL) tokens on the -Solana network. - -This lesson will focus on the basics of creating and managing a new SPL-Token -using the Token Program: - -1. Creating a new Token Mint -2. Creating Token Accounts -3. Minting -4. Transferring tokens from one holder to another -5. Burning tokens - -We'll be approaching this from the client-side of the development process using -the `@solana/spl-token` Javascript library. - -## Token Mint - -To create a new SPL-Token you first have to create a Token Mint. A Token Mint is -the account that holds data about a specific token. - -As an example, let's look at -[USD Coin (USDC) on the Solana Explorer](https://explorer.solana.com/address/EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v). -USDC's Token Mint address is `EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v`. -With the explorer, we can see the particular details about USDC's Token Mint -such as the current supply of tokens, the addresses of the mint and freeze -authorities, and the decimal precision of the token: - -![Screenshot of USDC Token Mint](../assets/token-program-usdc-mint.png) - -To create a new Token Mint, you need to send the right transaction instructions -to the Token Program. To do this, we'll use the `createMint` function from -`@solana/spl-token`. - -```tsx -const tokenMint = await createMint( - connection, - payer, - mintAuthority, - freezeAuthority, - decimal, -); -``` - -The `createMint` function returns the `publicKey` of the new token mint. This -function requires the following arguments: - -- `connection` - the JSON-RPC connection to the cluster -- `payer` - the public key of the payer for the transaction -- `mintAuthority` - the account which is authorized to do the actual minting of - tokens from the token mint. -- `freezeAuthority` - an account authorized to freeze the tokens in a token - account. If freezing is not a desired attribute, the parameter can be set to - null -- `decimals` - specifies the desired decimal precision of the token - -When creating a new mint from a script that has access to your secret key, you -can simply use the `createMint` function. However, if you were to build a -website to allow users to create a new token mint, you would need to do so with -the user's secret key without making them expose it to the browser. In that -case, you would want to build and submit a transaction with the right -instructions. - -Under the hood, the `createMint` function is simply creating a transaction that -contains two instructions: - -1. Create a new account -2. Initialize a new mint - -This would look as follows: - -```tsx -import * as web3 from "@solana/web3"; -import * as token from "@solana/spl-token"; - -async function buildCreateMintTransaction( - connection: web3.Connection, - payer: web3.PublicKey, - decimals: number, -): Promise { - const lamports = await token.getMinimumBalanceForRentExemptMint(connection); - const accountKeypair = web3.Keypair.generate(); - const programId = token.TOKEN_PROGRAM_ID; - - const transaction = new web3.Transaction().add( - web3.SystemProgram.createAccount({ - fromPubkey: payer, - newAccountPubkey: accountKeypair.publicKey, - space: token.MINT_SIZE, - lamports, - programId, - }), - token.createInitializeMintInstruction( - accountKeypair.publicKey, - decimals, - payer, - payer, - programId, - ), - ); - - return transaction; -} -``` - -When manually building the instructions to create a new token mint, make sure -you add the instructions for creating the account and initializing the mint to -the _same transaction_. If you were to do each step in a separate transaction, -it's theoretically possible for somebody else to take the account you create and -initialize it for their own mint. - -### Rent and Rent Exemption - -Note that the first line in the function body of the previous code snippet -contains a call to `getMinimumBalanceForRentExemptMint`, the result of which is -passed into the `createAccount` function. This is part of account initialization -called rent exemption. - -Until recently, all accounts on Solana were required to do one of the following -to avoid being deallocated: - -1. Pay rent at specific intervals -2. Deposit enough SOL upon initialization to be considered rent-exempt - -Recently, the first option was done away with and it became a requirement to -deposit enough SOL for rent exemption when initializing a new account. - -In this case, we're creating a new account for a token mint so we use -`getMinimumBalanceForRentExemptMint` from the `@solana/spl-token` library. -However, this concept applies to all accounts and you can use the more generic -`getMinimumBalanceForRentExemption` method on `Connection` for other accounts -you may need to create. - -## Token Account - -Before you can mint tokens (issue new supply), you need a Token Account to hold -the newly issued tokens. - -A Token Account holds tokens of a specific "mint" and has a specified "owner" of -the account. Only the owner is authorized to decrease the Token Account balance -(transfer, burn, etc.) while anyone can send tokens to the Token Account to -increase its balance. - -You can use the `spl-token` library's `createAccount` function to create the new -Token Account: - -```tsx -const tokenAccount = await createAccount( - connection, - payer, - mint, - owner, - keypair, -); -``` - -The `createAccount` function returns the `publicKey` of the new token account. -This function requires the following arguments: - -- `connection` - the JSON-RPC connection to the cluster -- `payer` - the account of the payer for the transaction -- `mint` - the token mint that the new token account is associated with -- `owner` - the account of the owner of the new token account -- `keypair` - this is an optional parameter for specifying the new token account - address. If no keypair is provided, the `createAccount` function defaults to a - derivation from the associated `mint` and `owner` accounts. - -Please note that this `createAccount` function is different from the -`createAccount` function shown above when we looked under the hood of the -`createMint` function. Previously we used the `createAccount` function on -`SystemProgram` to return the instruction for creating all accounts. The -`createAccount` function here is a helper function in the `spl-token` library -that submits a transaction with two instructions. The first creates the account -and the second initializes the account as a Token Account. - -Like with creating a Token Mint, if we needed to build the transaction for -`createAccount` manually we could duplicate what the function is doing under the -hood: - -1. Use `getMint` to retrieve the data associated with the `mint` -2. Use `getAccountLenForMint` to calculate the space needed for the token - account -3. Use `getMinimumBalanceForRentExemption` to calculate the lamports needed for - rent exemption -4. Create a new transaction using `SystemProgram.createAccount` and - `createInitializeAccountInstruction`. Note that this `createAccount` is from - `@solana/web3.js` and used to create a generic new account. The - `createInitializeAccountInstruction` uses this new account to initialize the - new token account - -```tsx -import * as web3 from "@solana/web3"; -import * as token from "@solana/spl-token"; - -async function buildCreateTokenAccountTransaction( - connection: web3.Connection, - payer: web3.PublicKey, - mint: web3.PublicKey, -): Promise { - const mintState = await token.getMint(connection, mint); - const accountKeypair = await web3.Keypair.generate(); - const space = token.getAccountLenForMint(mintState); - const lamports = await connection.getMinimumBalanceForRentExemption(space); - const programId = token.TOKEN_PROGRAM_ID; - - const transaction = new web3.Transaction().add( - web3.SystemProgram.createAccount({ - fromPubkey: payer, - newAccountPubkey: accountKeypair.publicKey, - space, - lamports, - programId, - }), - token.createInitializeAccountInstruction( - accountKeypair.publicKey, - mint, - payer, - programId, - ), - ); - - return transaction; -} -``` - -### Associated Token Account - -An Associated Token Account is a Token Account where the address of the Token -Account is derived using an owner's public key and a token mint. Associated -Token Accounts provide a deterministic way to find the Token Account owned by a -specific `publicKey` for a specific token mint. Most of the time you create a -Token Account, you'll want it to be an Associated Token Account. - -Similar to above, you can create an associated token account using the -`spl-token` library's `createAssociatedTokenAccount` function. - -```tsx -const associatedTokenAccount = await createAssociatedTokenAccount( - connection, - payer, - mint, - owner, -); -``` - -This function returns the `publicKey` of the new associated token account and -requires the following arguments: - -- `connection` - the JSON-RPC connection to the cluster -- `payer` - the account of the payer for the transaction -- `mint` - the token mint that the new token account is associated with -- `owner` - the account of the owner of the new token account - -You can also use `getOrCreateAssociatedTokenAccount` to get the Token Account -associated with a given address or create it if it doesn't exist. For example, -if you were writing code to airdrop tokens to a given user, you'd likely use -this function to ensure that the token account associated with the given user -gets created if it doesn't already exist. - -Under the hood, `createAssociatedTokenAccount` is doing two things: - -1. Using `getAssociatedTokenAddress` to derive the associated token account - address from the `mint` and `owner` -2. Building a transaction using instructions from - `createAssociatedTokenAccountInstruction` - -```tsx -import * as web3 from "@solana/web3"; -import * as token from "@solana/spl-token"; - -async function buildCreateAssociatedTokenAccountTransaction( - payer: web3.PublicKey, - mint: web3.PublicKey, -): Promise { - const associatedTokenAddress = await token.getAssociatedTokenAddress( - mint, - payer, - false, - ); - - const transaction = new web3.Transaction().add( - token.createAssociatedTokenAccountInstruction( - payer, - associatedTokenAddress, - payer, - mint, - ), - ); - - return transaction; -} -``` - -## Mint Tokens - -Minting tokens is the process of issuing new tokens into circulation. When you -mint tokens, you increase the supply of the token mint and deposit the newly -minted tokens into a token account. Only the mint authority of a token mint is -allowed to mint new tokens. - -To mint tokens using the `spl-token` library, you can use the `mintTo` function. - -```tsx -const transactionSignature = await mintTo( - connection, - payer, - mint, - destination, - authority, - amount, -); -``` - -The `mintTo` function returns a `TransactionSignature` that can be viewed on the -Solana Explorer. The `mintTo` function requires the following arguments: - -- `connection` - the JSON-RPC connection to the cluster -- `payer` - the account of the payer for the transaction -- `mint` - the token mint that the new token account is associated with -- `destination` - the token account that tokens will be minted to -- `authority` - the account authorized to mint tokens -- `amount` - the raw amount of tokens to mint outside of decimals, e.g. if - Scrooge Coin mint's decimals property was set to 2 then to get 1 full Scrooge - Coin you would need to set this property to 100 - -It's not uncommon to update the mint authority on a token mint to null after the -tokens have been minted. This would set a maximum supply and ensure no tokens -can be minted in the future. Conversely, minting authority could be granted to a -program so tokens could be automatically minted at regular intervals or -according to programmable conditions. - -Under the hood, the `mintTo` function simply creates a transaction with the -instructions obtained from the `createMintToInstruction` function. - -```tsx -import * as web3 from "@solana/web3"; -import * as token from "@solana/spl-token"; - -async function buildMintToTransaction( - authority: web3.PublicKey, - mint: web3.PublicKey, - amount: number, - destination: web3.PublicKey, -): Promise { - const transaction = new web3.Transaction().add( - token.createMintToInstruction(mint, destination, authority, amount), - ); - - return transaction; -} -``` - -## Transfer Tokens - -SPL-Token transfers require both the sender and receiver to have token accounts -for the mint of the tokens being transferred. The tokens are transferred from -the sender’s token account to the receiver’s token account. - -You can use `getOrCreateAssociatedTokenAccount` when obtaining the receiver's -associated token account to ensure their token account exists before the -transfer. Just remember that if the account doesn't exist already, this function -will create it and the payer on the transaction will be debited the lamports -required for the account creation. - -Once you know the receiver's token account address, you transfer tokens using -the `spl-token` library's `transfer` function. - -```tsx -const transactionSignature = await transfer( - connection, - payer, - source, - destination, - owner, - amount, -); -``` - -The `transfer` function returns a `TransactionSignature` that can be viewed on -the Solana Explorer. The `transfer` function requires the following arguments: - -- `connection` the JSON-RPC connection to the cluster -- `payer` the account of the payer for the transaction -- `source` the token account sending tokens -- `destination` the token account receiving tokens -- `owner` the account of the owner of the `source` token account -- `amount` the amount of tokens to transfer - -Under the hood, the `transfer` function simply creates a transaction with the -instructions obtained from the `createTransferInstruction` function: - -```tsx -import * as web3 from "@solana/web3"; -import * as token from "@solana/spl-token"; - -async function buildTransferTransaction( - source: web3.PublicKey, - destination: web3.PublicKey, - owner: web3.PublicKey, - amount: number, -): Promise { - const transaction = new web3.Transaction().add( - token.createTransferInstruction(source, destination, owner, amount), - ); - - return transaction; -} -``` - -## Burn Tokens - -Burning tokens is the process of decreasing the token supply of a given token -mint. Burning tokens removes them from the given token account and from broader -circulation. - -To burn tokens using the `spl-token` library, you use the `burn` function. - -```tsx -const transactionSignature = await burn( - connection, - payer, - account, - mint, - owner, - amount, -); -``` - -The `burn` function returns a `TransactionSignature` that can be viewed on -Solana Explorer. The `burn` function requires the following arguments: - -- `connection` the JSON-RPC connection to the cluster -- `payer` the account of the payer for the transaction -- `account` the token account to burn tokens from -- `mint` the token mint associated with the token account -- `owner` the account of the owner of the token account -- `amount` the amount of tokens to burn - -Under the hood, the `burn` function creates a transaction with instructions -obtained from the `createBurnInstruction` function: - -```tsx -import * as web3 from "@solana/web3"; -import * as token from "@solana/spl-token"; - -async function buildBurnTransaction( - account: web3.PublicKey, - mint: web3.PublicKey, - owner: web3.PublicKey, - amount: number, -): Promise { - const transaction = new web3.Transaction().add( - token.createBurnInstruction(account, mint, owner, amount), - ); - - return transaction; -} -``` - -## Approve Delegate - -Approving a delegate is the process of authorizing another account to transfer -or burn tokens from a token account. When using a delegate, the authority over -the token account remains with the original owner. The maximum amount of tokens -a delegate may transfer or burn is specified at the time the owner of the token -account approves the delegate. Note that there can only be one delegate account -associated with a token account at any given time. - -To approve a delegate using the `spl-token` library, you use the `approve` -function. - -```tsx -const transactionSignature = await approve( - connection, - payer, - account, - delegate, - owner, - amount, -); -``` - -The `approve` function returns a `TransactionSignature` that can be viewed on -Solana Explorer. The `approve` function requires the following arguments: - -- `connection` the JSON-RPC connection to the cluster -- `payer` the account of the payer for the transaction -- `account` the token account to delegate tokens from -- `delegate` the account the owner is authorizing to transfer or burn tokens -- `owner` the account of the owner of the token account -- `amount` the maximum number of tokens the delegate may transfer or burn - -Under the hood, the `approve` function creates a transaction with instructions -obtained from the `createApproveInstruction` function: - -```tsx -import * as web3 from "@solana/web3"; -import * as token from "@solana/spl-token"; - -async function buildApproveTransaction( - account: web3.PublicKey, - delegate: web3.PublicKey, - owner: web3.PublicKey, - amount: number, -): Promise { - const transaction = new web3.Transaction().add( - token.createApproveInstruction(account, delegate, owner, amount), - ); - - return transaction; -} -``` - -## Revoke Delegate - -A previously approved delegate for a token account can be later revoked. Once a -delegate is revoked, the delegate can no longer transfer tokens from the owner's -token account. Any remaining amount left untransferred from the previously -approved amount can no longer be transferred by the delegate. - -To revoke a delegate using the `spl-token` library, you use the `revoke` -function. - -```tsx -const transactionSignature = await revoke(connection, payer, account, owner); -``` - -The `revoke` function returns a `TransactionSignature` that can be viewed on -Solana Explorer. The `revoke` function requires the following arguments: - -- `connection` the JSON-RPC connection to the cluster -- `payer` the account of the payer for the transaction -- `account` the token account to revoke the delegate authority from -- `owner` the account of the owner of the token account - -Under the hood, the `revoke` function creates a transaction with instructions -obtained from the `createRevokeInstruction` function: - -```tsx -import * as web3 from "@solana/web3"; -import * as token from "@solana/spl-token"; - -async function buildRevokeTransaction( - account: web3.PublicKey, - owner: web3.PublicKey, -): Promise { - const transaction = new web3.Transaction().add( - token.createRevokeInstruction(account, owner), - ); - - return transaction; -} -``` - -# Demo - -We’re going to create a script that interacts with instructions on the Token -Program. We will create a Token Mint, create Token Accounts, mint tokens, -approve a delegate, transfer tokens, and burn tokens. - -### 1. Basic scaffolding - -Let’s start with some basic scaffolding. You’re welcome to set up your project -however feels most appropriate for you, but we’ll be using a simple Typescript -project with a dependency on the `@solana/web3.js` and `@solana/spl-token` -packages. - -You can use `npx create-solana-client [INSERT_NAME_HERE]` in the command line to -clone the template we'll be starting from. Or you can manually clone the -template [here](https://github.com/Unboxed-Software/solana-client-template). - -You'll then need to add a dependency on `@solana/spl-token`. From the command -line inside the newly created directory, use the command -`npm install @solana/spl-token`. - -### 2. Create Token Mint - -We'll be using the `@solana/spl-token` library, so let's start by importing it -at the top of the file. - -```tsx -import * as token from "@solana/spl-token"; -``` - -Next, declare a new function `createNewMint` with parameters `connection`, -`payer`, `mintAuthority`, `freezeAuthority`, and `decimals`. - -In the body of the function Import `createMint` from `@solana/spl-token` and -then create a function to call `createMint`: - -```tsx -async function createNewMint( - connection: web3.Connection, - payer: web3.Keypair, - mintAuthority: web3.PublicKey, - freezeAuthority: web3.PublicKey, - decimals: number, -): Promise { - const tokenMint = await token.createMint( - connection, - payer, - mintAuthority, - freezeAuthority, - decimals, - ); - - console.log( - `Token Mint: https://explorer.solana.com/address/${tokenMint}?cluster=devnet`, - ); - - return tokenMint; -} -``` - -With that function completed, call it from the body of `main`, setting `user` as -the `payer`, `mintAuthority`, and `freezeAuthority`. - -After creating the new mint, let's fetch the account data using the `getMint` -function and store it in a variable called `mintInfo`. We'll use this data later -to adjust input `amount` for the decimal precision of the mint. - -```tsx -async function main() { - const connection = new web3.Connection(web3.clusterApiUrl("devnet")); - const user = await initializeKeypair(connection); - - const mint = await createNewMint( - connection, - user, - user.publicKey, - user.publicKey, - 2, - ); - - const mintInfo = await token.getMint(connection, mint); -} -``` - -### 3. Create Token Account - -Now that we've created the mint, lets create a new Token Account, specifying the -`user` as the `owner`. - -The `createAccount` function creates a new Token Account with the option to -specify the address of the Token Account. Recall that if no address is provided, -`createAccount` will default to using the associated token account derived using -the `mint` and `owner`. - -Alternatively, the function `createAssociatedTokenAccount` will also create an -associated token account with the same address derived from the `mint` and -`owner` public keys. - -For our demo we’ll use the`getOrCreateAssociatedTokenAccount` function to create -our token account. This function gets the address of a Token Account if it -already exists. If it doesn't, it will create a new Associated Token Account at -the appropriate address. - -```tsx -async function createTokenAccount( - connection: web3.Connection, - payer: web3.Keypair, - mint: web3.PublicKey, - owner: web3.PublicKey, -) { - const tokenAccount = await token.getOrCreateAssociatedTokenAccount( - connection, - payer, - mint, - owner, - ); - - console.log( - `Token Account: https://explorer.solana.com/address/${tokenAccount.address}?cluster=devnet`, - ); - - return tokenAccount; -} -``` - -Add a call the `createTokenAccount` in `main`, passing in the mint we created in -the previous step and setting the `user` as the `payer` and `owner`. - -```tsx -async function main() { - const connection = new web3.Connection(web3.clusterApiUrl("devnet")); - const user = await initializeKeypair(connection); - - const mint = await createNewMint( - connection, - user, - user.publicKey, - user.publicKey, - 2, - ); - - const mintInfo = await token.getMint(connection, mint); - - const tokenAccount = await createTokenAccount( - connection, - user, - mint, - user.publicKey, - ); -} -``` - -### 4. Mint Tokens - -Now that we have a token mint and a token account, lets mint tokens to the token -account. Note that only the `mintAuthority` can mint new tokens to a token -account. Recall that we set the `user` as the `mintAuthority` for the `mint` we -created. - -Create a function `mintTokens` that uses the `spl-token` function `mintTo` to -mint tokens: - -```tsx -async function mintTokens( - connection: web3.Connection, - payer: web3.Keypair, - mint: web3.PublicKey, - destination: web3.PublicKey, - authority: web3.Keypair, - amount: number, -) { - const transactionSignature = await token.mintTo( - connection, - payer, - mint, - destination, - authority, - amount, - ); - - console.log( - `Mint Token Transaction: https://explorer.solana.com/tx/${transactionSignature}?cluster=devnet`, - ); -} -``` - -Lets call the function in `main` using the `mint` and `tokenAccount` created -previously. - -Note that we have to adjust the input `amount` for the decimal precision of the -mint. Tokens from our `mint` have a decimal precision of 2. If we only specify -100 as the input `amount`, then only 1 token will be minted to our token -account. - -```tsx -async function main() { - const connection = new web3.Connection(web3.clusterApiUrl("devnet")); - const user = await initializeKeypair(connection); - - const mint = await createNewMint( - connection, - user, - user.publicKey, - user.publicKey, - 2, - ); - - const mintInfo = await token.getMint(connection, mint); - - const tokenAccount = await createTokenAccount( - connection, - user, - mint, - user.publicKey, - ); - - await mintTokens( - connection, - user, - mint, - tokenAccount.address, - user, - 100 * 10 ** mintInfo.decimals, - ); -} -``` - -### 5. Approve Delegate - -Now that we have a token mint and a token account, lets authorize a delegate to -transfer tokens on our behalf. - -Create a function `approveDelegate` that uses the `spl-token` function `approve` -to mint tokens: - -```tsx -async function approveDelegate( - connection: web3.Connection, - payer: web3.Keypair, - account: web3.PublicKey, - delegate: web3.PublicKey, - owner: web3.Signer | web3.PublicKey, - amount: number, -) { - const transactionSignature = await token.approve( - connection, - payer, - account, - delegate, - owner, - amount, - ); - - console.log( - `Approve Delegate Transaction: https://explorer.solana.com/tx/${transactionSignature}?cluster=devnet`, - ); -} -``` - -In `main`, lets generate a new `Keypair` to represent the delegate account. -Then, lets call our new `approveDelegate` function and authorize the delegate to -tranfer up to 50 tokens from the `user` token account. Remember to adjust the -`amount` for the decimal precision of the `mint`. - -```tsx -async function main() { - const connection = new web3.Connection(web3.clusterApiUrl("devnet")); - const user = await initializeKeypair(connection); - - const mint = await createNewMint( - connection, - user, - user.publicKey, - user.publicKey, - 2, - ); - - const mintInfo = await token.getMint(connection, mint); - - const tokenAccount = await createTokenAccount( - connection, - user, - mint, - user.publicKey, - ); - - await mintTokens( - connection, - user, - mint, - tokenAccount.address, - user, - 100 * 10 ** mintInfo.decimals, - ); - - const delegate = web3.Keypair.generate(); - - await approveDelegate( - connection, - user, - tokenAccount.address, - delegate.publicKey, - user.publicKey, - 50 * 10 ** mintInfo.decimals, - ); -} -``` - -### 6. Transfer Tokens - -Next, lets transfer some of the tokens we just minted using the `spl-token` -library's `transfer` function. - -```tsx -async function transferTokens( - connection: web3.Connection, - payer: web3.Keypair, - source: web3.PublicKey, - destination: web3.PublicKey, - owner: web3.Keypair, - amount: number, -) { - const transactionSignature = await token.transfer( - connection, - payer, - source, - destination, - owner, - amount, - ); - - console.log( - `Transfer Transaction: https://explorer.solana.com/tx/${transactionSignature}?cluster=devnet`, - ); -} -``` - -Before we can call this new function, we need to know the account into which -we'll transfer the tokens. - -In `main`, lets generate a new `Keypair` to be the receiver (but remember that -this is just to simulate having someone to send tokens to - in a real -application you'd need to know the wallet address of the person receiving the -tokens). - -Then, create a token account for the receiver. Finally, lets call our new -`transferTokens` function to transfer tokens from the `user` token account to -the `receiver` token account. We'll use the `delegate` we approved in the -previous step to perform the transfer on our behalf. - -```tsx -async function main() { - const connection = new web3.Connection(web3.clusterApiUrl("devnet")); - const user = await initializeKeypair(connection); - - const mint = await createNewMint( - connection, - user, - user.publicKey, - user.publicKey, - 2, - ); - - const tokenAccount = await createTokenAccount( - connection, - user, - mint, - user.publicKey, - ); - - const mintInfo = await token.getMint(connection, mint); - - await mintTokens( - connection, - user, - mint, - tokenAccount.address, - user, - 100 * 10 ** mintInfo.decimals, - ); - - const receiver = web3.Keypair.generate().publicKey; - const receiverTokenAccount = await createTokenAccount( - connection, - user, - mint, - receiver, - ); - - const delegate = web3.Keypair.generate(); - await approveDelegate( - connection, - user, - tokenAccount.address, - delegate.publicKey, - user.publicKey, - 50 * 10 ** mintInfo.decimals, - ); - - await transferTokens( - connection, - user, - tokenAccount.address, - receiverTokenAccount.address, - delegate, - 50 * 10 ** mintInfo.decimals, - ); -} -``` - -### 7. Revoke Delegate - -Now that we've finished transferring tokens, lets revoke the `delegate` using -the `spl-token` library's `revoke` function. - -```tsx -async function revokeDelegate( - connection: web3.Connection, - payer: web3.Keypair, - account: web3.PublicKey, - owner: web3.Signer | web3.PublicKey, -) { - const transactionSignature = await token.revoke( - connection, - payer, - account, - owner, - ); - - console.log( - `Revote Delegate Transaction: https://explorer.solana.com/tx/${transactionSignature}?cluster=devnet`, - ); -} -``` - -Revoke will set delegate for the token account to null and reset the delegated -amount to 0. All we will need for this function is the token account and user. -Lets call our new `revokeDelegate` function to revoke the delegate from the -`user` token account. - -```tsx -async function main() { - const connection = new web3.Connection(web3.clusterApiUrl("devnet")); - const user = await initializeKeypair(connection); - - const mint = await createNewMint( - connection, - user, - user.publicKey, - user.publicKey, - 2, - ); - - const mintInfo = await token.getMint(connection, mint); - - const tokenAccount = await createTokenAccount( - connection, - user, - mint, - user.publicKey, - ); - - await mintTokens( - connection, - user, - mint, - tokenAccount.address, - user, - 100 * 10 ** mintInfo.decimals, - ); - - const receiver = web3.Keypair.generate().publicKey; - const receiverTokenAccount = await createTokenAccount( - connection, - user, - mint, - receiver, - ); - - const delegate = web3.Keypair.generate(); - await approveDelegate( - connection, - user, - tokenAccount.address, - delegate.publicKey, - user.publicKey, - 50 * 10 ** mintInfo.decimals, - ); - - await transferTokens( - connection, - user, - tokenAccount.address, - receiverTokenAccount.address, - delegate, - 50 * 10 ** mintInfo.decimals, - ); - - await revokeDelegate(connection, user, tokenAccount.address, user.publicKey); -} -``` - -### 8. Burn Tokens - -Finally, let's remove some tokens from circulation by burning them. - -Create a `burnTokens` function that uses the `spl-token` library's `burn` -function to remove half of your tokens from circulation. - -```tsx -async function burnTokens( - connection: web3.Connection, - payer: web3.Keypair, - account: web3.PublicKey, - mint: web3.PublicKey, - owner: web3.Keypair, - amount: number, -) { - const transactionSignature = await token.burn( - connection, - payer, - account, - mint, - owner, - amount, - ); - - console.log( - `Burn Transaction: https://explorer.solana.com/tx/${transactionSignature}?cluster=devnet`, - ); -} -``` - -Now call this new function in `main` to burn 25 of the user's tokens. Remember -to adjust the `amount` for the decimal precision of the `mint`. - -```tsx -async function main() { - const connection = new web3.Connection(web3.clusterApiUrl("devnet")); - const user = await initializeKeypair(connection); - - const mint = await createNewMint( - connection, - user, - user.publicKey, - user.publicKey, - 2, - ); - - const mintInfo = await token.getMint(connection, mint); - - const tokenAccount = await createTokenAccount( - connection, - user, - mint, - user.publicKey, - ); - - await mintTokens( - connection, - user, - mint, - tokenAccount.address, - user, - 100 * 10 ** mintInfo.decimals, - ); - - const receiver = web3.Keypair.generate().publicKey; - const receiverTokenAccount = await createTokenAccount( - connection, - user, - mint, - receiver, - ); - - const delegate = web3.Keypair.generate(); - await approveDelegate( - connection, - user, - tokenAccount.address, - delegate.publicKey, - user.publicKey, - 50 * 10 ** mintInfo.decimals, - ); - - await transferTokens( - connection, - user, - tokenAccount.address, - receiverTokenAccount.address, - delegate, - 50 * 10 ** mintInfo.decimals, - ); - - await revokeDelegate(connection, user, tokenAccount.address, user.publicKey); - - await burnTokens( - connection, - user, - tokenAccount.address, - mint, - user, - 25 * 10 ** mintInfo.decimals, - ); -} -``` - -### 9. Test it all out - -With that, run `npm start`. You should see a series of Solana Explorer links -logged to the console. Click on them and see what happened each step of the way! -You created a new token mint, created a token account, minted 100 tokens, -approved a delegate, transferred 50 using a delegate, revoked the delegate, and -burned 25 more. You're well on your way to being a token expert. - -If you need a bit more time with this project to feel comfortable, have a look -at the complete -[solution code](https://github.com/Unboxed-Software/solana-token-client) - -# Challenge - -Now it’s your turn to build something independently. Create an application that -allows a users to create a new mint, create a token account, and mint tokens. - -Note that you will not be able to directly use the helper functions we went over -in the demo. In order to interact with the Token Program using the Phantom -wallet adapter, you will have to build each transaction manually and submit the -transaction to Phantom for approval. - -![Screenshot of Token Program Challenge Frontend](../assets/token-program-frontend.png) - -1. You can build this from scratch or you can download the starter code - [here](https://github.com/Unboxed-Software/solana-token-frontend/tree/starter). -2. Create a new Token Mint in the `CreateMint` component. If you need a - refresher on how to send transactions to a wallet for approval, have a look - at the [Wallets lesson](./interact-with-wallets.md). - - When creating a new mint, the newly generated `Keypair` will also have to - sign the transaction. When additional signers are required in addition to the - connected wallet, use the following format: - - ```tsx - sendTransaction(transaction, connection, { - signers: [Keypair], - }); - ``` - -3. Create a new Token Account in the `CreateTokenAccount` component. -4. Mint tokens in the `MintToForm` component. - -If you get stumped, feel free to reference the -[solution code](https://github.com/ZYJLiu/solana-token-frontend). - -And remember, get creative with these challenges and make them your own! diff --git a/content/courses/solana-course/content/token-swap.md b/content/courses/solana-course/content/token-swap.md deleted file mode 100644 index cc6c49758..000000000 --- a/content/courses/solana-course/content/token-swap.md +++ /dev/null @@ -1,1127 +0,0 @@ ---- -title: Swap Tokens With The Token Swap Program -objectives: - - Create a token swap pool - - Deposit liquidity - - Withdraw liquidity - - Swap tokens ---- - -# TL;DR - -- The **Token Swap Program** is an SPL contract deployed to Devnet available for - testing and experimentation by developers and protocols. For production use - cases, use your own deployment or one regularly maintained by a reputable - service. -- The program accepts six different **instructions**, all of which we will - explore in this lesson. -- Developers are able to create and use **liquidity pools** to swap between any - SPL token that they wish. -- The program uses a mathematical formula called "**curve**" to calculate the - price of all trades. Curves aim to mimic normal market dynamics: for example, - as traders buy a lot of one token type, the value of the other token type goes - up. - -# Overview - -## Swap Pools - -Before we get into how to create and interact with swap pools on Solana, it’s -important we understand the basics of what a swap pool is. A swap pool is an -aggregation of two different tokens with the purpose of providing liquidity to -facilitate exchange between each token. - -Users provide liquidity to these pools by depositing their own tokens into each -pool. These users are called liquidity providers. When a liquidity provider (or -LP) deposits some tokens to the swap pool, LP-tokens are minted that represent -the LP's fractional ownership in the pool. - -Most swap pools charge a trading fee for facilitating each swap. These fees are -then paid out to the LP’s in proportion to the amount of liquidity they are -providing in the pool. This provides incentive for LP's to provide liquidity to -the pool. - -When an LP is ready to withdraw their deposited liquidity, their LP-tokens are -burned and tokens from the pool (proportional to the amount of LP-tokens burned) -are sent to their wallet. - -The purpose of swap pools is to facilitate decentralized trade between users. In -traditional finance, users execute trades like this through a centralized -exchange on a central limit -[order book](https://www.investopedia.com/terms/o/order-book.asp). Generally, -this requires a trusted third-party intermediary. - -Due to the decentralized nature of cryptocurrency, however, we now have a new -way to facilitate trades. Many protocols decentralized exchanges have been built -to take advantage of this. [Project Serum](https://www.projectserum.com/) is an -example of such a decentralized central limit order book built on Solana. - -Since swap pools are completely decentralized, anybody can issue instructions to -the swap program to create a new swap pool between any SPL tokens they wish. -This is a massive lift beyond traditional finance. Swap pools and Automated -Market Makers (AMMs) are one of DeFi's most fascinating and complex topics. The -nitty-gritty details of how they work are outside the scope of this lesson, but -there is a ton of material out there available to you if you’re interested in -learning more. For example, the Solana Token Swap Program was heavily inspired -by [Uniswap](https://uniswap.org/) and [Balancer](https://balancer.fi/), each of -which provide excellent documentation that you can read through. - -## Token Swap Program and `@solana/spl-token-swap` - -Unlike the Token Program, there is no Solana-maintained deployment of the Token -Swap Program. Rather, Solana provides -[source code](https://github.com/solana-labs/solana-program-library/tree/master/token-swap/program) -for the Token Swap Program as a reference implementation that you can fork and -deploy yourself. You can also use a token swap program maintained by a third -party organization you trust. Throughout this lesson, we'll be using the -deployment maintained by Serum at address -`SwaPpA9LAaLfeLi3a68M4DjnLqgtticKg6CnyNwgAC8`. - -Solana also maintains the `@solana/spl-token-swap` JS library. This library -provides helper functions for interacting with a token swap program. Each helper -function takes an argument representing a token swap program ID. As long as the -program you use accepts the Token Swap instructions, you can use the -`@solana/spl-token-swap` library with it. - -## Creating a Swap Pool - -Creating swap pools with the SPL Token Swap Program really showcases the -account, instruction, and authorization models on Solana. This lesson will -combine and build on top of a lot of what we have learned so far in the course. -For operations specific to the Token Swap Program, we'll use the -`@solana/spl-token-swap` library. - -As we talk through creating a swap pool, we'll assume we're creating a swap pool -for two tokens named Token A and Token B. Creating the swap pool with the -`spl-token-swap` library is as simple as sending a transaction with an -instruction created with the `TokenSwap.createInitSwapInstruction` function. -However, there are a number of accounts you need to create or derive beforehand -that will be needed when creating that instruction: - -1. **Token swap state account** - holds information about the swap pool -2. **Swap pool authority** - the PDA used to sign transactions on behalf of the - swap program -3. **Token accounts for Token A and Token B** - token accounts that will hold - tokens A and B for the pool -4. **Pool token mint** - the mint for the swap pool's LP-token -5. **Pool token account** - the token account for the initial minting of the - pool token mint when the swap account is created -6. **Pool token fee account** - the account that gets paid the swap pool's - trading fees - -### Token Swap State Account - -Before you can create a swap pool, you'll need to create a token swap state -account. This account will be used to hold information about the swap pool -itself. - -To create the token swap state account, you use the `SystemProgram` instruction -`createAccount`. - -```tsx -import * as web3 from "@solana/web3"; -import { - TokenSwap, - TOKEN_SWAP_PROGRAM_ID, - TokenSwapLayout, -} from "@solana/spl-token-swap"; - -const transaction = new Web3.Transaction(); -const tokenSwapStateAccount = Web3.Keypair.generate(); -const rent = TokenSwap.getMinBalanceRentForExemptTokenSwap(connection); -const tokenSwapStateAccountInstruction = await Web3.SystemProgram.createAccount( - { - newAccountPubkey: tokenSwapStateAccount.publicKey, - fromPubkey: wallet.publicKey, - lamports: rent, - space: TokenSwapLayout.span, - programId: TOKEN_SWAP_PROGRAM_ID, - }, -); -transaction.add(tokenSwapStateAccountInstruction); -``` - -A few items to note from this example: - -1. You can get the number of lamports required for rent exemption using - `TokenSwap.getMinBalanceRentForExemptTokenSwap` from the `spl-token-swap` - library. -2. Similarly, you can use `TokenSwapLayout.span` for the space required on the - account. -3. `programId` must be set to `TOKEN_SWAP_PROGRAM_ID`. This sets the owner of - the new account to be the Token Swap Program itself. The Token Swap Program - will need to write data to the new account and so must be set as the owner. - -### Swap Pool Authority - -The swap pool authority is the account used to sign for transactions on behalf -of the swap program. This account is a Program Derived Address (PDA) derived -from the Token Swap Program and the token swap state account. - -PDAs can only be created by their owning program, so you don't need to create -this account directly. You do, however, need to know its public key. You can -discover it using the `@solana/web3` library's `PublicKey.findProgramAddress` -function. - -```tsx -const [swapAuthority, bump] = await Web3.PublicKey.findProgramAddress( - [tokenSwapStateAccount.publicKey.toBuffer()], - TOKEN_SWAP_PROGRAM_ID, -); -``` - -The resulting public key will be used as the authority on a number of the -accounts that follow. - -### Token accounts for Token A and Token B - -Token A and Token B accounts are associated token accounts used for the actual -swap pool. These accounts must contain some number of A/B tokens respectively -and the swap authority PDA must be marked as the owner of each so that the Token -Swap Program can sign for transactions and transfer tokens from each account. - -```tsx -let tokenAAccountAddress = await token.getAssociatedTokenAddress( - tokenAMint, // mint - swapAuthority, // owner - true, // allow owner off curve -); - -const tokenAAccountInstruction = - await token.createAssociatedTokenAccountInstruction( - wallet.publicKey, // payer - tokenAAccountAddress, // ata - swapAuthority, // owner - tokenAMint, // mint - ); - -transaction.add(tokenAAccountInstruction); -``` - -If you need a refresher on creating token accounts, have a look at the -[Token Program lesson](./token-program.md). - -### Pool Token Mint - -The pool token mint is the mint of the LP-tokens that represent an LP’s -ownership in the pool. You create this mint the way you learned in the -[Token Program lesson](./token-program.md). For the swap pool to work, the mint -authority must be the swap authority account. - -```tsx -const poolTokenMint = await token.createMint( - connection, - wallet, - swapAuthority, - null, - 2, -); -``` - -### Pool Token Account - -The pool token account is the account that the initial liquidity pool tokens get -minted to when the swap account is first created. Subsequent minting of -LP-tokens will be minted directly to the account of the user adding liquidity to -the pool. Liquidity pool tokens represent ownership in the deposited liquidity -in the pool. - -```tsx -const tokenAccountPool = Web3.Keypair.generate(); -const rent = await token.getMinimumBalanceForRentExemptAccount(connection); -const createTokenAccountPoolInstruction = Web3.SystemProgram.createAccount({ - fromPubkey: wallet.publicKey, - newAccountPubkey: tokenAccountPool.publicKey, - space: token.ACCOUNT_SIZE, - lamports: rent, - programId: token.TOKEN_PROGRAM_ID, -}); -const initializeTokenAccountPoolInstruction = - token.createInitializeAccountInstruction( - tokenAccountPool.publicKey, - poolTokenMint, - wallet.publicKey, - ); - -transaction.add(createTokenAccountPoolInstruction); -transaction.add(initializeTokenAccountPoolInstruction); -``` - -### Pool Token Fee Account - -The pool token fee account is the token account that the fees for the token -swaps are paid to. For the Serum deployment of the Token Swap Program that we -are using, this account must be owned by a specific account defined in the swap -program: -[HfoTxFR1Tm6kGmWgYWD6J7YHVy1UwqSULUGVLXkJqaKN](https://explorer.solana.com/address/HfoTxFR1Tm6kGmWgYWD6J7YHVy1UwqSULUGVLXkJqaKN?cluster=devnet). - -```tsx -const feeOwner = new web3.PublicKey( - "HfoTxFR1Tm6kGmWgYWD6J7YHVy1UwqSULUGVLXkJqaKN", -); - -let tokenFeeAccountAddress = await token.getAssociatedTokenAddress( - poolTokenMint, // mint - feeOwner, // owner - true, // allow owner off curve -); - -const tokenFeeAccountInstruction = - await token.createAssociatedTokenAccountInstruction( - wallet.publicKey, // payer - tokenFeeAccountAddress, // ata - feeOwner, // owner - poolTokenMint, // mint - ); - -transaction.add(tokenFeeAccountInstruction); -``` - -### Create the swap pool - -With all of the prerequisite accounts created, you can create the swap pool -initialization instruction using `TokenSwap.createInitSwapInstruction` from the -`spl-token-swap` library. - -This function takes _a lot_ of arguments. Let's talk through them. - -The first 7 arguments are the prerequisite token accounts we just discussed. - -After that comes the constant representing the Token Program ID followed by the -constant representing the Token Swap Program ID. - -Next, there are 4 pairs of number arguments representing numerators and -denominators for the trade fee, owner trade fee, owner withdraw fee, and host -fee. The instruction uses the numerator and denominator for each to calculate -the percentage of the fee. Lets explain each of the fees: - -1. **Trade fee** - fees that are retained by the swap pool token accounts during - a trade and increase the redeemable value of LP-tokens. This fee rewards - users for providing liquidity to the swap pool. -2. **Owner trade fee** - fees that are retained by the swap pool token accounts - during a trade, with the equivalent in LP-tokens minted to the owner of the - program -3. **Owner withdraw fee** - extra LP-tokens that are sent to the owner on every - withdrawal -4. **Host fee** - a proportion of the owner trade fees, sent to an extra host - token account provided during the trade. This fee incentives external parties - (such as a decentralized exchange) to provide frontends for the swap pool and - rewards them with a portion. - -When using a swap program deployed and maintained by a third party, these fees -may or may not be fixed such that you _must_ input the correct arguments. You'll -need to check the implementation of the backing program. - -Lastly, there's the curve type, which we'll discuss further later in the lesson. - -```tsx -const createSwapInstruction = TokenSwap.createInitSwapInstruction( - tokenSwapStateAccount, // Token swap state account - swapAuthority, // Swap pool authority - poolTokenA, // Token A token account - poolTokenB, // Token B token account - poolTokenMint, // Swap pool token mint - tokenFeeAccountAddress, // Token fee account - tokenAccountPool.publicKey, // Swap pool token account - token.TOKEN_PROGRAM_ID, // Token Program ID - TOKEN_SWAP_PROGRAM_ID, // Token Swap Program ID - 0, // Trade fee numerator - 10000, // Trade fee denominator - 5, // Owner trade fee numerator - 10000, // Owner trade fee denominator - 0, // Owner withdraw fee numerator - 0, // Owner withdraw fee denominator - 20, // Host fee numerator - 100, // Host fee denominator - CurveType.ConstantProduct, // Curve type -); - -transaction.add(createSwapInstruction); -``` - -When a transaction with these instructions successfully executes, the swap pool -is created and ready to be used. - -## Interacting with Swap Pools - -Once the swap pool is initialized, the Token Swap Program has a few different -instructions for using a swap pool. These include: - -1. Executing a swap -2. Depositing liquidity -3. Withdrawing liquidity - -### Execute a swap - -Users can immediately begin trading on a swap pool using the swap instruction. -The swap instruction transfers funds from a user's token account into the swap -pool's token account. The swap pool then mints LP-tokens to the user's LP-token -account. - -Since Solana programs require all accounts to be declared in the instruction, -users need to gather all account information from the token swap state account: -the token A and B accounts, pool token mint, and fee account. - -We swap tokens using the `TokenSwap.swapInstruction` helper function which -requires the following arguments: - -1. `tokenSwap` - the token swap state account -2. `authority` - the swap pool authority -3. `userTransferAuthority` - the delegate over the user token account -4. `userSource` - user token account to transfer tokens into the swap -5. `poolSource` - swap pool token account to receive tokens transferred from the - user -6. `poolDestination` - swap pool token account to send tokens to the user -7. `userDestination` - user token account to receive tokens sent from the swap - pool -8. `poolMint` - the LP-token mint address -9. `feeAccount` - the token account which receives the owner trade fees -10. `hostFeeAccount` - the token account which receives the host trade fees - (optional parameter), set to null if none is provided -11. `swapProgramId` - the address of the Token Swap Program -12. `tokenProgramId` - the address of the Token Program -13. `amountIn` - amount of tokens the user wants to transfer to the swap pool -14. `minimumAmountOut` - minimum amount of tokens send to the user token - account. This parameter is used to account for slippage. Slippage is the - difference between the value of a token when you submit the transaction - versus when the order is fulfilled. In this case, the lower the number, the - more slippage can possible occur without the transaction failing. Throughout - this lesson we'll use 0 for swaps as calculating slippage is outside the - scope of this lesson. In a production app, however, it's important to let - users specify the amount of slippage they're comfortable with. - -The instruction for swapping token A for token B will look like this: - -```tsx -const swapInstruction = TokenSwap.swapInstruction( - tokenSwapStateAccount, - swapAuthority, - userPublicKey, - userTokenA, - poolTokenA, - poolTokenB, - userTokenB, - poolMint, - feeAccount, - null, - TOKEN_SWAP_PROGRAM_ID, - TOKEN_PROGRAM_ID, - amount * 10 ** MintInfoTokenA.decimals, - 0, -); - -transaction.add(swapInstruction); -``` - -### Deposit liquidity - -The Token Swap Program has two variations of deposit instructions. One allows -users to only deposit tokens to one side of the swap pool at a time. The other -allows for users to deposit to both sides of the swap pool at the same time. - -In order to deposit liquidity to both sides of the swap pool, a user’s wallet -must have a sufficient amount of each token. When depositing both tokens, -instead of providing the amount of each token to deposit, the user specifies the -amount of LP-tokens they would like to receive. The Token Swap Program then -calculates the amount of each token that a depositor will receive given the -pool's curve and current liquidity. - -We can deposit both tokens at the same time using the -`TokenSwap.depositAllTokenTypesInstruction` helper function which requires the -following arguments: - -1. `tokenSwap` - the token swap state account -2. `authority` - the swap pool authority -3. `userTransferAuthority` - the authority over the user token accounts -4. `sourceA` - user token A account to transfer tokens into the swap pool token - A account -5. `sourceB` - user token B account to transfer tokens into the swap pool token - B account -6. `intoA` - swap pool token account A to receive user's token A -7. `intoB` - swap pool token account B to receive user's token B -8. `poolToken` - the LP-token mint address -9. `poolAccount` - user LP-token account the swap pool mints LP-token to -10. `swapProgramId` - the address of the Token Swap Program -11. `tokenProgramId` - the address of the Token Program -12. `poolTokenAmount` - amount of LP-token the depositor expects to receive -13. `maximumTokenA` - maximum amount of token A allowed to deposit -14. `maximumTokenB` - maximum amount of token A allowed to deposit - -The `maximumTokenA` and `maximumTokenB` arguments are used to prevent slippage. -The higher the number, the more slippage can possibly occur without a -transaction failure. For simplicity, we'll use a very large number for these -arguments. - -The instruction for depositing both token A and token B will look like this: - -```tsx -const instruction = TokenSwap.depositAllTokenTypesInstruction( - tokenSwapStateAccount, - swapAuthority, - userPublicKey, - userTokenA, - userTokenB, - poolTokenA, - poolTokenB, - poolMint, - userPoolToken, - TOKEN_SWAP_PROGRAM_ID, - TOKEN_PROGRAM_ID, - poolTokenAmount * 10 ** MintInfoPoolToken.decimals, - 100e9, - 100e9, -); - -transaction.add(instruction); -``` - -We can deposit tokens to only one side of the swap pool in a similar way using -the `TokenSwap.depositSingleTokenTypeExactAmountInInstruction`. The main -difference is that the last argument in the instruction is -`minimumPoolTokenAmount`. When depositing to only one side of the swap pool, the -user specifies exactly how many tokens to deposit. In turn, the Token Swap -Program calculates the amount of LP-tokens to mint the user for their deposit. -An instruction depositing only Token A will look like this: - -```tsx -const instruction = TokenSwap.depositSingleTokenTypeExactAmountInInstruction( - tokenSwapStateAccount, - swapAuthority, - userPublicKey, - userTokenA, - poolTokenA, - poolMint, - userPoolToken, - TOKEN_SWAP_PROGRAM_ID, - TOKEN_PROGRAM_ID, - DepositAmountTokenA * 10 ** MintInfoTokenA.decimals, - 0, -); - -transaction.add(instruction); -``` - -### Withdraw liquidity - -In exchange for providing liquidity, depositors receive LP-tokens representing -their fractional ownership of all A and B tokens in the pool. At any time, -liquidity providers may redeem their LP-token in exchange for tokens A and B at -the current "fair" exchange rate as determined by the curve. When liquidity is -withdrawn, tokens A and/or B are transferred into the user's token accounts and -the user's LP-token are burned. - -The Token Swap Program has two variations of withdraw instructions. One allows -users to only withdraw tokens from one side of the swap pool at a time. The -other allows for withdraws from both sides of the swap pool at the same time. - -We can withdraw both tokens at the same time using the -`TokenSwap.withdrawAllTokenTypesInstruction` helper function which requires the -following arguements: - -1. `tokenSwap` - the token swap state account -2. `authority` - the swap pool authority -3. `userTransferAuthority` - the authority over the user token accounts -4. `poolMint` - the LP-token mint address -5. `feeAccount` - the token account which receives the owner withdraw fees -6. `sourcePoolAccount` - user LP-token account to burn pool tokens LP-token from -7. `fromA` - swap pool token A account to withdraw from -8. `fromB` - swap pool token B account to withdraw from -9. `userAccountA` - user token A account to receive tokens withdrawn from swap - pool token A account -10. `userAccountB` - user token B account to receive tokens withdrawn from swap - pool token B account -11. `swapProgramId` - the address of the Token Swap Program -12. `tokenProgramId` - the address of the Token Program -13. `poolTokenAmount` - amount of LP-tokens the user expects to burn on withdraw -14. `minimumTokenA` - minimum amount of token A to withdraw -15. `minimumTokenB` - minimum amount of token B to withdraw - -The `minimumTokenA` and `minimumTokenB` arguments are used to prevent slippage. -The lower the number, the more slippage can possibly occur. For simplicity, we -will use 0 for these arguments. - -The instruction for depositing both token A and token B will look like this: - -```tsx -const instruction = TokenSwap.withdrawAllTokenTypesInstruction( - tokenSwapStateAccount, - swapAuthority, - userPublicKey, - poolMint, - feeAccount, - userPoolToken, - poolTokenA, - poolTokenB, - userTokenA, - userTokenB, - TOKEN_SWAP_PROGRAM_ID, - TOKEN_PROGRAM_ID, - poolTokenAmount * 10 ** MintInfoPoolToken.decimals, - 0, - 0, -); - -transaction.add(instruction); -``` - -We can withdraw tokens from only one side of the swap pool in a similar way -using the `TokenSwap.withdrawSingleTokenTypeExactAmountOut`. The main difference -is that the last argument in the instruction is `maximumPoolTokenAmount`. When -withdrawing only one side of the swap pool, the user specifies exact how many -tokens to withdraw. In turn, the Token Swap Program calculates the amount of -LP-tokens to mint the user must burn. An instruction withdrawing only Token B -will look like this: - -```tsx -const instruction = TokenSwap.depositSingleTokenTypeExactAmountInInstruction( - tokenSwapStateAccount, - swapAuthority, - userPublicKey, - poolMint, - feeAccount, - poolTokenB, - userTokenB, - TOKEN_SWAP_PROGRAM_ID, - TOKEN_PROGRAM_ID, - WithdrawAmountTokenB * 10 ** MintInfoTokenB.decimals, - 100e9, -); - -transaction.add(instruction); -``` - -## Curves - -Trading curves are at the core of how swap pools and AMMs (Automated Market -Makers) operate. The trading curve is the function that the Token Swap Program -uses to calculate how much of a destination token will be provided given an -amount of source token. The curve effectively sets the market price of the -tokens in the pool. - -The pool we’ll be interacting with in this lesson employs a -[Constant Product](https://spl.solana.com/token-swap#curves) Curve Function. The -constant product curve is the well-known Uniswap and Balancer style curve that -preserves an invariant on all swaps. This invariant can be expressed as the -product of the quantity of token A and token B in the swap pool. - -```tsx -A_total * B_total = invariant -``` - -If we have 100 token A and 5,000 token B, our invariant is 500,000. - -Now, if a trader wishes to put in a specific amount token A for some amount of -token B, the calculation becomes a matter of resolving "B_out" where: - -```tsx -(A_total + A_in) * (B_total - B_out) = invariant -``` - -Putting in the 10 token A along with our invariant of half a million, we would -need to solve for "B_out" like so: - -```tsx -(100 + 10) * (5,000 - B_out) = 500,000 -5,000 - B_out = 500,000 / 110 -5,000 - (500,000 / 110) = B_out -B_out = 454.5454... -``` - -The product of the amount of token A and token B must always equal a constant, -hence the name ‘Constant Product’. More information can be found on the -[Uniswap whitepaper](https://uniswap.org/whitepaper.pdf) and the -[Balancer whitepaper](https://balancer.fi/whitepaper.pdf). - -If curves don't make a whole lot of sense, don't worry! While learning more -about how they work doesn't hurt, you don't need to understand the entirety of -the mathematics to be able to implement the common curves. - -# Demo - -For this demo, a token pool of two brand new tokens has been created and is live -on Devnet. We'll walk through building out a frontend UI to interact with this -swap pool! Since the pool is already made, we don't have to worry about -initiating the pool and funding it with tokens. Instead, we'll focus on building -out the instructions for - -- depositing liquidity to the pool -- withdrawing your deposited liquidity -- swapping from one token to the other - -![Screenshot of Token Swap Demo](../assets/token-swap-frontend.png) - -### 1. Download the starter code - -Before we get started, go ahead and download the -[starter code](https://github.com/Unboxed-Software/solana-token-swap-frontend/tree/starter). - -The project is a fairly simple Next.js application re-using a lot of what was -previously built out for the demo in the -[Token Program lesson](./token-program.md). As you can see from the image above, -there are a few different text inputs and buttons - all of which will submit -transactions to the blockchain on the user's behalf. Our focus in this demo will -be creating the instructions that the last three buttons will submit. - -The airdrop buttons are already implemented and should work out of the box. They -utilize an airdrop program that's deployed on Devnet at address -[CPEV4ibq2VUv7UnNpkzUGL82VRzotbv2dy8vGwRfh3H3](https://explorer.solana.com/address/CPEV4ibq2VUv7UnNpkzUGL82VRzotbv2dy8vGwRfh3H3?cluster=devnet). -You can mint as many tokens as you'd like to your wallet to interact with the -pool. - -### 2. Create the Deposit Instruction - -Of the two variations of deposit instructions on the Token Swap Program, we'll -be using the variation that provides liquidity to both sides of the swap pool at -once: `TokenSwap.depositAllTokenTypesInstruction`. - -The deposit instruction should be added inside the `/components/Deposit.tsx` -file inside the `handleTransactionSubmit` function. This function is called when -the user clicks the Deposit button. - -We’ll start by deriving three associated token account addresses: - -1. The associated token account corresponding to the user's wallet address and - Krypt Coin -2. The associated token account corresponding to the user's wallet address and - Scrooge Coin -3. The associated token account corresponding to the user's wallet address and - the swap pools LP token - -There are a number of ways to do this, but we'll use the helper function -`getAssociatedTokenAddress` from the `spl-token` library. - -We'll also need the data associated with the pool token mint to adjust the user -input for the decimals of the pool token. To access a token mint's data, we'll -use the helper function `getMint` from the `spl-token` library. - -```tsx -const handleTransactionSubmit = async (deposit: DepositAllSchema) => { - if (!publicKey) { - alert("Please connect your wallet!"); - return; - } - // these are the accounts that hold the tokens - const kryptATA = await token.getAssociatedTokenAddress(kryptMint, publicKey); - const scroogeATA = await token.getAssociatedTokenAddress( - ScroogeCoinMint, - publicKey, - ); - const tokenAccountPool = await token.getAssociatedTokenAddress( - pool_mint, - publicKey, - ); - - // poolMintInfo holds data we've fetched for the pool token mint - const poolMintInfo = await token.getMint(connection, poolMint); -}; -``` - -Next, we need to check if the `tokenAccountPool` address we just derived has -been created. We'll use the `getAccountInfo` function from the `@solana/web3` -library to get the account info associated with `tokenAccountPool`. This -function will return an `AccountInfo` struct if the account exists or `null` -otherwise. If `null` is returned, we'll need to create the account. - -Since the `handleTransactionSubmit` function is already going to be submitting a -transaction, we'll simply add the instruction for creating an associated account -to the same transaction rather than submit multiple transactions. - -```tsx -const handleTransactionSubmit = async () => { - if (!publicKey) { - alert("Please connect your wallet!"); - return; - } - - const kryptATA = await token.getAssociatedTokenAddress(kryptMint, publicKey); - const scroogeATA = await token.getAssociatedTokenAddress( - ScroogeCoinMint, - publicKey, - ); - const tokenAccountPool = await token.getAssociatedTokenAddress( - pool_mint, - publicKey, - ); - - const poolMintInfo = await token.getMint(connection, poolMint); - - const transaction = new Web3.Transaction(); - - let account = await connection.getAccountInfo(tokenAccountPool); - - if (account == null) { - const createATAInstruction = token.createAssociatedTokenAccountInstruction( - publicKey, - tokenAccountPool, - publicKey, - pool_mint, - ); - transaction.add(createATAInstruction); - } -}; -``` - -Finally, we can create the deposit instruction using the `spl-token-swap` -library's `TokenSwap.depositAllTokenTypesInstruction` helper function. We then -add the instruction and submit the transaction. - -```tsx -const handleTransactionSubmit = async () => { - if (!publicKey) { - alert("Please connect your wallet!"); - return; - } - - const kryptATA = await token.getAssociatedTokenAddress(kryptMint, publicKey); - - const scroogeATA = await token.getAssociatedTokenAddress( - ScroogeCoinMint, - publicKey, - ); - - const tokenAccountPool = await token.getAssociatedTokenAddress( - poolMint, - publicKey, - ); - - const poolMintInfo = await token.getMint(connection, poolMint); - - const transaction = new Web3.Transaction(); - - let account = await connection.getAccountInfo(tokenAccountPool); - - if (account == null) { - const createATAInstruction = token.createAssociatedTokenAccountInstruction( - publicKey, - tokenAccountPool, - publicKey, - poolMint, - ); - transaction.add(createATAInstruction); - } - - const instruction = TokenSwap.depositAllTokenTypesInstruction( - tokenSwapStateAccount, - swapAuthority, - publicKey, - kryptATA, - scroogeATA, - poolKryptAccount, - poolScroogeAccount, - poolMint, - tokenAccountPool, - TOKEN_SWAP_PROGRAM_ID, - token.TOKEN_PROGRAM_ID, - poolTokenAmount * 10 ** poolMintInfo.decimals, - 100e9, - 100e9, - ); - - transaction.add(instruction); - - try { - let txid = await sendTransaction(transaction, connection); - alert( - `Transaction submitted: https://explorer.solana.com/tx/${txid}?cluster=devnet`, - ); - console.log( - `Transaction submitted: https://explorer.solana.com/tx/${txid}?cluster=devnet`, - ); - } catch (e) { - console.log(JSON.stringify(e)); - alert(JSON.stringify(e)); - } -}; -``` - -With the exception of the user’s `publickey` and their derived associated token -accounts (for Krypt Coin, Scrooge Coin, and the pool's LP-token), notice that -all the accounts are constants for this swap pool and are defined in the -`const.ts` file. - -At this point, you should be able to airdrop yourself some tokens and then -deposit them into the swap pool! - -### 3. Create the Withdrawal Instruction - -The withdrawal instruction is very similar to the deposit instruction, but there -are some subtle differences. Like deposits, the Token Swap Program accepts two -variations of the withdrawal instruction. You can either withdraw liquidity from -a single side of the swap pool, or you can withdraw your deposited liquidity -from both sides at the same time. - -Of the two variations of withdraw instructions on the Token Swap Program, we'll -be using the variation that removes liquidity from both sides of the swap pool -at once: `TokenSwap.withdrawAllTokenTypesInstruction`. - -The withdraw instruction should be added inside the `/components/Withdraw.tsx` -file inside the `handleTransactionSubmit` function. This function is called when -the user clicks the Withdraw button. - -We’ll start by deriving the three associated token account addresses, fetching -the pool token mint data, and checking the `tokenAccountPool` address the same -way we did for the deposit instruction. - -```tsx -const handleTransactionSubmit = async () => { - if (!publicKey) { - alert("Please connect your wallet!"); - return; - } - - const kryptATA = await token.getAssociatedTokenAddress(kryptMint, publicKey); - const scroogeATA = await token.getAssociatedTokenAddress( - ScroogeCoinMint, - publicKey, - ); - const tokenAccountPool = await token.getAssociatedTokenAddress( - pool_mint, - publicKey, - ); - - const poolMintInfo = await token.getMint(connection, poolMint); - - const transaction = new Web3.Transaction(); - - let account = await connection.getAccountInfo(tokenAccountPool); - - if (account == null) { - const createATAInstruction = token.createAssociatedTokenAccountInstruction( - publicKey, - tokenAccountPool, - publicKey, - pool_mint, - ); - transaction.add(createATAInstruction); - } -}; -``` - -Next, we create the withdraw instruction using the `spl-token-swap` library's -`TokenSwap.withdrawAllTokenTypesInstruction` helper function. We then add the -instruction and submit the transaction. - -```tsx -const handleTransactionSubmit = async () => { - if (!publicKey) { - alert("Please connect your wallet!"); - return; - } - - const kryptATA = await token.getAssociatedTokenAddress(kryptMint, publicKey); - const scroogeATA = await token.getAssociatedTokenAddress( - ScroogeCoinMint, - publicKey, - ); - const tokenAccountPool = await token.getAssociatedTokenAddress( - poolMint, - publicKey, - ); - - const poolMintInfo = await token.getMint(connection, poolMint); - - const transaction = new Web3.Transaction(); - - let account = await connection.getAccountInfo(tokenAccountPool); - - if (account == null) { - const createATAInstruction = token.createAssociatedTokenAccountInstruction( - publicKey, - tokenAccountPool, - publicKey, - poolMint, - ); - transaction.add(createATAInstruction); - } - - const instruction = TokenSwap.withdrawAllTokenTypesInstruction( - tokenSwapStateAccount, - swapAuthority, - publicKey, - poolMint, - feeAccount, - tokenAccountPool, - poolKryptAccount, - poolScroogeAccount, - kryptATA, - scroogeATA, - TOKEN_SWAP_PROGRAM_ID, - TOKEN_PROGRAM_ID, - poolTokenAmount * 10 ** poolMintInfo.decimals, - 0, - 0, - ); - - transaction.add(instruction); - try { - let txid = await sendTransaction(transaction, connection); - alert( - `Transaction submitted: https://explorer.solana.com/tx/${txid}?cluster=devnet`, - ); - console.log( - `Transaction submitted: https://explorer.solana.com/tx/${txid}?cluster=devnet`, - ); - } catch (e) { - console.log(JSON.stringify(e)); - alert(JSON.stringify(e)); - } -}; -``` - -Notice the ordering of accounts is different for the withdraw transaction and -there is an additional `feeAccount` provided this time. This `feeAccount` is the -destination for the fee that must be paid by the user for withdrawing liquidity -from the pools. - -### 4. Create the Swap Instruction - -Now it's time to implement the actual purpose of this program - the swap -instruction! - -Note that our UI has a dropdown to allow users to select which token they would -like to swap _from_, so we will have to create our instruction differently based -on what the user selects. - -We’ll do this inside the `handleTransactionSubmit` function of the -`/components/Swap.tsx` file. Once again, we will have to derive the user’s -`Associated Token Addresses` for each token mint (Krypt Coin, Scrooge Coin, and -Pool Token) and create the `tokenAccountPool` if it does not already exist. -Additionally, we'll fetch the data for both the Krypt Coin and Scrooge Coin to -account for the decimal precision of the tokens. - -```tsx -const handleTransactionSubmit = async () => { - if (!publicKey) { - alert("Please connect your wallet!"); - return; - } - - const kryptMintInfo = await token.getMint(connection, kryptMint); - const ScroogeCoinMintInfo = await token.getMint(connection, ScroogeCoinMint); - - const kryptATA = await token.getAssociatedTokenAddress(kryptMint, publicKey); - const scroogeATA = await token.getAssociatedTokenAddress( - ScroogeCoinMint, - publicKey, - ); - const tokenAccountPool = await token.getAssociatedTokenAddress( - poolMint, - publicKey, - ); -}; -``` - -From here, the user’s input will determine our path of execution. The user's -choice is saved to the `mint` property, so we'll use this to branch between each -possible instruction. - -```tsx -const handleTransactionSubmit = async () => { - if (!publicKey) { - alert("Please connect your wallet!"); - return; - } - - const kryptMintInfo = await token.getMint(connection, kryptMint); - const ScroogeCoinMintInfo = await token.getMint(connection, ScroogeCoinMint); - - const kryptATA = await token.getAssociatedTokenAddress(kryptMint, publicKey); - const scroogeATA = await token.getAssociatedTokenAddress( - ScroogeCoinMint, - publicKey, - ); - const tokenAccountPool = await token.getAssociatedTokenAddress( - poolMint, - publicKey, - ); - - const transaction = new Web3.Transaction(); - - let account = await connection.getAccountInfo(tokenAccountPool); - - if (account == null) { - const createATAInstruction = token.createAssociatedTokenAccountInstruction( - publicKey, - tokenAccountPool, - publicKey, - poolMint, - ); - transaction.add(createATAInstruction); - } - - // check which direction to swap - if (mint == "option1") { - const instruction = TokenSwap.swapInstruction( - tokenSwapStateAccount, - swapAuthority, - publicKey, - kryptATA, - poolKryptAccount, - poolScroogeAccount, - scroogeATA, - poolMint, - feeAccount, - null, - TOKEN_SWAP_PROGRAM_ID, - TOKEN_PROGRAM_ID, - amount * 10 ** kryptMintInfo.decimals, - 0, - ); - - transaction.add(instruction); - } else if (mint == "option2") { - const instruction = TokenSwap.swapInstruction( - tokenSwapStateAccount, - swapAuthority, - publicKey, - scroogeATA, - poolScroogeAccount, - poolKryptAccount, - kryptATA, - poolMint, - feeAccount, - null, - TOKEN_SWAP_PROGRAM_ID, - TOKEN_PROGRAM_ID, - amount * 10 ** ScroogeCoinMintInfo.decimals, - 0, - ); - - transaction.add(instruction); - } - - try { - let txid = await sendTransaction(transaction, connection); - alert( - `Transaction submitted: https://explorer.solana.com/tx/${txid}?cluster=devnet`, - ); - console.log( - `Transaction submitted: https://explorer.solana.com/tx/${txid}?cluster=devnet`, - ); - } catch (e) { - console.log(JSON.stringify(e)); - alert(JSON.stringify(e)); - } -}; -``` - -And that’s it! Once you have the swap instruction implemented, the UI should be -fully functional and you can airdrop yourself tokens, deposit liquidity, -withdraw your liquidity, and swap from token to token! - -Please take your time with this code and the concepts in this lesson. Swap pools -can get a lot more complicated than the one we have implemented today so it's -important to understand the basics. If you need some more time with the demo, -take it! And if you need, have a look at the -[solution code here](https://github.com/Unboxed-Software/solana-token-swap-frontend). - -# Challenge - -Now that we've worked through the demo together, try and take it a step further -with your own tokens! - -In the [Token Program lesson](./token-program.md) you created some tokens. Now -make a swap pool for those tokens and modify the code from this lesson's demo to -use your tokens and newly created swap pool. There is no solution code for this -since it's specific to your tokens, so go slow and take it one step at a time. -You've got this! diff --git a/content/courses/solana-course/content/type-cosplay.md b/content/courses/solana-course/content/type-cosplay.md deleted file mode 100644 index e38bf9ced..000000000 --- a/content/courses/solana-course/content/type-cosplay.md +++ /dev/null @@ -1,531 +0,0 @@ ---- -title: Type Cosplay -objectives: - - Explain the security risks associated with not checking account types - - Implement an account type discriminator using long-form Rust - - Use Anchor's `init` constraint to initialize accounts - - Use Anchor's `Account` type for account validation ---- - -# TL;DR - -- Use discriminators to distinguish between different account types -- To implement a discriminator in Rust, include a field in the account struct to - represent the account type - - ```rust - #[derive(BorshSerialize, BorshDeserialize)] - pub struct User { - discriminant: AccountDiscriminant, - user: Pubkey, - } - - #[derive(BorshSerialize, BorshDeserialize, PartialEq)] - pub enum AccountDiscriminant { - User, - Admin, - } - ``` - -- To implement a discriminator check in Rust, verify that the discriminator of - the deserialized account data matches the expected value - - ```rust - if user.discriminant != AccountDiscriminant::User { - return Err(ProgramError::InvalidAccountData.into()); - } - ``` - -- In Anchor, program account types automatically implement the `Discriminator` - trait which creates an 8 byte unique identifier for a type -- Use Anchor’s `Account<'info, T>` type to automatically check the discriminator - of the account when deserializing the account data - -# Overview - -“Type cosplay” refers to an unexpected account type being used in place of an -expected account type. Under the hood, account data is simply stored as an array -of bytes that a program deserializes into a custom account type. Without -implementing a way to explicitly distinguish between account types, account data -from an unexpected account could result in an instruction being used in -unintended ways. - -### Unchecked account - -In the example below, both the `AdminConfig` and `UserConfig` account types -store a single public key. The `admin_instruction` instruction deserializes the -`admin_config` account as an `AdminConfig` type and then performs a owner check -and data validation check. - -However, the `AdminConfig` and `UserConfig` account types have the same data -structure. This means a `UserConfig` account type could be passed in as the -`admin_config` account. As long as the public key stored on the account data -matches the `admin` signing the transaction, the `admin_instruction` instruction -would continue to process, even if the signer isn't actually an admin. - -Note that the names of the fields stored on the account types (`admin` and -`user`) make no difference when deserializing account data. The data is -serialized and deserialized based on the order of fields rather than their -names. - -```rust -use anchor_lang::prelude::*; -use borsh::{BorshDeserialize, BorshSerialize}; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod type_cosplay_insecure { - use super::*; - - pub fn admin_instruction(ctx: Context) -> Result<()> { - let account_data = - AdminConfig::try_from_slice(&ctx.accounts.admin_config.data.borrow()).unwrap(); - if ctx.accounts.admin_config.owner != ctx.program_id { - return Err(ProgramError::IllegalOwner.into()); - } - if account_data.admin != ctx.accounts.admin.key() { - return Err(ProgramError::InvalidAccountData.into()); - } - msg!("Admin {}", account_data.admin); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct AdminInstruction<'info> { - admin_config: UncheckedAccount<'info>, - admin: Signer<'info>, -} - -#[derive(BorshSerialize, BorshDeserialize)] -pub struct AdminConfig { - admin: Pubkey, -} - -#[derive(BorshSerialize, BorshDeserialize)] -pub struct UserConfig { - user: Pubkey, -} -``` - -### Add account discriminator - -To solve this, you can add a discriminant field for each account type and set -the discriminant when initializing an account. - -The example below updates the `AdminConfig` and `UserConfig` account types with -a `discriminant` field. The `admin_instruction` instruction includes an -additional data validation check for the `discriminant` field. - -```rust -if account_data.discriminant != AccountDiscriminant::Admin { - return Err(ProgramError::InvalidAccountData.into()); -} -``` - -If the `discriminant` field of the account passed into the instruction as the -`admin_config` account does not match the expected `AccountDiscriminant`, then -the transaction will fail. Simply make sure to set the appropriate value for -`discriminant` when you initialize each account (not shown in the example), and -then you can include these discriminant checks in every subsequent instruction. - -```rust -use anchor_lang::prelude::*; -use borsh::{BorshDeserialize, BorshSerialize}; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod type_cosplay_secure { - use super::*; - - pub fn admin_instruction(ctx: Context) -> Result<()> { - let account_data = - AdminConfig::try_from_slice(&ctx.accounts.admin_config.data.borrow()).unwrap(); - if ctx.accounts.admin_config.owner != ctx.program_id { - return Err(ProgramError::IllegalOwner.into()); - } - if account_data.admin != ctx.accounts.admin.key() { - return Err(ProgramError::InvalidAccountData.into()); - } - if account_data.discriminant != AccountDiscriminant::Admin { - return Err(ProgramError::InvalidAccountData.into()); - } - msg!("Admin {}", account_data.admin); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct AdminInstruction<'info> { - admin_config: UncheckedAccount<'info>, - admin: Signer<'info>, -} - -#[derive(BorshSerialize, BorshDeserialize)] -pub struct AdminConfig { - discriminant: AccountDiscriminant, - admin: Pubkey, -} - -#[derive(BorshSerialize, BorshDeserialize)] -pub struct UserConfig { - discriminant: AccountDiscriminant, - user: Pubkey, -} - -#[derive(BorshSerialize, BorshDeserialize, PartialEq)] -pub enum AccountDiscriminant { - Admin, - User, -} -``` - -### Use Anchor’s `Account` wrapper - -Implementing these checks for every account needed for every instruction can be -tedious. Fortunately, Anchor provides a `#[account]` attribute macro for -automatically implementing traits that every account should have. - -Structs marked with `#[account]` can then be used with `Account` to validate -that the passed in account is indeed the type you expect it to be. When -initializing an account whose struct representation has the `#[account]` -attribute, the first 8 bytes are automatically reserved for a discriminator -unique to the account type. When deserializing the account data, Anchor will -automatically check if the discriminator on the account matches the expected -account type and throw and error if it does not match. - -In the example below, `Account<'info, AdminConfig>` specifies that the -`admin_config` account should be of type `AdminConfig`. Anchor then -automatically checks that the first 8 bytes of account data match the -discriminator of the `AdminConfig` type. - -The data validation check for the `admin` field is also moved from the -instruction logic to the account validation struct using the `has_one` -constraint. `#[account(has_one = admin)]` specifies that the `admin_config` -account’s `admin` field must match the `admin` account passed into the -instruction. Note that for the `has_one` constraint to work, the naming of the -account in the struct must match the naming of field on the account you are -validating. - -```rust -use anchor_lang::prelude::*; -use borsh::{BorshDeserialize, BorshSerialize}; - -declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"); - -#[program] -pub mod type_cosplay_recommended { - use super::*; - - pub fn admin_instruction(ctx: Context) -> Result<()> { - msg!("Admin {}", ctx.accounts.admin_config.admin); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct AdminInstruction<'info> { - #[account(has_one = admin)] - admin_config: Account<'info, AdminConfig>, - admin: Signer<'info>, -} - -#[account] -pub struct AdminConfig { - admin: Pubkey, -} - -#[account] -pub struct UserConfig { - user: Pubkey, -} -``` - -It’s important to note that this is a vulnerability you don’t really have to -worry about when using Anchor - that’s the whole point of it in the first place! -After going through how this can be exploited if not handled properly in a -native rust program, hopefully you have a much better understanding of what the -purpose of the account discriminator is in an Anchor account. The fact that -Anchor sets and checks this discriminator automatically means that developers -can spend more time focusing on their product, but it’s still very important to -understand what Anchor is doing behind the scenes to develop robust Solana -programs. - -# Demo - -For this demo we’ll create two programs to demonstrate a type cosplay -vulnerability. - -- The first program will initialize program accounts without a discriminator -- The second program will initialize program accounts using Anchor’s `init` - constraint which automatically sets an account discriminator - -### 1. Starter - -To get started, download the starter code from the `starter` branch of -[this repository](https://github.com/Unboxed-Software/solana-type-cosplay/tree/starter). The -starter code includes a program with three instructions and some tests. - -The three instructions are: - -1. `initialize_admin` - initializes an admin account and sets the admin - authority of the program -2. `initialize_user` - intializes a standard user account -3. `update_admin` - allows the existing admin to update the admin authority of - the program - -Take a look at these three instructions in the `lib.rs` file. The last -instruction should only be callable by the account matching the `admin` field on -the admin account initialized using the `initialize_admin` instruction. - -### 2. Test insecure `update_admin` instruction - -However, both accounts have the same fields and field types: - -```rust -#[derive(BorshSerialize, BorshDeserialize)] -pub struct AdminConfig { - admin: Pubkey, -} - -#[derive(BorshSerialize, BorshDeserialize)] -pub struct User { - user: Pubkey, -} -``` - -Because of this, it's possible to pass in a `User` account in place of the -`admin` account in the `update_admin` instruction, thereby bypassing the -requirement that one be an admin to call this instruction. - -Take a look at the `solana-type-cosplay.ts` file in the `tests` directory. It -contains some basic setup and two tests. One test initializes a user account, -and the other invokes `update_admin` and passes in the user account in place of -an admin account. - -Run `anchor test` to see that invoking `update_admin` will complete -successfully. - -```bash - type-cosplay - ✔ Initialize User Account (233ms) - ✔ Invoke update admin instruction with user account (487ms) -``` - -### 3. Create `type-checked` program - -Now we'll create a new program called `type-checked` by running -`anchor new type-checked` from the root of the existing anchor program. - -Now in your `programs` folder you will have two programs. Run `anchor keys list` -and you should see the program ID for the new program. Add it to the `lib.rs` -file of the `type-checked` program and to the `type_checked` program in the -`Anchor.toml` file. - -Next, update the test file's setup to include the new program and two new -keypairs for the accounts we'll be initializing for the new program. - -```tsx -import * as anchor from "@project-serum/anchor"; -import { Program } from "@project-serum/anchor"; -import { TypeCosplay } from "../target/types/type_cosplay"; -import { TypeChecked } from "../target/types/type_checked"; -import { expect } from "chai"; - -describe("type-cosplay", () => { - const provider = anchor.AnchorProvider.env(); - anchor.setProvider(provider); - - const program = anchor.workspace.TypeCosplay as Program; - const programChecked = anchor.workspace.TypeChecked as Program; - - const userAccount = anchor.web3.Keypair.generate(); - const newAdmin = anchor.web3.Keypair.generate(); - - const userAccountChecked = anchor.web3.Keypair.generate(); - const adminAccountChecked = anchor.web3.Keypair.generate(); -}); -``` - -### 4. Implement the `type-checked` program - -In the `type_checked` program, add two instructions using the `init` constraint -to initialize an `AdminConfig` account and a `User` account. When using the -`init` constraint to initialize new program accounts, Anchor will automatically -set the first 8 bytes of account data as a unique discriminator for the account -type. - -We’ll also add an `update_admin` instruction that validates the `admin_config` -account as a `AdminConfig` account type using Anchor’s `Account` wrapper. For -any account passed in as the `admin_config` account, Anchor will automatically -check that the account discriminator matches the expected account type. - -```rust -use anchor_lang::prelude::*; - -declare_id!("FZLRa6vX64QL6Vj2JkqY1Uzyzjgi2PYjCABcDabMo8U7"); - -#[program] -pub mod type_checked { - use super::*; - - pub fn initialize_admin(ctx: Context) -> Result<()> { - ctx.accounts.admin_config.admin = ctx.accounts.admin.key(); - Ok(()) - } - - pub fn initialize_user(ctx: Context) -> Result<()> { - ctx.accounts.user_account.user = ctx.accounts.user.key(); - Ok(()) - } - - pub fn update_admin(ctx: Context) -> Result<()> { - ctx.accounts.admin_config.admin = ctx.accounts.admin.key(); - Ok(()) - } -} - -#[derive(Accounts)] -pub struct InitializeAdmin<'info> { - #[account( - init, - payer = admin, - space = 8 + 32 - )] - pub admin_config: Account<'info, AdminConfig>, - #[account(mut)] - pub admin: Signer<'info>, - pub system_program: Program<'info, System>, -} - -#[derive(Accounts)] -pub struct InitializeUser<'info> { - #[account( - init, - payer = user, - space = 8 + 32 - )] - pub user_account: Account<'info, User>, - #[account(mut)] - pub user: Signer<'info>, - pub system_program: Program<'info, System>, -} - -#[derive(Accounts)] -pub struct UpdateAdmin<'info> { - #[account( - mut, - has_one = admin - )] - pub admin_config: Account<'info, AdminConfig>, - pub new_admin: SystemAccount<'info>, - #[account(mut)] - pub admin: Signer<'info>, -} - -#[account] -pub struct AdminConfig { - admin: Pubkey, -} - -#[account] -pub struct User { - user: Pubkey, -} -``` - -### 5. Test secure `update_admin` instruction - -In the test file, we’ll initialize an `AdminConfig` account and a `User` account -from the `type_checked` program. Then we’ll invoke the `updateAdmin` instruction -twice passing in the newly created accounts. - -```rust -describe("type-cosplay", () => { - ... - - it("Initialize type checked AdminConfig Account", async () => { - await programChecked.methods - .initializeAdmin() - .accounts({ - adminConfig: adminAccountType.publicKey, - }) - .signers([adminAccountType]) - .rpc() - }) - - it("Initialize type checked User Account", async () => { - await programChecked.methods - .initializeUser() - .accounts({ - userAccount: userAccountType.publicKey, - user: provider.wallet.publicKey, - }) - .signers([userAccountType]) - .rpc() - }) - - it("Invoke update instruction using User Account", async () => { - try { - await programChecked.methods - .updateAdmin() - .accounts({ - adminConfig: userAccountType.publicKey, - newAdmin: newAdmin.publicKey, - admin: provider.wallet.publicKey, - }) - .rpc() - } catch (err) { - expect(err) - console.log(err) - } - }) - - it("Invoke update instruction using AdminConfig Account", async () => { - await programChecked.methods - .updateAdmin() - .accounts({ - adminConfig: adminAccountType.publicKey, - newAdmin: newAdmin.publicKey, - admin: provider.wallet.publicKey, - }) - .rpc() - }) -}) -``` - -Run `anchor test`. For the transaction where we pass in the `User` account type, -we expect the instruction and return an Anchor Error for the account not being -of type `AdminConfig`. - -```bash -'Program EU66XDppFCf2Bg7QQr59nyykj9ejWaoW93TSkk1ufXh3 invoke [1]', -'Program log: Instruction: UpdateAdmin', -'Program log: AnchorError caused by account: admin_config. Error Code: AccountDiscriminatorMismatch. Error Number: 3002. Error Message: 8 byte discriminator did not match what was expected.', -'Program EU66XDppFCf2Bg7QQr59nyykj9ejWaoW93TSkk1ufXh3 consumed 4765 of 200000 compute units', -'Program EU66XDppFCf2Bg7QQr59nyykj9ejWaoW93TSkk1ufXh3 failed: custom program error: 0xbba' -``` - -Following Anchor best practices and using Anchor types will ensure that your -programs avoid this vulnerability. Always use the `#[account]` attribute when -creating account structs, use the `init` constraint when initializing accounts, -and use the `Account` type in your account validation structs. - -If you want to take a look at the final solution code you can find it on the -`solution` branch of -[the repository](https://github.com/Unboxed-Software/solana-type-cosplay/tree/solution). - -# Challenge - -Just as with other lessons in this module, your opportunity to practice avoiding -this security exploit lies in auditing your own or other programs. - -Take some time to review at least one program and ensure that account types have -a discriminator and that those are checked for each account and instruction. -Since standard Anchor types handle this check automatically, you're more likely -to find a vulnerability in a native program. - -Remember, if you find a bug or exploit in somebody else's program, please alert -them! If you find one in your own program, be sure to patch it right away. diff --git a/content/courses/solana-course/content/versioned-transaction.md b/content/courses/solana-course/content/versioned-transaction.md deleted file mode 100644 index d3769adf8..000000000 --- a/content/courses/solana-course/content/versioned-transaction.md +++ /dev/null @@ -1,814 +0,0 @@ ---- -title: Versioned Transactions and Lookup Tables -objectives: - - Create versioned transactions - - Create lookup tables - - Extend lookup tables - - Use lookup tables with versioned transactions ---- - -# TL;DR - -- **Versioned Transactions** refers to a way to support both legacy versions and - newer versions of transaction formats. The original transaction format is - "legacy" and new transaction versions start at version 0. Versioned - transactions were implemented in order to support the use of Address Lookup - Tables (also called lookup tables or LUTs). -- **Address Lookup Tables** are accounts used to store addresses of other - accounts, which can then be referenced in versioned transactions using a 1 - byte index instead of the full 32 bytes per address. This enables the creation - of more complex transactions than what was possible prior to the introduction - of LUTs. - -# Overview - -By design, Solana transactions are limited to 1232 bytes. Transactions exceeding -this size will fail. While this enables a number of network optimizations, it -can also limit the types of atomic operations that can be performed on the -network. - -To help get around the transaction size limitation, Solana released a new -transaction format that allows support for multiple versions of transaction -formats. At the time of writing, Solana supports two transaction versions: - -1. `legacy` - the original transaction format -2. `0` - the newest transaction format that includes support for Address Lookup - Tables - -Versioned transactions don't require any modifications to existing Solana -programs, but any client-side code created prior to the release of versioned -transactions should be updated. In this lesson, we'll cover the basics of -versioned transactions and how to use them, including: - -- Creating versioned transactions -- Creating and managing lookup tables -- Using lookup tables in versioned transactions - -## Versioned Transactions - -One of the items taking up the most space Solana transactions is the inclusion -of full account addresses. At 32 bytes each, 39 accounts will render a -transaction too large. That's not even accounting for instruction data. In -practice, most transactions will be too large with around 20 accounts. - -Solana released versioned transactions in order to support multiple transaction -formats. Alongside the release of versioned transactions, Solana released -version 0 of transactions to support Address Lookup Tables. Lookup tables are -separate accounts that store account addresses and then allow them to be -referenced in a transaction using a 1 byte index. This significantly decreases -the size of a transaction since each included account now only needs to use 1 -byte instead of 32 bytes. - -Even if you don't need to use lookup tables, you'll need to know how to support -versioned transactions in your client-side code. Fortunately, everything you -need to work with versioned transactions and lookup tables is included in the -`@solana/web3.js` library. - -### Create versioned transaction - -To create a versioned transaction, you simply create a `TransactionMessage` with -the following parameters: - -- `payerKey` - the public key of the account that will pay for the transaction -- `recentBlockhash` - a recent blockhash from the network -- `instructions` - the instructions to include in the transaction - -You then transform this message object into a version `0` transaction using the -`compileToV0Message()` method. - -```ts -import * as web3 from "@solana/web3.js"; - -// Example transfer instruction -const transferInstruction = [ - web3.SystemProgram.transfer({ - fromPubkey: payer.publicKey, // Public key of account that will send the funds - toPubkey: toAccount.publicKey, // Public key of the account that will receive the funds - lamports: 1 * LAMPORTS_PER_SOL, // Amount of lamports to be transferred - }), -]; - -// Get the latest blockhash -let { blockhash } = await connection.getLatestBlockhash(); - -// Create the transaction message -const message = new web3.TransactionMessage({ - payerKey: payer.publicKey, // Public key of the account that will pay for the transaction - recentBlockhash: blockhash, // Latest blockhash - instructions: transferInstruction, // Instructions included in transaction -}).compileToV0Message(); -``` - -Finally, you pass the compiled message into `VersionedTransaction` constructor -to create a new versioned transaction. Your code can then sign and send the -transaction to the network, similar to a legacy transaction. - -```ts -// Create the versioned transaction using the message -const transaction = new web3.VersionedTransaction(message); - -// Sign the transaction -transaction.sign([payer]); - -// Send the signed transaction to the network -const transactionSignature = await connection.sendTransaction(transaction); -``` - -## Address Lookup Table - -Address Lookup Tables (also called lookup tables or LUTs) are accounts that -store a lookup table of other account addresses. These LUT accounts are owned by -the Address Lookup Table Program and are used to increase the number of accounts -that can be included in a single transaction. - -Versioned transactions can include the address of an LUT account and then -reference additional accounts with a 1-byte index instead of including the full -address of those accounts. This significantly reduces the amount of space used -for referencing accounts in a transaction. - -To simplify the process of working with LUTs, the `@solana/web3.js` library -includes an `AddressLookupTableProgram` class which provides a set of methods to -create instructions for managing LUTs. These methods include: - -- `createLookupTable` - creates a new LUT account -- `freezeLookupTable` - makes an existing LUT immutable -- `extendLookupTable` - adds addresses to an existing LUT -- `deactivateLookupTable` - puts an LUT in a “deactivation” period before it can - be closed -- `closeLookupTable` - permanently closes an LUT account - -### Create a lookup table - -You use the `createLookupTable` method to construct the instruction that creates -a lookup table. The function requires the following parameters: - -- `authority` - the account that will have permission to modify the lookup table -- `payer` - the account that will pay for the account creation -- `recentSlot` - a recent slot to derive the lookup table's address - -The function returns both the instruction to create the lookup table and the -address of the lookup table. - -```ts -// Get the current slot -const slot = await connection.getSlot(); - -// Create an instruction for creating a lookup table -// and retrieve the address of the new lookup table -const [lookupTableInst, lookupTableAddress] = - web3.AddressLookupTableProgram.createLookupTable({ - authority: user.publicKey, // The authority (i.e., the account with permission to modify the lookup table) - payer: user.publicKey, // The payer (i.e., the account that will pay for the transaction fees) - recentSlot: slot - 1, // The recent slot to derive lookup table's address - }); -``` - -Under the hood, the lookup table address is simply a PDA derived using the -`authority` and `recentSlot` as seeds. - -```ts -const [lookupTableAddress, bumpSeed] = PublicKey.findProgramAddressSync( - [params.authority.toBuffer(), toBufferLE(BigInt(params.recentSlot), 8)], - this.programId, -); -``` - -Note that using the most recent slot sometimes results in an error after sending -the transaction. To avoid this, you can use a slot that is one slot prior the -most recent one (e.g. `recentSlot: slot - 1`). However, if you still encounter -an error when sending the transaction, you can try resending the transaction. - -``` -"Program AddressLookupTab1e1111111111111111111111111 invoke [1]", -"188115589 is not a recent slot", -"Program AddressLookupTab1e1111111111111111111111111 failed: invalid instruction data"; -``` - -### Extend a lookup table - -You use the `extendLookupTable` method to create an instruction that adds -addresses to an existing lookup table. It takes the following parameters: - -- `payer` - the account that will pay for the transaction fees and any increased - rent -- `authority` - the account that has permission to change the lookup table -- `lookupTable` - the address of the lookup table to extend -- `addresses` - the addresses to add to the lookup table - -The function returns an instruction to extend the lookup table. - -```ts -const addresses = [ - new web3.PublicKey("31Jy3nFeb5hKVdB4GS4Y7MhU7zhNMFxwF7RGVhPc1TzR"), - new web3.PublicKey("HKSeapcvwJ7ri6mf3HwBtspLFTDKqaJrMsozdfXfg5y2"), - // add more addresses -]; - -// Create an instruction to extend a lookup table with the provided addresses -const extendInstruction = web3.AddressLookupTableProgram.extendLookupTable({ - payer: user.publicKey, // The payer (i.e., the account that will pay for the transaction fees) - authority: user.publicKey, // The authority (i.e., the account with permission to modify the lookup table) - lookupTable: lookupTableAddress, // The address of the lookup table to extend - addresses: addresses, // The addresses to add to the lookup table -}); -``` - -Note that when extending a lookup table, the number of addresses that can be -added in one instruction is limited by the transaction size limit, which is 1232 -bytes. This means you can add 30 addresses to a lookup table at a time. If you -need to add more than that, you'll need to send multiple transactions. Each -lookup table can store a maximum of 256 addresses. - -### Send Transaction - -After creating the instructions, you can add them to a transaction and sent to -the network. - -```ts -// Get the latest blockhash -let { blockhash } = await connection.getLatestBlockhash(); - -// Create the transaction message -const message = new web3.TransactionMessage({ - payerKey: payer.publicKey, // Public key of the account that will pay for the transaction - recentBlockhash: blockhash, // Latest blockhash - instructions: [lookupTableInst, extendInstruction], // Instructions included in transaction -}).compileToV0Message(); - -// Create the versioned transaction using the message -const transaction = new web3.VersionedTransaction(message); - -// Sign the transaction -transaction.sign([payer]); - -// Send the signed transaction to the network -const transactionSignature = await connection.sendTransaction(transaction); -``` - -Note that when you first create or extend a lookup table or when, it needs to -"warm up" for one slot before the LUT or new addresses can be used in -transactions. In other words, you can only use a lookup tables and access -addresses that were added prior to the current slot. - -```ts -SendTransactionError: failed to send transaction: invalid transaction: Transaction address table lookup uses an invalid index -``` - -If you encounter the error above or are unable to access addresses in a lookup -table immediately after extending it, it's likely because you're attempting to -access the lookup table or a specific address prior to the end of the warm up -period. To avoid this issue, add a delay after extending the lookup table before -sending a transaction that references the table. - -### Deactivate a lookup table - -When an lookup table is no longer needed, you can deactivate and close it to -reclaim its rent balance. Address lookup tables can be deactivated at any time, -but they can continue to be used by transactions until a specified -"deactivation" slot is no longer "recent". This "cool-down" period ensures that -in-flight transactions can't be censored by LUTs being closed and recreated in -the same slot. The deactivation period is approximately 513 slots. - -To deactivate an LUT, use the `deactivateLookupTable` method and pass in the -following parameters: - -- `lookupTable` - the address of the LUT to be deactivated -- `authority` - the account with permission to deactivate the LUT - -```ts -const deactivateInstruction = - web3.AddressLookupTableProgram.deactivateLookupTable({ - lookupTable: lookupTableAddress, // The address of the lookup table to deactivate - authority: user.publicKey, // The authority (i.e., the account with permission to modify the lookup table) - }); -``` - -### Close a lookup table - -To close a lookup table after its deactivation period, use the -`closeLookupTable` method. This method creates an instruction to close a -deactivated lookup table and reclaim its rent balance. It takes the following -parameters: - -- `lookupTable` - the address of the LUT to be closed -- `authority` - the account with permission to close the LUT -- `recipient` - the account that will receive the reclaimed rent balance - -```ts -const closeInstruction = web3.AddressLookupTableProgram.closeLookupTable({ - lookupTable: lookupTableAddress, // The address of the lookup table to close - authority: user.publicKey, // The authority (i.e., the account with permission to modify the lookup table) - recipient: user.publicKey, // The recipient of closed account lamports -}); -``` - -Attempting to close a lookup table before it's been fully deactivated will -result in an error. - -``` -"Program AddressLookupTab1e1111111111111111111111111 invoke [1]", -"Table cannot be closed until it's fully deactivated in 513 blocks", -"Program AddressLookupTab1e1111111111111111111111111 failed: invalid program argument"; -``` - -### Freeze a lookup table - -In addition to standard CRUD operations, you can "freeze" a lookup table. This -makes it immutable so that it can no longer be extended, deactivated, or closed. - -You freeze a lookup table with the `freezeLookupTable` method. It takes the -following parameters: - -- `lookupTable` - the address of the LUT to be frozen -- `authority` - the account with permission to freeze the LUT - -```ts -const freezeInstruction = web3.AddressLookupTableProgram.freezeLookupTable({ - lookupTable: lookupTableAddress, // The address of the lookup table to freeze - authority: user.publicKey, // The authority (i.e., the account with permission to modify the lookup table) -}); -``` - -Once an LUT is frozen, any further attempts to modify it will result in an -error. - -``` -"Program AddressLookupTab1e1111111111111111111111111 invoke [1]", -"Lookup table is frozen", -"Program AddressLookupTab1e1111111111111111111111111 failed: Account is immutable"; -``` - -### Using lookup tables in versioned transactions - -To use a lookup table in a versioned transaction, you need to retrieve the -lookup table account using its address. - -```ts -const lookupTableAccount = ( - await connection.getAddressLookupTable(lookupTableAddress) -).value; -``` - -You can then create a list of instructions to include in a transaction as usual. -When creating the `TransactionMessage`, you can include any lookup table -accounts by passing them as an array to the `compileToV0Message()` method. You -can also provide multiple lookup table accounts. - -```ts -const message = new web3.TransactionMessage({ - payerKey: payer.publicKey, // The payer (i.e., the account that will pay for the transaction fees) - recentBlockhash: blockhash, // The blockhash of the most recent block - instructions: instructions, // The instructions to include in the transaction -}).compileToV0Message([lookupTableAccount]); // Include lookup table accounts - -// Create the versioned transaction using the message -const transaction = new web3.VersionedTransaction(message); - -// Sign the transaction -transaction.sign([payer]); - -// Send the signed transaction to the network -const transactionSignature = await connection.sendTransaction(transaction); -``` - -# Demo - -Let's go ahead and practice using lookup tables! - -This demo will guide you through the steps of creating, extending, and then -using a lookup table in a versioned transaction. - -### 1. Get the starter code - -To begin, download the starter code from the starter branch of this -[repository](https://github.com/Unboxed-Software/solana-versioned-transactions/tree/starter). -Once you have the starter code, run `npm install` in the terminal to install the -required dependencies. - -The starter code includes an example of creating a legacy transaction that -intends to atomically transfer SOL to 22 recipients. The transaction contains 22 -instructions where each instruction transfers SOL from the signer to a different -recipient. - -The purpose of the starter code is to illustrate the limitation on the number of -addresses that can be included in a legacy transaction. The transaction built in -the starter code is expected to fail when sent. - -The following starter code can be found in the `index.ts` file. - -```typescript -import { initializeKeypair } from "./initializeKeypair"; -import * as web3 from "@solana/web3.js"; - -async function main() { - // Connect to the devnet cluster - const connection = new web3.Connection(web3.clusterApiUrl("devnet")); - - // Initialize the user's keypair - const user = await initializeKeypair(connection); - console.log("PublicKey:", user.publicKey.toBase58()); - - // Generate 22 addresses - const recipients = []; - for (let i = 0; i < 22; i++) { - recipients.push(web3.Keypair.generate().publicKey); - } - - // Create an array of transfer instructions - const transferInstructions = []; - - // Add a transfer instruction for each address - for (const address of recipients) { - transferInstructions.push( - web3.SystemProgram.transfer({ - fromPubkey: user.publicKey, // The payer (i.e., the account that will pay for the transaction fees) - toPubkey: address, // The destination account for the transfer - lamports: web3.LAMPORTS_PER_SOL * 0.01, // The amount of lamports to transfer - }), - ); - } - - // Create a transaction and add the transfer instructions - const transaction = new web3.Transaction().add(...transferInstructions); - - // Send the transaction to the cluster (this will fail in this example if addresses > 21) - const txid = await connection.sendTransaction(transaction, [user]); - - // Get the latest blockhash and last valid block height - const { lastValidBlockHeight, blockhash } = - await connection.getLatestBlockhash(); - - // Confirm the transaction - await connection.confirmTransaction({ - blockhash: blockhash, - lastValidBlockHeight: lastValidBlockHeight, - signature: txid, - }); - - // Log the transaction URL on the Solana Explorer - console.log(`https://explorer.solana.com/tx/${txid}?cluster=devnet`); -} -``` - -To execute the code, run `npm start`. This will create a new keypair, write it -to the `.env` file, airdrop devnet SOL to the keypair, and send the transaction -built in the starter code. The transaction is expected to fail with the error -message `Transaction too large`. - -``` -Creating .env file -Current balance is 0 -Airdropping 1 SOL... -New balance is 1 -PublicKey: 5ZZzcDbabFHmoZU8vm3VzRzN5sSQhkf91VJzHAJGNM7B -Error: Transaction too large: 1244 > 1232 -``` - -In the next steps, we'll go over how to use lookup tables with versioned -transactions to increase the number of addresses that can be included in a -single transaction. - -Before we start, go ahead and delete the content of the `main` function to leave -only the following: - -```ts -async function main() { - // Connect to the devnet cluster - const connection = new web3.Connection(web3.clusterApiUrl("devnet")); - - // Initialize the user's keypair - const user = await initializeKeypair(connection); - console.log("PublicKey:", user.publicKey.toBase58()); - - // Generate 22 addresses - const addresses = []; - for (let i = 0; i < 22; i++) { - addresses.push(web3.Keypair.generate().publicKey); - } -} -``` - -### 2. Create a `sendV0Transaction` helper function - -We'll be sending multiple "version 0" transactions, so let's create a helper -function to facilitate this. - -This function should take parameters for a connection, a user's keypair, an -array of transaction instructions, and an optional array of lookup table -accounts. - -The function then performs the following tasks: - -- Retrieves the latest blockhash and last valid block height from the Solana - network -- Creates a new transaction message using the provided instructions -- Signs the transaction using the user's keypair -- Sends the transaction to the Solana network -- Confirms the transaction -- Logs the transaction URL on the Solana Explorer - -```ts -async function sendV0Transaction( - connection: web3.Connection, - user: web3.Keypair, - instructions: web3.TransactionInstruction[], - lookupTableAccounts?: web3.AddressLookupTableAccount[], -) { - // Get the latest blockhash and last valid block height - const { lastValidBlockHeight, blockhash } = - await connection.getLatestBlockhash(); - - // Create a new transaction message with the provided instructions - const messageV0 = new web3.TransactionMessage({ - payerKey: user.publicKey, // The payer (i.e., the account that will pay for the transaction fees) - recentBlockhash: blockhash, // The blockhash of the most recent block - instructions, // The instructions to include in the transaction - }).compileToV0Message(lookupTableAccounts ? lookupTableAccounts : undefined); - - // Create a new transaction object with the message - const transaction = new web3.VersionedTransaction(messageV0); - - // Sign the transaction with the user's keypair - transaction.sign([user]); - - // Send the transaction to the cluster - const txid = await connection.sendTransaction(transaction); - - // Confirm the transaction - await connection.confirmTransaction( - { - blockhash: blockhash, - lastValidBlockHeight: lastValidBlockHeight, - signature: txid, - }, - "finalized", - ); - - // Log the transaction URL on the Solana Explorer - console.log(`https://explorer.solana.com/tx/${txid}?cluster=devnet`); -} -``` - -### 3. Create a `waitForNewBlock` helper function - -Recall that lookup tables and the addresses contained in them can't be -referenced immediately after creation or extension. This means we'll end up -needing to wait for a new block before submitting transactions that reference -the newly created or extended lookup table. To make this simpler down the road, -let's create a `waitForNewBlock` helper function that we'll use to wait for -lookup tables to activate between sending transactions. - -This function will have parameters for a connection and a target block height. -It then starts an interval that checks the current block height of the network -every 1000ms. Once the new block height exceeds the target height, the interval -is cleared and the promise is resolved. - -```ts -function waitForNewBlock(connection: web3.Connection, targetHeight: number) { - console.log(`Waiting for ${targetHeight} new blocks`); - return new Promise(async (resolve: any) => { - // Get the last valid block height of the blockchain - const { lastValidBlockHeight } = await connection.getLatestBlockhash(); - - // Set an interval to check for new blocks every 1000ms - const intervalId = setInterval(async () => { - // Get the new valid block height - const { lastValidBlockHeight: newValidBlockHeight } = - await connection.getLatestBlockhash(); - // console.log(newValidBlockHeight) - - // Check if the new valid block height is greater than the target block height - if (newValidBlockHeight > lastValidBlockHeight + targetHeight) { - // If the target block height is reached, clear the interval and resolve the promise - clearInterval(intervalId); - resolve(); - } - }, 1000); - }); -} -``` - -### 4. Create an `initializeLookupTable` function - -Now that we have some helper functions ready to go, declare a function named -`initializeLookupTable`. This function has parameters `user`, `connection`, and -`addresses`. The function will: - -1. Retrieve the current slot -2. Generate an instruction for creating a lookup table -3. Generate an instruction for extending the lookup table with the provided - addresses -4. Send and confirm a transaction with the instructions for creating and - extending the lookup table -5. Return the address of the lookup table - -```ts -async function initializeLookupTable( - user: web3.Keypair, - connection: web3.Connection, - addresses: web3.PublicKey[], -): Promise { - // Get the current slot - const slot = await connection.getSlot(); - - // Create an instruction for creating a lookup table - // and retrieve the address of the new lookup table - const [lookupTableInst, lookupTableAddress] = - web3.AddressLookupTableProgram.createLookupTable({ - authority: user.publicKey, // The authority (i.e., the account with permission to modify the lookup table) - payer: user.publicKey, // The payer (i.e., the account that will pay for the transaction fees) - recentSlot: slot - 1, // The recent slot to derive lookup table's address - }); - console.log("lookup table address:", lookupTableAddress.toBase58()); - - // Create an instruction to extend a lookup table with the provided addresses - const extendInstruction = web3.AddressLookupTableProgram.extendLookupTable({ - payer: user.publicKey, // The payer (i.e., the account that will pay for the transaction fees) - authority: user.publicKey, // The authority (i.e., the account with permission to modify the lookup table) - lookupTable: lookupTableAddress, // The address of the lookup table to extend - addresses: addresses.slice(0, 30), // The addresses to add to the lookup table - }); - - await sendV0Transaction(connection, user, [ - lookupTableInst, - extendInstruction, - ]); - - return lookupTableAddress; -} -``` - -### 5. Modify `main` to use lookup tables - -Now that we can initialize a lookup table with all of the recipients' addresses, -let's update `main` to use versioned transactions and lookup tables. We'll need -to: - -1. Call `initializeLookupTable` -2. Call `waitForNewBlock` -3. Get the lookup table using `connection.getAddressLookupTable` -4. Create the transfer instruction for each recipient -5. Send the v0 transaction with all of the transfer instructions - -```ts -async function main() { - // Connect to the devnet cluster - const connection = new web3.Connection(web3.clusterApiUrl("devnet")); - - // Initialize the user's keypair - const user = await initializeKeypair(connection); - console.log("PublicKey:", user.publicKey.toBase58()); - - // Generate 22 addresses - const recipients = []; - for (let i = 0; i < 22; i++) { - recipients.push(web3.Keypair.generate().publicKey); - } - - const lookupTableAddress = await initializeLookupTable( - user, - connection, - recipients, - ); - - await waitForNewBlock(connection, 1); - - const lookupTableAccount = ( - await connection.getAddressLookupTable(lookupTableAddress) - ).value; - - if (!lookupTableAccount) { - throw new Error("Lookup table not found"); - } - - const transferInstructions = recipients.map(recipient => { - return web3.SystemProgram.transfer({ - fromPubkey: user.publicKey, // The payer (i.e., the account that will pay for the transaction fees) - toPubkey: recipient, // The destination account for the transfer - lamports: web3.LAMPORTS_PER_SOL * 0.01, // The amount of lamports to transfer - }); - }); - - await sendV0Transaction(connection, user, transferInstructions, [ - lookupTableAccount, - ]); -} -``` - -Notice that you create the transfer instructions with the full recipient address -even though we created a lookup table. That's because by including the lookup -table in the versioned transaction, you tell the `web3.js` framework to replace -any recipient addresses that match addresses in the lookup table with pointers -to the lookup table instead. By the time the transaction is sent to the network, -addresses that exist in the lookup table will be referenced by a single byte -rather than the full 32 bytes. - -Use `npm start` in the command line to execute the `main` function. You should -see output similar to the following: - -```bash -Current balance is 1.38866636 -PublicKey: 8iGVBt3dcJdp9KfyTRcKuHY6gXCMFdnSG2F1pAwsUTMX -lookup table address: Cc46Wp1mtci3Jm9EcH35JcDQS3rLKBWzy9mV1Kkjjw7M -https://explorer.solana.com/tx/4JvCo2azy2u8XK2pU8AnJiHAucKTrZ6QX7EEHVuNSED8B5A8t9GqY5CP9xB8fZpTNuR7tbUcnj2MiL41xRJnLGzV?cluster=devnet -Waiting for 1 new blocks -https://explorer.solana.com/tx/rgpmxGU4QaAXw9eyqfMUqv8Lp6LHTuTyjQqDXpeFcu1ijQMmCH2V3Sb54x2wWAbnWXnMpJNGg4eLvuy3r8izGHt?cluster=devnet -Finished successfully -``` - -The first transaction link in the console represents the transaction for -creating and extending the lookup table. The second transaction represents the -transfers to all recipients. Feel free to inspect these transactions in the -explorer. - -Remember, this same transaction was failing when you first downloaded the -starter code. Now that we're using lookup tables, we can do all 22 transfers in -a single transaction. - -### 6. Add more address to the lookup table - -Keep in mind that the solution we've come up with so far only supports transfers -to up to 30 accounts since we only extend the lookup table once. When you factor -in the transfer instruction size, it's actually possible to extend the lookup -table with an additional 27 addresses and complete an atomic transfer to up to -57 recipients. Let's go ahead and add support for this now! - -All we need to do is go into `initializeLookupTable` and do two things: - -1. Modify the existing call to `extendLookupTable` to only add the first 30 - addressess (any more than that and the transaction will be too large) -2. Add a loop that will keep extending a lookup table 30 addresses at a time - until all addresses have been added - -```ts -async function initializeLookupTable( - user: web3.Keypair, - connection: web3.Connection, - addresses: web3.PublicKey[], -): Promise { - // Get the current slot - const slot = await connection.getSlot(); - - // Create an instruction for creating a lookup table - // and retrieve the address of the new lookup table - const [lookupTableInst, lookupTableAddress] = - web3.AddressLookupTableProgram.createLookupTable({ - authority: user.publicKey, // The authority (i.e., the account with permission to modify the lookup table) - payer: user.publicKey, // The payer (i.e., the account that will pay for the transaction fees) - recentSlot: slot - 1, // The recent slot to derive lookup table's address - }); - console.log("lookup table address:", lookupTableAddress.toBase58()); - - // Create an instruction to extend a lookup table with the provided addresses - const extendInstruction = web3.AddressLookupTableProgram.extendLookupTable({ - payer: user.publicKey, // The payer (i.e., the account that will pay for the transaction fees) - authority: user.publicKey, // The authority (i.e., the account with permission to modify the lookup table) - lookupTable: lookupTableAddress, // The address of the lookup table to extend - addresses: addresses.slice(0, 30), // The addresses to add to the lookup table - }); - - await sendV0Transaction(connection, user, [ - lookupTableInst, - extendInstruction, - ]); - - var remaining = addresses.slice(30); - - while (remaining.length > 0) { - const toAdd = remaining.slice(0, 30); - remaining = remaining.slice(30); - const extendInstruction = web3.AddressLookupTableProgram.extendLookupTable({ - payer: user.publicKey, // The payer (i.e., the account that will pay for the transaction fees) - authority: user.publicKey, // The authority (i.e., the account with permission to modify the lookup table) - lookupTable: lookupTableAddress, // The address of the lookup table to extend - addresses: toAdd, // The addresses to add to the lookup table - }); - - await sendV0Transaction(connection, user, [extendInstruction]); - } - - return lookupTableAddress; -} -``` - -Congratulations! If you feel good about this demo, you're probably ready to work -with lookup tables and versioned transactions on your own. If you want to take a -look at the final solution code you can find it on the solution branch -[here](https://github.com/Unboxed-Software/solana-versioned-transactions/tree/solution). - -# Challenge - -As a challenge, experiment with deactivating, closing and freezing lookup -tables. Remember that you need to wait for a lookup table to finish deactivating -before you can close it. Also, if a lookup table is frozen, it cannot be -modified (deactivated or closed), so you will have to test separately or use -separate lookup tables. - -1. Create a function for deactivating the lookup table. -2. Create a function for closing the lookup table -3. Create a function for freezing the lookup table -4. Test the functions by calling them in the `main()` function - -You can reuse the functions we created in the demo for sending the transaction -and waiting for the lookup table to activate/deactivate. Feel free to reference -this [solution code](https://github.com/Unboxed-Software/versioned-transaction/tree/challenge). diff --git a/content/courses/solana-course/metadata.json b/content/courses/solana-course/metadata.json deleted file mode 100644 index 3dfa4d0fa..000000000 --- a/content/courses/solana-course/metadata.json +++ /dev/null @@ -1,229 +0,0 @@ -{ - "title": "Solana Course", - "description": "This course is designed to be the absolute best starting point for Web Developers looking to learn Web3 Development.", - "structure": [ - { - "title": "Client Interaction with the Solana Network", - "number": 1, - "lessons": [ - { - "title": "Read data from the network", - "slug": "intro-to-reading-data", - "number": 1 - }, - { - "title": "Write data to the network", - "slug": "intro-to-writing-data", - "number": 2 - }, - { - "title": "Interact with wallets", - "slug": "interact-with-wallets", - "number": 3 - }, - { - "title": "Serialize custom instruction data", - "slug": "serialize-instruction-data", - "number": 4 - }, - { - "title": "Deserialize custom account data", - "slug": "deserialize-custom-data", - "number": 5 - }, - { - "title": "Page, Order, and Filter custom account data", - "slug": "paging-ordering-filtering-data", - "number": 6 - } - ] - }, - { - "title": "Client Interaction with Common Solana Programs", - "number": 2, - "lessons": [ - { - "title": "Create tokens with the Token Program", - "slug": "token-program", - "number": 1 - }, - { - "title": "Swap tokens with the Token Swap Program", - "slug": "token-swap", - "number": 2 - }, - { - "title": "Create Solana NFTs With Metaplex", - "slug": "nfts-with-metaplex", - "number": 3 - } - ] - }, - { - "title": "Basic Solana Program Development", - "number": 3, - "lessons": [ - { - "title": "Hello World", - "slug": "hello-world-program", - "number": 1 - }, - { - "title": "Create a Basic Program, Part 1 - Handle Instruction Data", - "slug": "deserialize-instruction-data", - "number": 2 - }, - { - "title": "Create a Basic Program, Part 2 - State Management", - "slug": "program-state-management", - "number": 3 - }, - { - "title": "Create a Basic Program, Part 3 - Basic Security and Validation", - "slug": "program-security", - "number": 4 - } - ] - }, - { - "title": "Intermediate Solana Program Development", - "number": 4, - "lessons": [ - { - "title": "Local Program Development", - "slug": "local-setup", - "number": 1 - }, - { - "title": "Program Derived Addresses", - "slug": "pda", - "number": 2 - }, - { - "title": "Cross Program Invocations", - "slug": "cpi", - "number": 3 - }, - { - "title": "Program Testing - COMING SOON", - "slug": "", - "hidden": true, - "number": 4 - } - ] - }, - { - "title": "Anchor Program Development", - "number": 5, - "lessons": [ - { - "title": "Intro to Anchor development", - "slug": "intro-to-anchor", - "number": 1 - }, - { - "title": "Intro to client-side Anchor development", - "slug": "intro-to-anchor-frontend", - "number": 2 - }, - { - "title": "Anchor PDAs and accounts", - "slug": "anchor-pdas", - "number": 3 - }, - { - "title": "Anchor CPIs and errors", - "slug": "anchor-cpi", - "number": 4 - } - ] - }, - { - "title": "Beyond the Basics", - "number": 6, - "lessons": [ - { - "title": "Environment variables in Solana programs", - "slug": "env-variables", - "number": 1 - }, - { - "title": "Solana Pay", - "slug": "solana-pay", - "number": 2 - }, - { - "title": "Versioned transactions and lookup tables", - "slug": "versioned-transaction", - "number": 3 - }, - { - "title": "Rust procedural macros", - "slug": "rust-macros", - "number": 4 - } - ] - }, - { - "title": "Solana Program Security", - "number": 7, - "lessons": [ - { - "title": "How to approach the Program Security module", - "slug": "security-intro", - "number": 1 - }, - { - "title": "Signer authorization", - "slug": "signer-auth", - "number": 2 - }, - { - "title": "Owner checks", - "slug": "owner-checks", - "number": 3 - }, - { - "title": "Account data matching", - "slug": "account-data-matching", - "number": 4 - }, - { - "title": "Reinitialization attacks", - "slug": "reinitialization-attacks", - "number": 5 - }, - { - "title": "Duplicate mutable accounts", - "slug": "duplicate-mutable-accounts", - "number": 6 - }, - { - "title": "Type cosplay", - "slug": "type-cosplay", - "number": 7 - }, - { - "title": "Arbitrary CPIs", - "slug": "arbitrary-cpi", - "number": 8 - }, - { - "title": "Bump seed canonicalization", - "slug": "bump-seed-canonicalization", - "number": 9 - }, - { - "title": "Closing accounts and revival attacks", - "slug": "closing-accounts", - "number": 10 - }, - { - "title": "PDA sharing", - "slug": "pda-sharing", - "number": 11 - } - ] - } - ] -} diff --git a/content/guides/getstarted/hello-world-in-your-browser.md b/content/guides/getstarted/hello-world-in-your-browser.md index cc9f76690..8232e844a 100644 --- a/content/guides/getstarted/hello-world-in-your-browser.md +++ b/content/guides/getstarted/hello-world-in-your-browser.md @@ -4,6 +4,7 @@ difficulty: intro featured: true featuredPriority: 0 title: "Intro to Solana development (using only your browser)" +seoTitle: "Introduction to Solana development" description: "Solana developer quickstart guide to learn how to setup, build, and deploy your first Solana program using only your browser with Solana Playground." @@ -54,9 +55,8 @@ especially on Windows. ### Import our example project -In a new tab in your browser, open our example "_Hello World_" project on Solana -Playground: -[https://beta.solpg.io/6314a69688a7fca897ad7d1d](https://beta.solpg.io/6314a69688a7fca897ad7d1d) +In a new tab in your browser, open our example "_Hello World_" +[project on Solana Playground](https://beta.solpg.io/6314a69688a7fca897ad7d1d) Next, import the project into your local workspace by clicking the "**Import**" icon and naming your project `hello_world`. diff --git a/content/guides/getstarted/solana-token-airdrop-and-faucets.md b/content/guides/getstarted/solana-token-airdrop-and-faucets.md index 80b3d1b65..f384e7dc2 100644 --- a/content/guides/getstarted/solana-token-airdrop-and-faucets.md +++ b/content/guides/getstarted/solana-token-airdrop-and-faucets.md @@ -2,6 +2,7 @@ date: Jul 29, 2023 difficulty: intro title: "How to get Solana devnet SOL (including airdrops and faucets)" +seoTitle: "Faucets: How to get Solana devnet SOL" description: "A list of the most common ways to get devnet and testnet SOL tokens for Solana development. Including: airdrop, web3.js, POW faucet, and more." @@ -58,12 +59,12 @@ _Available for Devnet_ RPC Providers can opt in to distributing Devnet SOL via their Devnet Validators. _\*If you are an RPC Provider and want to distribute SOL please get in touch -here_ +here: [Form](https://c852ena8x5c.typeform.com/to/cUj1iRhS)_ Currently Supported: 1. [Helius](https://www.helius.dev/) -2. [QuickNode](https://www.quicknode.com/chains/sol) +2. [QuickNode](https://faucet.quicknode.com/solana/devnet) ### Using the Solana CLI diff --git a/content/guides/intro/wallets-explained.md b/content/guides/intro/wallets-explained.md index efb13c8ca..ed56487c7 100644 --- a/content/guides/intro/wallets-explained.md +++ b/content/guides/intro/wallets-explained.md @@ -31,8 +31,8 @@ blockchain? ![A screenshot of a wallet balance](/assets/guides/wallets-explained/Wallets---1-Phantom-Balance.png) -Skeptical? Take a look at this: -[https://explorer.solana.com/address/E35325pbtxCRsA4uVoC3cyBDZy8BMpmxvsvGcHNUa18k?cluster=devnet](https://explorer.solana.com/address/E35325pbtxCRsA4uVoC3cyBDZy8BMpmxvsvGcHNUa18k?cluster=devnet). +Skeptical? Take a look at +[this account's balance](https://explorer.solana.com/address/E35325pbtxCRsA4uVoC3cyBDZy8BMpmxvsvGcHNUa18k?cluster=devnet). If my crypto were stored inside a wallet, how come you can just look up what's inside my wallet on the Internet? @@ -65,8 +65,6 @@ Let's continue to unroll the thread. We have established that balance is stored on the blockchain and not inside your computer. The next exciting trail we could follow is to ask ourselves how data, in general, is stored in the blockchain. -> **Millions dollars FX Effect zoom into the blockchain piece** - **To understand why we need wallets, we first need to understand how data is stored in blockchains.** @@ -147,7 +145,7 @@ indeed the user; there are two problems: Yes! The key is signing using the secret key! We can "password protect our data" without communicating the password, thanks to asymmetric cryptography magic. -> What's asymmetric? _Think SSH keys_. +What's asymmetric? _Think SSH keys_. Can't wrap your head around it? Well, just think about the real world. For example, if a malicious imposter wants to withdraw money from your bank account, @@ -155,7 +153,7 @@ he would need your signature to do it. Well, that's precisely the same here: any modification to data identified by a public key also needs to be signed by the corresponding secret key. -> Yes, there was a world like that when mobile banking did not exist. +Yes, there was a world like that when mobile banking did not exist. **That's the true nature of wallets!! They are here to sign access to data. They are signatures or stamps.** diff --git a/content/guides/javascript/compressed-nfts.md b/content/guides/javascript/compressed-nfts.md index 08ad047e5..b849be038 100644 --- a/content/guides/javascript/compressed-nfts.md +++ b/content/guides/javascript/compressed-nfts.md @@ -613,7 +613,8 @@ The response fields to pay special attention to are: Transferring compressed NFTs is different from transferring uncompressed NFTs. Aside from using a different on-chain program, compressed NFTs require the use -of a asset's "merkle proof" (or `proof` for short) to actually change ownership. +of an asset's "merkle proof" (or `proof` for short) to actually change +ownership. ### What is a merkle proof? @@ -813,7 +814,7 @@ helper function. Since each of these hash values resemble and are formatted similar to PublicKeys, we can use the [`PublicKey`](https://solana-labs.github.io/solana-web3.js/classes/PublicKey.html) -class in web3.js to convert them into a accepted byte array format. +class in web3.js to convert them into an accepted byte array format. #### Send the transaction diff --git a/content/guides/rpc/configure-solana-rpc-on-aws.md b/content/guides/rpc/configure-solana-rpc-on-aws.md new file mode 100644 index 000000000..22eb84722 --- /dev/null +++ b/content/guides/rpc/configure-solana-rpc-on-aws.md @@ -0,0 +1,143 @@ +--- +date: Oct 26, 2023 +difficulty: intermediate +featured: true +featuredPriority: 0 +title: "Configure and run a Solana RPC on AWS" +description: + "Running Solana nodes on AWS can help to reduce operational costs and improve + reliability of your validators or RPC services" +tags: + - rpc + - infrastructure +keywords: + - aws + - amazon + - node runner +--- + +# Solana on AWS + +Running Solana nodes on AWS can help to reduce operational costs and improve +reliability of your validators or RPC services. In this guide we will review the +major use cases for running self-managed Solana nodes, understand their +infrastructure requirements, and costs considerations. + +## Solana node configurations on AWS + +There are four major use cases for running Solana nodes: Consensus, "Base” RPC +node, “Extended” RPC node, and history node. Depending on the use case, you +start your ‘solana-validator’ process with different set of parameters and might +use different infrastructure configurations. Let’s review them. + +### Consensus node + +Consensus nodes on both Mainnet Beta and testnets +[support proof-of-stake consensus](https://solana.com/staking) and are run by +operators to either stake themselves or to accept delegated stakes from other +holders of Solana tokens (SOL). At the time of writing (September 2023) the +configuration on AWS with good cost/performance ratio is +[r6a.8xlarge EC2 instance type](https://aws.amazon.com/ec2/instance-types/r6a/) +with three [EBS gp3 volumes](https://aws.amazon.com/ebs/general-purpose/): one +for root volume and two storing Accounts and Data separately: + +- Root volume: EBS gp3 500 GB, 3K IOPS, 250 MB/s throughput, +- Accounts volume: EBS gp3 500GB, 5K IOPS, 700 MB/s throughput, +- Data volume: EBS gp3 2TB, 10K IOPS, 700 MB/s throughput. + +To set up new validator node, you first need to generate cryptographic keys that +will be used in the process. After the keys are generated with the standard +`solana-keygen` tool on the EC2 instance you can keep a backup copy of the key +pair in +[AWS Secrets Manager](https://docs.aws.amazon.com/secretsmanager/latest/userguide/intro.html). + +Based on amount you have at stake, you can estimate with +[community-developed calculators](https://www.stakingrewards.com/asset/solana) +the amount of profit your validator node can generate and then calculate the +cost of the configuration with [AWS calculator](https://calculator.aws/#/). Also +note that, depending on the amount at stake, your validator node will generate +between 150 TB to 300 TB of data transfer out to the Internet per month, so talk +to your AWS account manager about potential cost optimization. + +- To run a single Consensus node on AWS, use the + [Solana Node Runner CDK application](https://github.com/aws-samples/aws-blockchain-node-runners/tree/solana/lib/solana) + in + [AWS Blockchain Node Runners](https://aws-samples.github.io/aws-blockchain-node-runners/) + and use + [sample config for Consensus node](https://github.com/aws-samples/aws-blockchain-node-runners/blob/solana/lib/solana/sample-configs/.env-sample-validator). + +### Base RPC nodes + +Base RPC nodes (or just "RPC nodes") can be used by your application to perform +all RPC calls, except those that trigger scan operation to the entire account +set, like +[getProgramAccounts](https://docs.solana.com/api/http#getprogramaccounts) and +[SPL-token-specific requests](https://docs.solana.com/api/http#gettokenaccountsbydelegate): +`getTokenAccountsByDelegate`, `getTokenAccountBalance`, +`getTokenAccountsByOwner`, `getTokenLargestAccounts`, and `getTokenSupply`. +These node types can use the same infrastructure as the consensus nodes, but +instead of validating transactions, it will expose HTTP and WebSocket endpoints +for your application to interact with the node trough JSON RPC API and RPC +PubSub respectively. On AWS you can use the same as with consensus node +`r6a.8xlarge` EC2 instance type the same three EBS gp3 volumes: + +- Root volume: EBS gp3 500 GB, 3K IOPS, 250 MB/s throughput, +- Accounts volume: EBS gp3 500GB, 5K IOPS, 700 MB/s throughput, +- Data volume: EBS gp3 2TB, 10K IOPS, 700 MB/s throughput. + +Data transfer costs for this node can vary depending on whether you expose the +RPC endpoints to the Internet (generates more traffic to the Internet) or +consume it with the same AWS Availability Zone (will cost you nothing). If you +are not exposing the RPC interface for external consumption, then your node will +generate about 13-15 TB of outgoing data per month per node. It is less than +Consensus nodes, but can still be sufficient and better be discussed with your +AWS account manager. + +- To run Base RPC node on AWS, use the + [Solana Node Runner CDK application](https://github.com/aws-samples/aws-blockchain-node-runners/tree/solana/lib/solana) + in + [AWS Blockchain Node Runners](https://aws-samples.github.io/aws-blockchain-node-runners/) + and use + [sample config for RPC node](https://github.com/aws-samples/aws-blockchain-node-runners/blob/solana/lib/solana/sample-configs/.env-sample-baserpc). + You can use both Single-node and Highly Available-node setup. + +### Extended RPC nodes with secondary indexes + +RPC nodes with secondary indexes allow you to call "extended" RPC functions like +mentioned above. To use them you need to enable extra indexes on your RPC node, +which requires more hardware. At the time of writing (September 2023) it is +recommended to use at least 1 TB or RAM with NVMe discs, or, on AWS an instance +like `x2idn.16xlarge` which is also equipped with a physically-attached NVMe SSD +[Instance Store volume](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/InstanceStorage.html). +The storage configurations looks like this: + +- Root volume: EBS gp3 500 GB, 3K IOPS, 250 MB/s throughput, +- Accounts volume: Instance store (comes with the `x2idn.16xlarge` instance) + 1.9TB, +- Data volume: EBS gp3 2TB, 10K IOPS, 700 MB/s throughput. + +As with the RPC nodes, data transfer amount may vary and the estimated amount is +15 TB of outgoing data per month per node. + +- To run Extended RPC nodes on AWS, use the + [Solana Node Runner CDK application](https://github.com/aws-samples/aws-blockchain-node-runners/tree/solana/lib/solana) + in + [AWS Blockchain Node Runners](https://aws-samples.github.io/aws-blockchain-node-runners/) + and use + [sample config for RPC with secondary indexes node](https://github.com/aws-samples/aws-blockchain-node-runners/blob/solana/lib/solana/sample-configs/.env-sample-extendedrpc). + You can use both Single-node and Highly Available-node setup. + +### History nodes + +History nodes are not yet available on AWS because or dependency on the GCP's +BigTable. But there is some awesome work that Solana community is doing to make +this data available in cloud-agnostic way, so stay tuned. + +## Cost Optimization + +To optimize the cost you can use either +[Compute Savings Plan for EC2](https://aws.amazon.com/savingsplans/compute-pricing/) +or talk to one of the AWS partners like [Antimetal](https://www.antimetal.com/) +or [Zesty](https://zesty.co/). We don’t recommend using Spot instances for +Solana nodes because they can be terminated with a short notice and a similar +instance might not be immediately available for you even with on-demand pricing. diff --git a/content/guides/solang/getting-started.md b/content/guides/solang/getting-started.md index 6c468c8ca..44c84c359 100644 --- a/content/guides/solang/getting-started.md +++ b/content/guides/solang/getting-started.md @@ -1,5 +1,5 @@ --- -featured: true +featured: false date: Jul 17, 2023 difficulty: intro title: "Getting started with Solang" diff --git a/content/guides/token-extensions/mint-close-authority.md b/content/guides/token-extensions/mint-close-authority.md new file mode 100644 index 000000000..a3f19f36e --- /dev/null +++ b/content/guides/token-extensions/mint-close-authority.md @@ -0,0 +1,241 @@ +--- +date: Dec 05, 2023 +seoTitle: "Token Extensions: Mint Close Authority" +title: How to use the Mint Close Authority extension +description: + "With Token Extensions, it is possible to close token mint accounts by + initializing the MintCloseAuthority extension before initializing the mint." +keywords: + - token 2022 + - token extensions + - token program +difficulty: beginner +tags: + - token 2022 + - token extensions +--- + +With the original SPL token program, there was no option to close Mint Accounts +owned by the Token Program and reclaim the SOL allocated to these accounts. + +The `MintCloseAuthority` extension introduces a solution to this limitation by +allowing a designated Close Authority to close a Mint Account if the supply of +the mint is 0. This feature provides a mechanism to recover SOL allocated to +Mint Accounts that are no longer in use. + +In this guide, we'll walk through an example of using Solana Playground. Here is +the [final script](https://beta.solpg.io/65700c73fb53fa325bfd0c4a). + +## Getting Started + +Start by opening this Solana Playground +[link](https://beta.solpg.io/656e19acfb53fa325bfd0c46) with the following +starter code. + +```javascript +// Client +console.log("My address:", pg.wallet.publicKey.toString()); +const balance = await pg.connection.getBalance(pg.wallet.publicKey); +console.log(`My balance: ${balance / web3.LAMPORTS_PER_SOL} SOL`); +``` + +If it is your first time using Solana Playground, you'll first need to create a +Playground Wallet and fund the wallet with devnet SOL. + + + +If you do not have a Playground wallet, you may see a type error within the +editor on all declarations of `pg.wallet.publicKey`. This type error will clear +after you create a Playground wallet. + + + +To get devnet SOL, run the `solana airdrop` command in the Playground's +terminal, or visit this [devnet faucet](https://faucet.solana.com/). + +``` +solana airdrop 5 +``` + +Once you've created and funded the Playground wallet, click the "Run" button to +run the starter code. + +## Add Dependencies + +Let's start by setting up our script. We'll be using the `@solana/web3.js` and +`@solana/spl-token` libraries. + +Replace the starter code with the following: + +```javascript +import { + Connection, + Keypair, + SystemProgram, + Transaction, + clusterApiUrl, + sendAndConfirmTransaction, +} from "@solana/web3.js"; +import { + ExtensionType, + TOKEN_2022_PROGRAM_ID, + closeAccount, + createInitializeMintCloseAuthorityInstruction, + createInitializeMintInstruction, + getMintLen, +} from "@solana/spl-token"; + +// Playground wallet +const payer = pg.wallet.keypair; + +// Connection to devnet cluster +const connection = new Connection(clusterApiUrl("devnet"), "confirmed"); + +// Transaction signature returned from sent transaction +let transactionSignature: string; +``` + +## Mint Setup + +First, let's define the properties of the Mint Account we'll be creating in the +following step. + +```javascript +// Generate new keypair for Mint Account +const mintKeypair = Keypair.generate(); +// Address for Mint Account +const mint = mintKeypair.publicKey; +// Decimals for Mint Account +const decimals = 2; +// Authority that can mint new tokens +const mintAuthority = pg.wallet.publicKey; +// Authority that can close the Mint Account +const closeAuthority = pg.wallet.publicKey; +``` + +Next, let's determine the size of the new Mint Account and calculate the minimum +lamports needed for rent exemption. + +```javascript +// Size of Mint Account with extension +const mintLen = getMintLen([ExtensionType.MintCloseAuthority]); +// Minimum lamports required for Mint Account +const lamports = await connection.getMinimumBalanceForRentExemption(mintLen); +``` + +With Token Extensions, the size of the Mint Account will vary based on the +extensions enabled. + +## Build Instructions + +Next, let's build the set of instructions to: + +- Create a new account +- Initialize the `MintCloseAuthority` extension +- Initialize the remaining Mint Account data + +First, build the instruction to invoke the System Program to create an account +and assign ownership to the Token Extensions Program. + +```javascript +// Instruction to invoke System Program to create new account +const createAccountInstruction = SystemProgram.createAccount({ + fromPubkey: payer.publicKey, // Account that will transfer lamports to created account + newAccountPubkey: mint, // Address of the account to create + space: mintLen, // Amount of bytes to allocate to the created account + lamports, // Amount of lamports transferred to created account + programId: TOKEN_2022_PROGRAM_ID, // Program assigned as owner of created account +}); +``` + +Next, build the instruction to initialize the `MintCloseAuthority` extension for +the Mint Account. + +```javascript +// Instruction to initialize the MintCloseAuthority Extension +const initializeMintCloseAuthorityInstruction = + createInitializeMintCloseAuthorityInstruction( + mint, // Mint Account address + closeAuthority, // Designated Close Authority + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID + ); +``` + +Lastly, build the instruction to initialize the rest of the Mint Account data. +This is the same as with the original Token Program. + +```javascript +// Instruction to initialize Mint Account data +const initializeMintInstruction = createInitializeMintInstruction( + mint, // Mint Account Address + decimals, // Decimals of Mint + mintAuthority, // Designated Mint Authority + null, // Optional Freeze Authority + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); +``` + +## Send Transaction + +Next, let's add the instructions to a new transaction and send it to the +network. This will create a Mint Account with the `MintCloseAuthority` extension +enabled. + +```javascript +// Add instructions to new transaction +const transaction = new Transaction().add( + createAccountInstruction, + initializeMintCloseAuthorityInstruction, + initializeMintInstruction, +); + +// Send transaction +transactionSignature = await sendAndConfirmTransaction( + connection, + transaction, + [payer, mintKeypair], // Signers +); + +console.log( + "\nCreate Mint Account:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +Run the script by clicking the `Run` button. You can then inspect the +transaction details on SolanaFM. + +## Close Mint Account + +With the `MintCloseAuthority` extension enabled, the Close Authority can close +the Mint Account to reclaim the lamports from the account. + +```javascript +// Send transaction to close Mint Account +transactionSignature = await closeAccount( + connection, + payer, // Transaction fee payer + mint, // Mint Account address + payer.publicKey, // Account to receive lamports from closed account + closeAuthority, // Close Authority for Mint Account + undefined, // Additional signers + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); + +console.log( + "\nClose Mint Account:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +Run the script by clicking the `Run` button. You can then inspect the +transaction details on the SolanaFM. + +## Conclusion + +The `MintCloseAuthority` extension enables developers to reclaim SOL that +otherwise would have been permanently locked in a Mint Account. This feature is +particularly useful for applications or games involving single-use NFTs that are +meant to be burned. It ensures that the SOL allocated to Mint Accounts which are +no longer used can be reclaimed and reused. diff --git a/content/guides/token-extensions/non-transferable.md b/content/guides/token-extensions/non-transferable.md new file mode 100644 index 000000000..f13fa3ce0 --- /dev/null +++ b/content/guides/token-extensions/non-transferable.md @@ -0,0 +1,362 @@ +--- +date: Dec 06, 2023 +seoTitle: "Token Extensions: Non-transferable" +title: How to use the Non-transferable extension +description: + "In the world of digital collectibles, NFTs have plenty of uses outside of the + PFP meta. Enter the concept of 'soul-bound' tokens - assets that are tied to + an individual and cannot be transferred." +keywords: + - token 2022 + - token extensions + - token program +difficulty: beginner +tags: + - token 2022 + - token extensions +--- + +The `NonTransferable` extension makes it possible to create tokens that cannot +be transferred. This enables the creation of "soul-bound" tokens, where digital +assets are intrinsically linked to an individual. While these tokens cannot be +transferred, the owner can still burn tokens and close the Token Account. This +prevents users from being "stuck" with an unwanted asset. + +In this guide, we will walk through an example of creating "soul-bound" tokens +with the `NonTransferable` extension using Solana Playground. Here is the +[final script](https://beta.solpg.io/6570c54bfb53fa325bfd0c4d). + +## Getting Started + +Start by opening this Solana Playground +[link](https://beta.solpg.io/656e19acfb53fa325bfd0c46) with the following +starter code. + +```javascript +// Client +console.log("My address:", pg.wallet.publicKey.toString()); +const balance = await pg.connection.getBalance(pg.wallet.publicKey); +console.log(`My balance: ${balance / web3.LAMPORTS_PER_SOL} SOL`); +``` + +If it is your first time using Solana Playground, you'll first need to create a +Playground Wallet and fund the wallet with devnet SOL. + + + +If you do not have a Playground wallet, you may see a type error within the +editor on all declarations of `pg.wallet.publicKey`. This type error will clear +after you create a Playground wallet. + + + +To get devnet SOL, run the `solana airdrop` command in the Playground's +terminal, or visit this [devnet faucet](https://faucet.solana.com/). + +``` +solana airdrop 5 +``` + +Once you've created and funded the Playground wallet, click the "Run" button to +run the starter code. + +## Add Dependencies + +Let's start by setting up our script. We'll be using the `@solana/web3.js` and +`@solana/spl-token` libraries. + +Replace the starter code with the following: + +```javascript +import { + Connection, + Keypair, + SystemProgram, + Transaction, + clusterApiUrl, + sendAndConfirmTransaction, +} from "@solana/web3.js"; +import { + ExtensionType, + TOKEN_2022_PROGRAM_ID, + createInitializeMintInstruction, + createInitializeNonTransferableMintInstruction, + getMintLen, + mintTo, + createAccount, + transfer, + burn, + closeAccount, +} from "@solana/spl-token"; + +// Playground wallet +const payer = pg.wallet.keypair; + +// Connection to devnet cluster +const connection = new Connection(clusterApiUrl("devnet"), "confirmed"); + +// Transaction signature returned from sent transaction +let transactionSignature: string; +``` + +## Mint Setup + +First, let's define the properties of the Mint Account we'll be creating in the +following step. + +```javascript +// Generate new keypair for Mint Account +const mintKeypair = Keypair.generate(); +// Address for Mint Account +const mint = mintKeypair.publicKey; +// Decimals for Mint Account +const decimals = 2; +// Authority that can mint new tokens +const mintAuthority = pg.wallet.publicKey; +``` + +Next, let's determine the size of the new Mint Account and calculate the minimum +lamports needed for rent exemption. + +```javascript +// Size of Mint Account with extension +const mintLen = getMintLen([ExtensionType.NonTransferable]); +// Minimum lamports required for Mint Account +const lamports = await connection.getMinimumBalanceForRentExemption(mintLen); +``` + +With Token Extensions, the size of the Mint Account will vary based on the +extensions enabled. + +## Build Instructions + +Next, let's build the set of instructions to: + +- Create a new account +- Initialize the `NonTransferable` extension +- Initialize the remaining Mint Account data + +First, build the instruction to invoke the System Program to create an account +and assign ownership to the Token Extensions Program. + +```javascript +// Instruction to invoke System Program to create new account +const createAccountInstruction = SystemProgram.createAccount({ + fromPubkey: payer.publicKey, // Account that will transfer lamports to created account + newAccountPubkey: mint, // Address of the account to create + space: mintLen, // Amount of bytes to allocate to the created account + lamports, // Amount of lamports transferred to created account + programId: TOKEN_2022_PROGRAM_ID, // Program assigned as owner of created account +}); +``` + +Next, build the instruction to initialize the `NonTransferable` extension for +the Mint Account. + +```javascript +// Instruction to initialize the NonTransferable Extension +const initializeNonTransferableMintInstruction = + createInitializeNonTransferableMintInstruction( + mint, // Mint Account address + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID + ); +``` + +Lastly, build the instruction to initialize the rest of the Mint Account data. +This is the same as with the original Token Program. + +```javascript +// Instruction to initialize Mint Account data +const initializeMintInstruction = createInitializeMintInstruction( + mint, // Mint Account Address + decimals, // Decimals of Mint + mintAuthority, // Designated Mint Authority + null, // Optional Freeze Authority + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); +``` + +## Send Transaction + +Next, let's add the instructions to a new transaction and send it to the +network. This will create a Mint Account with the `NonTransferable` extension +enabled. + +```javascript +// Add instructions to new transaction +const transaction = new Transaction().add( + createAccountInstruction, + initializeNonTransferableMintInstruction, + initializeMintInstruction, +); + +// Send transaction +transactionSignature = await sendAndConfirmTransaction( + connection, + transaction, + [payer, mintKeypair], // Signers +); + +console.log( + "\nCreate Mint Account:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +Run the script by clicking the `Run` button. You can then inspect the +transaction on the SolanaFM. + +## Create Token Accounts + +Next, let's set up two Token Accounts to demonstrate the functionality of the +`NonTransferable` extension. + +First, create a `sourceTokenAccount` owned by the Playground wallet. + +```javascript +// Create Token Account for Playground wallet +const sourceTokenAccount = await createAccount( + connection, + payer, // Payer to create Token Account + mint, // Mint Account address + payer.publicKey, // Token Account owner + undefined, // Optional keypair, default to Associated Token Account + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); +``` + +Next, generate a random keypair and use it as the owner of a +`destinationTokenAccount`. + +```javascript +// Random keypair to use as owner of Token Account +const randomKeypair = new Keypair(); +// Create Token Account for random keypair +const destinationTokenAccount = await createAccount( + connection, + payer, // Payer to create Token Account + mint, // Mint Account address + randomKeypair.publicKey, // Token Account owner + undefined, // Optional keypair, default to Associated Token Account + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); +``` + +Lastly, mint 1 token to the `sourceTokenAccount` to test the non-transferrable +enforcement. + +```javascript +// Mint tokens to sourceTokenAccount +transactionSignature = await mintTo( + connection, + payer, // Transaction fee payer + mint, // Mint Account address + sourceTokenAccount, // Mint to + mintAuthority, // Mint Authority address + 100, // Amount + undefined, // Additional signers + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); + +console.log( + "\nMint Tokens:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +## Attempt Token Transfer + +Next, let's try to transfer tokens from the `sourceTokenAccount` to the +`destinationTokenAccount`. We expect this transaction to fail due to the +`NonTransferable` extension. + +```javascript +try { + // Attempt to Transfer tokens + await transfer( + connection, + payer, // Transaction fee payer + sourceTokenAccount, // Transfer from + destinationTokenAccount, // Transfer to + payer.publicKey, // Source Token Account owner + 100, // Amount + undefined, // Additional signers + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID + ); +} catch (error) { + console.log("\nExpect Error:", error); +} +``` + +Run the script by clicking the `Run` button. You can then inspect the error in +the Playground terminal. You should see a message similar to the following: + +``` +Expect Error: { [Error: failed to send transaction: Transaction simulation failed: Error processing Instruction 0: custom program error: 0x25] + logs: + [ 'Program TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb invoke [1]', + 'Program log: Instruction: Transfer', + 'Program log: Transfer is disabled for this mint', + 'Program TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb consumed 3454 of 200000 compute units', + 'Program TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb failed: custom program error: 0x25' ] } +``` + +## Burn Tokens and Close Token Account + +While tokens can't be transferred, they can still be burned. + +```javascript +// Burn tokens +transactionSignature = await burn( + connection, + payer, // Transaction fee payer + sourceTokenAccount, // Burn from + mint, // Mint Account address + payer.publicKey, // Token Account owner + 100, // Amount + undefined, // Additional signers + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); + +console.log( + "\nBurn Tokens:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +The Token Account can then be closed to recover the SOL that was allocated to +the account. Note that the token balance must be 0. + +```javascript +// Close Token Account +transactionSignature = await closeAccount( + connection, + payer, // Transaction fee payer + sourceTokenAccount, // Token Account to close + payer.publicKey, // Account to receive lamports from closed account + payer.publicKey, // Owner of Token Account + undefined, // Additional signers + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); + +console.log( + "\nClose Token Account:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +Run the script by clicking the `Run` button. You can then inspect the +transaction on the SolanaFM. + +## Conclusion + +The `NonTransferable` mint extension enables the creation of "soul-bound" +tokens, ensuring that digital assets are bound to an individual account. This +feature enables a unique mechanism for digital ownership such as for personal +achievements, identity, or credentials that are inherently non-transferable. diff --git a/content/guides/token-extensions/permanent-delegate.md b/content/guides/token-extensions/permanent-delegate.md new file mode 100644 index 000000000..458cf210c --- /dev/null +++ b/content/guides/token-extensions/permanent-delegate.md @@ -0,0 +1,357 @@ +--- +date: Dec 06, 2023 +seoTitle: "Token Extensions: Permanent Delegate" +title: How to use the Permanent Delegate extension +description: + "Token Extensions allow you to specify a permanent account delegate for a mint + with unlimited delegate privileges over any account associated with that mint, + including burning or transferring any amount of tokens." +keywords: + - token 2022 + - token extensions + - token program +difficulty: beginner +tags: + - token 2022 + - token extensions +--- + +The `PermanentDelegate` extension allows for a designated Permanent Delegate for +a Mint Account. This permanent delegate has unrestricted delegate privileges +over all Token Accounts for that mint, enabling them to burn or transfer tokens +without limitation. + +In this guide, we'll walk through an example of creating a token with the +Permanent Delegate using Solana Playground. Here is the +[final script](https://beta.solpg.io/6570a56bfb53fa325bfd0c4b) that this guide +will walkthrough. + +## Understanding the Implications + +This is a very powerful feature, and its implications have to be clearly stated +for both users and app developers. + + +The Permanent Delegate is effectively a global owner of all Token Accounts for +the mint. Due to the unlimited powers of the Permanent Delegate, if the +delegate's keys are compromised, an attacker will have complete control over all +Token Accounts for that mint. + + +## Getting Started + +Start by opening this Solana Playground +[link](https://beta.solpg.io/656e19acfb53fa325bfd0c46) with the following +starter code. + +```javascript +// Client +console.log("My address:", pg.wallet.publicKey.toString()); +const balance = await pg.connection.getBalance(pg.wallet.publicKey); +console.log(`My balance: ${balance / web3.LAMPORTS_PER_SOL} SOL`); +``` + +If it is your first time using Solana Playground, you'll first need to create a +Playground Wallet and fund the wallet with devnet SOL. + + + +If you do not have a Playground wallet, you may see a type error within the +editor on all declarations of `pg.wallet.publicKey`. This type error will clear +after you create a Playground wallet. + + + +To get devnet SOL, run the `solana airdrop` command in the Playground's +terminal, or visit this [devnet faucet](https://faucet.solana.com/). + +``` +solana airdrop 5 +``` + +Once you've created and funded the Playground wallet, click the "Run" button to +run the starter code. + +## Add Dependencies + +Lets start by setting up our script. We'll be using the `@solana/web3.js` and +`@solana/spl-token` libraries. + +Replace the starter code with the following: + +```javascript +import { + Connection, + Keypair, + SystemProgram, + Transaction, + clusterApiUrl, + sendAndConfirmTransaction, +} from "@solana/web3.js"; +import { + ExtensionType, + TOKEN_2022_PROGRAM_ID, + createInitializePermanentDelegateInstruction, + createInitializeMintInstruction, + getMintLen, + createAccount, + mintTo, + transferChecked, + burnChecked, +} from "@solana/spl-token"; + +// Playground wallet +const payer = pg.wallet.keypair; + +// Connection to devnet cluster +const connection = new Connection(clusterApiUrl("devnet"), "confirmed"); + +// Transaction signature returned from sent transaction +let transactionSignature: string; +``` + +## Mint Setup + +First, lets define the properties of the Mint Account we'll be creating in the +following step. + +```javascript +// Generate new keypair for Mint Account +const mintKeypair = Keypair.generate(); +// Address for Mint Account +const mint = mintKeypair.publicKey; +// Decimals for Mint Account +const decimals = 2; +// Authority that can mint new tokens +const mintAuthority = pg.wallet.publicKey; +// Authority that can transfer or burn from any token account +const permanentDelegate = pg.wallet.publicKey; +``` + +Next, lets determine the size of the new Mint Account and calculate the minimum +lamports needed for rent exemption. + +```javascript +// Size of Mint Account with extension +const mintLen = getMintLen([ExtensionType.PermanentDelegate]); +// Minimum lamports required for Mint Account +const lamports = await connection.getMinimumBalanceForRentExemption(mintLen); +``` + +With Token Extensions, the size of the Mint Account will vary based on the +extensions enabled. + +## Build Instructions + +We will need to build a set of instructions to: + +- Create a new account +- Initialize the `PermanentDelegate` extension +- Initialize the remaining Mint Account data + +First, build the instruction to invoke the System Program to create an account +and assign ownership to the Token Extensions Program. + +```javascript +// Instruction to invoke System Program to create new account +const createAccountInstruction = SystemProgram.createAccount({ + fromPubkey: payer.publicKey, // Account that will transfer lamports to created account + newAccountPubkey: mint, // Address of the account to create + space: mintLen, // Amount of bytes to allocate to the created account + lamports, // Amount of lamports transferred to created account + programId: TOKEN_2022_PROGRAM_ID, // Program assigned as owner of created account +}); +``` + +Next, build the instruction to initialize the `PermanentDelegate` extension for +the Mint Account. + +```javascript +// Instruction to initialize the MintCloseAuthority Extension +const initializeMintCloseAuthorityInstruction = + createInitializePermanentDelegateInstruction( + mint, // Mint Account address + permanentDelegate, // Designated Permanent Delegate + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID + ); +``` + +Lastly, build the instruction to initialize the rest of the Mint Account data. +This is the same as with the original Token Program. + +```javascript +// Instruction to initialize Mint Account data +const initializeMintInstruction = createInitializeMintInstruction( + mint, // Mint Account Address + decimals, // Decimals of Mint + mintAuthority, // Designated Mint Authority + null, // Optional Freeze Authority + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); +``` + +## Send Transaction + +Now add the instructions to a new transaction and send it to the network. This +will create a Mint Account with the `PermanentDelegate` extension enabled. + +```javascript +// Add instructions to new transaction +const transaction = new Transaction().add( + createAccountInstruction, + initializeMintCloseAuthorityInstruction, + initializeMintInstruction, +); + +// Send transaction +transactionSignature = await sendAndConfirmTransaction( + connection, + transaction, + [payer, mintKeypair], // Signers +); + +console.log( + "\nCreate Mint Account:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +Run the script by clicking the `Run` button. You can then inspect the +transaction on the SolanaFM. + +## Create Token Accounts + +Next, we will set up two Token Accounts to demonstrate the functionality of the +Permanent Delegate. + +First, generate a random keypair and use it as the owner of a +`sourceTokenAccount`. + +```javascript +// Random keypair to use as owner of Token Account +const randomKeypair = new Keypair(); +// Create Token Account for random keypair +const sourceTokenAccount = await createAccount( + connection, + payer, // Payer to create Token Account + mint, // Mint Account address + randomKeypair.publicKey, // Token Account owner + undefined, // Optional keypair, default to Associated Token Account + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); +``` + +Next, create a `destinationTokenAccount` owned by the Playground wallet. + +```javascript +// Create Token Account for Playground wallet +const destinationTokenAccount = await createAccount( + connection, + payer, // Payer to create Token Account + mint, // Mint Account address + payer.publicKey, // Token Account owner + undefined, // Optional keypair, default to Associated Token Account + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); +``` + +Lastly, mint 2 tokens to the `sourceTokenAccount` to fund it. + +```javascript +// Mint tokens to sourceTokenAccount +transactionSignature = await mintTo( + connection, + payer, // Transaction fee payer + mint, // Mint Account address + sourceTokenAccount, // Mint to + mintAuthority, // Mint Authority address + 200, // Amount + undefined, // Additional signers + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); + +console.log( + "\nMint Tokens:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +## Transfer with Permanent Delegate + +Next, lets send a transaction to transfer 1 token from the `sourceTokenAccount` +to the `destinationTokenAccount`. Remember, the `sourceTokenAccount` is owned by +a randomly generated keypair. + +To transfer tokens using the Permanent Delegate, use the `transferChecked` +instruction and specify the Permanent Delegate as the owner of the +`sourceTokenAccount`. + +```javascript +// Transfer tokens from source to destination +transactionSignature = await transferChecked( + connection, + payer, // Transaction fee payer + sourceTokenAccount, // Transfer from + mint, // Mint Account address + destinationTokenAccount, // Transfer to + permanentDelegate, // Use Permanent Delegate as owner + 100, // Amount + decimals, // Mint Account decimals + undefined, // Additional signers + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); + +console.log( + "\nTransfer Tokens:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +## Burn with Permanent Delegate + +Lets also send a transaction to burn 1 token from the `sourceTokenAccount`. + +To burn tokens using the `Permanent Delegate`, use the `burnChecked` instruction +and specify the Permanent Delegate as the owner of the `sourceTokenAccount`. + +```javascript +// Burn tokens from token account +transactionSignature = await burnChecked( + connection, + payer, // Transaction fee payer + sourceTokenAccount, // Burn from + mint, // Mint Account address + permanentDelegate, // Use Permanent Delegate as owner + 100, // Amount + decimals, // Mint Account decimals + undefined, // Additional signers + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); + +console.log( + "\nBurn Tokens:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +Run the script by clicking the `Run` button. You can then inspect the +transactions on the SolanaFM. + + +Note that both the transfer and burn transactions complete successfully, +even though the transactions are not signed by the owner of the Token Account. + + +## Conclusion + +The `PermanentDelegate` extension is a powerful extension that enables +developers to have much greater control over tokens they create, such as the +ability to retrieve tokens that have been mistakenly transferred. While this +extension offers greater flexibility, it's essential for users to be aware of +the implications of holding tokens with this extension enabled, particularly the +risks associated with compromised delegate keys. diff --git a/content/guides/token-extensions/reallocate.md b/content/guides/token-extensions/reallocate.md new file mode 100644 index 000000000..3583d5395 --- /dev/null +++ b/content/guides/token-extensions/reallocate.md @@ -0,0 +1,209 @@ +--- +date: Dec 7, 2023 +seoTitle: "Token Extensions: Reallocate instruction" +title: How to use the Reallocate instruction +description: + "The Token Extensions program has an account extension that can be applied + after initializing a Token Account, enabling expanding support for more + extensions after initial creation." +keywords: + - token 2022 + - token extensions + - token program +difficulty: beginner +tags: + - token 2022 + - token extensions +--- + +To enable additional extensions on existing Token Accounts created via the Token +Extension program, additional space must first be reallocated to accommodate the +extra data required by these extensions. This can be done using the `reallocate` +instruction. + +For example, this allows existing Token Accounts can be updated to enable the +[`MemoTransfer`](/content/guides/token-extensions/required-memo.md) and +`CpiGuard` extensions. + +In this guide, we'll walk through an example of using Solana Playground. Here is +the [final script](https://beta.solpg.io/65723a50fb53fa325bfd0c52). + +## Getting Started + +Start by opening this Solana Playground +[link](https://beta.solpg.io/656e19acfb53fa325bfd0c46) with the following +starter code. + +```javascript +// Client +console.log("My address:", pg.wallet.publicKey.toString()); +const balance = await pg.connection.getBalance(pg.wallet.publicKey); +console.log(`My balance: ${balance / web3.LAMPORTS_PER_SOL} SOL`); +``` + +If it is your first time using Solana Playground, you'll first need to create a +Playground Wallet and fund the wallet with devnet SOL. + + + +If you do not have a Playground wallet, you may see a type error within the +editor on all declarations of `pg.wallet.publicKey`. This type error will clear +after you create a Playground wallet. + + + +To get devnet SOL, run the `solana airdrop` command in the Playground's +terminal, or visit this [devnet faucet](https://faucet.solana.com/). + +``` +solana airdrop 5 +``` + +Once you've created and funded the Playground wallet, click the "Run" button to +run the starter code. + +## Add Dependencies + +Let's start by setting up our script. We'll be using the `@solana/web3.js` and +`@solana/spl-token` libraries. + +Replace the starter code with the following: + +```javascript +import { + Connection, + Transaction, + clusterApiUrl, + sendAndConfirmTransaction, +} from "@solana/web3.js"; +import { + ExtensionType, + TOKEN_2022_PROGRAM_ID, + createAccount, + createMint, + createReallocateInstruction, + createEnableRequiredMemoTransfersInstruction, +} from "@solana/spl-token"; + +// Playground wallet +const payer = pg.wallet.keypair; + +// Connection to devnet cluster +const connection = new Connection(clusterApiUrl("devnet"), "confirmed"); + +// Transaction signature returned from sent transaction +let transactionSignature: string; +``` + +## Create Mint and Token Account + +First, we'll need to create a new Mint Account. + +```javascript +// Authority that can mint new tokens +const mintAuthority = pg.wallet.publicKey; +// Decimals for Mint Account +const decimals = 2; + +// Create Mint Account +const mint = await createMint( + connection, + payer, // Payer of the transaction and initialization fees + mintAuthority, // Mint Authority + null, // Optional Freeze Authority + decimals, // Decimals of Mint + undefined, // Optional keypair + undefined, // Options for confirming the transaction + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); +``` + +Next, let's create a Token Account with no extensions enabled. + +```javascript +// Create Token Account for Playground wallet +const tokenAccount = await createAccount( + connection, + payer, // Payer to create Token Account + mint, // Mint Account address + payer.publicKey, // Token Account owner + undefined, // Optional keypair, default to Associated Token Account + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); +``` + +## Build Instructions + +Next, let's build a transaction to enable the `MemoTransfer` extensions for an +existing Token Account. + +First, build the instruction to reallocate the Token Account with enough space +for the specified extension. The Token Extensions Program includes a +[reallocate instruction](https://github.com/solana-labs/solana-program-library/blob/master/token/program-2022/src/extension/reallocate.rs#L24) +that automatically calculates the space and lamports required. + +```javascript +// Extensions to reallocate data for +const extensions = [ExtensionType.MemoTransfer]; +// Instruction to reallocate Token Account data +const reallocateInstruction = createReallocateInstruction( + tokenAccount, // Token Account address + payer.publicKey, // Payer to reallocate data + extensions, // Extensions to reallocate for + payer.publicKey, // Token Account owner + undefined, // Additional signers + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); +``` + +Next, build the instruction to enable the `MemoTransfer` extension for the Token +Account. + +```javascript +// Instruction to initialize the MemoTransfer Extension +const enableRequiredMemoTransfersInstruction = + createEnableRequiredMemoTransfersInstruction( + tokenAccount, // Token Account address + payer.publicKey, // Token Account Owner + undefined, // Additional signers + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID + ); +``` + +## Send Transaction + +Next, let's add the instructions to a new transaction and send it to the +network. This will update the Token Account with the `MemoTransfer` extension +enabled. + +```javascript +// Add instructions to new transaction +const transaction = new Transaction().add( + reallocateInstruction, + enableRequiredMemoTransfersInstruction, +); + +// Send Transactoin +transactionSignature = await sendAndConfirmTransaction( + connection, + transaction, + [payer], +); + +console.log( + "\nReallocate:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +Run the script by clicking the `Run` button. You can then inspect the +transaction details on SolanaFM. + +## Conclusion + +The reallocate instruction on the Token Extensions program offers a flexible way +to update existing Token Accounts with additional functionalities. This +instruction is useful for token owners who may not have foreseen the need for +certain extensions initially, but find themselves requiring these additional +features at a later time. diff --git a/content/guides/token-extensions/required-memo.md b/content/guides/token-extensions/required-memo.md new file mode 100644 index 000000000..a66b26aa2 --- /dev/null +++ b/content/guides/token-extensions/required-memo.md @@ -0,0 +1,445 @@ +--- +date: Dec 7, 2023 +seoTitle: "Token Extensions: Required Memo" +title: How to use the Required Memo token extension +description: + "Memos in financial transactions serve as a communication tool between sender + and recipient. It aids in the identification of both parties and offers + clarity on the purpose of the transfer." +keywords: + - token 2022 + - token extensions + - token program +difficulty: beginner +tags: + - token 2022 + - token extensions +--- + +The `MemoTransfer` extension enforces that every incoming transfer to a Token +Account is accompanied by a [memo](https://spl.solana.com/memo) instruction. +This memo instruction records a message in the transaction's program logs. This +feature is particularly useful for adding context to transactions, making it +easier to understand their purpose when reviewing the transaction logs later. + +In this guide, we'll walk through an example of using Solana Playground. Here is +the [final script](https://beta.solpg.io/65724a91fb53fa325bfd0c54). + +## Getting Started + +Start by opening this Solana Playground +[link](https://beta.solpg.io/656e19acfb53fa325bfd0c46) with the following +starter code. + +```javascript +// Client +console.log("My address:", pg.wallet.publicKey.toString()); +const balance = await pg.connection.getBalance(pg.wallet.publicKey); +console.log(`My balance: ${balance / web3.LAMPORTS_PER_SOL} SOL`); +``` + +If it is your first time using Solana Playground, you'll first need to create a +Playground Wallet and fund the wallet with devnet SOL. + + + +If you do not have a Playground wallet, you may see a type error within the +editor on all declarations of `pg.wallet.publicKey`. This type error will clear +after you create a Playground wallet. + + + +To get devnet SOL, run the `solana airdrop` command in the Playground's +terminal, or visit this [devnet faucet](https://faucet.solana.com/). + +``` +solana airdrop 5 +``` + +Once you've created and funded the Playground wallet, click the "Run" button to +run the starter code. + +## Add Dependencies + +Let's start by setting up our script. We'll be using the `@solana/web3.js` and +`@solana/spl-token` libraries. + +Replace the starter code with the following: + +```javascript +import { + Connection, + Keypair, + SystemProgram, + Transaction, + clusterApiUrl, + sendAndConfirmTransaction, + TransactionInstruction, + PublicKey, +} from "@solana/web3.js"; +import { + ExtensionType, + TOKEN_2022_PROGRAM_ID, + createEnableRequiredMemoTransfersInstruction, + createInitializeAccountInstruction, + createMint, + disableRequiredMemoTransfers, + enableRequiredMemoTransfers, + getAccountLen, + createAccount, + mintTo, + createTransferInstruction, +} from "@solana/spl-token"; + +// Playground wallet +const payer = pg.wallet.keypair; + +// Connection to devnet cluster +const connection = new Connection(clusterApiUrl("devnet"), "confirmed"); + +// Transaction to send +let transaction: Transaction; +// Transaction signature returned from sent transaction +let transactionSignature: string; +``` + +## Mint Setup + +We'll first need to create a new Mint Account before we can create Token +Accounts. + +```javascript +// Authority that can mint new tokens +const mintAuthority = pg.wallet.publicKey; +// Decimals for Mint Account +const decimals = 2; + +// Create Mint Account +const mint = await createMint( + connection, + payer, // Payer of the transaction and initialization fees + mintAuthority, // Mint Authority + null, // Optional Freeze Authority + decimals, // Decimals of Mint + undefined, // Optional keypair + undefined, // Options for confirming the transaction + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); +``` + +## Memo Transfer Token Account + +Next, let's build a transaction to enable the `MemoTransfer` extension for a new +Token Account. + +First, let's generate a new keypair to use as the address of the Token Account. + +```javascript +// Random keypair to use as owner of Token Account +const tokenAccountKeypair = Keypair.generate(); +// Address for Token Account +const tokenAccount = tokenAccountKeypair.publicKey; +``` + +Next, let's determine the size of the new Token Account and calculate the +minimum lamports needed for rent exemption. + +```javascript +// Size of Token Account with extension +const accountLen = getAccountLen([ExtensionType.MemoTransfer]); +// Minimum lamports required for Token Account +const lamports = await connection.getMinimumBalanceForRentExemption(accountLen); +``` + +With Token Extensions, the size of the Token Account will vary based on the +extensions enabled. + +## Build Instructions + +Next, let's build the set of instructions to: + +- Create a new account +- Initialize the Token Account data +- Enable the `MemoTransfer` extension + +First, build the instruction to invoke the System Program to create an account +and assign ownership to the Token Extensions Program. + +```javascript +// Instruction to invoke System Program to create new account +const createAccountInstruction = SystemProgram.createAccount({ + fromPubkey: payer.publicKey, // Account that will transfer lamports to created account + newAccountPubkey: tokenAccount, // Address of the account to create + space: accountLen, // Amount of bytes to allocate to the created account + lamports, // Amount of lamports transferred to created account + programId: TOKEN_2022_PROGRAM_ID, // Program assigned as owner of created account +}); +``` + +Next, build the instruction to initialize the Token Account data. + +```javascript +// Instruction to initialize Token Account data +const initializeAccountInstruction = createInitializeAccountInstruction( + tokenAccount, // Token Account Address + mint, // Mint Account + payer.publicKey, // Token Account Owner + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); +``` + +Lastly, build the instruction to enable the `MemoTransfer` extension for the +Token Account. + +```javascript +// Instruction to initialize the MemoTransfer Extension +const enableRequiredMemoTransfersInstruction = + createEnableRequiredMemoTransfersInstruction( + tokenAccount, // Token Account address + payer.publicKey, // Token Account Owner + undefined, // Additional signers + TOKEN_2022_PROGRAM_ID, // Token Program ID + ); +``` + +## Send Transaction + +Next, let's add the instructions to a new transaction and send it to the +network. This will create a Token Account with the `MemoTransfer` extension +enabled. + +```javascript +// Add instructions to new transaction +transaction = new Transaction().add( + createAccountInstruction, + initializeAccountInstruction, + enableRequiredMemoTransfersInstruction, +); + +// Send transaction +transactionSignature = await sendAndConfirmTransaction( + connection, + transaction, + [payer, tokenAccountKeypair], // Signers +); + +console.log( + "\nCreate Token Account:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +Run the script by clicking the `Run` button. You can then inspect the +transaction details on SolanaFM. + +## Create and Fund Token Account + +Next, let's set up another Token Account to demonstrate the functionality of the +`MemoTransfer` extension. + +First, create a `sourceTokenAccount` owned by the Playground wallet. + +```javascript +// Create Token Account for Playground wallet +const sourceTokenAccount = await createAccount( + connection, + payer, // Payer to create Token Account + mint, // Mint Account address + payer.publicKey, // Token Account owner + undefined, // Optional keypair, default to Associated Token Account + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); +``` + +Next, mint 2 tokens to the `sourceTokenAccount` to fund it. + +```javascript +// Mint tokens to sourceTokenAccount +transactionSignature = await mintTo( + connection, + payer, // Transaction fee payer + mint, // Mint Account address + sourceTokenAccount, // Mint to + mintAuthority, // Mint Authority address + 200, // Amount + undefined, // Additional signers + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); + +console.log( + "\nMint Tokens:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +## Transfer and Memo Instruction + +Next, let's prepare the token transfer and memo instructions. + +First, build the instruction to transfer tokens from the `sourceTokenAccount` to +the `tokenAccount` which has the `MemoTransfer` extension enabled. + +```javascript +// Instruction to transfer tokens +const transferInstruction = createTransferInstruction( + sourceTokenAccount, // Source Token Account + tokenAccount, // Destination Token Account + payer.publicKey, // Source Token Account owner + 100, // Amount + undefined, // Additional signers + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); +``` + +Next, build the memo instruction. The message will be included in the program +logs of the transaction the instruction is added to. + +```javascript +// Message for the memo +const message = "Hello, Solana"; +// Instruction to add memo +const memoInstruction = new TransactionInstruction({ + keys: [{ pubkey: payer.publicKey, isSigner: true, isWritable: true }], + data: Buffer.from(message, "utf-8"), + programId: new PublicKey("MemoSq4gqABAXKb96qnH8TysNcWxMyWCqXgDLGmfcHr"), +}); +``` + +## Attempt Transfer without Memo + +To demonstrate the functionality of the `MemoTransfer` extension, let's first +attempt to send a token transfer without a memo. + +```javascript +try { + // Attempt to transfer without memo + transaction = new Transaction().add(transferInstruction); + + // Send transaction + await sendAndConfirmTransaction( + connection, + transaction, + [payer], // Signers + ); +} catch (error) { + console.log("\nExpect Error:", error); +} +``` + +Run the script by clicking the `Run` button. You can then inspect the error in +the Playground terminal. You should see a message similar to the following: + +``` +Expect Error: { [Error: failed to send transaction: Transaction simulation failed: Error processing Instruction 0: custom program error: 0x24] + logs: + [ 'Program TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb invoke [1]', + 'Program log: Instruction: Transfer', + 'Program log: Error: No memo in previous instruction; required for recipient to receive a transfer', + 'Program TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb consumed 6571 of 200000 compute units', + 'Program TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb failed: custom program error: 0x24' ] } +``` + +## Transfer with Memo + +Next, send a token transfer with the memo instruction included on the +transaction. + +```javascript +// Add instructions to new transaction +transaction = new Transaction().add(memoInstruction, transferInstruction); + +// Send transaction +transactionSignature = await sendAndConfirmTransaction( + connection, + transaction, + [payer], // Signers +); + +console.log( + "\nTransfer with Memo:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +Run the script by clicking the `Run` button. You can then inspect the +transaction details on SolanaFM. + +## Enable and Disable Memo Transfer + +The `MemoTransfer` extension can also be freely enabled or disabled by the Token +Account owner. + +To enable the `MemoTransfer` extension, use the `enableRequiredMemoTransfers` +instruction. + +```javascript +// Enable Required Memo Transfers +transactionSignature = await enableRequiredMemoTransfers( + connection, // Connection to use + payer, // Payer of the transaction fee + tokenAccount, // Token Account to modify + payer.publicKey, // Owner of Token Account + undefined, // Additional signers + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); + +console.log( + "\nEnable Required Memo Transfers:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +To disable the `MemoTransfer` extension, use the `disableRequiredMemoTransfers` +instruction. + +```javascript +// Disable Required Memo Transfers +transactionSignature = await disableRequiredMemoTransfers( + connection, // Connection to use + payer, // Payer of the transaction fee + tokenAccount, // Token Account to modify + payer.publicKey, // Owner of Token Account + undefined, // Additional signers + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); + +console.log( + "\nDisable Required Memo Transfers:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +Once the `MemoTransfer` extension is disabled, transactions to transfer tokens +without a memo instruction will complete successfully. + +```javascript +// Add instructions to new transaction +transaction = new Transaction().add(transferInstruction); + +// Send transaction +transactionSignature = await sendAndConfirmTransaction( + connection, + transaction, + [payer], // Signers +); + +console.log( + "\nTransfer without Memo:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +Run the script by clicking the `Run` button. You can then inspect the +transaction details on SolanaFM. + +## Conclusion + +The `MemoTransfer` extension ensures every incoming transfer to a Token Account +includes a memo. By requiring a memo instruction with each transfer, a message +is recorded in the transaction's program logs. This feature is especially useful +for understanding the purpose of transactions when reviewing logs at a later +time. diff --git a/content/guides/token-extensions/transfer-fee.md b/content/guides/token-extensions/transfer-fee.md new file mode 100644 index 000000000..f9ba1af27 --- /dev/null +++ b/content/guides/token-extensions/transfer-fee.md @@ -0,0 +1,506 @@ +--- +date: Dec 06, 2023 +seoTitle: "Token Extensions: Transfer Fees" +title: How to use the Transfer Fee extension +description: + "With any form of transaction, there's often a desire to collect or apply a + fee. Similar to a small service charge every time you transfer money at a bank + or the way royalties or taxes are collected for particular transfers." +keywords: + - token 2022 + - token extensions + - token program +difficulty: beginner +tags: + - token 2022 + - token extensions +--- + +With any form of transaction, there's often a desire to collect or apply a fee. +Similar to a small service charge every time you transfer money at a bank or the +way royalties or taxes are collected for particular transfers. + +The `TransferFee` extension allows you to configure a transfer fee directly on +the Mint Account, enabling fees to be collected at a protocol level. Every time +tokens are transferred, the fee is set aside in the recipient's Token Account. +This fee is untouchable by the recipient and can only be accessed by the +Withdraw Authority. + +The design of pooling transfer fees at the recipient account is meant to +maximize parallelization of transactions. Otherwise, one configured fee +recipient account would be write-locked between parallel transfers, decreasing +throughput of the protocol. + +In this guide, we'll walk through an example of creating a mint with the +`TransferFee` extension enabled using Solana Playground. Here is the +[final script](https://beta.solpg.io/6570e5b7fb53fa325bfd0c4e). + + + +The Transfer Fee extension can ONLY take a fee from its same Token Mint. (e.g. +if you created `TokenA`, all transfer fees via the Transfer Fee extension will +be in `TokenA`). If you wish to achieve a similar transfer fee in a token other +that itself, use the Transfer Hook extension. + + + +## Getting Started + +Start by opening this Solana Playground +[link](https://beta.solpg.io/656e19acfb53fa325bfd0c46) with the following +starter code. + +```javascript +// Client +console.log("My address:", pg.wallet.publicKey.toString()); +const balance = await pg.connection.getBalance(pg.wallet.publicKey); +console.log(`My balance: ${balance / web3.LAMPORTS_PER_SOL} SOL`); +``` + +If it is your first time using Solana Playground, you'll first need to create a +Playground Wallet and fund the wallet with devnet SOL. + + + +If you do not have a Playground wallet, you may see a type error within the +editor on all declarations of `pg.wallet.publicKey`. This type error will clear +after you create a Playground wallet. + + + +To get devnet SOL, run the `solana airdrop` command in the Playground's +terminal, or visit this [devnet faucet](https://faucet.solana.com/). + +``` +solana airdrop 5 +``` + +Once you've created and funded the Playground wallet, click the "Run" button to +run the starter code. + +## Add Dependencies + +Let's start by setting up our script. We'll be using the `@solana/web3.js` and +`@solana/spl-token` libraries. + +Replace the starter code with the following: + +```javascript +import { + Connection, + Keypair, + SystemProgram, + Transaction, + clusterApiUrl, + sendAndConfirmTransaction, +} from "@solana/web3.js"; +import { + ExtensionType, + TOKEN_2022_PROGRAM_ID, + createAccount, + createInitializeMintInstruction, + createInitializeTransferFeeConfigInstruction, + getMintLen, + getTransferFeeAmount, + harvestWithheldTokensToMint, + mintTo, + transferCheckedWithFee, + unpackAccount, + withdrawWithheldTokensFromAccounts, + withdrawWithheldTokensFromMint, +} from "@solana/spl-token"; + +// Connection to devnet cluster +const connection = new Connection(clusterApiUrl("devnet"), "confirmed"); + +// Playground wallet +const payer = pg.wallet.keypair; + +// Transaction signature returned from sent transaction +let transactionSignature: string; +``` + +## Mint Setup + +First, let's define the properties of the Mint Account we'll be creating in the +following step. + +```javascript +// Generate new keypair for Mint Account +const mintKeypair = Keypair.generate(); +// Address for Mint Account +const mint = mintKeypair.publicKey; +// Decimals for Mint Account +const decimals = 2; +// Authority that can mint new tokens +const mintAuthority = pg.wallet.publicKey; +// Authority that can modify transfer fees +const transferFeeConfigAuthority = pg.wallet.keypair; +// Authority that can move tokens withheld on mint or token accounts +const withdrawWithheldAuthority = pg.wallet.keypair; + +// Fee basis points for transfers (100 = 1%) +const feeBasisPoints = 100; +// Maximum fee for transfers in token base units +const maxFee = BigInt(100); +``` + +Next, let's determine the size of the new Mint Account and calculate the minimum +lamports needed for rent exemption. + +```javascript +// Size of Mint Account with extensions +const mintLen = getMintLen([ExtensionType.TransferFeeConfig]); +// Minimum lamports required for Mint Account +const lamports = await connection.getMinimumBalanceForRentExemption(mintLen); +``` + +With Token Extensions, the size of the Mint Account will vary based on the +extensions enabled. + +## Build Instructions + +Next, let's build the set of instructions to: + +- Create a new account +- Initialize the `TransferFee` extension +- Initialize the remaining Mint Account data + +First, build the instruction to invoke the System Program to create an account +and assign ownership to the Token Extensions Program. + +```javascript +// Instruction to invoke System Program to create new account +const createAccountInstruction = SystemProgram.createAccount({ + fromPubkey: payer.publicKey, // Account that will transfer lamports to created account + newAccountPubkey: mint, // Address of the account to create + space: mintLen, // Amount of bytes to allocate to the created account + lamports, // Amount of lamports transferred to created account + programId: TOKEN_2022_PROGRAM_ID, // Program assigned as owner of created account +}); +``` + +Next, build the instruction to initialize the `TransferFee` extension for the +Mint Account. + +```javascript +// Instruction to initialize TransferFeeConfig Extension +const initializeTransferFeeConfig = + createInitializeTransferFeeConfigInstruction( + mint, // Mint Account address + transferFeeConfigAuthority.publicKey, // Authority to update fees + withdrawWithheldAuthority.publicKey, // Authority to withdraw fees + feeBasisPoints, // Basis points for transfer fee calculation + maxFee, // Maximum fee per transfer + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID + ); +``` + +Lastly, build the instruction to initialize the rest of the Mint Account data. +This is the same as with the original Token Program. + +```javascript +// Instruction to initialize Mint Account data +const initializeMintInstruction = createInitializeMintInstruction( + mint, // Mint Account Address + decimals, // Decimals of Mint + mintAuthority, // Designated Mint Authority + null, // Optional Freeze Authority + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); +``` + +## Send Transaction + +Finally, we add the instructions to a new transaction and send it to the +network. This will create a mint account with the `TransferFee` extension. + +```javascript +// Add instructions to new transaction +const transaction = new Transaction().add( + createAccountInstruction, + initializeTransferFeeConfig, + initializeMintInstruction, +); + +// Send transaction +transactionSignature = await sendAndConfirmTransaction( + connection, + transaction, + [payer, mintKeypair], // Signers +); + +console.log( + "\nCreate Mint Account:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +Run the script by clicking the `Run` button. You can then inspect the +transactions on the SolanaFM. + +## Create Token Accounts + +Next, let's set up two Token Accounts to demonstrate the functionality of the +`TransferFee` extension. + +First, create a `sourceTokenAccount` owned by the Playground wallet. + +```javascript +// Create Token Account for Playground wallet +const sourceTokenAccount = await createAccount( + connection, + payer, // Payer to create Token Account + mint, // Mint Account address + payer.publicKey, // Token Account owner + undefined, // Optional keypair, default to Associated Token Account + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); +``` + +Next, generate a random keypair and use it as the owner of a +`destinationTokenAccount`. + +```javascript +// Random keypair to use as owner of Token Account +const randomKeypair = new Keypair(); +// Create Token Account for random keypair +const destinationTokenAccount = await createAccount( + connection, + payer, // Payer to create Token Account + mint, // Mint Account address + randomKeypair.publicKey, // Token Account owner + undefined, // Optional keypair, default to Associated Token Account + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); +``` + +Lastly, mint 2000 tokens to the `sourceTokenAccount` to fund it. + +```javascript +// Mint tokens to sourceTokenAccount +transactionSignature = await mintTo( + connection, + payer, // Transaction fee payer + mint, // Mint Account address + sourceTokenAccount, // Mint to + mintAuthority, // Mint Authority address + 2000_00, // Amount + undefined, // Additional signers + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); + +console.log( + "\nMint Tokens:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +## Transfer Tokens + +Next, let's try to transfer tokens from the `sourceTokenAccount` to the +`destinationTokenAccount`. The transfer fee will automatically be deducted from +the transfer amount and remain in the `destinationTokenAccount` account. + +To transfer tokens, we have to use the either the `transferChecked` or +`transferCheckedWithFee` instructions. + +In this example, we'll use `transferCheckedWithFee`. The transfer only succeeds +if the correct transfer fee amount is passed into the instruction. + +```javascript +// Transfer amount +const transferAmount = BigInt(1000_00); +// Calculate transfer fee +const fee = (transferAmount * BigInt(feeBasisPoints)) / BigInt(10_000); +// Determine fee charged +const feeCharged = fee > maxFee ? maxFee : fee; + +// Transfer tokens with fee +transactionSignature = await transferCheckedWithFee( + connection, + payer, // Transaction fee payer + sourceTokenAccount, // Source Token Account + mint, // Mint Account address + destinationTokenAccount, // Destination Token Account + payer.publicKey, // Owner of Source Account + transferAmount, // Amount to transfer + decimals, // Mint Account decimals + feeCharged, // Transfer fee + undefined, // Additional signers + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); + +console.log( + "\nTransfer Tokens:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +## Withdraw Fee from Token Accounts + +When tokens are transferred, transfer fees automatically accumulate in the +recipient Token Accounts. The Withdraw Authority can freely withdraw these +withheld tokens from each Token Account of the Mint. + +To find the Token Accounts that have accumulated fees, we need to fetch all +Token Accounts for the mint and then filter for ones which have withheld tokens. + +First, we fetch all Token Accounts for the Mint Account. + +```javascript +// Retrieve all Token Accounts for the Mint Account +const allAccounts = await connection.getProgramAccounts(TOKEN_2022_PROGRAM_ID, { + commitment: "confirmed", + filters: [ + { + memcmp: { + offset: 0, + bytes: mint.toString(), // Mint Account address + }, + }, + ], +}); +``` + +Next, we filter for Token Accounts that hold transfer fees. + +```javascript +// List of Token Accounts to withdraw fees from +const accountsToWithdrawFrom = []; + +for (const accountInfo of allAccounts) { + const account = unpackAccount( + accountInfo.pubkey, // Token Account address + accountInfo.account, // Token Account data + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID + ); + + // Extract transfer fee data from each account + const transferFeeAmount = getTransferFeeAmount(account); + + // Check if fees are available to be withdrawn + if (transferFeeAmount !== null && transferFeeAmount.withheldAmount > 0) { + accountsToWithdrawFrom.push(accountInfo.pubkey); // Add account to withdrawal list + } +} +``` + +Finally, we use the `withdrawWithheldAuthority` instruction to withdraw the fees +from the Token Accounts to a specified destination Token Account. + +```javascript +// Withdraw withheld tokens from Token Accounts +transactionSignature = await withdrawWithheldTokensFromAccounts( + connection, + payer, // Transaction fee payer + mint, // Mint Account address + destinationTokenAccount, // Destination account for fee withdrawal + withdrawWithheldAuthority, // Authority for fee withdrawal + undefined, // Additional signers + accountsToWithdrawFrom, // Token Accounts to withdrawal from + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); + +console.log( + "\nWithdraw Fee From Token Accounts:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +Run the script by clicking the `Run` button. You can then inspect the +transaction on the SolanaFM. + +## Harvest Fee to Mint Account + +Token Accounts holding any tokens, including withheld ones, cannot be closed. +However, a user may want to close a Token Account with withheld transfer fees. + +Users can permissionlessly clear out Token Accounts of withheld tokens using the +`harvestWithheldTokensToMint` instruction. This transfers the fees accumulated +on the Token Account directly to the Mint Account. + +Let's first send another transfer so the `destinationTokenAccount` has withheld +transfer fees. + +```javascript +// Transfer tokens with fee +transactionSignature = await transferCheckedWithFee( + connection, + payer, // Transaction fee payer + sourceTokenAccount, // Source Token Account + mint, // Mint Account address + destinationTokenAccount, // Destination Token Account + payer.publicKey, // Owner of Source Account + transferAmount, // Amount to transfer + decimals, // Mint Account decimals + feeCharged, // Transfer fee + undefined, // Additional signers + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); + +console.log( + "\nTransfer Tokens:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +Next, we'll "harvest" the fees from the `destinationTokenAccount`. Note that +this can be done by anyone and not just the owner of the Token Account. + +```javascript +// Harvest withheld fees from Token Accounts to Mint Account +transactionSignature = await harvestWithheldTokensToMint( + connection, + payer, // Transaction fee payer + mint, // Mint Account address + [destinationTokenAccount], // Source Token Accounts for fee harvesting + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); + +console.log( + "\nHarvest Fee To Mint Account:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +## Withdraw Fee from Mint Account + +Tokens "harvested" to the Mint Account can then be withdrawn at any time by the +Withdraw Authority to a specified Token Account. + +```javascript +// Withdraw fees from Mint Account +transactionSignature = await withdrawWithheldTokensFromMint( + connection, + payer, // Transaction fee payer + mint, // Mint Account address + destinationTokenAccount, // Destination account for fee withdrawal + withdrawWithheldAuthority, // Withdraw Withheld Authority + undefined, // Additional signers + undefined, // Confirmation options + TOKEN_2022_PROGRAM_ID, // Token Extension Program ID +); + +console.log( + "\nWithdraw Fee from Mint Account:", + `https://solana.fm/tx/${transactionSignature}?cluster=devnet-solana`, +); +``` + +Run the script by clicking the `Run` button. You can then inspect the +transaction on the SolanaFM. + +## Conclusion + +The `TransferFee` extension enables token creators to enforce fees on each +transfer without requiring extra instructions or specialized programs. This +approach ensures that fees are collected in the same currency as the transferred +tokens, simplifying the transaction process. diff --git a/content/workshops/ship-your-first-xnft.md b/content/workshops/ship-your-first-xnft.md index ccfcd1d2b..72ff3a99f 100644 --- a/content/workshops/ship-your-first-xnft.md +++ b/content/workshops/ship-your-first-xnft.md @@ -14,6 +14,7 @@ objectives: tags: - NFT - Backpack + - Beginner # author details author: Valentin Madrid authorDescription: "External Developer Advocate at Solana Foundation" @@ -25,7 +26,8 @@ authorGithubUsername: valentinmadrid ## 🎒 What even is an xNFT ? -First of all, the "X" in xNFT stands for eXecutable. So basically an xNFT is an +First of all, the "X" in xNFT stands for eXecutable. So basically an xNFT is +an eXecutable NFT. But, where can it be executed? That's where [Backpack](https://www.backpack.app/) comes in. Backpack is a multi diff --git a/content/workshops/solana-101.md b/content/workshops/solana-101.md new file mode 100644 index 000000000..161c96d6d --- /dev/null +++ b/content/workshops/solana-101.md @@ -0,0 +1,175 @@ +--- +featured: true +date: 04 Jan 2023 +updatedDate: 04 Jan 2023 +title: Solana 101 +description: + Introduction to building on & interacting with the Solana blockchain +repoUrl: https://github.com/Solana-Workshops/solana-101 +duration: "2 hours" +objectives: + - The Solana Network + - Solana’s Programming Model + - Tokens & NFTs +tags: + - Introduction + - Beginner +video: "https://www.youtube.com/watch?v=56Erql9229E" +presentation: "https://docs.google.com/presentation/u/1/d/1e62b2hRbfREidLsVEnDNDKjXCRQDW2cyQOesy9Ozsbs/" +# author details +author: Joe Caulfield +authorDescription: DevRel at Solana Foundation +authorTwitterHandle: realbuffalojoe +authorGithubUsername: buffalojoec +--- + +# Solana 101 + +## Introduction to Programming on Solana + +### Topics Introduced + +--- + +#### The Solana Network + +- Technical Advantages +- Network Overview + +#### Solana’s Programming Model + +- Accounts + + - Accounts Overview + - Anatomy of an Account + +- Programs + + - Programs Overview + +- Instructions & Transactions + + - Anatomy of an Instruction + - Anatomy of a Transaction + +- Custom Program Data + - Data Ownership + - Program-Derived Addresses + +#### Tying it All Together + +- Lifecycle of a Transaction + +--- + +### Why Solana? + +Let’s talk about the main technological advantages to building a decentralized +application on Solana. +Solana has extremely fast block confirmation times, so users don’t have to wait +to make sure their action worked. + +Solana’s transaction fees are exceptionally low, so developers can build more +robust user experiences that cost less. + +Let’s take a brief look at how Solana’s network creates blocks and processes +transactions. + +Like most proof-of-stake networks, Solana elects a leader for each block +creation cycle, who’s responsible for creating a new block. + +Unlike Ethereum - Solana does not use a mempool. Instead, it forwards new +transactions to the next leader in the block creation cycle, which means when +that leader is elected, it already has all of the transactions it needs to pack +into a new block. + +Next, Solana leverages a high-throughput engine called Turbine that disseminates +information about a new block to the rest of the network. + +When a block’s transactions are executed, Solana’s runtime actually allows the +operations within each transaction to run in parallel wherever possible. The +combination of these 3 innovations leads to greatly increased speed and +throughput for the network. + +Solana’s most popular innovation is Proof-of-History, which leverages a +Verifiable-Delay Function (VDF) to allow all nodes in the network to agree on +the passage of time. + +Solana also has various upgrades like QUIC, and localised fee markets, Stake +Weighted QoS, makes it perfect for high-performance applications. + +### Programming on Solana + +Now let’s dive into the concepts you’ll need to know when programming on Solana. +The first thing we’ll want to understand is the concept of an account. + +#### Account + +An account on Solana is a slice of data from the blockchain. + +Everything on Solana is an account! You can kind of think of it like a +computer’s file system - where everything is a file! + +Every account has a unique address, holds some balance of SOL, and can store +arbitrary data. Based on the size of that arbitrary data, a user is required to +pay some value of SOL for what’s called “Rent”. + +Since this is blockchain data, anyone can read from an account. Also, anyone can +credit SOL or tokens to an account. However, only an account’s owner can modify +its data - which includes debiting it’s SOL balance. + +``` +{ + key: number, // The address of the account + lamports: number, // Lamports currently held + data: Uint8Array, // Data stored in the account + is_executable: boolean, // Is this data a program? + owner: PublicKey, // The program with write access + } + + Accounts, + 1 Lamport = 10E-9 SOL +``` + +If we take a look at what an actual account looks like in raw form, we can see +some of the fields present on all accounts shown here. + +The “key” field is just that account’s address. + +The “lamports” field simply tracks that account’s current balance of SOL. +Lamports are the smaller denomination of SOL. + +“Data” is where the arbitrary data is stored inside of an account. + +If that arbitrary data stored in this account is actually an executable program, +the “is_executable” boolean will be set to true. + +Lastly, the “owner” field determines which Solana program has the authority to +perform changes to this account’s data, including its balance of Lamports. + +#### Programs + +First of all, you may have heard the term “smart contract” from the world of +Ethereum. On Solana, smart contracts are called “Programs”. + +Programs are a special type of account whose data is an executable program, like +we mentioned before. Right now, Solana programs can be written in Rust, C/C++ or +Python. Soon, we may be able to write programs in other languages - such as +TypeScript and GoLang. + +Unlike Ethereum’s “smart contracts”, programs don’t actually have state of their +own. Instead, they perform reads and writes on accounts from the blockchain. To +perform a write, this program must be the designated owner of the account it’s +attempting to modify. Programs are designed to process what are called +“instructions”, and they can also send these instructions to other programs on +the network. + +#### Takeaways + +- Everything is an account +- All accounts hold SOL +- Accounts can store arbitrary data +- Accounts can also store executable programs +- Accounts are passed into programs, allowing for parallel execution + +~ Read more about this workshop in the Video or Presentation.~ diff --git a/contentlayer.config.ts b/contentlayer.config.ts index 53d1ac418..6638b2ba9 100644 --- a/contentlayer.config.ts +++ b/contentlayer.config.ts @@ -104,6 +104,40 @@ const basicContentFields: FieldDefs = { required: false, default: false, }, + + /** + * Custom fields that are used for the generated `nav.json` sidebar data + */ + sidebarLabel: { + type: "string", + description: "Custom sidebar label to use, instead of the document's title", + required: false, + }, + sidebarSortOrder: { + type: "number", + description: "Sort order of the doc, relative to its siblings", + required: false, + }, + hideTableOfContents: { + type: "boolean", + description: "Force hide the table of contents displayed on page", + required: false, + }, + + /** + * Custom SEO specific details + */ + seoTitle: { + type: "string", + description: "Custom title to be used for SEO purposes", + required: false, + }, + seoDescription: { + type: "string", + description: + "Custom description to be used for SEO purposes (recommended max of 155 characters)", + required: false, + }, }; /** @@ -177,6 +211,16 @@ export const DeveloperWorkshop = defineDocumentType(() => ({ description: "Estimated duration of this workshop", required: true, }, + video: { + type: "string", + description: "Video recording of the workshop (if Available)", + required: false, + }, + presentation: { + type: "string", + description: "Presentation for this workshop (if Available)", + required: false, + }, /** * Author specific details @@ -265,21 +309,18 @@ export const SolanaDoc = defineDocumentType(() => ({ * Custom fields for this specific content record type */ // none + }, +})); - /** - * Custom fields that are used for the generated `nav.json` sidebar data - */ - sidebarLabel: { - type: "string", - description: - "Custom sidebar label to use, instead of the document's title", - required: false, - }, - sidebarSortOrder: { - type: "number", - description: "Sort order of the doc, relative to its siblings", - required: false, - }, +/** + * Content record schema a single Solana RPC documentation record + */ +export const SolanaRPCDoc = defineDocumentType(() => ({ + name: "SolanaRPCDoc", + filePathPattern: "docs/rpc/**/*.mdx", + fields: { + // use the standard content fields + ...basicContentFields, }, })); @@ -303,6 +344,7 @@ export default makeSource({ // set the base content directories to search for content records contentDirPath: ".", contentDirInclude: [ + "docs/rpc/**", "docs/**", "content/guides/**", "content/courses/**", @@ -321,6 +363,7 @@ export default makeSource({ // developer specific content SolanaDoc, + SolanaRPCDoc, DeveloperGuide, DeveloperResource, DeveloperWorkshop, diff --git a/docs/advanced/index.md b/docs/advanced/index.md new file mode 100644 index 000000000..c0c86871b --- /dev/null +++ b/docs/advanced/index.md @@ -0,0 +1,5 @@ +--- +metaOnly: true +title: Advanced Concepts +# sidebarSortOrder: 3 +--- diff --git a/docs/advanced/lookup-tables.md b/docs/advanced/lookup-tables.md new file mode 100644 index 000000000..9382e948c --- /dev/null +++ b/docs/advanced/lookup-tables.md @@ -0,0 +1,189 @@ +--- +title: Address Lookup Tables +description: "" +--- + +Address Lookup Tables, commonly referred to as "_lookup tables_" or "_ALTs_" for +short, allow developers to create a collection of related addresses to +efficiently load more addresses in a single transaction. + +Since each transaction on the Solana blockchain requires a listing of every +address that is interacted with as part of the transaction, this listing would +effectively be capped at 32 addresses per transaction. With the help of +[Address Lookup Tables](/docs/advanced/lookup-tables.md), a transaction would +now be able to raise that limit to 256 addresses per transaction. + +## Compressing on chain addresses + +After all the desired addresses have been stored on chain in an Address Lookup +Table, each address can be referenced inside a transaction by its 1-byte index +within the table (instead of their full 32-byte address). This lookup method +effectively "_compresses_" a 32-byte address into a 1-byte index value. + +This "_compression_" enables storing up to 256 addresses in a single lookup +table for use inside any given transaction. + +## Versioned Transactions + +To utilize an Address Lookup Table inside a transaction, developers must use v0 +transactions that were introduced with the new +[Versioned Transaction format](/docs/core/transactions/versions.md). + +## How to create an address lookup table + +Creating a new lookup table with the `@solana/web3.js` library is similar to the +older `legacy` transactions, but with some differences. + +Using the `@solana/web3.js` library, you can use the +[`createLookupTable`](https://solana-labs.github.io/solana-web3.js/classes/AddressLookupTableProgram.html#createLookupTable) +function to construct the instruction needed to create a new lookup table, as +well as determine its address: + +```js +const web3 = require("@solana/web3.js"); + +// connect to a cluster and get the current `slot` +const connection = new web3.Connection(web3.clusterApiUrl("devnet")); +const slot = await connection.getSlot(); + +// Assumption: +// `payer` is a valid `Keypair` with enough SOL to pay for the execution + +const [lookupTableInst, lookupTableAddress] = + web3.AddressLookupTableProgram.createLookupTable({ + authority: payer.publicKey, + payer: payer.publicKey, + recentSlot: slot, + }); + +console.log("lookup table address:", lookupTableAddress.toBase58()); + +// To create the Address Lookup Table on chain: +// send the `lookupTableInst` instruction in a transaction +``` + +> NOTE: Address lookup tables can be **created** with either a `v0` transaction +> or a `legacy` transaction. But the Solana runtime can only retrieve and handle +> the additional addresses within a lookup table while using +> [v0 Versioned Transactions](/docs/core/transactions/versions.md#current-transaction-versions). + +## Add addresses to a lookup table + +Adding addresses to a lookup table is known as "_extending_". Using the +`@solana/web3.js` library, you can create a new _extend_ instruction using the +[`extendLookupTable`](https://solana-labs.github.io/solana-web3.js/classes/AddressLookupTableProgram.html#extendLookupTable) +method: + +```js +// add addresses to the `lookupTableAddress` table via an `extend` instruction +const extendInstruction = web3.AddressLookupTableProgram.extendLookupTable({ + payer: payer.publicKey, + authority: payer.publicKey, + lookupTable: lookupTableAddress, + addresses: [ + payer.publicKey, + web3.SystemProgram.programId, + // list more `publicKey` addresses here + ], +}); + +// Send this `extendInstruction` in a transaction to the cluster +// to insert the listing of `addresses` into your lookup table with address `lookupTableAddress` +``` + +> NOTE: Due to the same memory limits of `legacy` transactions, any transaction +> used to _extend_ an Address Lookup Table is also limited in how many addresses +> can be added at a time. Because of this, you will need to use multiple +> transactions to _extend_ any table with more addresses (~20) that can fit +> within a single transaction's memory limits. + +Once these addresses have been inserted into the table, and stored on chain, you +will be able to utilize the Address Lookup Table in future transactions. +Enabling up to 256 addresses in those future transactions. + +## Fetch an Address Lookup Table + +Similar to requesting another account (or PDA) from the cluster, you can fetch a +complete Address Lookup Table with the +[`getAddressLookupTable`](https://solana-labs.github.io/solana-web3.js/classes/Connection.html#getAddressLookupTable) +method: + +```js +// define the `PublicKey` of the lookup table to fetch +const lookupTableAddress = new web3.PublicKey(""); + +// get the table from the cluster +const lookupTableAccount = ( + await connection.getAddressLookupTable(lookupTableAddress) +).value; + +// `lookupTableAccount` will now be a `AddressLookupTableAccount` object + +console.log("Table address from cluster:", lookupTableAccount.key.toBase58()); +``` + +Our `lookupTableAccount` variable will now be a `AddressLookupTableAccount` +object which we can parse to read the listing of all the addresses stored on +chain in the lookup table: + +```js +// loop through and parse all the addresses stored in the table +for (let i = 0; i < lookupTableAccount.state.addresses.length; i++) { + const address = lookupTableAccount.state.addresses[i]; + console.log(i, address.toBase58()); +} +``` + +## How to use an address lookup table in a transaction + +After you have created your lookup table, and stored your needed address on +chain (via extending the lookup table), you can create a `v0` transaction to +utilize the on chain lookup capabilities. + +Just like older `legacy` transactions, you can create all the +[instructions](/docs/terminology.md#instruction) your transaction will execute +on chain. You can then provide an array of these instructions to the +[Message](/docs/terminology.md#message) used in the `v0 transaction. + +> NOTE: The instructions used inside a `v0` transaction can be constructed using +> the same methods and functions used to create the instructions in the past. +> There is no required change to the instructions used involving an Address +> Lookup Table. + +```js +// Assumptions: +// - `arrayOfInstructions` has been created as an `array` of `TransactionInstruction` +// - we are using the `lookupTableAccount` obtained above + +// construct a v0 compatible transaction `Message` +const messageV0 = new web3.TransactionMessage({ + payerKey: payer.publicKey, + recentBlockhash: blockhash, + instructions: arrayOfInstructions, // note this is an array of instructions +}).compileToV0Message([lookupTableAccount]); + +// create a v0 transaction from the v0 message +const transactionV0 = new web3.VersionedTransaction(messageV0); + +// sign the v0 transaction using the file system wallet we created named `payer` +transactionV0.sign([payer]); + +// send and confirm the transaction +// (NOTE: There is NOT an array of Signers here; see the note below...) +const txid = await web3.sendAndConfirmTransaction(connection, transactionV0); + +console.log( + `Transaction: https://explorer.solana.com/tx/${txid}?cluster=devnet`, +); +``` + +> NOTE: When sending a `VersionedTransaction` to the cluster, it must be signed +> BEFORE calling the `sendAndConfirmTransaction` method. If you pass an array of +> `Signer` (like with `legacy` transactions) the method will trigger an error! + +## More Resources + +- Read the + [proposal](https://docs.solanalabs.com/proposals/versioned-transactions) for + Address Lookup Tables and Versioned transactions +- [Example Rust program using Address Lookup Tables](https://github.com/TeamRaccoons/address-lookup-table-multi-swap) diff --git a/docs/advanced/state-compression.md b/docs/advanced/state-compression.md new file mode 100644 index 000000000..b234b3f7f --- /dev/null +++ b/docs/advanced/state-compression.md @@ -0,0 +1,326 @@ +--- +title: State Compression +description: + 'State Compression is the method of cheaply and securely storing + "fingerprints" of off-chain data in the Solana leger, instead of expensive + accounts.' +--- + +On Solana, [State Compression](/docs/advanced/state-compression.md) is the +method of creating a "fingerprint" (or hash) of off-chain data and storing this +fingerprint on-chain for secure verification. Effectively using the security of +the Solana ledger to securely validate off-chain data, verifying it has not been +tampered with. + +This method of "compression" allows Solana programs and dApps to use cheap +blockchain [ledger](/docs/terminology.md#ledger) space, instead of the more +expensive [account](/docs/terminology.md#account) space, to securely store data. + +This is accomplished by using a special binary tree structure, known as a +[concurrent merkle tree](#what-is-a-concurrent-merkle-tree), to create a hash of +each piece of data (called a `leaf`), hashing those together, and only storing +this final hash on-chain. + +## What is State Compression? + +In simple terms, state compression uses "**_tree_**" structures to +cryptographically hash off-chain data together, in a deterministic way, to +compute a single final hash that gets stored on-chain. + +These _trees_ are created in this "_deterministic_" process by: + +- taking any piece of data +- creating a hash of this data +- storing this hash as a `leaf` the bottom of the tree +- each `leaf` pair is then hash together, creating a `branch` +- each `branch` is then hash together +- continually climbing the tree and hashing adjacent branches together +- once at the top of the tree, a final `root hash` is produced + +This `root hash` is then stored on chain, as a verifiable **_proof_** of all of +the data within every leaf. Allowing anyone to cryptographically verify all the +off-chain data within the tree, while only actually storing a **minimal** amount +of data on-chain. Therefore, significantly reducing the cost to store/prove +large amounts of data due to this "state compression". + +## Merkle trees and concurrent merkle trees + +Solana's state compression used a special type of +[merkle tree](#what-is-a-merkle-tree) that allows for multiple changes to any +given tree to happen, while still maintaining the integrity and validity of the +tree. + +This special tree, known as a +"[concurrent merkle tree](#what-is-a-concurrent-merkle-tree)", effectively +retains a "changelog" of the tree on-chain. Allowing for multiple rapid changes +to the same tree (i.e. all in the same block), before a proof is invalidated. + +### What is a merkle tree? + +A [merkle tree](https://en.wikipedia.org/wiki/merkle_tree), sometimes called a +"hash tree", is a hash based binary tree structure where each `leaf` node is +represented as a cryptographic hash of its inner data. And every node that is +**not** a leaf, called a `branch`, is represented as a hash of its child leaf +hashes. + +Each branch is then also hashed together, climbing the tree, until eventually +only a single hash remains. This final hash, called the `root hash` or "root", +can then be used in combination with a "proof path" to verify any piece of data +stored within a leaf node. + +Once a final `root hash` has been computed, any piece of data stored within a +`leaf` node can be verified by rehashing the specific leaf's data and the hash +label of each adjacent branch climbing the tree (known as the `proof` or "proof +path"). Comparing this "rehash" to the `root hash` is the verification of the +underlying leaf data. If they match, the data is verified accurate. If they do +not match, the leaf data was changed. + +Whenever desired, the original leaf data can be changed by simply hashing the +**new leaf** data and recomputing the root hash in the same manner of the +original root. This **new root hash** is then used to verify any of the data, +and effectively invalidates the previous root hash and previous proof. +Therefore, each change to these _traditional merkle trees_ are required to be +performed in series. + +> This process of changing leaf data, and computing a new root hash can be a +> **very common** thing when using merkle trees! While it is one of the design +> points of the tree, it can result in one of the most notable drawbacks: rapid +> changes. + +### What is a Concurrent merkle tree? + +In high throughput applications, like within the +[Solana runtime](/docs/core/runtime.md), requests to change an on-chain +_traditional merkle tree_ could be received by validators in relatively rapid +succession (e.g. within the same slot). Each leaf data change would still be +required to performed in series. Resulting in each subsequent request for change +to fail, due to the root hash and proof being invalidated by the previous change +request in the slot. + +Enter, Concurrent merkle trees. + +A **Concurrent merkle tree** stores a **secure changelog** of the most recent +changes, their root hash, and the proof to derive it. This changelog "buffer" is +stored on-chain in an account specific to each tree, with a maximum number of +changelog "records" (aka `maxBufferSize`). + +When multiple leaf data change requests are received by validators in the same +slot, the on-chain _concurrent merkle tree_ can use this "changelog buffer" as a +source of truth for more acceptable proofs. Effectively allowing for up to +`maxBufferSize` changes to the same tree in the same slot. Significantly +boosting throughput. + +## Sizing a concurrent merkle tree + +When creating one of these on-chain trees, there are 3 values that will +determine the size of your tree, the cost to create your tree, and the number of +concurrent changes to your tree: + +1. max depth +2. max buffer size +3. canopy depth + +### Max depth + +The "max depth" of a tree is the **maximum number** of hops to get from any data +`leaf` to the `root` of the tree. + +Since merkle trees are binary trees, every leaf is connected to **only one** +other leaf; existing as a `leaf pair`. + +Therefore, the `maxDepth` of a tree is used to determine the maximum number of +nodes (aka pieces of data or `leafs`) to store within the tree using a simple +calculation: + +```text +nodes_count = 2 ^ maxDepth +``` + +Since a trees depth must be set at tree creation, you must decide how many +pieces of data you want your tree to store. Then using the simple calculation +above, you can determine the lowest `maxDepth` to store your data. + +#### Example 1: minting 100 nfts + +If you wanted to create a tree to store 100 compressed nfts, we will need a +minimum of "100 leafs" or "100 nodes". + +```text +// maxDepth=6 -> 64 nodes +2^6 = 64 + +// maxDepth=7 -> 128 nodes +2^7 = 128 +``` + +We must use a `maxDepth` of `7` to ensure we can store all of our data. + +#### Example 2: minting 15000 nfts + +If you wanted to create a tree to store 15000 compressed nfts, we will need a +minimum of "15000 leafs" or "15000 nodes". + +```text +// maxDepth=13 -> 8192 nodes +2^13 = 8192 + +// maxDepth=14 -> 16384 nodes +2^14 = 16384 +``` + +We must use a `maxDepth` of `14` to ensure we can store all of our data. + +#### The higher the max depth, the higher the cost + +The `maxDepth` value will be one of the primary drivers of cost when creating a +tree since you will pay this cost upfront at tree creation. The higher the max +tree depth depth, the more data fingerprints (aka hashes) you can store, the +higher the cost. + +### Max buffer size + +The "max buffer size" is effectively the maximum number of changes that can +occur on a tree, with the `root hash` still being valid. + +Due to the root hash effectively being a single hash of all leaf data, changing +any single leaf would invalidate the proof needed for all subsequent attempts to +change any leaf of a regular tree. + +But with a [concurrent tree](#what-is-a-concurrent-merkle-tree), there is +effectively a changelog of updates for these proofs. This changelog buffer is +sized and set at tree creation via this `maxBufferSize` value. + +### Canopy depth + +The "canopy depth", sometimes called the canopy size, is the number of proof +nodes that are cached/stored on-chain for any given proof path. + +When performing an update action on a `leaf`, like transferring ownership (e.g. +selling a compressed NFT), the **complete** proof path must be used to verify +original ownership of the leaf and therefore allow for the update action. This +verification is performed using the **complete** proof path to correctly compute +the current `root hash` (or any cached `root hash` via the on-chain "concurrent +buffer"). + +The larger a tree's max depth is, the more proof nodes are required to perform +this verification. For example, if your max depth is `14`, there are `14` total +proof nodes required to be used to verify. As a tree gets larger, the complete +proof path gets larger. + +Normally, each of these proof nodes would be required to be included within each +tree update transaction. Since each proof node value takes up `32 bytes` in a +transaction (similar to providing a Public Key), larger trees would very quickly +exceed the maximum transaction size limit. + +Enter the canopy. The canopy enables storing a set number of proof nodes on +chain (for any given proof path). Allowing for less proof nodes to be included +within each update transactions, therefore keeping the overall transaction size +below the limit. + +For example, a tree with a max depth of `14` would require `14` total proof +nodes. With a canopy of `10`, only `4` proof nodes are required to be submitted +per update transaction. + +#### The larger the canopy depth value, the higher the cost + +The `canopyDepth` value is also a primary factor of cost when creating a tree +since you will pay this cost upfront at tree creation. The higher the canopy +depth, the more data proof nodes are stored on chain, the higher the cost. + +#### Smaller canopy limits composability + +While a tree's creation costs are higher with a higher canopy, having a lower +`canopyDepth` will require more proof nodes to be included within each update +transaction. The more nodes required to be submitted, the larger the transaction +size, and therefore the easier it is to exceed the transaction size limits. + +This will also be the case for any other Solana program or dApp that attempts to +interact with your tree/leafs. If your tree requires too many proof nodes +(because of a low canopy depth), then any other additional actions another +on-chain program **could** offer will be **limited** by their specific +instruction size plus your proof node list size. Limiting composability, and +potential additional utility for your specific tree. + +For example, if your tree is being used for compressed NFTs and has a very low +canopy depth, an NFT marketplace may only be able to support simple NFTs +transfers. And not be able to support an on-chain bidding system. + +## Cost of creating a tree + +The cost of creating a concurrent merkle tree is based on the tree's size +parameters: `maxDepth`, `maxBufferSize`, and `canopyDepth`. These values are all +used to calculate the on-chain storage (in bytes) required for a tree to exist +on chain. + +Once the required space (in bytes) has been calculated, and using the +[`getMinimumBalanceForRentExemption`](/docs/rpc/http/getminimumbalanceforrentexemption) +RPC method, request the cost (in lamports) to allocate this amount of bytes +on-chain. + +### Calculate tree cost in JavaScript + +Within the +[`@solana/spl-account-compression`](https://www.npmjs.com/package/@solana/spl-account-compression) +package, developers can use the +[`getConcurrentMerkleTreeAccountSize`](https://solana-labs.github.io/solana-program-library/account-compression/sdk/docs/modules/index.html#getConcurrentMerkleTreeAccountSize) +function to calculate the required space for a given tree size parameters. + +Then using the +[`getMinimumBalanceForRentExemption`](https://solana-labs.github.io/solana-web3.js/classes/Connection.html#getMinimumBalanceForRentExemption) +function to get the final cost (in lamports) to allocate the required space for +the tree on-chain. + +Then determine the cost in lamports to make an account of this size rent exempt, +similar to any other account creation. + +```ts +// calculate the space required for the tree +const requiredSpace = getConcurrentMerkleTreeAccountSize( + maxDepth, + maxBufferSize, + canopyDepth, +); + +// get the cost (in lamports) to store the tree on-chain +const storageCost = + await connection.getMinimumBalanceForRentExemption(requiredSpace); +``` + +### Example costs + +Listed below are several example costs, for different tree sizes, including how +many leaf nodes are possible for each: + +**Example #1: 16,384 nodes costing 0.222 SOL** + +- max depth of `14` and max buffer size of `64` +- maximum number of leaf nodes: `16,384` +- canopy depth of `0` costs approximately `0.222 SOL` to create + +**Example #2: 16,384 nodes costing 1.134 SOL** + +- max depth of `14` and max buffer size of `64` +- maximum number of leaf nodes: `16,384` +- canopy depth of `11` costs approximately `1.134 SOL` to create + +**Example #3: 1,048,576 nodes costing 1.673 SOL** + +- max depth of `20` and max buffer size of `256` +- maximum number of leaf nodes: `1,048,576` +- canopy depth of `10` costs approximately `1.673 SOL` to create + +**Example #4: 1,048,576 nodes costing 15.814 SOL** + +- max depth of `20` and max buffer size of `256` +- maximum number of leaf nodes: `1,048,576` +- canopy depth of `15` costs approximately `15.814 SOL` to create + +## Compressed NFTs + +Compressed NFTs are one of the most popular use cases for State Compression on +Solana. With compression, a one million NFT collection could be minted for +`~50 SOL`, vice `~12,000 SOL` for its uncompressed equivalent collection. + +If you are interested in creating compressed NFTs yourself, read our developer +guide for +[minting and transferring compressed NFTs](/content/guides/javascript/compressed-nfts.md). diff --git a/docs/clients/index.md b/docs/clients/index.md new file mode 100644 index 000000000..e24dd3aa1 --- /dev/null +++ b/docs/clients/index.md @@ -0,0 +1,5 @@ +--- +metaOnly: true +title: Solana Clients +# sidebarSortOrder: 3 +--- diff --git a/docs/clients/javascript-reference.md b/docs/clients/javascript-reference.md new file mode 100644 index 000000000..c07b49dea --- /dev/null +++ b/docs/clients/javascript-reference.md @@ -0,0 +1,858 @@ +--- +title: Web3.js API Examples +--- + +## Web3 API Reference Guide + +The `@solana/web3.js` library is a package that has coverage over the +[Solana JSON RPC API](/docs/rpc). + +You can find the full documentation for the `@solana/web3.js` library +[here](https://solana-labs.github.io/solana-web3.js/). + +## General + +### Connection + +[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/Connection.html) + +Connection is used to interact with the [Solana JSON RPC](/docs/rpc). You can +use Connection to confirm transactions, get account info, and more. + +You create a connection by defining the JSON RPC cluster endpoint and the +desired commitment. Once this is complete, you can use this connection object to +interact with any of the Solana JSON RPC API. + +#### Example Usage + +```javascript +const web3 = require("@solana/web3.js"); + +let connection = new web3.Connection(web3.clusterApiUrl("devnet"), "confirmed"); + +let slot = await connection.getSlot(); +console.log(slot); +// 93186439 + +let blockTime = await connection.getBlockTime(slot); +console.log(blockTime); +// 1630747045 + +let block = await connection.getBlock(slot); +console.log(block); + +/* +{ + blockHeight: null, + blockTime: 1630747045, + blockhash: 'AsFv1aV5DGip9YJHHqVjrGg6EKk55xuyxn2HeiN9xQyn', + parentSlot: 93186438, + previousBlockhash: '11111111111111111111111111111111', + rewards: [], + transactions: [] +} +*/ + +let slotLeader = await connection.getSlotLeader(); +console.log(slotLeader); +//49AqLYbpJYc2DrzGUAH1fhWJy62yxBxpLEkfJwjKy2jr +``` + +The above example shows only a few of the methods on Connection. Please see the +[source generated docs](https://solana-labs.github.io/solana-web3.js/classes/Connection.html) +for the full list. + +### Transaction + +[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/Transaction.html) + +A transaction is used to interact with programs on the Solana blockchain. These +transactions are constructed with TransactionInstructions, containing all the +accounts possible to interact with, as well as any needed data or program +addresses. Each TransactionInstruction consists of keys, data, and a programId. +You can do multiple instructions in a single transaction, interacting with +multiple programs at once. + +#### Example Usage + +```javascript +const web3 = require("@solana/web3.js"); +const nacl = require("tweetnacl"); + +// Airdrop SOL for paying transactions +let payer = web3.Keypair.generate(); +let connection = new web3.Connection(web3.clusterApiUrl("devnet"), "confirmed"); + +let airdropSignature = await connection.requestAirdrop( + payer.publicKey, + web3.LAMPORTS_PER_SOL, +); + +await connection.confirmTransaction({ signature: airdropSignature }); + +let toAccount = web3.Keypair.generate(); + +// Create Simple Transaction +let transaction = new web3.Transaction(); + +// Add an instruction to execute +transaction.add( + web3.SystemProgram.transfer({ + fromPubkey: payer.publicKey, + toPubkey: toAccount.publicKey, + lamports: 1000, + }), +); + +// Send and confirm transaction +// Note: feePayer is by default the first signer, or payer, if the parameter is not set +await web3.sendAndConfirmTransaction(connection, transaction, [payer]); + +// Alternatively, manually construct the transaction +let recentBlockhash = await connection.getRecentBlockhash(); +let manualTransaction = new web3.Transaction({ + recentBlockhash: recentBlockhash.blockhash, + feePayer: payer.publicKey, +}); +manualTransaction.add( + web3.SystemProgram.transfer({ + fromPubkey: payer.publicKey, + toPubkey: toAccount.publicKey, + lamports: 1000, + }), +); + +let transactionBuffer = manualTransaction.serializeMessage(); +let signature = nacl.sign.detached(transactionBuffer, payer.secretKey); + +manualTransaction.addSignature(payer.publicKey, signature); + +let isVerifiedSignature = manualTransaction.verifySignatures(); +console.log(`The signatures were verified: ${isVerifiedSignature}`); + +// The signatures were verified: true + +let rawTransaction = manualTransaction.serialize(); + +await web3.sendAndConfirmRawTransaction(connection, rawTransaction); +``` + +### Keypair + +[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/Keypair.html) + +The keypair is used to create an account with a public key and secret key within +Solana. You can either generate, generate from a seed, or create from a secret +key. + +#### Example Usage + +```javascript +const { Keypair } = require("@solana/web3.js"); + +let account = Keypair.generate(); + +console.log(account.publicKey.toBase58()); +console.log(account.secretKey); + +// 2DVaHtcdTf7cm18Zm9VV8rKK4oSnjmTkKE6MiXe18Qsb +// Uint8Array(64) [ +// 152, 43, 116, 211, 207, 41, 220, 33, 193, 168, 118, +// 24, 176, 83, 206, 132, 47, 194, 2, 203, 186, 131, +// 197, 228, 156, 170, 154, 41, 56, 76, 159, 124, 18, +// 14, 247, 32, 210, 51, 102, 41, 43, 21, 12, 170, +// 166, 210, 195, 188, 60, 220, 210, 96, 136, 158, 6, +// 205, 189, 165, 112, 32, 200, 116, 164, 234 +// ] + +let seed = Uint8Array.from([ + 70, 60, 102, 100, 70, 60, 102, 100, 70, 60, 102, 100, 70, 60, 102, 100, 70, + 60, 102, 100, 70, 60, 102, 100, 70, 60, 102, 100, 70, 60, 102, 100, +]); +let accountFromSeed = Keypair.fromSeed(seed); + +console.log(accountFromSeed.publicKey.toBase58()); +console.log(accountFromSeed.secretKey); + +// 3LDverZtSC9Duw2wyGC1C38atMG49toPNW9jtGJiw9Ar +// Uint8Array(64) [ +// 70, 60, 102, 100, 70, 60, 102, 100, 70, 60, 102, +// 100, 70, 60, 102, 100, 70, 60, 102, 100, 70, 60, +// 102, 100, 70, 60, 102, 100, 70, 60, 102, 100, 34, +// 164, 6, 12, 9, 193, 196, 30, 148, 122, 175, 11, +// 28, 243, 209, 82, 240, 184, 30, 31, 56, 223, 236, +// 227, 60, 72, 215, 47, 208, 209, 162, 59 +// ] + +let accountFromSecret = Keypair.fromSecretKey(account.secretKey); + +console.log(accountFromSecret.publicKey.toBase58()); +console.log(accountFromSecret.secretKey); + +// 2DVaHtcdTf7cm18Zm9VV8rKK4oSnjmTkKE6MiXe18Qsb +// Uint8Array(64) [ +// 152, 43, 116, 211, 207, 41, 220, 33, 193, 168, 118, +// 24, 176, 83, 206, 132, 47, 194, 2, 203, 186, 131, +// 197, 228, 156, 170, 154, 41, 56, 76, 159, 124, 18, +// 14, 247, 32, 210, 51, 102, 41, 43, 21, 12, 170, +// 166, 210, 195, 188, 60, 220, 210, 96, 136, 158, 6, +// 205, 189, 165, 112, 32, 200, 116, 164, 234 +// ] +``` + +Using `generate` generates a random Keypair for use as an account on Solana. +Using `fromSeed`, you can generate a Keypair using a deterministic constructor. +`fromSecret` creates a Keypair from a secret Uint8array. You can see that the +publicKey for the `generate` Keypair and `fromSecret` Keypair are the same +because the secret from the `generate` Keypair is used in `fromSecret`. + +**Warning**: Do not use `fromSeed` unless you are creating a seed with high +entropy. Do not share your seed. Treat the seed like you would a private key. + +### PublicKey + +[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/PublicKey.html) + +PublicKey is used throughout `@solana/web3.js` in transactions, keypairs, and +programs. You require publickey when listing each account in a transaction and +as a general identifier on Solana. + +A PublicKey can be created with a base58 encoded string, buffer, Uint8Array, +number, and an array of numbers. + +#### Example Usage + +```javascript +const { Buffer } = require("buffer"); +const web3 = require("@solana/web3.js"); +const crypto = require("crypto"); + +// Create a PublicKey with a base58 encoded string +let base58publicKey = new web3.PublicKey( + "5xot9PVkphiX2adznghwrAuxGs2zeWisNSxMW6hU6Hkj", +); +console.log(base58publicKey.toBase58()); + +// 5xot9PVkphiX2adznghwrAuxGs2zeWisNSxMW6hU6Hkj + +// Create a Program Address +let highEntropyBuffer = crypto.randomBytes(31); +let programAddressFromKey = await web3.PublicKey.createProgramAddress( + [highEntropyBuffer.slice(0, 31)], + base58publicKey, +); +console.log(`Generated Program Address: ${programAddressFromKey.toBase58()}`); + +// Generated Program Address: 3thxPEEz4EDWHNxo1LpEpsAxZryPAHyvNVXJEJWgBgwJ + +// Find Program address given a PublicKey +let validProgramAddress = await web3.PublicKey.findProgramAddress( + [Buffer.from("", "utf8")], + programAddressFromKey, +); +console.log(`Valid Program Address: ${validProgramAddress}`); + +// Valid Program Address: C14Gs3oyeXbASzwUpqSymCKpEyccfEuSe8VRar9vJQRE,253 +``` + +### SystemProgram + +[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/SystemProgram.html) + +The SystemProgram grants the ability to create accounts, allocate account data, +assign an account to programs, work with nonce accounts, and transfer lamports. +You can use the SystemInstruction class to help with decoding and reading +individual instructions + +#### Example Usage + +```javascript +const web3 = require("@solana/web3.js"); + +// Airdrop SOL for paying transactions +let payer = web3.Keypair.generate(); +let connection = new web3.Connection(web3.clusterApiUrl("devnet"), "confirmed"); + +let airdropSignature = await connection.requestAirdrop( + payer.publicKey, + web3.LAMPORTS_PER_SOL, +); + +await connection.confirmTransaction({ signature: airdropSignature }); + +// Allocate Account Data +let allocatedAccount = web3.Keypair.generate(); +let allocateInstruction = web3.SystemProgram.allocate({ + accountPubkey: allocatedAccount.publicKey, + space: 100, +}); +let transaction = new web3.Transaction().add(allocateInstruction); + +await web3.sendAndConfirmTransaction(connection, transaction, [ + payer, + allocatedAccount, +]); + +// Create Nonce Account +let nonceAccount = web3.Keypair.generate(); +let minimumAmountForNonceAccount = + await connection.getMinimumBalanceForRentExemption(web3.NONCE_ACCOUNT_LENGTH); +let createNonceAccountTransaction = new web3.Transaction().add( + web3.SystemProgram.createNonceAccount({ + fromPubkey: payer.publicKey, + noncePubkey: nonceAccount.publicKey, + authorizedPubkey: payer.publicKey, + lamports: minimumAmountForNonceAccount, + }), +); + +await web3.sendAndConfirmTransaction( + connection, + createNonceAccountTransaction, + [payer, nonceAccount], +); + +// Advance nonce - Used to create transactions as an account custodian +let advanceNonceTransaction = new web3.Transaction().add( + web3.SystemProgram.nonceAdvance({ + noncePubkey: nonceAccount.publicKey, + authorizedPubkey: payer.publicKey, + }), +); + +await web3.sendAndConfirmTransaction(connection, advanceNonceTransaction, [ + payer, +]); + +// Transfer lamports between accounts +let toAccount = web3.Keypair.generate(); + +let transferTransaction = new web3.Transaction().add( + web3.SystemProgram.transfer({ + fromPubkey: payer.publicKey, + toPubkey: toAccount.publicKey, + lamports: 1000, + }), +); +await web3.sendAndConfirmTransaction(connection, transferTransaction, [payer]); + +// Assign a new account to a program +let programId = web3.Keypair.generate(); +let assignedAccount = web3.Keypair.generate(); + +let assignTransaction = new web3.Transaction().add( + web3.SystemProgram.assign({ + accountPubkey: assignedAccount.publicKey, + programId: programId.publicKey, + }), +); + +await web3.sendAndConfirmTransaction(connection, assignTransaction, [ + payer, + assignedAccount, +]); +``` + +### Secp256k1Program + +[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/Secp256k1Program.html) + +The Secp256k1Program is used to verify Secp256k1 signatures, which are used by +both Bitcoin and Ethereum. + +#### Example Usage + +```javascript +const { keccak_256 } = require("js-sha3"); +const web3 = require("@solana/web3.js"); +const secp256k1 = require("secp256k1"); + +// Create a Ethereum Address from secp256k1 +let secp256k1PrivateKey; +do { + secp256k1PrivateKey = web3.Keypair.generate().secretKey.slice(0, 32); +} while (!secp256k1.privateKeyVerify(secp256k1PrivateKey)); + +let secp256k1PublicKey = secp256k1 + .publicKeyCreate(secp256k1PrivateKey, false) + .slice(1); + +let ethAddress = + web3.Secp256k1Program.publicKeyToEthAddress(secp256k1PublicKey); +console.log(`Ethereum Address: 0x${ethAddress.toString("hex")}`); + +// Ethereum Address: 0xadbf43eec40694eacf36e34bb5337fba6a2aa8ee + +// Fund a keypair to create instructions +let fromPublicKey = web3.Keypair.generate(); +let connection = new web3.Connection(web3.clusterApiUrl("devnet"), "confirmed"); + +let airdropSignature = await connection.requestAirdrop( + fromPublicKey.publicKey, + web3.LAMPORTS_PER_SOL, +); + +await connection.confirmTransaction({ signature: airdropSignature }); + +// Sign Message with Ethereum Key +let plaintext = Buffer.from("string address"); +let plaintextHash = Buffer.from(keccak_256.update(plaintext).digest()); +let { signature, recid: recoveryId } = secp256k1.ecdsaSign( + plaintextHash, + secp256k1PrivateKey, +); + +// Create transaction to verify the signature +let transaction = new Transaction().add( + web3.Secp256k1Program.createInstructionWithEthAddress({ + ethAddress: ethAddress.toString("hex"), + plaintext, + signature, + recoveryId, + }), +); + +// Transaction will succeed if the message is verified to be signed by the address +await web3.sendAndConfirmTransaction(connection, transaction, [fromPublicKey]); +``` + +### Message + +[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/Message.html) + +Message is used as another way to construct transactions. You can construct a +message using the accounts, header, instructions, and recentBlockhash that are a +part of a transaction. A [Transaction](/docs/clients/javascript.md#Transaction) +is a Message plus the list of required signatures required to execute the +transaction. + +#### Example Usage + +```javascript +const { Buffer } = require("buffer"); +const bs58 = require("bs58"); +const web3 = require("@solana/web3.js"); + +let toPublicKey = web3.Keypair.generate().publicKey; +let fromPublicKey = web3.Keypair.generate(); + +let connection = new web3.Connection(web3.clusterApiUrl("devnet"), "confirmed"); + +let airdropSignature = await connection.requestAirdrop( + fromPublicKey.publicKey, + web3.LAMPORTS_PER_SOL, +); + +await connection.confirmTransaction({ signature: airdropSignature }); + +let type = web3.SYSTEM_INSTRUCTION_LAYOUTS.Transfer; +let data = Buffer.alloc(type.layout.span); +let layoutFields = Object.assign({ instruction: type.index }); +type.layout.encode(layoutFields, data); + +let recentBlockhash = await connection.getRecentBlockhash(); + +let messageParams = { + accountKeys: [ + fromPublicKey.publicKey.toString(), + toPublicKey.toString(), + web3.SystemProgram.programId.toString(), + ], + header: { + numReadonlySignedAccounts: 0, + numReadonlyUnsignedAccounts: 1, + numRequiredSignatures: 1, + }, + instructions: [ + { + accounts: [0, 1], + data: bs58.encode(data), + programIdIndex: 2, + }, + ], + recentBlockhash, +}; + +let message = new web3.Message(messageParams); + +let transaction = web3.Transaction.populate(message, [ + fromPublicKey.publicKey.toString(), +]); + +await web3.sendAndConfirmTransaction(connection, transaction, [fromPublicKey]); +``` + +### Struct + +[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/Struct.html) + +The struct class is used to create Rust compatible structs in javascript. This +class is only compatible with Borsh encoded Rust structs. + +#### Example Usage + +Struct in Rust: + +```rust +pub struct Fee { + pub denominator: u64, + pub numerator: u64, +} +``` + +Using web3: + +```javascript +import BN from "bn.js"; +import { Struct } from "@solana/web3.js"; + +export class Fee extends Struct { + denominator: BN; + numerator: BN; +} +``` + +### Enum + +[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/Enum.html) + +The Enum class is used to represent a Rust compatible Enum in javascript. The +enum will just be a string representation if logged but can be properly +encoded/decoded when used in conjunction with +[Struct](/docs/clients/javascript.md#Struct). This class is only compatible with +Borsh encoded Rust enumerations. + +#### Example Usage + +Rust: + +```rust +pub enum AccountType { + Uninitialized, + StakePool, + ValidatorList, +} +``` + +Web3: + +```javascript +import { Enum } from "@solana/web3.js"; + +export class AccountType extends Enum {} +``` + +### NonceAccount + +[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/NonceAccount.html) + +Normally a transaction is rejected if a transaction's `recentBlockhash` field is +too old. To provide for certain custodial services, Nonce Accounts are used. +Transactions which use a `recentBlockhash` captured on-chain by a Nonce Account +do not expire as long at the Nonce Account is not advanced. + +You can create a nonce account by first creating a normal account, then using +`SystemProgram` to make the account a Nonce Account. + +#### Example Usage + +```javascript +const web3 = require("@solana/web3.js"); + +// Create connection +let connection = new web3.Connection(web3.clusterApiUrl("devnet"), "confirmed"); + +// Generate accounts +let account = web3.Keypair.generate(); +let nonceAccount = web3.Keypair.generate(); + +// Fund account +let airdropSignature = await connection.requestAirdrop( + account.publicKey, + web3.LAMPORTS_PER_SOL, +); + +await connection.confirmTransaction({ signature: airdropSignature }); + +// Get Minimum amount for rent exemption +let minimumAmount = await connection.getMinimumBalanceForRentExemption( + web3.NONCE_ACCOUNT_LENGTH, +); + +// Form CreateNonceAccount transaction +let transaction = new web3.Transaction().add( + web3.SystemProgram.createNonceAccount({ + fromPubkey: account.publicKey, + noncePubkey: nonceAccount.publicKey, + authorizedPubkey: account.publicKey, + lamports: minimumAmount, + }), +); +// Create Nonce Account +await web3.sendAndConfirmTransaction(connection, transaction, [ + account, + nonceAccount, +]); + +let nonceAccountData = await connection.getNonce( + nonceAccount.publicKey, + "confirmed", +); + +console.log(nonceAccountData); +// NonceAccount { +// authorizedPubkey: PublicKey { +// _bn: +// }, +// nonce: '93zGZbhMmReyz4YHXjt2gHsvu5tjARsyukxD4xnaWaBq', +// feeCalculator: { lamportsPerSignature: 5000 } +// } + +let nonceAccountInfo = await connection.getAccountInfo( + nonceAccount.publicKey, + "confirmed", +); + +let nonceAccountFromInfo = web3.NonceAccount.fromAccountData( + nonceAccountInfo.data, +); + +console.log(nonceAccountFromInfo); +// NonceAccount { +// authorizedPubkey: PublicKey { +// _bn: +// }, +// nonce: '93zGZbhMmReyz4YHXjt2gHsvu5tjARsyukxD4xnaWaBq', +// feeCalculator: { lamportsPerSignature: 5000 } +// } +``` + +The above example shows both how to create a `NonceAccount` using +`SystemProgram.createNonceAccount`, as well as how to retrieve the +`NonceAccount` from accountInfo. Using the nonce, you can create transactions +offline with the nonce in place of the `recentBlockhash`. + +### VoteAccount + +[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/VoteAccount.html) + +Vote account is an object that grants the capability of decoding vote accounts +from the native vote account program on the network. + +#### Example Usage + +```javascript +const web3 = require("@solana/web3.js"); + +let voteAccountInfo = await connection.getProgramAccounts(web3.VOTE_PROGRAM_ID); +let voteAccountFromData = web3.VoteAccount.fromAccountData( + voteAccountInfo[0].account.data, +); +console.log(voteAccountFromData); +/* +VoteAccount { + nodePubkey: PublicKey { + _bn: + }, + authorizedWithdrawer: PublicKey { + _bn: + }, + commission: 10, + rootSlot: 104570885, + votes: [ + { slot: 104570886, confirmationCount: 31 }, + { slot: 104570887, confirmationCount: 30 }, + { slot: 104570888, confirmationCount: 29 }, + { slot: 104570889, confirmationCount: 28 }, + { slot: 104570890, confirmationCount: 27 }, + { slot: 104570891, confirmationCount: 26 }, + { slot: 104570892, confirmationCount: 25 }, + { slot: 104570893, confirmationCount: 24 }, + { slot: 104570894, confirmationCount: 23 }, + ... + ], + authorizedVoters: [ { epoch: 242, authorizedVoter: [PublicKey] } ], + priorVoters: [ + [Object], [Object], [Object], + [Object], [Object], [Object], + [Object], [Object], [Object], + [Object], [Object], [Object], + [Object], [Object], [Object], + [Object], [Object], [Object], + [Object], [Object], [Object], + [Object], [Object], [Object], + [Object], [Object], [Object], + [Object], [Object], [Object], + [Object], [Object] + ], + epochCredits: [ + { epoch: 179, credits: 33723163, prevCredits: 33431259 }, + { epoch: 180, credits: 34022643, prevCredits: 33723163 }, + { epoch: 181, credits: 34331103, prevCredits: 34022643 }, + { epoch: 182, credits: 34619348, prevCredits: 34331103 }, + { epoch: 183, credits: 34880375, prevCredits: 34619348 }, + { epoch: 184, credits: 35074055, prevCredits: 34880375 }, + { epoch: 185, credits: 35254965, prevCredits: 35074055 }, + { epoch: 186, credits: 35437863, prevCredits: 35254965 }, + { epoch: 187, credits: 35672671, prevCredits: 35437863 }, + { epoch: 188, credits: 35950286, prevCredits: 35672671 }, + { epoch: 189, credits: 36228439, prevCredits: 35950286 }, + ... + ], + lastTimestamp: { slot: 104570916, timestamp: 1635730116 } +} +*/ +``` + +## Staking + +### StakeProgram + +[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/StakeProgram.html) + +The StakeProgram facilitates staking SOL and delegating them to any validators +on the network. You can use StakeProgram to create a stake account, stake some +SOL, authorize accounts for withdrawal of your stake, deactivate your stake, and +withdraw your funds. The StakeInstruction class is used to decode and read more +instructions from transactions calling the StakeProgram + +#### Example Usage + +```javascript +const web3 = require("@solana/web3.js"); + +// Fund a key to create transactions +let fromPublicKey = web3.Keypair.generate(); +let connection = new web3.Connection(web3.clusterApiUrl("devnet"), "confirmed"); + +let airdropSignature = await connection.requestAirdrop( + fromPublicKey.publicKey, + web3.LAMPORTS_PER_SOL, +); +await connection.confirmTransaction({ signature: airdropSignature }); + +// Create Account +let stakeAccount = web3.Keypair.generate(); +let authorizedAccount = web3.Keypair.generate(); +/* Note: This is the minimum amount for a stake account -- Add additional Lamports for staking + For example, we add 50 lamports as part of the stake */ +let lamportsForStakeAccount = + (await connection.getMinimumBalanceForRentExemption( + web3.StakeProgram.space, + )) + 50; + +let createAccountTransaction = web3.StakeProgram.createAccount({ + fromPubkey: fromPublicKey.publicKey, + authorized: new web3.Authorized( + authorizedAccount.publicKey, + authorizedAccount.publicKey, + ), + lamports: lamportsForStakeAccount, + lockup: new web3.Lockup(0, 0, fromPublicKey.publicKey), + stakePubkey: stakeAccount.publicKey, +}); +await web3.sendAndConfirmTransaction(connection, createAccountTransaction, [ + fromPublicKey, + stakeAccount, +]); + +// Check that stake is available +let stakeBalance = await connection.getBalance(stakeAccount.publicKey); +console.log(`Stake balance: ${stakeBalance}`); +// Stake balance: 2282930 + +// We can verify the state of our stake. This may take some time to become active +let stakeState = await connection.getStakeActivation(stakeAccount.publicKey); +console.log(`Stake state: ${stakeState.state}`); +// Stake state: inactive + +// To delegate our stake, we get the current vote accounts and choose the first +let voteAccounts = await connection.getVoteAccounts(); +let voteAccount = voteAccounts.current.concat(voteAccounts.delinquent)[0]; +let votePubkey = new web3.PublicKey(voteAccount.votePubkey); + +// We can then delegate our stake to the voteAccount +let delegateTransaction = web3.StakeProgram.delegate({ + stakePubkey: stakeAccount.publicKey, + authorizedPubkey: authorizedAccount.publicKey, + votePubkey: votePubkey, +}); +await web3.sendAndConfirmTransaction(connection, delegateTransaction, [ + fromPublicKey, + authorizedAccount, +]); + +// To withdraw our funds, we first have to deactivate the stake +let deactivateTransaction = web3.StakeProgram.deactivate({ + stakePubkey: stakeAccount.publicKey, + authorizedPubkey: authorizedAccount.publicKey, +}); +await web3.sendAndConfirmTransaction(connection, deactivateTransaction, [ + fromPublicKey, + authorizedAccount, +]); + +// Once deactivated, we can withdraw our funds +let withdrawTransaction = web3.StakeProgram.withdraw({ + stakePubkey: stakeAccount.publicKey, + authorizedPubkey: authorizedAccount.publicKey, + toPubkey: fromPublicKey.publicKey, + lamports: stakeBalance, +}); + +await web3.sendAndConfirmTransaction(connection, withdrawTransaction, [ + fromPublicKey, + authorizedAccount, +]); +``` + +### Authorized + +[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/Authorized.html) + +Authorized is an object used when creating an authorized account for staking +within Solana. You can designate a `staker` and `withdrawer` separately, +allowing for a different account to withdraw other than the staker. + +You can find more usage of the `Authorized` object under +[`StakeProgram`](/docs/clients/javascript.md#StakeProgram) + +### Lockup + +[Source Documentation](https://solana-labs.github.io/solana-web3.js/classes/Lockup.html) + +Lockup is used in conjunction with the +[StakeProgram](/docs/clients/javascript.md#StakeProgram) to create an account. +The Lockup is used to determine how long the stake will be locked, or unable to +be retrieved. If the Lockup is set to 0 for both epoch and the Unix timestamp, +the lockup will be disabled for the stake account. + +#### Example Usage + +```javascript +const { + Authorized, + Keypair, + Lockup, + StakeProgram, +} = require("@solana/web3.js"); + +let account = Keypair.generate(); +let stakeAccount = Keypair.generate(); +let authorized = new Authorized(account.publicKey, account.publicKey); +let lockup = new Lockup(0, 0, account.publicKey); + +let createStakeAccountInstruction = StakeProgram.createAccount({ + fromPubkey: account.publicKey, + authorized: authorized, + lamports: 1000, + lockup: lockup, + stakePubkey: stakeAccount.publicKey, +}); +``` + +The above code creates a `createStakeAccountInstruction` to be used when +creating an account with the `StakeProgram`. The Lockup is set to 0 for both the +epoch and Unix timestamp, disabling lockup for the account. + +See [StakeProgram](/docs/clients/javascript.md#StakeProgram) for more. diff --git a/docs/clients/javascript.md b/docs/clients/javascript.md new file mode 100644 index 000000000..7a27bd05b --- /dev/null +++ b/docs/clients/javascript.md @@ -0,0 +1,405 @@ +--- +sidebarLabel: JavaScript / TypeScript +title: JavaScript Client for Solana +sidebarSortOrder: 2 +--- + +## What is Solana-Web3.js? + +The Solana-Web3.js library aims to provide complete coverage of Solana. The +library was built on top of the [Solana JSON RPC API](/docs/rpc). + +You can find the full documentation for the `@solana/web3.js` library +[here](https://solana-labs.github.io/solana-web3.js/). + +## Common Terminology + +| Term | Definition | +| ----------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| Program | Stateless executable code written to interpret instructions. Programs are capable of performing actions based on the instructions provided. | +| Instruction | The smallest unit of a program that a client can include in a transaction. Within its processing code, an instruction may contain one or more cross-program invocations. | +| Transaction | One or more instructions signed by the client using one or more Keypairs and executed atomically with only two possible outcomes: success or failure. | + +For the full list of terms, see +[Solana terminology](/docs/terminology.md#cross-program-invocation-cpi) + +## Getting Started + +### Installation + +#### yarn + +```bash +yarn add @solana/web3.js +``` + +#### npm + +```bash +npm install --save @solana/web3.js +``` + +#### Bundle + +```html + + + + + +``` + +### Usage + +#### Javascript + +```javascript +const solanaWeb3 = require("@solana/web3.js"); +console.log(solanaWeb3); +``` + +#### ES6 + +```javascript +import * as solanaWeb3 from "@solana/web3.js"; +console.log(solanaWeb3); +``` + +#### Browser Bundle + +```javascript +// solanaWeb3 is provided in the global namespace by the bundle script +console.log(solanaWeb3); +``` + +## Quickstart + +### Connecting to a Wallet + +To allow users to use your dApp or application on Solana, they will need to get +access to their Keypair. A Keypair is a private key with a matching public key, +used to sign transactions. + +There are two ways to obtain a Keypair: + +1. Generate a new Keypair +2. Obtain a Keypair using the secret key + +You can obtain a new Keypair with the following: + +```javascript +const { Keypair } = require("@solana/web3.js"); + +let keypair = Keypair.generate(); +``` + +This will generate a brand new Keypair for a user to fund and use within your +application. + +You can allow entry of the secretKey using a textbox, and obtain the Keypair +with `Keypair.fromSecretKey(secretKey)`. + +```javascript +const { Keypair } = require("@solana/web3.js"); + +let secretKey = Uint8Array.from([ + 202, 171, 192, 129, 150, 189, 204, 241, 142, 71, 205, 2, 81, 97, 2, 176, 48, + 81, 45, 1, 96, 138, 220, 132, 231, 131, 120, 77, 66, 40, 97, 172, 91, 245, 84, + 221, 157, 190, 9, 145, 176, 130, 25, 43, 72, 107, 190, 229, 75, 88, 191, 136, + 7, 167, 109, 91, 170, 164, 186, 15, 142, 36, 12, 23, +]); + +let keypair = Keypair.fromSecretKey(secretKey); +``` + +Many wallets today allow users to bring their Keypair using a variety of +extensions or web wallets. The general recommendation is to use wallets, not +Keypairs, to sign transactions. The wallet creates a layer of separation between +the dApp and the Keypair, ensuring that the dApp never has access to the secret +key. You can find ways to connect to external wallets with the +[wallet-adapter](https://github.com/solana-labs/wallet-adapter) library. + +### Creating and Sending Transactions + +To interact with programs on Solana, you create, sign, and send transactions to +the network. Transactions are collections of instructions with signatures. The +order that instructions exist in a transaction determines the order they are +executed. + +A transaction in Solana-Web3.js is created using the +[`Transaction`](/docs/clients/javascript.md#Transaction) object and adding +desired messages, addresses, or instructions. + +Take the example of a transfer transaction: + +```javascript +const { + Keypair, + Transaction, + SystemProgram, + LAMPORTS_PER_SOL, +} = require("@solana/web3.js"); + +let fromKeypair = Keypair.generate(); +let toKeypair = Keypair.generate(); +let transaction = new Transaction(); + +transaction.add( + SystemProgram.transfer({ + fromPubkey: fromKeypair.publicKey, + toPubkey: toKeypair.publicKey, + lamports: LAMPORTS_PER_SOL, + }), +); +``` + +The above code achieves creating a transaction ready to be signed and +broadcasted to the network. The `SystemProgram.transfer` instruction was added +to the transaction, containing the amount of lamports to send, and the `to` and +`from` public keys. + +All that is left is to sign the transaction with keypair and send it over the +network. You can accomplish sending a transaction by using +`sendAndConfirmTransaction` if you wish to alert the user or do something after +a transaction is finished, or use `sendTransaction` if you don't need to wait +for the transaction to be confirmed. + +```javascript +const { + sendAndConfirmTransaction, + clusterApiUrl, + Connection, +} = require("@solana/web3.js"); + +let keypair = Keypair.generate(); +let connection = new Connection(clusterApiUrl("testnet")); + +sendAndConfirmTransaction(connection, transaction, [keypair]); +``` + +The above code takes in a `TransactionInstruction` using `SystemProgram`, +creates a `Transaction`, and sends it over the network. You use `Connection` in +order to define which Solana network you are connecting to, namely +`mainnet-beta`, `testnet`, or `devnet`. + +### Interacting with Custom Programs + +The previous section visits sending basic transactions. In Solana everything you +do interacts with different programs, including the previous section's transfer +transaction. At the time of writing programs on Solana are either written in +Rust or C. + +Let's look at the `SystemProgram`. The method signature for allocating space in +your account on Solana in Rust looks like this: + +```rust +pub fn allocate( + pubkey: &Pubkey, + space: u64 +) -> Instruction +``` + +In Solana when you want to interact with a program you must first know all the +accounts you will be interacting with. + +You must always provide every account that the program will be interacting +within the instruction. Not only that, but you must provide whether or not the +account is `isSigner` or `isWritable`. + +In the `allocate` method above, a single account `pubkey` is required, as well +as an amount of `space` for allocation. We know that the `allocate` method +writes to the account by allocating space within it, making the `pubkey` +required to be `isWritable`. `isSigner` is required when you are designating the +account that is running the instruction. In this case, the signer is the account +calling to allocate space within itself. + +Let's look at how to call this instruction using solana-web3.js: + +```javascript +let keypair = web3.Keypair.generate(); +let payer = web3.Keypair.generate(); +let connection = new web3.Connection(web3.clusterApiUrl("testnet")); + +let airdropSignature = await connection.requestAirdrop( + payer.publicKey, + web3.LAMPORTS_PER_SOL, +); + +await connection.confirmTransaction({ signature: airdropSignature }); +``` + +First, we set up the account Keypair and connection so that we have an account +to make allocate on the testnet. We also create a payer Keypair and airdrop some +sol so we can pay for the allocate transaction. + +```javascript +let allocateTransaction = new web3.Transaction({ + feePayer: payer.publicKey, +}); +let keys = [{ pubkey: keypair.publicKey, isSigner: true, isWritable: true }]; +let params = { space: 100 }; +``` + +We create the transaction `allocateTransaction`, keys, and params objects. +`feePayer` is an optional field when creating a transaction that specifies who +is paying for the transaction, defaulting to the pubkey of the first signer in +the transaction. `keys` represents all accounts that the program's `allocate` +function will interact with. Since the `allocate` function also required space, +we created `params` to be used later when invoking the `allocate` function. + +```javascript +let allocateStruct = { + index: 8, + layout: struct([u32("instruction"), ns64("space")]), +}; +``` + +The above is created using `u32` and `ns64` from `@solana/buffer-layout` to +facilitate the payload creation. The `allocate` function takes in the parameter +`space`. To interact with the function we must provide the data as a Buffer +format. The `buffer-layout` library helps with allocating the buffer and +encoding it correctly for Rust programs on Solana to interpret. + +Let's break down this struct. + +```javascript +{ + index: 8, /* <-- */ + layout: struct([ + u32('instruction'), + ns64('space'), + ]) +} +``` + +`index` is set to 8 because the function `allocate` is in the 8th position in +the instruction enum for `SystemProgram`. + +```rust +/* https://github.com/solana-labs/solana/blob/21bc43ed58c63c827ba4db30426965ef3e807180/sdk/program/src/system_instruction.rs#L142-L305 */ +pub enum SystemInstruction { + /** 0 **/CreateAccount {/**/}, + /** 1 **/Assign {/**/}, + /** 2 **/Transfer {/**/}, + /** 3 **/CreateAccountWithSeed {/**/}, + /** 4 **/AdvanceNonceAccount, + /** 5 **/WithdrawNonceAccount(u64), + /** 6 **/InitializeNonceAccount(Pubkey), + /** 7 **/AuthorizeNonceAccount(Pubkey), + /** 8 **/Allocate {/**/}, + /** 9 **/AllocateWithSeed {/**/}, + /** 10 **/AssignWithSeed {/**/}, + /** 11 **/TransferWithSeed {/**/}, + /** 12 **/UpgradeNonceAccount, +} +``` + +Next up is `u32('instruction')`. + +```javascript +{ + index: 8, + layout: struct([ + u32('instruction'), /* <-- */ + ns64('space'), + ]) +} +``` + +The `layout` in the allocate struct must always have `u32('instruction')` first +when you are using it to call an instruction. + +```javascript +{ + index: 8, + layout: struct([ + u32('instruction'), + ns64('space'), /* <-- */ + ]) +} +``` + +`ns64('space')` is the argument for the `allocate` function. You can see in the +original `allocate` function in Rust that space was of the type `u64`. `u64` is +an unsigned 64bit integer. Javascript by default only provides up to 53bit +integers. `ns64` comes from `@solana/buffer-layout` to help with type +conversions between Rust and Javascript. You can find more type conversions +between Rust and Javascript at +[solana-labs/buffer-layout](https://github.com/solana-labs/buffer-layout). + +```javascript +let data = Buffer.alloc(allocateStruct.layout.span); +let layoutFields = Object.assign({ instruction: allocateStruct.index }, params); +allocateStruct.layout.encode(layoutFields, data); +``` + +Using the previously created bufferLayout, we can allocate a data buffer. We +then assign our params `{ space: 100 }` so that it maps correctly to the layout, +and encode it to the data buffer. Now the data is ready to be sent to the +program. + +```javascript +allocateTransaction.add( + new web3.TransactionInstruction({ + keys, + programId: web3.SystemProgram.programId, + data, + }), +); + +await web3.sendAndConfirmTransaction(connection, allocateTransaction, [ + payer, + keypair, +]); +``` + +Finally, we add the transaction instruction with all the account keys, payer, +data, and programId and broadcast the transaction to the network. + +The full code can be found below. + +```javascript +const { struct, u32, ns64 } = require("@solana/buffer-layout"); +const { Buffer } = require("buffer"); +const web3 = require("@solana/web3.js"); + +let keypair = web3.Keypair.generate(); +let payer = web3.Keypair.generate(); + +let connection = new web3.Connection(web3.clusterApiUrl("testnet")); + +let airdropSignature = await connection.requestAirdrop( + payer.publicKey, + web3.LAMPORTS_PER_SOL, +); + +await connection.confirmTransaction({ signature: airdropSignature }); + +let allocateTransaction = new web3.Transaction({ + feePayer: payer.publicKey, +}); +let keys = [{ pubkey: keypair.publicKey, isSigner: true, isWritable: true }]; +let params = { space: 100 }; + +let allocateStruct = { + index: 8, + layout: struct([u32("instruction"), ns64("space")]), +}; + +let data = Buffer.alloc(allocateStruct.layout.span); +let layoutFields = Object.assign({ instruction: allocateStruct.index }, params); +allocateStruct.layout.encode(layoutFields, data); + +allocateTransaction.add( + new web3.TransactionInstruction({ + keys, + programId: web3.SystemProgram.programId, + data, + }), +); + +await web3.sendAndConfirmTransaction(connection, allocateTransaction, [ + payer, + keypair, +]); +``` diff --git a/docs/clients/rust.md b/docs/clients/rust.md new file mode 100644 index 000000000..17fb6dfe6 --- /dev/null +++ b/docs/clients/rust.md @@ -0,0 +1,38 @@ +--- +sidebarLabel: Rust +title: Rust Client for Solana +sidebarSortOrder: 1 +--- + +Solana's Rust crates are [published to crates.io][crates.io] and can be found +[on docs.rs with the "solana-" prefix][docs.rs]. + +[crates.io]: https://crates.io/search?q=solana- +[docs.rs]: https://docs.rs/releases/search?query=solana- + +Some important crates: + +- [`solana-program`] — Imported by programs running on Solana, compiled to + SBF. This crate contains many fundamental data types and is re-exported from + [`solana-sdk`], which cannot be imported from a Solana program. + +- [`solana-sdk`] — The basic off-chain SDK, it re-exports + [`solana-program`] and adds more APIs on top of that. Most Solana programs + that do not run on-chain will import this. + +- [`solana-client`] — For interacting with a Solana node via the + [JSON RPC API](/docs/rpc). + +- [`solana-cli-config`] — Loading and saving the Solana CLI configuration + file. + +- [`solana-clap-utils`] — Routines for setting up a CLI, using [`clap`], + as used by the main Solana CLI. Includes functions for loading all types of + signers supported by the CLI. + +[`solana-program`]: https://docs.rs/solana-program +[`solana-sdk`]: https://docs.rs/solana-sdk +[`solana-client`]: https://docs.rs/solana-client +[`solana-cli-config`]: https://docs.rs/solana-cli-config +[`solana-clap-utils`]: https://docs.rs/solana-clap-utils +[`clap`]: https://docs.rs/clap diff --git a/docs/core/accounts.md b/docs/core/accounts.md new file mode 100644 index 000000000..f8fb1263b --- /dev/null +++ b/docs/core/accounts.md @@ -0,0 +1,177 @@ +--- +sidebarLabel: Accounts +sidebarSortOrder: 1 +title: Accounts and Storing State +--- + +## Storing State between Transactions + +If the program needs to store state between transactions, it does so using +_accounts_. Accounts are similar to files in operating systems such as Linux in +that they may hold arbitrary data that persists beyond the lifetime of a +program. Also like a file, an account includes metadata that tells the runtime +who is allowed to access the data and how. + +Unlike a file, the account includes metadata for the lifetime of the file. That +lifetime is expressed by a number of fractional native tokens called _lamports_. +Accounts are held in validator memory and pay ["rent"](#rent) to stay there. +Each validator periodically scans all accounts and collects rent. Any account +that drops to zero lamports is purged. Accounts can also be marked +[rent-exempt](#rent-exemption) if they contain a sufficient number of lamports. + +In the same way that a Linux user uses a path to look up a file, a Solana client +uses an _address_ to look up an account. The address is a 256-bit public key. + +## Signers + +Transactions include one or more digital +[signatures](/docs/terminology.md#signature) each corresponding to an account +address referenced by the transaction. Each of these addresses must be the +public key of an ed25519 keypair, and the signature signifies that the holder of +the matching private key signed, and thus, "authorized" the transaction. In this +case, the account is referred to as a _signer_. Whether an account is a signer +or not is communicated to the program as part of the account's metadata. +Programs can then use that information to make authority decisions. + +## Read-only + +Transactions can [indicate](/docs/core/transactions.md#message-header-format) +that some of the accounts it references be treated as _read-only accounts_ in +order to enable parallel account processing between transactions. The runtime +permits read-only accounts to be read concurrently by multiple programs. If a +program attempts to modify a read-only account, the transaction is rejected by +the runtime. + +## Executable + +If an account is marked "executable" in its metadata, then it is considered a +program which can be executed by including the account's public key in an +instruction's [program id](/docs/core/transactions.md#program-id). Accounts are +marked as executable during a successful program deployment process by the +loader that owns the account. When a program is deployed to the execution engine +(SBF deployment), the loader determines that the bytecode in the account's data +is valid. If so, the loader permanently marks the program account as executable. + +If a program is marked as final (non-upgradeable), the runtime enforces that the +account's data (the program) is immutable. Through the upgradeable loader, it is +possible to upload a totally new program to an existing program address. + +## Creating + +To create an account, a client generates a _keypair_ and registers its public +key using the `SystemProgram::CreateAccount` instruction with a fixed storage +size in bytes preallocated. The current maximum size of an account's data is 10 +MiB, which can be changed (increased or decreased) at a rate over all accounts +of 20 MiB per transaction, and the size can be increased by 10 KiB per account +and per instruction. + +An account address can be any arbitrary 256 bit value, and there are mechanisms +for advanced users to create derived addresses +(`SystemProgram::CreateAccountWithSeed`, +[`Pubkey::CreateProgramAddress`](/docs/core/cpi.md#program-derived-addresses)). + +Accounts that have never been created via the system program can also be passed +to programs. When an instruction references an account that hasn't been +previously created, the program will be passed an account with no data and zero +lamports that is owned by the system program. + +Such newly created accounts reflect whether they sign the transaction, and +therefore, can be used as an authority. Authorities in this context convey to +the program that the holder of the private key associated with the account's +public key signed the transaction. The account's public key may be known to the +program or recorded in another account, signifying some kind of ownership or +authority over an asset or operation the program controls or performs. + +## Ownership and Assignment to Programs + +A created account is initialized to be _owned_ by a built-in program called the +System program and is called a _system account_ aptly. An account includes +"owner" metadata. The owner is a program id. The runtime grants the program +write access to the account if its id matches the owner. For the case of the +System program, the runtime allows clients to transfer lamports and importantly +_assign_ account ownership, meaning changing the owner to a different program +id. If an account is not owned by a program, the program is only permitted to +read its data and credit the account. + +## Verifying validity of unmodified, reference-only accounts + +For security purposes, it is recommended that programs check the validity of any +account it reads, but does not modify. + +This is because a malicious user could create accounts with arbitrary data and +then pass these accounts to the program in place of valid accounts. The +arbitrary data could be crafted in a way that leads to unexpected or harmful +program behavior. + +The security model enforces that an account's data can only be modified by the +account's `Owner` program. This allows the program to trust that the data is +passed to them via accounts they own. The runtime enforces this by rejecting any +transaction containing a program that attempts to write to an account it does +not own. + +If a program were to not check account validity, it might read an account it +thinks it owns, but doesn't. Anyone can issue instructions to a program, and the +runtime does not know that those accounts are expected to be owned by the +program. + +To check an account's validity, the program should either check the account's +address against a known value, or check that the account is indeed owned +correctly (usually owned by the program itself). + +One example is when programs use a sysvar account. Unless the program checks the +account's address or owner, it's impossible to be sure whether it's a real and +valid sysvar account merely by successful deserialization of the account's data. + +Accordingly, the Solana SDK +[checks the sysvar account's validity during deserialization](https://github.com/solana-labs/solana/blob/a95675a7ce1651f7b59443eb146b356bc4b3f374/sdk/program/src/sysvar/mod.rs#L65). +An alternative and safer way to read a sysvar is via the sysvar's +[`get()` function](https://github.com/solana-labs/solana/blob/64bfc14a75671e4ec3fe969ded01a599645080eb/sdk/program/src/sysvar/mod.rs#L73) +which doesn't require these checks. + +If the program always modifies the account in question, the address/owner check +isn't required because modifying an unowned account will be rejected by the +runtime, and the containing transaction will be thrown out. + +## Rent + +Keeping accounts alive on Solana incurs a storage cost called _rent_ because the +blockchain cluster must actively maintain the data to process any future +transactions. This is different from Bitcoin and Ethereum, where storing +accounts doesn't incur any costs. + +Currently, all new accounts are required to be rent-exempt. + +### Rent exemption + +An account is considered rent-exempt if it holds at least 2 years worth of rent. +This is checked every time an account's balance is reduced, and transactions +that would reduce the balance to below the minimum amount will fail. + +Program executable accounts are required by the runtime to be rent-exempt to +avoid being purged. + +> Note: Use the +> [`getMinimumBalanceForRentExemption`](/docs/rpc/http/getMinimumBalanceForRentExemption.mdx) +> RPC endpoint to calculate the minimum balance for a particular account size. +> The following calculation is illustrative only. + +For example, a program executable with the size of 15,000 bytes requires a +balance of 105,290,880 lamports (=~ 0.105 SOL) to be rent-exempt: + +```text +105,290,880 = 19.055441478439427 (fee rate) * (128 + 15_000)(account size including metadata) * ((365.25/2) * 2)(epochs in 2 years) +``` + +Rent can also be estimated via the +[`solana rent` CLI subcommand](https://docs.solanalabs.com/cli/usage#solana-rent) + +```text +$ solana rent 15000 +Rent per byte-year: 0.00000348 SOL +Rent per epoch: 0.000288276 SOL +Rent-exempt minimum: 0.10529088 SOL +``` + +Note: Rest assured that, should the storage rent rate need to be increased at +some point in the future, steps will be taken to ensure that accounts that are +rent-exempt before the increase will remain rent-exempt afterwards diff --git a/docs/core/clusters.md b/docs/core/clusters.md new file mode 100644 index 000000000..b764f7340 --- /dev/null +++ b/docs/core/clusters.md @@ -0,0 +1,148 @@ +--- +sidebarLabel: Clusters & Endpoints +title: Clusters and Public RPC Endpoints +--- + +The Solana blockchain has several different groups of validators, known as +[Clusters](/docs/core/clusters.md). Each serving different purposes within the +overall ecosystem and containing dedicated api nodes to fulfill +[JSON-RPC](/docs/rpc) requests for their respective Cluster. + +The individual nodes within a Cluster are owned and operated by third parties, +with a public endpoint available for each. + +## Solana public RPC endpoints + +The Solana Labs organization operates a public RPC endpoint for each Cluster. +Each of these public endpoints are subject to rate limits, but are available for +users and developers to interact with the Solana blockchain. + +> > Note: Public endpoint rate limits are subject to change. The specific rate +> > limits listed on this document are not guaranteed to be the most up-to-date. + +### Using explorers with different Clusters + +Many of the popular Solana blockchain explorers support selecting any of the +Clusters, often allowing advanced users to add a custom/private RPC endpoint as +well. + +An example of some of these Solana blockchain explorers include: + +- [http://explorer.solana.com/](https://explorer.solana.com/). +- [http://solana.fm/](https://solana.fm/). +- [http://solscan.io/](https://solscan.io/). +- [http://solanabeach.io/](http://solanabeach.io/). +- [http://validators.app/](http://validators.app/). + +## Devnet + +Devnet serves as a playground for anyone who wants to take Solana for a test +drive, as a user, token holder, app developer, or validator. + +- Application developers should target Devnet. +- Potential validators should first target Devnet. +- Key differences between Devnet and Mainnet Beta: + - Devnet tokens are **not real** + - Devnet includes a token faucet for airdrops for application testing + - Devnet may be subject to ledger resets + - Devnet typically runs the same software release branch version as Mainnet + Beta, but may run a newer minor release version than Mainnet Beta. +- Gossip entrypoint for Devnet: `entrypoint.devnet.solana.com:8001` + +### Devnet endpoint + +- `https://api.devnet.solana.com` - single Solana Labs hosted api node; + rate-limited + +#### Example `solana` command-line configuration + +To connect to the `devnet` Cluster using the Solana CLI: + +```bash +solana config set --url https://api.devnet.solana.com +``` + +### Devnet rate limits + +- Maximum number of requests per 10 seconds per IP: 100 +- Maximum number of requests per 10 seconds per IP for a single RPC: 40 +- Maximum concurrent connections per IP: 40 +- Maximum connection rate per 10 seconds per IP: 40 +- Maximum amount of data per 30 second: 100 MB + +## Testnet + +Testnet is where the Solana core contributors stress test recent release +features on a live cluster, particularly focused on network performance, +stability and validator behavior. + +- Testnet tokens are **not real** +- Testnet may be subject to ledger resets. +- Testnet includes a token faucet for airdrops for application testing +- Testnet typically runs a newer software release branch than both Devnet and + Mainnet Beta +- Gossip entrypoint for Testnet: `entrypoint.testnet.solana.com:8001` + +### Testnet endpoint + +- `https://api.testnet.solana.com` - single Solana Labs api node; rate-limited + +#### Example `solana` command-line configuration + +To connect to the `testnet` Cluster using the Solana CLI: + +```bash +solana config set --url https://api.testnet.solana.com +``` + +### Testnet rate limits + +- Maximum number of requests per 10 seconds per IP: 100 +- Maximum number of requests per 10 seconds per IP for a single RPC: 40 +- Maximum concurrent connections per IP: 40 +- Maximum connection rate per 10 seconds per IP: 40 +- Maximum amount of data per 30 second: 100 MB + +## Mainnet beta + +A permissionless, persistent cluster for Solana users, builders, validators and +token holders. + +- Tokens that are issued on Mainnet Beta are **real** SOL +- Gossip entrypoint for Mainnet Beta: `entrypoint.mainnet-beta.solana.com:8001` + +### Mainnet beta endpoint + +- `https://api.mainnet-beta.solana.com` - Solana Labs hosted api node cluster, + backed by a load balancer; rate-limited + +#### Example `solana` command-line configuration + +To connect to the `mainnet-beta` Cluster using the Solana CLI: + +```bash +solana config set --url https://api.mainnet-beta.solana.com +``` + +### Mainnet beta rate limits + +- Maximum number of requests per 10 seconds per IP: 100 +- Maximum number of requests per 10 seconds per IP for a single RPC: 40 +- Maximum concurrent connections per IP: 40 +- Maximum connection rate per 10 seconds per IP: 40 +- Maximum amount of data per 30 second: 100 MB + +> The public RPC endpoints are not intended for production applications. Please +> use dedicated/private RPC servers when you launch your application, drop NFTs, +> etc. The public services are subject to abuse and rate limits may change +> without prior notice. Likewise, high-traffic websites may be blocked without +> prior notice. + +## Common HTTP Error Codes + +- 403 -- Your IP address or website has been blocked. It is time to run your own + RPC server(s) or find a private service. +- 429 -- Your IP address is exceeding the rate limits. Slow down! Use the + [Retry-After](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After) + HTTP response header to determine how long to wait before making another + request. diff --git a/docs/core/cpi.md b/docs/core/cpi.md new file mode 100644 index 000000000..d403eb102 --- /dev/null +++ b/docs/core/cpi.md @@ -0,0 +1,360 @@ +--- +sidebarLabel: CPI +# sidebarSortOrder: 5 +title: Cross-program Invocation (aka CPI) +--- + +The Solana runtime allows programs to call each other via a mechanism called +"[cross-program invocation](/docs/core/cpi.md)", or `cpi` for short. Calling +between programs is achieved by one program invoking an instruction of the +other. The invoking program is halted until the invoked program finishes +processing the instruction. + +For example, a client could create a transaction that modifies two accounts, +each owned by separate on-chain programs: + +```rust +let message = Message::new(vec![ + token_instruction::pay(&alice_pubkey), + acme_instruction::launch_missiles(&bob_pubkey), +]); +client.send_and_confirm_message(&[&alice_keypair, &bob_keypair], &message); +``` + +A client may instead allow the `acme` program to conveniently invoke `token` +instructions on the client's behalf: + +```rust +let message = Message::new(vec![ + acme_instruction::pay_and_launch_missiles(&alice_pubkey, &bob_pubkey), +]); +client.send_and_confirm_message(&[&alice_keypair, &bob_keypair], &message); +``` + +Given two on-chain programs, `token` and `acme`, each implementing instructions +`pay()` and `launch_missiles()` respectively, `acme` can be implemented with a +call to a function defined in the `token` module by issuing a cross-program +invocation: + +```rust +mod acme { + use token_instruction; + + fn launch_missiles(accounts: &[AccountInfo]) -> Result<()> { + ... + } + + fn pay_and_launch_missiles(accounts: &[AccountInfo]) -> Result<()> { + let alice_pubkey = accounts[1].key; + let instruction = token_instruction::pay(&alice_pubkey); + invoke(&instruction, accounts)?; + + launch_missiles(accounts)?; + } +``` + +`invoke()` is built into Solana's runtime and is responsible for routing the +given instruction to the `token` program via the instruction's `program_id` +field. + +Note that `invoke` requires the caller to pass all the accounts required by the +instruction being invoked, except for the executable account (the `program_id`). + +Before invoking `pay()`, the runtime must ensure that `acme` didn't modify any +accounts owned by `token`. It does this by applying the runtime's policy to the +current state of the accounts at the time `acme` calls `invoke` vs. the initial +state of the accounts at the beginning of the `acme`'s instruction. After +`pay()` completes, the runtime must again ensure that `token` didn't modify any +accounts owned by `acme` by again applying the runtime's policy, but this time +with the `token` program ID. Lastly, after `pay_and_launch_missiles()` +completes, the runtime must apply the runtime policy one more time where it +normally would, but using all updated `pre_*` variables. If executing +`pay_and_launch_missiles()` up to `pay()` made no invalid account changes, +`pay()` made no invalid changes, and executing from `pay()` until +`pay_and_launch_missiles()` returns made no invalid changes, then the runtime +can transitively assume `pay_and_launch_missiles()` as a whole made no invalid +account changes, and therefore commit all these account modifications. + +### Instructions that require privileges + +The runtime uses the privileges granted to the caller program to determine what +privileges can be extended to the callee. Privileges in this context refer to +signers and writable accounts. For example, if the instruction the caller is +processing contains a signer or writable account, then the caller can invoke an +instruction that also contains that signer and/or writable account. + +This privilege extension relies on the fact that programs are immutable, except +during the special case of program upgrades. + +In the case of the `acme` program, the runtime can safely treat the +transaction's signature as a signature of a `token` instruction. When the +runtime sees the `token` instruction references `alice_pubkey`, it looks up the +key in the `acme` instruction to see if that key corresponds to a signed +account. In this case, it does and thereby authorizes the `token` program to +modify Alice's account. + +### Program signed accounts + +Programs can issue instructions that contain signed accounts that were not +signed in the original transaction by using +[Program derived addresses](#program-derived-addresses). + +To sign an account with program derived addresses, a program may +`invoke_signed()`. + +```rust + invoke_signed( + &instruction, + accounts, + &[&["First addresses seed"], + &["Second addresses first seed", "Second addresses second seed"]], + )?; +``` + +### Call Depth + +Cross-program invocations allow programs to invoke other programs directly, but +the depth is constrained currently to 4. + +### Reentrancy + +Reentrancy is currently limited to direct self recursion, capped at a fixed +depth. This restriction prevents situations where a program might invoke another +from an intermediary state without the knowledge that it might later be called +back into. Direct recursion gives the program full control of its state at the +point that it gets called back. + +## Program Derived Addresses + +Program derived addresses allow programmatically generated signatures to be used +when [calling between programs](/docs/core/cpi.md). + +Using a program derived address, a program may be given the authority over an +account and later transfer that authority to another. This is possible because +the program can act as the signer in the transaction that gives authority. + +For example, if two users want to make a wager on the outcome of a game in +Solana, they must each transfer their wager's assets to some intermediary that +will honor their agreement. Currently, there is no way to implement this +intermediary as a program in Solana because the intermediary program cannot +transfer the assets to the winner. + +This capability is necessary for many DeFi applications since they require +assets to be transferred to an escrow agent until some event occurs that +determines the new owner. + +- Decentralized Exchanges that transfer assets between matching bid and ask + orders. + +- Auctions that transfer assets to the winner. + +- Games or prediction markets that collect and redistribute prizes to the + winners. + +Program derived address: + +1. Allow programs to control specific addresses, called program addresses, in + such a way that no external user can generate valid transactions with + signatures for those addresses. + +2. Allow programs to programmatically sign for program addresses that are + present in instructions invoked via + [Cross-Program Invocations](/docs/core/cpi.md). + +Given the two conditions, users can securely transfer or assign the authority of +on-chain assets to program addresses, and the program can then assign that +authority elsewhere at its discretion. + +### Private keys for program addresses + +A program address does not lie on the ed25519 curve and therefore has no valid +private key associated with it, and thus generating a signature for it is +impossible. While it has no private key of its own, it can be used by a program +to issue an instruction that includes the program address as a signer. + +### Hash-based generated program addresses + +Program addresses are deterministically derived from a collection of seeds and a +program id using a 256-bit pre-image resistant hash function. Program address +must not lie on the ed25519 curve to ensure there is no associated private key. +During generation, an error will be returned if the address is found to lie on +the curve. There is about a 50/50 chance of this happening for a given +collection of seeds and program id. If this occurs a different set of seeds or a +seed bump (additional 8 bit seed) can be used to find a valid program address +off the curve. + +Deterministic program addresses for programs follow a similar derivation path as +Accounts created with `SystemInstruction::CreateAccountWithSeed` which is +implemented with `Pubkey::create_with_seed`. + +For reference, that implementation is as follows: + +```rust +pub fn create_with_seed( + base: &Pubkey, + seed: &str, + program_id: &Pubkey, +) -> Result { + if seed.len() > MAX_ADDRESS_SEED_LEN { + return Err(SystemError::MaxSeedLengthExceeded); + } + + Ok(Pubkey::new( + hashv(&[base.as_ref(), seed.as_ref(), program_id.as_ref()]).as_ref(), + )) +} +``` + +Programs can deterministically derive any number of addresses by using seeds. +These seeds can symbolically identify how the addresses are used. + +From `Pubkey`:: + +```rust +/// Generate a derived program address +/// * seeds, symbolic keywords used to derive the key +/// * program_id, program that the address is derived for +pub fn create_program_address( + seeds: &[&[u8]], + program_id: &Pubkey, +) -> Result + +/// Find a valid off-curve derived program address and its bump seed +/// * seeds, symbolic keywords used to derive the key +/// * program_id, program that the address is derived for +pub fn find_program_address( + seeds: &[&[u8]], + program_id: &Pubkey, +) -> Option<(Pubkey, u8)> { + let mut bump_seed = [std::u8::MAX]; + for _ in 0..std::u8::MAX { + let mut seeds_with_bump = seeds.to_vec(); + seeds_with_bump.push(&bump_seed); + if let Ok(address) = create_program_address(&seeds_with_bump, program_id) { + return Some((address, bump_seed[0])); + } + bump_seed[0] -= 1; + } + None +} +``` + +**Warning**: Because of the way the seeds are hashed there is a potential for +program address collisions for the same program id. The seeds are hashed +sequentially which means that seeds {"abcdef"}, {"abc", "def"}, and {"ab", "cd", +"ef"} will all result in the same program address given the same program id. +Since the chance of collision is local to a given program id, the developer of +that program must take care to choose seeds that do not collide with each other. +For seed schemes that are susceptible to this type of hash collision, a common +remedy is to insert separators between seeds, e.g. transforming {"abc", "def"} +into {"abc", "-", "def"}. + +### Using program addresses + +Clients can use the `create_program_address` function to generate a destination +address. In this example, we assume that +`create_program_address(&[&["escrow"]], &escrow_program_id)` generates a valid +program address that is off the curve. + +```rust +// deterministically derive the escrow key +let escrow_pubkey = create_program_address(&[&["escrow"]], &escrow_program_id); + +// construct a transfer message using that key +let message = Message::new(vec![ + token_instruction::transfer(&alice_pubkey, &escrow_pubkey, 1), +]); + +// process the message which transfer one 1 token to the escrow +client.send_and_confirm_message(&[&alice_keypair], &message); +``` + +Programs can use the same function to generate the same address. In the function +below the program issues a `token_instruction::transfer` from a program address +as if it had the private key to sign the transaction. + +```rust +fn transfer_one_token_from_escrow( + program_id: &Pubkey, + accounts: &[AccountInfo], +) -> ProgramResult { + // User supplies the destination + let alice_pubkey = keyed_accounts[1].unsigned_key(); + + // Deterministically derive the escrow pubkey. + let escrow_pubkey = create_program_address(&[&["escrow"]], program_id); + + // Create the transfer instruction + let instruction = token_instruction::transfer(&escrow_pubkey, &alice_pubkey, 1); + + // The runtime deterministically derives the key from the currently + // executing program ID and the supplied keywords. + // If the derived address matches a key marked as signed in the instruction + // then that key is accepted as signed. + invoke_signed(&instruction, accounts, &[&["escrow"]]) +} +``` + +Note that the address generated using `create_program_address` is not guaranteed +to be a valid program address off the curve. For example, let's assume that the +seed `"escrow2"` does not generate a valid program address. + +To generate a valid program address using `"escrow2"` as a seed, use +`find_program_address`, iterating through possible bump seeds until a valid +combination is found. The preceding example becomes: + +```rust +// find the escrow key and valid bump seed +let (escrow_pubkey2, escrow_bump_seed) = find_program_address(&[&["escrow2"]], &escrow_program_id); + +// construct a transfer message using that key +let message = Message::new(vec![ + token_instruction::transfer(&alice_pubkey, &escrow_pubkey2, 1), +]); + +// process the message which transfer one 1 token to the escrow +client.send_and_confirm_message(&[&alice_keypair], &message); +``` + +Within the program, this becomes: + +```rust +fn transfer_one_token_from_escrow2( + program_id: &Pubkey, + accounts: &[AccountInfo], +) -> ProgramResult { + // User supplies the destination + let alice_pubkey = keyed_accounts[1].unsigned_key(); + + // Iteratively derive the escrow pubkey + let (escrow_pubkey2, bump_seed) = find_program_address(&[&["escrow2"]], program_id); + + // Create the transfer instruction + let instruction = token_instruction::transfer(&escrow_pubkey2, &alice_pubkey, 1); + + // Include the generated bump seed to the list of all seeds + invoke_signed(&instruction, accounts, &[&["escrow2", &[bump_seed]]]) +} +``` + +Since `find_program_address` requires iterating over a number of calls to +`create_program_address`, it may use more +[compute budget](/docs/core/runtime.md#compute-budget) when used on-chain. To +reduce the compute cost, use `find_program_address` off-chain and pass the +resulting bump seed to the program. + +### Instructions that require signers + +The addresses generated with `create_program_address` and `find_program_address` +are indistinguishable from any other public key. The only way for the runtime to +verify that the address belongs to a program is for the program to supply the +seeds used to generate the address. + +The runtime will internally call `create_program_address`, and compare the +result against the addresses supplied in the instruction. + +## Examples + +Refer to [Developing with Rust](/docs/programs/lang-rust.md#examples) and +[Developing with C](/docs/programs/lang-c.md#examples) for examples of how to +use cross-program invocation. diff --git a/docs/core/index.md b/docs/core/index.md new file mode 100644 index 000000000..519926a25 --- /dev/null +++ b/docs/core/index.md @@ -0,0 +1,5 @@ +--- +metaOnly: true +title: Core Concepts +sidebarSortOrder: 2 +--- diff --git a/docs/core/programs.md b/docs/core/programs.md new file mode 100644 index 000000000..a31c044c7 --- /dev/null +++ b/docs/core/programs.md @@ -0,0 +1,92 @@ +--- +sidebarLabel: Programs +sidebarSortOrder: 2 +title: What are Solana Programs? +description: + "A Solana Program, aka smart contract, is the executable code that interprets + the instructions on the blockchain. There are two types: Native and on chain." +--- + +Solana Programs, often referred to as "_smart contracts_" on other blockchains, +are the executable code that interprets the instructions sent inside of each +transaction on the blockchain. They can be deployed directly into the core of +the network as [Native Programs](#native-programs), or published by anyone as +[On Chain Programs](#on-chain-programs). Programs are the core building blocks +of the network and handle everything from sending tokens between wallets, to +accepting votes of a DAOs, to tracking ownership of NFTs. + +Both types of programs run on top of the +[Sealevel runtime](https://medium.com/solana-labs/sealevel-parallel-processing-thousands-of-smart-contracts-d814b378192), +which is Solana's _parallel processing_ model that helps to enable the high +transactions speeds of the blockchain. + +## Key points + +- Programs are essentially special type of [Accounts](/docs/core/accounts.md) + that is marked as "_executable_" +- Programs can own other Accounts +- Programs can only _change the data_ or _debit_ accounts they own +- Any program can _read_ or _credit_ another account +- Programs are considered stateless since the primary data stored in a program + account is the compiled SBF code +- Programs can be upgraded by their owner (see more on that below) + +## Types of programs + +The Solana blockchain has two types of programs: + +- Native programs +- On chain programs + +### On chain programs + +These user written programs, often referred to as "_smart contracts_" on other +blockchains, are deployed directly to the blockchain for anyone to interact with +and execute. Hence the name "on chain"! + +In effect, "on chain programs" are any program that is not baked directly into +the Solana cluster's core code (like the native programs discussed below). + +And even though Solana Labs maintains a small subset of these on chain programs +(collectively known as the [Solana Program Library](https://spl.solana.com/)), +anyone can create or publish one. On chain programs can also be updated directly +on the blockchain by the respective program's Account owner. + +### Native programs + +_Native programs_ are programs that are built directly into the core of the +Solana blockchain. + +Similar to other "on chain" programs in Solana, native programs can be called by +any other program/user. However, they can only be upgraded as part of the core +blockchain and cluster updates. These native program upgrades are controlled via +the releases to the [different clusters](/docs/core/clusters.md). + +#### Examples of native programs include: + +- [System Program](https://docs.solanalabs.com/runtime/programs#system-program): + Create new accounts, transfer tokens, and more +- [BPF Loader Program](https://docs.solanalabs.com/runtime/programs#bpf-loader): + Deploys, upgrades, and executes programs on chain +- [Vote program](https://docs.solanalabs.com/runtime/programs#vote-program): + Create and manage accounts that track validator voting state and rewards. + +## Executable + +When a Solana program is deployed onto the network, it is marked as "executable" +by the +[BPF Loader Program](https://docs.solanalabs.com/runtime/programs#bpf-loader). +This allows the Solana runtime to efficiently and properly execute the compiled +program code. + +## Upgradable + +Unlike other blockchains, Solana programs can be upgraded after they are +deployed to the network. + +Native programs can only be upgraded as part of cluster updates when new +software releases are made. + +On chain programs can be upgraded by the account that is marked as the "_Upgrade +Authority_", which is usually the Solana account/address that deployed the +program to begin with. diff --git a/docs/core/rent.md b/docs/core/rent.md new file mode 100644 index 000000000..cdb2a25fa --- /dev/null +++ b/docs/core/rent.md @@ -0,0 +1,71 @@ +--- +sidebarLabel: Rent +sidebarSortOrder: 3 +title: What is rent? +description: + "Rent: the small fee Solana accounts incur to store data on the blockchain. + Accounts with >2 years of rent are rent exempt and do not pay the periodic + fee." +--- + +The fee for every Solana Account to store data on the blockchain is called +"_rent_". This _time and space_ based fee is required to keep an account, and +therefore its data, alive on the blockchain since +[Clusters](/docs/core/clusters.md) must actively maintain this data. + +All Solana Accounts (and therefore Programs) are required to maintain a high +enough LAMPORT balance to become [rent exempt](#rent-exempt) and remain on the +Solana blockchain. + +When an Account no longer has enough LAMPORTS to pay its rent, it will be +removed from the network in a process known as +[Garbage Collection](#garbage-collection). + +> **Note:** Rent is different from +> [transactions fees](/docs/core/transactions/fees.md). Rent is paid (or held in +> an Account) to keep data stored on the Solana blockchain. Whereas transaction +> fees are paid to process +> [instructions](/docs/core/transactions.md#instructions) on the network. + +### Rent rate + +The Solana rent rate is set on a network wide basis, primarily based on the set +LAMPORTS _per_ byte _per_ year. + +Currently, the rent rate is a static amount and stored in the +[Rent sysvar](https://docs.solanalabs.com/runtime/sysvars#rent). + +## Rent exempt + +Accounts that maintain a minimum LAMPORT balance greater than 2 years worth of +rent payments are considered "_rent exempt_" and will not incur a rent +collection. + +> At the time of writing this, new Accounts and Programs **are required** to be +> initialized with enough LAMPORTS to become rent-exempt. The RPC endpoints have +> the ability to calculate this +> [estimated rent exempt balance](/docs/rpc/http/getMinimumBalanceForRentExemption.mdx) +> and is recommended to be used. + +Every time an account's balance is reduced, a check is performed to see if the +account is still rent exempt. Transactions that would cause an account's balance +to drop below the rent exempt threshold will fail. + +## Garbage collection + +Accounts that do not maintain their rent exempt status, or have a balance high +enough to pay rent, are removed from the network in a process known as _garbage +collection_. This process is done to help reduce the network wide storage of no +longer used/maintained data. + +You can learn more about +[garbage collection here](https://docs.solanalabs.com/implemented-proposals/persistent-account-storage#garbage-collection) +in this implemented proposal. + +## Learn more about Rent + +You can learn more about Solana Rent with the following articles and +documentation: + +- [Implemented Proposals - Rent](https://docs.solanalabs.com/implemented-proposals/rent) +- [Implemented Proposals - Account Storage](https://docs.solanalabs.com/implemented-proposals/persistent-account-storage) diff --git a/docs/core/runtime.md b/docs/core/runtime.md new file mode 100644 index 000000000..39dc1dff9 --- /dev/null +++ b/docs/core/runtime.md @@ -0,0 +1,176 @@ +--- +sidebarLabel: Runtime +title: "Overview of the Solana Runtime" +# sidebarSortOrder: 6 +--- + +## Capability of Programs + +The runtime only permits the owner program to debit the account or modify its +data. The program then defines additional rules for whether the client can +modify accounts it owns. In the case of the System program, it allows users to +transfer lamports by recognizing transaction signatures. If it sees the client +signed the transaction using the keypair's _private key_, it knows the client +authorized the token transfer. + +In other words, the entire set of accounts owned by a given program can be +regarded as a key-value store, where a key is the account address and value is +program-specific arbitrary binary data. A program author can decide how to +manage the program's whole state, possibly as many accounts. + +After the runtime executes each of the transaction's instructions, it uses the +account metadata to verify that the access policy was not violated. If a program +violates the policy, the runtime discards all account changes made by all +instructions in the transaction, and marks the transaction as failed. + +### Policy + +After a program has processed an instruction, the runtime verifies that the +program only performed operations it was permitted to, and that the results +adhere to the runtime policy. + +The policy is as follows: + +- Only the owner of the account may change owner. + - And only if the account is writable. + - And only if the account is not executable. + - And only if the data is zero-initialized or empty. +- An account not assigned to the program cannot have its balance decrease. +- The balance of read-only and executable accounts may not change. +- Only the owner may change account size and data. + - And if the account is writable. + - And if the account is not executable. +- Executable is one-way (false->true) and only the account owner may set it. +- No one can make modifications to the rent_epoch associated with this account. + +## Balancing the balances + +Before and after each instruction, the sum of all account balances must stay the +same. E.g. if one account's balance is increased, another's must be decreased by +the same amount. Because the runtime can not see changes to accounts which were +not passed to it, all accounts for which the balances were modified must be +passed, even if they are not needed in the called instruction. + +## Compute Budget + +To prevent abuse of computational resources, each transaction is allocated a +compute budget. The budget specifies a maximum number of compute units that a +transaction can consume, the costs associated with different types of operations +the transaction may perform, and operational bounds the transaction must adhere +to. + +As the transaction is processed compute units are consumed by its instruction's +programs performing operations such as executing SBF instructions, calling +syscalls, etc... When the transaction consumes its entire budget, or exceeds a +bound such as attempting a call stack that is too deep, or loaded account data +size exceeds limit, the runtime halts the transaction processing and returns an +error. + +The following operations incur a compute cost: + +- Executing SBF instructions +- Passing data between programs +- Calling system calls + - logging + - creating program addresses + - cross-program invocations + - ... + +For cross-program invocations, the instructions invoked inherit the budget of +their parent. If an invoked instruction consumes the transactions remaining +budget, or exceeds a bound, the entire invocation chain and the top level +transaction processing are halted. + +The current +[compute budget](https://github.com/solana-labs/solana/blob/090e11210aa7222d8295610a6ccac4acda711bb9/program-runtime/src/compute_budget.rs#L26-L87) +can be found in the Solana Program Runtime. + +#### Example Compute Budget + +For example, if the compute budget set in the Solana runtime is: + +```rust +max_units: 1,400,000, +log_u64_units: 100, +create_program address units: 1500, +invoke_units: 1000, +max_invoke_stack_height: 5, +max_instruction_trace_length: 64, +max_call_depth: 64, +stack_frame_size: 4096, +log_pubkey_units: 100, +... +``` + +Then any transaction: + +- Could execute 1,400,000 SBF instructions, if it did nothing else. +- Cannot exceed 4k of stack usage. +- Cannot exceed a SBF call depth of 64. +- Cannot exceed invoke stack height of 5 (4 levels of cross-program + invocations). + +> **NOTE:** Since the compute budget is consumed incrementally as the +> transaction executes, the total budget consumption will be a combination of +> the various costs of the operations it performs. + +At runtime a program may log how much of the compute budget remains. See +[debugging](/docs/programs/debugging.md#monitoring-compute-budget-consumption) +for more information. + +### Prioritization fees + +As part of the Compute Budget, the runtime supports transactions including an +**optional** fee to prioritize itself against others known as a +[prioritization fee](/docs/intro/transaction_fees.md#prioritization-fee). + +This _prioritization fee_ is calculated by multiplying the number of _compute +units_ by the _compute unit price_ (measured in micro-lamports). These values +may be set via the Compute Budget instructions `SetComputeUnitLimit` and +`SetComputeUnitPrice` once per transaction. + +> You can learn more of the specifics of _how_ and _when_ to set a +> prioritization fee on the +> [transaction fees](/docs/intro/transaction_fees.md#prioritization-fee) page. + +### Accounts data size limit + +A transaction should request the maximum bytes of accounts data it is allowed to +load by including a `SetLoadedAccountsDataSizeLimit` instruction, requested +limit is capped by `MAX_LOADED_ACCOUNTS_DATA_SIZE_BYTES`. If no +`SetLoadedAccountsDataSizeLimit` is provided, the transaction is defaulted to +have limit of `MAX_LOADED_ACCOUNTS_DATA_SIZE_BYTES`. + +The `ComputeBudgetInstruction::set_loaded_accounts_data_size_limit` function can +be used to create this instruction: + +```rust +let instruction = ComputeBudgetInstruction::set_loaded_accounts_data_size_limit(100_000); +``` + +## New Features + +As Solana evolves, new features or patches may be introduced that changes the +behavior of the cluster and how programs run. Changes in behavior must be +coordinated between the various nodes of the cluster. If nodes do not +coordinate, then these changes can result in a break-down of consensus. Solana +supports a mechanism called runtime features to facilitate the smooth adoption +of changes. + +Runtime features are epoch coordinated events where one or more behavior changes +to the cluster will occur. New changes to Solana that will change behavior are +wrapped with feature gates and disabled by default. The Solana tools are then +used to activate a feature, which marks it pending, once marked pending the +feature will be activated at the next epoch. + +To determine which features are activated use the +[Solana command-line tools](https://docs.solanalabs.com/cli/install): + +```bash +solana feature status +``` + +If you encounter problems, first ensure that the Solana tools version you are +using match the version returned by `solana cluster-version`. If they do not +match, +[install the correct tool suite](https://docs.solanalabs.com/cli/install). diff --git a/docs/core/transactions.md b/docs/core/transactions.md new file mode 100644 index 000000000..bc62a3347 --- /dev/null +++ b/docs/core/transactions.md @@ -0,0 +1,235 @@ +--- +title: "Transactions" +sidebarSortOrder: 4 +description: + "A Solana transaction consists of one or more instructions, an array of + accounts to read and write data from, and one or more signatures." +--- + +On the Solana blockchain, program execution begins with a +[transaction](/docs/terminology.md#transaction) being submitted to the cluster. +With each transaction consisting of one or many +[instructions](/docs/terminology.md#instruction), the runtime will process each +of the instructions contained within the transaction, in order, and atomically. +If any part of an instruction fails, then the entire transaction will fail. + +## Overview of a Transaction + +On Solana, clients update the runtime (for example, debiting an account) by +submitting a transaction to the cluster. + +This transaction consists of three parts: + +- one or more instructions +- an array of accounts to read or write from +- one or more signatures + +An [instruction](/docs/terminology.md#instruction) is the smallest execution +logic on Solana. Instructions are basically a call to update the global Solana +state. Instructions invoke programs that make calls to the Solana runtime to +update the state (for example, calling the token program to transfer tokens from +your account to another account). + +[Programs](/docs/core/programs.md) on Solana don’t store data/state; rather, +data/state is stored in accounts. + +[Signatures](/docs/terminology.md#signature) verify that we have the authority +to read or write data to the accounts that we list. + +## Anatomy of a Transaction + +This section covers the binary format of a transaction. + +### Transaction Format + +A transaction contains a [compact-array](#compact-array-format) of signatures, +followed by a [message](#message-format). Each item in the signatures array is a +[digital signature](#signature-format) of the given message. The Solana runtime +verifies that the number of signatures matches the number in the first 8 bits of +the [message header](#message-header-format). It also verifies that each +signature was signed by the private key corresponding to the public key at the +same index in the message's account addresses array. + +#### Signature Format + +Each digital signature is in the ed25519 binary format and consumes 64 bytes. + +### Message Format + +A message contains a [header](#message-header-format), followed by a +compact-array of [account addresses](#account-addresses-format), followed by a +recent [blockhash](#blockhash-format), followed by a compact-array of +[instructions](#instruction-format). + +#### Message Header Format + +The message header contains three unsigned 8-bit values. The first value is the +number of required signatures in the containing transaction. The second value is +the number of those corresponding account addresses that are read-only. The +third value in the message header is the number of read-only account addresses +not requiring signatures. + +#### Account Addresses Format + +The addresses that require signatures appear at the beginning of the account +address array, with addresses requesting read-write access first, and read-only +accounts following. The addresses that do not require signatures follow the +addresses that do, again with read-write accounts first and read-only accounts +following. + +#### Blockhash Format + +A blockhash contains a 32-byte SHA-256 hash. It is used to indicate when a +client last observed the ledger. Validators will reject transactions when the +blockhash is too old. + +### Instruction Format + +An instruction contains a program id index, followed by a compact-array of +account address indexes, followed by a compact-array of opaque 8-bit data. The +program id index is used to identify an on-chain program that can interpret the +opaque data. The program id index is an unsigned 8-bit index to an account +address in the message's array of account addresses. The account address indexes +are each an unsigned 8-bit index into that same array. + +### Compact-Array Format + +A compact-array is serialized as the array length, followed by each array item. +The array length is a special multi-byte encoding called compact-u16. + +#### Compact-u16 Format + +A compact-u16 is a multi-byte encoding of 16 bits. The first byte contains the +lower 7 bits of the value in its lower 7 bits. If the value is above 0x7f, the +high bit is set and the next 7 bits of the value are placed into the lower 7 +bits of a second byte. If the value is above 0x3fff, the high bit is set and the +remaining 2 bits of the value are placed into the lower 2 bits of a third byte. + +### Account Address Format + +An account address is 32-bytes of arbitrary data. When the address requires a +digital signature, the runtime interprets it as the public key of an ed25519 +keypair. + +## Instructions + +Each [instruction](/docs/terminology.md#instruction) specifies a single program, +a subset of the transaction's accounts that should be passed to the program, and +a data byte array that is passed to the program. The program interprets the data +array and operates on the accounts specified by the instructions. The program +can return successfully, or with an error code. An error return causes the +entire transaction to fail immediately. + +Programs typically provide helper functions to construct instructions they +support. For example, the system program provides the following Rust helper to +construct a +[`SystemInstruction::CreateAccount`](https://github.com/solana-labs/solana/blob/6606590b8132e56dab9e60b3f7d20ba7412a736c/sdk/program/src/system_instruction.rs#L63) +instruction: + +```rust +pub fn create_account( + from_pubkey: &Pubkey, + to_pubkey: &Pubkey, + lamports: u64, + space: u64, + owner: &Pubkey, +) -> Instruction { + let account_metas = vec![ + AccountMeta::new(*from_pubkey, true), + AccountMeta::new(*to_pubkey, true), + ]; + Instruction::new_with_bincode( + system_program::id(), + &SystemInstruction::CreateAccount { + lamports, + space, + owner: *owner, + }, + account_metas, + ) +} +``` + +### Program Id + +The instruction's [program id](/docs/terminology.md#program-id) specifies which +program will process this instruction. The program's account's owner specifies +which loader should be used to load and execute the program, and the data +contains information about how the runtime should execute the program. + +In the case of [on-chain SBF programs](/docs/programs.md), the owner is the SBF +Loader and the account data holds the BPF bytecode. Program accounts are +permanently marked as executable by the loader once they are successfully +deployed. The runtime will reject transactions that specify programs that are +not executable. + +Unlike on-chain programs, +[Native Programs](https://docs.solanalabs.com/runtime/programs) are handled +differently in that they are built directly into the Solana runtime. + +### Accounts + +The accounts referenced by an instruction represent on-chain state and serve as +both the inputs and outputs of a program. More information about accounts can be +found in the [Accounts](/docs/core/accounts.md) section. + +### Instruction data + +Each instruction carries a general purpose byte array that is passed to the +program along with the accounts. The contents of the instruction data is program +specific and typically used to convey what operations the program should +perform, and any additional information those operations may need above and +beyond what the accounts contain. + +Programs are free to specify how information is encoded into the instruction +data byte array. The choice of how data is encoded should consider the overhead +of decoding, since that step is performed by the program on-chain. It's been +observed that some common encodings (Rust's bincode for example) are very +inefficient. + +The +[Solana Program Library's Token program](https://github.com/solana-labs/solana-program-library/tree/master/token) +gives one example of how instruction data can be encoded efficiently, but note +that this method only supports fixed sized types. Token utilizes the +[Pack](https://github.com/solana-labs/solana/blob/master/sdk/program/src/program_pack.rs) +trait to encode/decode instruction data for both token instructions as well as +token account states. + +### Multiple instructions in a single transaction + +A transaction can contain instructions in any order. This means a malicious user +could craft transactions that may pose instructions in an order that the program +has not been protected against. Programs should be hardened to properly and +safely handle any possible instruction sequence. + +One not so obvious example is account deinitialization. Some programs may +attempt to deinitialize an account by setting its lamports to zero, with the +assumption that the runtime will delete the account. This assumption may be +valid between transactions, but it is not between instructions or cross-program +invocations. To harden against this, the program should also explicitly zero out +the account's data. + +An example of where this could be a problem is if a token program, upon +transferring the token out of an account, sets the account's lamports to zero, +assuming it will be deleted by the runtime. If the program does not zero out the +account's data, a malicious user could trail this instruction with another that +transfers the tokens a second time. + +## Signatures + +Each transaction explicitly lists all account public keys referenced by the +transaction's instructions. A subset of those public keys are each accompanied +by a transaction signature. Those signatures signal on-chain programs that the +account holder has authorized the transaction. Typically, the program uses the +authorization to permit debiting the account or modifying its data. More +information about how the authorization is communicated to a program can be +found in [Accounts](/docs/core/accounts.md#signers) + +## Recent Blockhash + +A transaction includes a recent [blockhash](/docs/terminology.md#blockhash) to +prevent duplication and to give transactions lifetimes. Any transaction that is +completely identical to a previous one is rejected, so adding a newer blockhash +allows multiple transactions to repeat the exact same action. Transactions also +have lifetimes that are defined by the blockhash, as any transaction whose +blockhash is too old will be rejected. diff --git a/docs/core/transactions/confirmation.md b/docs/core/transactions/confirmation.md new file mode 100644 index 000000000..d2c8bef7c --- /dev/null +++ b/docs/core/transactions/confirmation.md @@ -0,0 +1,374 @@ +--- +title: "Transaction Confirmation" +--- + +Problems relating to +[transaction confirmation](/docs/terminology.md#transaction-confirmations) are +common with many newer developers while building applications. This article aims +to boost the overall understanding of the confirmation mechanism used on the +Solana blockchain, including some recommended best practices. + +## Brief background on transactions + +Let’s first make sure we’re all on the same page and thinking about the same +things... + +### What is a transaction? + +Transactions consist of two components: a +[message](/docs/terminology.md#message) and a +[list of signatures](/docs/terminology.md#signature). The transaction message is +where the magic happens and at a high level it consists of three components: + +- a **list of instructions** to invoke, +- a **list of accounts** to load, and +- a **“recent blockhash.”** + +In this article, we’re going to be focusing a lot on a transaction’s +[recent blockhash](/docs/terminology.md#blockhash) because it plays a big role +in transaction confirmation. + +### Transaction lifecycle refresher + +Below is a high level view of the lifecycle of a transaction. This article will +touch on everything except steps 1 and 4. + +1. Create a list of instructions along with the list of accounts that + instructions need to read and write +2. Fetch a recent blockhash and use it to prepare a transaction message +3. Simulate the transaction to ensure it behaves as expected +4. Prompt user to sign the prepared transaction message with their private key +5. Send the transaction to an RPC node which attempts to forward it to the + current block producer +6. Hope that a block producer validates and commits the transaction into their + produced block +7. Confirm the transaction has either been included in a block or detect when it + has expired + +## What is a Blockhash? + +A [“blockhash”](/docs/terminology.md#blockhash) refers to the last Proof of +History (PoH) hash for a [“slot”](/docs/terminology.md#slot) (description +below). Since Solana uses PoH as a trusted clock, a transaction’s recent +blockhash can be thought of as a **timestamp**. + +### Proof of History refresher + +Solana’s Proof of History mechanism uses a very long chain of recursive SHA-256 +hashes to build a trusted clock. The “history” part of the name comes from the +fact that block producers hash transaction id’s into the stream to record which +transactions were processed in their block. + +[PoH hash calculation](https://github.com/solana-labs/solana/blob/9488a73f5252ad0d7ea830a0b456d9aa4bfbb7c1/entry/src/poh.rs#L82): +`next_hash = hash(prev_hash, hash(transaction_ids))` + +PoH can be used as a trusted clock because each hash must be produced +sequentially. Each produced block contains a blockhash and a list of hash +checkpoints called “ticks” so that validators can verify the full chain of +hashes in parallel and prove that some amount of time has actually passed. The +stream of hashes can be broken up into the following time units: + +# Transaction Expiration + +By default, all Solana transactions will expire if not committed to a block in a +certain amount of time. The **vast majority** of transaction confirmation issues +are related to how RPC nodes and validators detect and handle **expired** +transactions. A solid understanding of how transaction expiration works should +help you diagnose the bulk of your transaction confirmation issues. + +## How does transaction expiration work? + +Each transaction includes a “recent blockhash” which is used as a PoH clock +timestamp and expires when that blockhash is no longer “recent” enough. More +concretely, Solana validators look up the corresponding slot number for each +transaction’s blockhash that they wish to process in a block. If the validator +[can’t find a slot number for the blockhash](https://github.com/solana-labs/solana/blob/9488a73f5252ad0d7ea830a0b456d9aa4bfbb7c1/runtime/src/bank.rs#L3687) +or if the looked up slot number is more than 151 slots lower than the slot +number of the block being processed, the transaction will be rejected. + +Slots are configured to last about +[400ms](https://github.com/solana-labs/solana/blob/47b938e617b77eb3fc171f19aae62222503098d7/sdk/program/src/clock.rs#L12) +but often fluctuate between 400ms and 600ms, so a given blockhash can only be +used by transactions for about 60 to 90 seconds. + +Transaction has expired pseudocode: +`currentBankSlot > slotForTxRecentBlockhash + 151` + +Transaction not expired pseudocode: +`currentBankSlot - slotForTxRecentBlockhash < 152` + +### Example of transaction expiration + +Let’s walk through a quick example: + +1. A validator is producing a new block for slot #1000 +2. The validator receives a transaction with recent blockhash `1234...` from a + user +3. The validator checks the `1234...` blockhash against the list of recent + blockhashes leading up to its new block and discovers that it was the + blockhash for slot #849 +4. Since slot #849 is exactly 151 slots lower than slot #1000, the transaction + hasn’t expired yet and can still be processed! +5. But wait, before actually processing the transaction, the validator finished + the block for slot #1000 and starts producing the block for slot #1001 + (validators get to produce blocks for 4 consecutive slots). +6. The validator checks the same transaction again and finds that it’s now too + old and drops it because it’s now 152 slots lower than the current slot :( + +## Why do transactions expire? + +There’s a very good reason for this actually, it’s to help validators avoid +processing the same transaction twice. + +A naive brute force approach to prevent double processing could be to check +every new transaction against the blockchain’s entire transaction history. But +by having transactions expire after a short amount of time, validators only need +to check if a new transaction is in a relatively small set of _recently_ +processed transactions. + +### Other blockchains + +Solana’s approach of prevent double processing is quite different from other +blockchains. For example, Ethereum tracks a counter (nonce) for each transaction +sender and will only process transactions that use the next valid nonce. + +Ethereum’s approach is simple for validators to implement, but it can be +problematic for users. Many people have encountered situations when their +Ethereum transactions got stuck in a _pending_ state for a long time and all the +later transactions, which used higher nonce values, were blocked from +processing. + +### Advantages on Solana + +There are a few advantages to Solana’s approach: + +1. A single fee payer can submit multiple transactions at the same time that are + allowed to be processed in any order. This might happen if you’re using + multiple applications at the same time. +2. If a transaction doesn’t get committed to a block and expires, users can try + again knowing that their previous transaction won’t ever be processed. + +By not using counters, the Solana wallet experience may be easier for users to +understand because they can get to success, failure, or expiration states +quickly and avoid annoying pending states. + +### Disadvantages on Solana + +Of course there are some disadvantages too: + +1. Validators have to actively track a set of all processed transaction id’s to + prevent double processing. +2. If the expiration time period is too short, users might not be able to submit + their transaction before it expires. + +These disadvantages highlight a tradeoff in how transaction expiration is +configured. If the expiration time of a transaction is increased, validators +need to use more memory to track more transactions. If expiration time is +decreased, users don’t have enough time to submit their transaction. + +Currently, Solana clusters require that transactions use blockhashes that are no +more than +[151 slots](https://github.com/solana-labs/solana/blob/9488a73f5252ad0d7ea830a0b456d9aa4bfbb7c1/sdk/program/src/clock.rs#L65) +old. + +> This [Github issue](https://github.com/solana-labs/solana/issues/23582) +> contains some calculations that estimate that mainnet-beta validators need +> about 150MB of memory to track transactions. This could be slimmed down in the +> future if necessary without decreasing expiration time as I’ve detailed in +> that issue. + +## Transaction confirmation tips + +As mentioned before, blockhashes expire after a time period of only 151 slots +which can pass as quickly as **one minute** when slots are processed within the +target time of 400ms. + +One minute is not a lot of time considering that a client needs to fetch a +recent blockhash, wait for the user to sign, and finally hope that the +broadcasted transaction reaches a leader that is willing to accept it. Let’s go +through some tips to help avoid confirmation failures due to transaction +expiration! + +### Fetch blockhashes with the appropriate commitment level + +Given the short expiration time frame, it’s imperative that clients help users +create transactions with blockhash that is as recent as possible. + +When fetching blockhashes, the current recommended RPC API is called +[`getLatestBlockhash`](/docs/rpc/http/getLatestBlockhash.mdx). By default, this +API uses the `"finalized"` commitment level to return the most recently +finalized block’s blockhash. However, you can override this behavior by +[setting the `commitment` parameter](/docs/rpc/index.mdx#configuring-state-commitment) +to a different commitment level. + +**Recommendation** + +The `"confirmed"` commitment level should almost always be used for RPC requests +because it’s usually only a few slots behind the `"processed"` commitment and +has a very low chance of belonging to a dropped +[fork](https://docs.solanalabs.com/consensus/fork-generation). + +But feel free to consider the other options: + +- Choosing `"processed"` will let you fetch the most recent blockhash compared + to other commitment levels and therefore gives you the most time to prepare + and process a transaction. But due to the prevalence of forking in the Solana + protocol, roughly 5% of blocks don’t end up being finalized by the cluster so + there’s a real chance that your transaction uses a blockhash that belongs to a + dropped fork. Transactions that use blockhashes for abandoned blocks won’t + ever be considered recent by any blocks that are in the finalized blockchain. +- Using the default commitment level `"finalized"` will eliminate any risk that + the blockhash you choose will belong to a dropped fork. The tradeoff is that + there is typically at least a 32 slot difference between the most recent + confirmed block and the most recent finalized block. This tradeoff is pretty + severe and effectively reduces the expiration of your transactions by about 13 + seconds but this could be even more during unstable cluster conditions. + +### Use an appropriate preflight commitment level + +If your transaction uses a blockhash that was fetched from one RPC node then you +send, or simulate, that transaction with a different RPC node, you could run +into issues due to one node lagging behind the other. + +When RPC nodes receive a `sendTransaction` request, they will attempt to +determine the expiration block of your transaction using the most recent +finalized block or with the block selected by the `preflightCommitment` +parameter. A **VERY** common issue is that a received transaction’s blockhash +was produced after the block used to calculate the expiration for that +transaction. If an RPC node can’t determine when your transaction expires, it +will only forward your transaction **one time** and then will **drop** the +transaction. + +Similarly, when RPC nodes receive a `simulateTransaction` request, they will +simulate your transaction using the most recent finalized block or with the +block selected by the `preflightCommitment` parameter. If the block chosen for +simulation is older than the block used for your transaction’s blockhash, the +simulation will fail with the dreaded “blockhash not found” error. + +**Recommendation** + +Even if you use `skipPreflight`, **ALWAYS** set the `preflightCommitment` +parameter to the same commitment level used to fetch your transaction’s +blockhash for both `sendTransaction` and `simulateTransaction` requests. + +### Be wary of lagging RPC nodes when sending transactions + +When your application uses an RPC pool service or when the RPC endpoint differs +between creating a transaction and sending a transaction, you need to be wary of +situations where one RPC node is lagging behind the other. For example, if you +fetch a transaction blockhash from one RPC node then you send that transaction +to a second RPC node for forwarding or simulation, the second RPC node might be +lagging behind the first. + +**Recommendation** + +For `sendTransaction` requests, clients should keep resending a transaction to a +RPC node on a frequent interval so that if an RPC node is slightly lagging +behind the cluster, it will eventually catch up and detect your transaction’s +expiration properly. + +For `simulateTransaction` requests, clients should use the +[`replaceRecentBlockhash`](/docs/rpc/http/simulateTransaction.mdx) parameter to +tell the RPC node to replace the simulated transaction’s blockhash with a +blockhash that will always be valid for simulation. + +### Avoid reusing stale blockhashes + +Even if your application has fetched a very recent blockhash, be sure that +you’re not reusing that blockhash in transactions for too long. The ideal +scenario is that a recent blockhash is fetched right before a user signs their +transaction. + +**Recommendation for applications** + +Poll for new recent blockhashes on a frequent basis to ensure that whenever a +user triggers an action that creates a transaction, your application already has +a fresh blockhash that’s ready to go. + +**Recommendation for wallets** + +Poll for new recent blockhashes on a frequent basis and replace a transaction’s +recent blockhash right before they sign the transaction to ensure the blockhash +is as fresh as possible. + +### Use healthy RPC nodes when fetching blockhashes + +By fetching the latest blockhash with the `"confirmed"` commitment level from an +RPC node, it’s going to respond with the blockhash for the latest confirmed +block that it’s aware of. Solana’s block propagation protocol prioritizes +sending blocks to staked nodes so RPC nodes naturally lag about a block behind +the rest of the cluster. They also have to do more work to handle application +requests and can lag a lot more under heavy user traffic. + +Lagging RPC nodes can therefore respond to blockhash requests with blockhashes +that were confirmed by the cluster quite awhile ago. By default, a lagging RPC +node detects that it is more than 150 slots behind the cluster will stop +responding to requests, but just before hitting that threshold they can still +return a blockhash that is just about to expire. + +**Recommendation** + +Monitor the health of your RPC nodes to ensure that they have an up-to-date view +of the cluster state with one of the following methods: + +1. Fetch your RPC node’s highest processed slot by using the + [`getSlot`](/docs/rpc/http/getSlot.mdx) RPC API with the `"processed"` + commitment level and then call the + [`getMaxShredInsertSlot](/docs/rpc/http/getMaxShredInsertSlot.mdx) RPC API to + get the highest slot that your RPC node has received a “shred” of a block + for. If the difference between these responses is very large, the cluster is + producing blocks far ahead of what the RPC node has processed. +2. Call the `getLatestBlockhash` RPC API with the `"confirmed"` commitment level + on a few different RPC API nodes and use the blockhash from the node that + returns the highest slot for its + [context slot](/docs/rpc/index.mdx#rpcresponse-structure). + +### Wait long enough for expiration + +**Recommendation** + +When calling [`getLatestBlockhash`](/docs/rpc/http/getLatestBlockhash.mdx) RPC +API to get a recent blockhash for your transaction, take note of the +`"lastValidBlockHeight"` in the response. + +Then, poll the [`getBlockHeight`](/docs/rpc/http/getBlockHeight.mdx) RPC API +with the “confirmed” commitment level until it returns a block height greater +than the previously returned last valid block height. + +### Consider using “durable” transactions + +Sometimes transaction expiration issues are really hard to avoid (e.g. offline +signing, cluster instability). If the previous tips are still not sufficient for +your use-case, you can switch to using durable transactions (they just require a +bit of setup). + +To start using durable transactions, a user first needs to submit a transaction +that +[invokes instructions that create a special on-chain “nonce” account](https://docs.rs/solana-program/latest/solana_program/system_instruction/fn.create_nonce_account.html) +and stores a “durable blockhash” inside of it. At any point in the future (as +long as the nonce account hasn’t been used yet), the user can create a durable +transaction by following these 2 rules: + +1. The instruction list must start with an + [“advance nonce” system instruction](https://docs.rs/solana-program/latest/solana_program/system_instruction/fn.advance_nonce_account.html) + which loads their on-chain nonce account +2. The transaction’s blockhash must be equal to the durable blockhash stored by + the on-chain nonce account + +Here’s how these transactions are processed by the Solana runtime: + +1. If the transaction’s blockhash is no longer “recent”, the runtime checks if + the transaction’s instruction list begins with an “advance nonce” system + instruction +2. If so, it then loads the nonce account specified by the “advance nonce” + instruction +3. Then it checks that the stored durable blockhash matches the transaction’s + blockhash +4. Lastly it makes sure to advance the nonce account’s stored blockhash to the + latest recent blockhash to ensure that the same transaction can never be + processed again + +For more details about how these durable transactions work, you can read the +[original proposal](https://docs.solanalabs.com/implemented-proposals/durable-tx-nonces) +and [check out an example](/docs/clients/javascript-reference.md#nonceaccount) +in the Solana docs. diff --git a/docs/core/transactions/fees.md b/docs/core/transactions/fees.md new file mode 100644 index 000000000..4409d9a29 --- /dev/null +++ b/docs/core/transactions/fees.md @@ -0,0 +1,130 @@ +--- +title: Transaction Fees +description: + "Transaction fees are the small fees paid to process instructions on the + network. These fees are based on computation and an optional prioritization + fee." +keywords: + - instruction fee + - processing fee + - storage fee + - low fee blockchain + - gas + - gwei + - cheap network + - affordable blockchain +--- + +The small fees paid to process [instructions](/docs/terminology.md#instruction) +on the Solana blockchain are known as "_transaction fees_". + +As each transaction (which contains one or more instructions) is sent through +the network, it gets processed by the current leader validation-client. Once +confirmed as a global state transaction, this _transaction fee_ is paid to the +network to help support the economic design of the Solana blockchain. + +> NOTE: Transactions fees are different from the blockchain's data storage fee +> called [rent](/docs/core/rent.md) + +### Transaction Fee Calculation + +Currently, the amount of resources consumed by a transaction do not impact fees +in any way. This is because the runtime imposes a small cap on the amount of +resources that transaction instructions can use, not to mention that the size of +transactions is limited as well. So right now, transaction fees are solely +determined by the number of signatures that need to be verified in a +transaction. The only limit on the number of signatures in a transaction is the +max size of transaction itself. Each signature (64 bytes) in a transaction (max +1232 bytes) must reference a unique public key (32 bytes) so a single +transaction could contain as many as 12 signatures (not sure why you would do +that). The fee per transaction signature can be fetched with the `solana` cli: + +```bash +$ solana fees +Blockhash: 8eULQbYYp67o5tGF2gxACnBCKAE39TetbYYMGTx3iBFc +Lamports per signature: 5000 +Last valid block height: 94236543 +``` + +The `solana` cli `fees` subcommand calls the `getFees` RPC API method to +retrieve the above output information, so your application can call that method +directly as well: + +```bash +$ curl http://api.mainnet-beta.solana.com -H "Content-Type: application/json" -d ' + {"jsonrpc":"2.0","id":1, "method":"getFees"} +' +``` + +Result: (lastValidSlot removed since it's inaccurate) + +```js +{ + "jsonrpc": "2.0", + "result": { + "context": { + "slot": 106818885 + }, + "value": { + "blockhash": "78e3YBCMXJBiPD1HpyVtVfFzZFPG6nUycnQcyNMSUQzB", + "feeCalculator": { + "lamportsPerSignature": 5000 + }, + "lastValidBlockHeight": 96137823 + } + }, + "id": 1 +} +``` + +### Fee Determinism + +It's important to keep in mind that fee rates (such as `lamports_per_signature`) +are subject to change from block to block (though that hasn't happened in the +full history of the `mainnet-beta` cluster). Despite the fact that fees can +fluctuate, fees for a transaction can still be calculated deterministically when +creating (and before signing) a transaction. This determinism comes from the +fact that fees are applied using the rates from the block whose blockhash +matches the `recent_blockhash` field in a transaction. Blockhashes can only be +referenced by a transaction for a few minutes before they expire. + +Transactions with expired blockhashes will be ignored and dropped by the +cluster, so it's important to understand how expiration actually works. Before +transactions are added to a block and during block validation, +[each transaction's recent blockhash is checked](https://github.com/solana-labs/solana/blob/647aa926673e3df4443d8b3d9e3f759e8ca2c44b/runtime/src/bank.rs#L3482) +to ensure it hasn't expired yet. The max age of a transaction's blockhash is +only 150 blocks. This means that if no slots are skipped in between, the +blockhash for block 100 would be usable by transactions processed in blocks 101 +to 252, inclusive (during block 101 the age of block 100 is "0" and during block +252 its age is "150"). However, it's important to remember that slots may be +skipped and that age checks use "block height" _not_ "slot height". Since slots +are skipped occasionally, the actual age of a blockhash can be a bit longer than +150 slots. At the time of writing, slot times are about 500ms and skip rate is +about 5% so the expected lifetime of a transaction which uses the most recent +blockhash is about 1min 19s. + +### Fee Collection + +Transactions are required to have at least one account which has signed the +transaction and is writable. Writable signer accounts are serialized first in +the list of transaction accounts and the first of these accounts is always used +as the "fee payer". + +Before any transaction instructions are processed, the fee payer account balance +will be deducted to pay for transaction fees. If the fee payer balance is not +sufficient to cover transaction fees, the transaction will be dropped by the +cluster. If the balance was sufficient, the fees will be deducted whether the +transaction is processed successfully or not. In fact, if any of the transaction +instructions return an error or violate runtime restrictions, all account +changes _except_ the transaction fee deduction will be rolled back. + +### Fee Distribution + +Transaction fees are partially burned and the remaining fees are collected by +the validator that produced the block that the corresponding transactions were +included in. The transaction fee burn rate was initialized as 50% when inflation +rewards were enabled at the beginning of 2021 and has not changed so far. These +fees incentivize a validator to process as many transactions as possible during +its slots in the leader schedule. Collected fees are deposited in the +validator's account (listed in the leader schedule for the current slot) after +processing all of the transactions included in a block. diff --git a/docs/core/transactions/retry.md b/docs/core/transactions/retry.md new file mode 100644 index 000000000..401bb5991 --- /dev/null +++ b/docs/core/transactions/retry.md @@ -0,0 +1,321 @@ +--- +title: Retrying Transactions +--- + +# Retrying Transactions + +On some occasions, a seemingly valid transaction may be dropped before it is +included in a block. This most often occurs during periods of network +congestion, when an RPC node fails to rebroadcast the transaction to the +[leader](/docs/terminology.md#leader). To an end-user, it may appear as if their +transaction disappears entirely. While RPC nodes are equipped with a generic +rebroadcasting algorithm, application developers are also capable of developing +their own custom rebroadcasting logic. + +## TLDR; + +- RPC nodes will attempt to rebroadcast transactions using a generic algorithm +- Application developers can implement their own custom rebroadcasting logic +- Developers should take advantage of the `maxRetries` parameter on the + `sendTransaction` JSON-RPC method +- Developers should enable preflight checks to raise errors before transactions + are submitted +- Before re-signing any transaction, it is **very important** to ensure that the + initial transaction’s blockhash has expired + +## The Journey of a Transaction + +### How Clients Submit Transactions + +In Solana, there is no concept of a mempool. All transactions, whether they are +initiated programmatically or by an end-user, are efficiently routed to leaders +so that they can be processed into a block. There are two main ways in which a +transaction can be sent to leaders: + +1. By proxy via an RPC server and the + [sendTransaction](/docs/rpc/http/sendTransaction.mdx) JSON-RPC method +2. Directly to leaders via a + [TPU Client](https://docs.rs/solana-client/latest/solana_client/tpu_client/index.html) + +The vast majority of end-users will submit transactions via an RPC server. When +a client submits a transaction, the receiving RPC node will in turn attempt to +broadcast the transaction to both the current and next leaders. Until the +transaction is processed by a leader, there is no record of the transaction +outside of what the client and the relaying RPC nodes are aware of. In the case +of a TPU client, rebroadcast and leader forwarding is handled entirely by the +client software. + +![Overview of a transactions journey, from client to leader](/assets/docs/rt-tx-journey.png) + +### How RPC Nodes Broadcast Transactions + +After an RPC node receives a transaction via `sendTransaction`, it will convert +the transaction into a +[UDP](https://en.wikipedia.org/wiki/User_Datagram_Protocol) packet before +forwarding it to the relevant leaders. UDP allows validators to quickly +communicate with one another, but does not provide any guarantees regarding +transaction delivery. + +Because Solana’s leader schedule is known in advance of every +[epoch](/docs/terminology.md#epoch) (~2 days), an RPC node will broadcast its +transaction directly to the current and next leaders. This is in contrast to +other gossip protocols such as Ethereum that propagate transactions randomly and +broadly across the entire network. By default, RPC nodes will try to forward +transactions to leaders every two seconds until either the transaction is +finalized or the transaction’s blockhash expires (150 blocks or ~1 minute 19 +seconds as of the time of this writing). If the outstanding rebroadcast queue +size is greater than +[10,000 transactions](https://github.com/solana-labs/solana/blob/bfbbc53dac93b3a5c6be9b4b65f679fdb13e41d9/send-transaction-service/src/send_transaction_service.rs#L20), +newly submitted transactions are dropped. There are command-line +[arguments](https://github.com/solana-labs/solana/blob/bfbbc53dac93b3a5c6be9b4b65f679fdb13e41d9/validator/src/main.rs#L1172) +that RPC operators can adjust to change the default behavior of this retry +logic. + +When an RPC node broadcasts a transaction, it will attempt to forward the +transaction to a leader’s +[Transaction Processing Unit (TPU)](https://github.com/solana-labs/solana/blob/cd6f931223181d5a1d47cba64e857785a175a760/core/src/validator.rs#L867). +The TPU processes transactions in five distinct phases: + +- [Fetch Stage](https://github.com/solana-labs/solana/blob/cd6f931223181d5a1d47cba64e857785a175a760/core/src/fetch_stage.rs#L21) +- [SigVerify Stage](https://github.com/solana-labs/solana/blob/cd6f931223181d5a1d47cba64e857785a175a760/core/src/tpu.rs#L91) +- [Banking Stage](https://github.com/solana-labs/solana/blob/cd6f931223181d5a1d47cba64e857785a175a760/core/src/banking_stage.rs#L249) +- [Proof of History Service](https://github.com/solana-labs/solana/blob/cd6f931223181d5a1d47cba64e857785a175a760/poh/src/poh_service.rs) +- [Broadcast Stage](https://github.com/solana-labs/solana/blob/cd6f931223181d5a1d47cba64e857785a175a760/core/src/tpu.rs#L136) + +![Overview of the Transaction Processing Unit (TPU)](/assets/docs/rt-tpu-jito-labs.png) + +Of these five phases, the Fetch Stage is responsible for receiving transactions. +Within the Fetch Stage, validators will categorize incoming transactions +according to three ports: + +- [tpu](https://github.com/solana-labs/solana/blob/cd6f931223181d5a1d47cba64e857785a175a760/gossip/src/contact_info.rs#L27) + handles regular transactions such as token transfers, NFT mints, and program + instructions +- [tpu_vote](https://github.com/solana-labs/solana/blob/cd6f931223181d5a1d47cba64e857785a175a760/gossip/src/contact_info.rs#L31) + focuses exclusively on voting transactions +- [tpu_forwards](https://github.com/solana-labs/solana/blob/cd6f931223181d5a1d47cba64e857785a175a760/gossip/src/contact_info.rs#L29) + forwards unprocessed packets to the next leader if the current leader is + unable to process all transactions + +For more information on the TPU, please refer to +[this excellent writeup by Jito Labs](https://jito-labs.medium.com/solana-validator-101-transaction-processing-90bcdc271143). + +## How Transactions Get Dropped + +Throughout a transaction’s journey, there are a few scenarios in which the +transaction can be unintentionally dropped from the network. + +### Before a transaction is processed + +If the network drops a transaction, it will most likely do so before the +transaction is processed by a leader. UDP +[packet loss](https://en.wikipedia.org/wiki/Packet_loss) is the simplest reason +why this might occur. During times of intense network load, it’s also possible +for validators to become overwhelmed by the sheer number of transactions +required for processing. While validators are equipped to forward surplus +transactions via `tpu_forwards`, there is a limit to the amount of data that can +be +[forwarded](https://github.com/solana-labs/solana/blob/master/core/src/banking_stage.rs#L389). +Furthermore, each forward is limited to a single hop between validators. That +is, transactions received on the `tpu_forwards` port are not forwarded on to +other validators. + +There are also two lesser known reasons why a transaction may be dropped before +it is processed. The first scenario involves transactions that are submitted via +an RPC pool. Occasionally, part of the RPC pool can be sufficiently ahead of the +rest of the pool. This can cause issues when nodes within the pool are required +to work together. In this example, the transaction’s +[recentBlockhash](/docs/core/transactions.md#recent-blockhash) is queried from +the advanced part of the pool (Backend A). When the transaction is submitted to +the lagging part of the pool (Backend B), the nodes will not recognize the +advanced blockhash and will drop the transaction. This can be detected upon +transaction submission if developers enable +[preflight checks](/docs/rpc/http/sendTransaction.mdx) on `sendTransaction`. + +![Transaction dropped via an RPC Pool](/assets/docs/rt-dropped-via-rpc-pool.png) + +Temporarily network forks can also result in dropped transactions. If a +validator is slow to replay its blocks within the Banking Stage, it may end up +creating a minority fork. When a client builds a transaction, it’s possible for +the transaction to reference a `recentBlockhash` that only exists on the +minority fork. After the transaction is submitted, the cluster can then switch +away from its minority fork before the transaction is processed. In this +scenario, the transaction is dropped due to the blockhash not being found. + +![Transaction dropped due to minority fork (before processed)](/assets/docs/rt-dropped-minority-fork-pre-process.png) + +### After a transaction is processed and before it is finalized + +In the event a transaction references a `recentBlockhash` from a minority fork, +it’s still possible for the transaction to be processed. In this case, however, +it would be processed by the leader on the minority fork. When this leader +attempts to share its processed transactions with the rest of the network, it +would fail to reach consensus with the majority of validators that do not +recognize the minority fork. At this time, the transaction would be dropped +before it could be finalized. + +![Transaction dropped due to minority fork (after processed)](/assets/docs/rt-dropped-minority-fork-post-process.png) + +## Handling Dropped Transactions + +While RPC nodes will attempt to rebroadcast transactions, the algorithm they +employ is generic and often ill-suited for the needs of specific applications. +To prepare for times of network congestion, application developers should +customize their own rebroadcasting logic. + +### An In-Depth Look at sendTransaction + +When it comes to submitting transactions, the `sendTransaction` RPC method is +the primary tool available to developers. `sendTransaction` is only responsible +for relaying a transaction from a client to an RPC node. If the node receives +the transaction, `sendTransaction` will return the transaction id that can be +used to track the transaction. A successful response does not indicate whether +the transaction will be processed or finalized by the cluster. + +### Request Parameters + +- `transaction`: `string` - fully-signed Transaction, as encoded string +- (optional) `configuration object`: `object` + - `skipPreflight`: `boolean` - if true, skip the preflight transaction checks + (default: false) + - (optional) `preflightCommitment`: `string` - + [Commitment](/docs/rpc/index.mdx#configuring-state-commitment) level to use + for preflight simulations against the bank slot (default: "finalized"). + - (optional) `encoding`: `string` - Encoding used for the transaction data. + Either "base58" (slow), or "base64". (default: "base58"). + - (optional) `maxRetries`: `usize` - Maximum number of times for the RPC node + to retry sending the transaction to the leader. If this parameter is not + provided, the RPC node will retry the transaction until it is finalized or + until the blockhash expires. + +**Response:** + +- `transaction id`: `string` - First transaction signature embedded in the + transaction, as base-58 encoded string. This transaction id can be used with + [`getSignatureStatuses`](/docs/rpc/http/getSignatureStatuses.mdx) to poll for + status updates. + +## Customizing Rebroadcast Logic + +In order to develop their own rebroadcasting logic, developers should take +advantage of `sendTransaction`’s `maxRetries` parameter. If provided, +`maxRetries` will override an RPC node’s default retry logic, allowing +developers to manually control the retry process +[within reasonable bounds](https://github.com/solana-labs/solana/blob/98707baec2385a4f7114d2167ef6dfb1406f954f/validator/src/main.rs#L1258-L1274). + +A common pattern for manually retrying transactions involves temporarily storing +the `lastValidBlockHeight` that comes from +[getLatestBlockhash](/docs/rpc/http/getLatestBlockhash.mdx). Once stashed, an +application can then +[poll the cluster’s blockheight](/docs/rpc/http/getBlockHeight.mdx) and manually +retry the transaction at an appropriate interval. In times of network +congestion, it’s advantageous to set `maxRetries` to 0 and manually rebroadcast +via a custom algorithm. While some applications may employ an +[exponential backoff](https://en.wikipedia.org/wiki/Exponential_backoff) +algorithm, others such as [Mango](https://www.mango.markets/) opt to +[continuously resubmit](https://github.com/blockworks-foundation/mango-ui/blob/b6abfc6c13b71fc17ebbe766f50b8215fa1ec54f/src/utils/send.tsx#L713) +transactions at a constant interval until some timeout has occurred. + +```ts +import { + Keypair, + Connection, + LAMPORTS_PER_SOL, + SystemProgram, + Transaction, +} from "@solana/web3.js"; +import * as nacl from "tweetnacl"; + +const sleep = async (ms: number) => { + return new Promise(r => setTimeout(r, ms)); +}; + +(async () => { + const payer = Keypair.generate(); + const toAccount = Keypair.generate().publicKey; + + const connection = new Connection("http://127.0.0.1:8899", "confirmed"); + + const airdropSignature = await connection.requestAirdrop( + payer.publicKey, + LAMPORTS_PER_SOL, + ); + + await connection.confirmTransaction({ signature: airdropSignature }); + + const blockhashResponse = await connection.getLatestBlockhashAndContext(); + const lastValidBlockHeight = blockhashResponse.context.slot + 150; + + const transaction = new Transaction({ + feePayer: payer.publicKey, + blockhash: blockhashResponse.value.blockhash, + lastValidBlockHeight: lastValidBlockHeight, + }).add( + SystemProgram.transfer({ + fromPubkey: payer.publicKey, + toPubkey: toAccount, + lamports: 1000000, + }), + ); + const message = transaction.serializeMessage(); + const signature = nacl.sign.detached(message, payer.secretKey); + transaction.addSignature(payer.publicKey, Buffer.from(signature)); + const rawTransaction = transaction.serialize(); + let blockheight = await connection.getBlockHeight(); + + while (blockheight < lastValidBlockHeight) { + connection.sendRawTransaction(rawTransaction, { + skipPreflight: true, + }); + await sleep(500); + blockheight = await connection.getBlockHeight(); + } +})(); +``` + +When polling via `getLatestBlockhash`, applications should specify their +intended [commitment](/docs/rpc/index.mdx#configuring-state-commitment) level. +By setting its commitment to `confirmed` (voted on) or `finalized` (~30 blocks +after `confirmed`), an application can avoid polling a blockhash from a minority +fork. + +If an application has access to RPC nodes behind a load balancer, it can also +choose to divide its workload amongst specific nodes. RPC nodes that serve +data-intensive requests such as +[getProgramAccounts](https://solanacookbook.com/guides/get-program-accounts.html) +may be prone to falling behind and can be ill-suited for also forwarding +transactions. For applications that handle time-sensitive transactions, it may +be prudent to have dedicated nodes that only handle `sendTransaction`. + +### The Cost of Skipping Preflight + +By default, `sendTransaction` will perform three preflight checks prior to +submitting a transaction. Specifically, `sendTransaction` will: + +- Verify that all signatures are valid +- Check that the referenced blockhash is within the last 150 blocks +- Simulate the transaction against the bank slot specified by the + `preflightCommitment` + +In the event that any of these three preflight checks fail, `sendTransaction` +will raise an error prior to submitting the transaction. Preflight checks can +often be the difference between losing a transaction and allowing a client to +gracefully handle an error. To ensure that these common errors are accounted +for, it is recommended that developers keep `skipPreflight` set to `false`. + +### When to Re-Sign Transactions + +Despite all attempts to rebroadcast, there may be times in which a client is +required to re-sign a transaction. Before re-signing any transaction, it is +**very important** to ensure that the initial transaction’s blockhash has +expired. If the initial blockhash is still valid, it is possible for both +transactions to be accepted by the network. To an end-user, this would appear as +if they unintentionally sent the same transaction twice. + +In Solana, a dropped transaction can be safely discarded once the blockhash it +references is older than the `lastValidBlockHeight` received from +`getLatestBlockhash`. Developers should keep track of this +`lastValidBlockHeight` by querying +[`getEpochInfo`](/docs/rpc/http/getEpochInfo.mdx) and comparing with +`blockHeight` in the response. Once a blockhash is invalidated, clients may +re-sign with a newly-queried blockhash. diff --git a/docs/core/transactions/versions.md b/docs/core/transactions/versions.md new file mode 100644 index 000000000..6bfd01254 --- /dev/null +++ b/docs/core/transactions/versions.md @@ -0,0 +1,186 @@ +--- +title: Versioned Transactions +description: "" +--- + +[Versioned Transactions](/docs/core/transactions/versions.md) are the new +transaction format that allow for additional functionality in the Solana +runtime, including [Address Lookup Tables](/docs/advanced/lookup-tables.md). + +While changes to [on chain](/docs/programs/index.md) programs are **NOT** +required to support the new functionality of versioned transactions (or for +backwards compatibility), developers **WILL** need update their client side code +to prevent +[errors due to different transaction versions](#max-supported-transaction-version). + +## Current Transaction Versions + +The Solana runtime supports two transaction versions: + +- `legacy` - older transaction format with no additional benefit +- `0` - added support for + [Address Lookup Tables](/docs/advanced/lookup-tables.md) + +## Max supported transaction version + +All RPC requests that return a transaction **_should_** specify the highest +version of transactions they will support in their application using the +`maxSupportedTransactionVersion` option, including +[`getBlock`](/docs/rpc/http/getBlock.mdx) and +[`getTransaction`](/docs/rpc/http/getTransaction.mdx). + +An RPC request will fail if a +[Versioned Transaction](/docs/core/transactions/versions.md) is returned that is +higher than the set `maxSupportedTransactionVersion`. (i.e. if a version `0` +transaction is returned when `legacy` is selected) + +> WARNING: If no `maxSupportedTransactionVersion` value is set, then only +> `legacy` transactions will be allowed in the RPC response. Therefore, your RPC +> requests **WILL** fail if any version `0` transactions are returned. + +## How to set max supported version + +You can set the `maxSupportedTransactionVersion` using both the +[`@solana/web3.js`](https://solana-labs.github.io/solana-web3.js/) library and +JSON formatted requests directly to an RPC endpoint. + +### Using web3.js + +Using the [`@solana/web3.js`](https://solana-labs.github.io/solana-web3.js/) +library, you can retrieve the most recent block or get a specific transaction: + +```js +// connect to the `devnet` cluster and get the current `slot` +const connection = new web3.Connection(web3.clusterApiUrl("devnet")); +const slot = await connection.getSlot(); + +// get the latest block (allowing for v0 transactions) +const block = await connection.getBlock(slot, { + maxSupportedTransactionVersion: 0, +}); + +// get a specific transaction (allowing for v0 transactions) +const getTx = await connection.getTransaction( + "3jpoANiFeVGisWRY5UP648xRXs3iQasCHABPWRWnoEjeA93nc79WrnGgpgazjq4K9m8g2NJoyKoWBV1Kx5VmtwHQ", + { + maxSupportedTransactionVersion: 0, + }, +); +``` + +### JSON requests to the RPC + +Using a standard JSON formatted POST request, you can set the +`maxSupportedTransactionVersion` when retrieving a specific block: + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d \ +'{"jsonrpc": "2.0", "id":1, "method": "getBlock", "params": [430, { + "encoding":"json", + "maxSupportedTransactionVersion":0, + "transactionDetails":"full", + "rewards":false +}]}' +``` + +## How to create a Versioned Transaction + +Versioned transactions can be created similar to the older method of creating +transactions. There are differences in using certain libraries that should be +noted. + +Below is an example of how to create a Versioned Transaction, using the +`@solana/web3.js` library, to send perform a SOL transfer between two accounts. + +#### Notes: + +- `payer` is a valid `Keypair` wallet, funded with SOL +- `toAccount` a valid `Keypair` + +Firstly, import the web3.js library and create a `connection` to your desired +cluster. + +We then define the recent `blockhash` and `minRent` we will need for our +transaction and the account: + +```js +const web3 = require("@solana/web3.js"); + +// connect to the cluster and get the minimum rent for rent exempt status +const connection = new web3.Connection(web3.clusterApiUrl("devnet")); +let minRent = await connection.getMinimumBalanceForRentExemption(0); +let blockhash = await connection + .getLatestBlockhash() + .then(res => res.blockhash); +``` + +Create an `array` of all the `instructions` you desire to send in your +transaction. In this example below, we are creating a simple SOL transfer +instruction: + +```js +// create an array with your desired `instructions` +const instructions = [ + web3.SystemProgram.transfer({ + fromPubkey: payer.publicKey, + toPubkey: toAccount.publicKey, + lamports: minRent, + }), +]; +``` + +Next, construct a `MessageV0` formatted transaction message with your desired +`instructions`: + +```js +// create v0 compatible message +const messageV0 = new web3.TransactionMessage({ + payerKey: payer.publicKey, + recentBlockhash: blockhash, + instructions, +}).compileToV0Message(); +``` + +Then, create a new `VersionedTransaction`, passing in our v0 compatible message: + +```js +const transaction = new web3.VersionedTransaction(messageV0); + +// sign your transaction with the required `Signers` +transaction.sign([payer]); +``` + +You can sign the transaction by either: + +- passing an array of `signatures` into the `VersionedTransaction` method, or +- call the `transaction.sign()` method, passing an array of the required + `Signers` + +> NOTE: After calling the `transaction.sign()` method, all the previous +> transaction `signatures` will be fully replaced by new signatures created from +> the provided in `Signers`. + +After your `VersionedTransaction` has been signed by all required accounts, you +can send it to the cluster and `await` the response: + +```js +// send our v0 transaction to the cluster +const txid = await connection.sendTransaction(transaction); +console.log(`https://explorer.solana.com/tx/${txid}?cluster=devnet`); +``` + +> NOTE: Unlike `legacy` transactions, sending a `VersionedTransaction` via +> `sendTransaction` does **NOT** support transaction signing via passing in an +> array of `Signers` as the second parameter. You will need to sign the +> transaction before calling `connection.sendTransaction()`. + +## More Resources + +- using + [Versioned Transactions for Address Lookup Tables](/docs/advanced/lookup-tables.md#how-to-create-an-address-lookup-table) +- view an + [example of a v0 transaction](https://explorer.solana.com/tx/3jpoANiFeVGisWRY5UP648xRXs3iQasCHABPWRWnoEjeA93nc79WrnGgpgazjq4K9m8g2NJoyKoWBV1Kx5VmtwHQ/?cluster=devnet) + on Solana Explorer +- read the + [accepted proposal](https://docs.solanalabs.com/proposals/versioned-transactions) + for Versioned Transaction and Address Lookup Tables diff --git a/docs/economics/inflation/_adjusted_staking_yield.md b/docs/economics/inflation/_adjusted_staking_yield.md new file mode 100644 index 000000000..56925bf4a --- /dev/null +++ b/docs/economics/inflation/_adjusted_staking_yield.md @@ -0,0 +1,170 @@ +--- +title: Adjusted Staking Yield +--- + +### Token Dilution + +Similarly we can look at the expected _Staked Dilution_ (i.e. _Adjusted Staking +Yield_) and _Un-staked Dilution_ as previously defined. Again, _dilution_ in +this context is defined as the change in fractional representation (i.e. +ownership) of a set of tokens within a larger set. In this sense, dilution can +be a positive value: an increase in fractional ownership (staked dilution / +_Adjusted Staking Yield_), or a negative value: a decrease in fractional +ownership (un-staked dilution). + +We are interested in the relative change in ownership of staked vs un-staked +tokens as the overall token pool increases with inflation issuance. As +discussed, this issuance is distributed only to staked token holders, increasing +the staked token fractional representation of the _Total Current Supply_. + +Continuing with the same _Inflation Schedule_ parameters as above, we see the +fraction of staked supply grow as shown below. + +![Graph of example growth of staked supply](/assets/docs/economics/example_staked_supply_w_range_initial_stake.png) + +Due to this relative change in representation, the proportion of stake of any +token holder will also change as a function of the _Inflation Schedule_ and the +proportion of all tokens that are staked. + +Of initial interest, however, is the _dilution of **un-staked** tokens_, or +$D_{us}$. In the case of un-staked tokens, token dilution is only a function of +the _Inflation Schedule_ because the amount of un-staked tokens doesn't change +over time. + +This can be seen by explicitly calculating un-staked dilution as $D_{us}$. The +un-staked proportion of the token pool at time $t$ is $P_{us}(t_{N})$ and +$I_{t}$ is the incremental inflation rate applied between any two consecutive +time points. $SOL_{us}(t)$ and $SOL_{total}(t)$ is the amount of un-staked and +total SOL on the network, respectively, at time $t$. Therefore +$P_{us}(t) = SOL_{us}(t)/SOL_{total}(t)$. + +$$ +\begin{aligned} + D_{us} &= \left( \frac{P_{us}(t_{1}) - P_{us}(t_{0})}{P_{us}(t_{0})} \right)\\ + &= \left( \frac{ \left( \frac{SOL_{us}(t_{2})}{SOL_{total}(t_{2})} \right) - \left( \frac{SOL_{us}(t_{1})}{SOL_{total}(t_{1})} \right)}{ \left( \frac{SOL_{us}(t_{1})}{SOL_{total}(t_{1})} \right) } \right)\\ + +\end{aligned} +$$ + +However, because inflation issuance only increases the total amount and the +un-staked supply doesn't change: + +$$ +\begin{aligned} + SOL_{us}(t_2) &= SOL_{us}(t_1)\\ + SOL_{total}(t_2) &= SOL_{total}(t_1)\times (1 + I_{t_1})\\ +\end{aligned} +$$ + +So $D_{us}$ becomes: + +$$ +\begin{aligned} + D_{us} &= \left( \frac{ \left( \frac{SOL_{us}(t_{1})}{SOL_{total}(t_{1})\times (1 + I_{1})} \right) - \left( \frac{SOL_{us}(t_{1})}{SOL_{total}(t_{1})} \right)}{ \left( \frac{SOL_{us}(t_{1})}{SOL_{total}(t_{1})} \right) } \right)\\ + D_{us} &= \frac{1}{(1 + I_{1})} - 1\\ +\end{aligned} +$$ + +Or generally, dilution for un-staked tokens over any time frame undergoing +inflation $I$: + +$$ +D_{us} = -\frac{I}{I + 1} \\ +$$ + +So as guessed, this dilution is independent of the total proportion of staked +tokens and only depends on inflation rate. This can be seen with our example +_Inflation Schedule_ here: + +![Graph of an example dilution of unstaked SOL](/assets/docs/economics/example_unstaked_dilution.png) + +### Estimated Adjusted Staked Yield + +We can do a similar calculation to determine the _dilution_ of staked token +holders, or as we've defined here as the **_Adjusted Staked Yield_**, keeping in +mind that dilution in this context is an _increase_ in proportional ownership +over time. We'll use the terminology _Adjusted Staked Yield_ to avoid confusion +going forward. + +To see the functional form, we calculate, $Y_{adj}$, or the _Adjusted Staked +Yield_ (to be compared to _D\_{us}_ the dilution of un-staked tokens above), +where $P_{s}(t)$ is the staked proportion of token pool at time $t$ and $I_{t}$ +is the incremental inflation rate applied between any two consecutive time +points. The definition of $Y_{adj}$ is therefore: + +$$ + Y_{adj} = \frac{P_s(t_2) - P_s(t_1)}{P_s(t_1)}\\ +$$ + +As seen in the plot above, the proportion of staked tokens increases with +inflation issuance. Letting $SOL_s(t)$ and $SOL_{\text{total}}(t)$ represent the +amount of staked and total SOL at time $t$ respectively: + +$$ + P_s(t_2) = \frac{SOL_s(t_1) + SOL_{\text{total}}(t_1)\times I(t_1)}{SOL_{\text{total}}(t_1)\times (1 + I(t_1))}\\ +$$ + +Where $SOL_{\text{total}}(t_1)\times I(t_1)$ is the additional inflation +issuance added to the staked token pool. Now we can write $Y_{adj}$ in common +terms $t_1 = t$: + +$$ +\begin{aligned} +Y_{adj} &= \frac{\frac{SOL_s(t) + SOL_{\text{total}}(t)\times I(t)}{SOL_{\text{total}}(t)\times (1 + I(t))} - \frac{SOL_s(t)}{SOL_{\text{total}}(t)} }{ \frac{SOL_s(t)}{SOL_{\text{total}}(t)} } \\ + &= \frac{ SOL_{\text{total}}(t)\times (SOL_s(t) + SOL_{\text{total}}(t)\times I(t)) }{ SOL_s(t)\times SOL_{\text{total}}\times (1 + I(t)) } -1 \\ +\end{aligned} +$$ + +which simplifies to: + +$$ +Y_{adj} = \frac{ 1 + I(t)/P_s(t) }{ 1 + I(t) } - 1\\ +$$ + +So we see that the _Adjusted Staked Yield_ a function of the inflation rate and +the percent of staked tokens on the network. We can see this plotted for various +staking fractions here: + +![Graph of example adjusted staking yields](/assets/docs/economics/example_adjusted_staking_yields.png) + +It is also clear that in all cases, dilution of un-staked tokens $>$ adjusted +staked yield (i.e. dilution of staked tokens). Explicitly we can look at the +_relative dilution of un-staked tokens to staked tokens:_ $D_{us}/Y_{adj}$. Here +the relationship to inflation drops out and the relative dilution, i.e. the +impact of staking tokens vs not staking tokens, is purely a function of the % of +the total token supply staked. From above + +$$ +\begin{aligned} +Y_{adj} &= \frac{ 1 + I/P_s }{ 1 + I } - 1,~\text{and}\\ +D_{us} &= -\frac{I}{I + 1},~\text{so} \\ +\frac{D_{us}}{Y_{adj}} &= \frac{ \frac{I}{I + 1} }{ \frac{ 1 + I/P_s }{ 1 + I } - 1 } \\ +\end{aligned} +$$ + +which simplifies as, + +$$ + \begin{aligned} + \frac{D_{us}}{Y_{adj}} &= \frac{ I }{ 1 + \frac{I}{P_s} - (1 + I)}\\ + &= \frac{ I }{ \frac{I}{P_s} - I}\\ + \frac{D_{us}}{Y_{adj}}&= \frac{ P_s }{ 1 - P_s}\\ + \end{aligned} +$$ + +Where we can see a primary dependence of the relative dilution of un-staked +tokens to staked tokens is on the function of the proportion of total tokens +staked. As shown above, the proportion of total tokens staked changes over time +(i.e. $P_s = P_s(t)$ due to the re-staking of inflation issuance thus we see +relative dilution grow over time as: + +![Graph of example relative un-staked vs staked SOL dilution](/assets/docs/economics/example_relative_dilution.png) + +As might be intuitive, as the total fraction of staked tokens increases the +relative dilution of un-staked tokens grows dramatically. E.g. with $80\%$ of +the network tokens staked, an un-staked token holder will experience ~$400\%$ +more dilution than a staked holder. + +Again, this represents the change in fractional change in ownership of staked +tokens and illustrates the built-in incentive for token holder to stake their +tokens to earn _Staked Yield_ and avoid _Un-staked Dilution_. diff --git a/docs/economics/inflation/inflation_schedule.md b/docs/economics/inflation/inflation_schedule.md new file mode 100644 index 000000000..d71ef731e --- /dev/null +++ b/docs/economics/inflation/inflation_schedule.md @@ -0,0 +1,85 @@ +--- +sidebarLabel: Proposed Inflation Schedule +title: Proposed Inflation Schedule +--- + +As mentioned above, the network's _Inflation Schedule_ is uniquely described by +three parameters: _Initial Inflation Rate_, _Disinflation Rate_ and _Long-term +Inflation Rate_. When considering these numbers, there are many factors to take +into account: + +- A large portion of the SOL issued via inflation will be distributed to + stake-holders in proportion to the SOL they have staked. We want to ensure + that the _Inflation Schedule_ design results in reasonable _Staking Yields_ + for token holders who delegate SOL and for validation service providers (via + commissions taken from _Staking Yields_). +- The primary driver of _Staked Yield_ is the amount of SOL staked divided by + the total amount of SOL (% of total SOL staked). Therefore the distribution + and delegation of tokens across validators are important factors to understand + when determining initial inflation parameters. +- Yield throttling is a current area of research that would impact + _staking-yields_. This is not taken into consideration in the discussion here + or the modeling below. +- Overall token issuance - i.e. what do we expect the Current Total Supply to be + in 10 years, or 20 years? +- Long-term, steady-state inflation is an important consideration not only for + sustainable support for the validator ecosystem and the Solana Foundation + grant programs, but also should be tuned in consideration with expected token + losses and burning over time. +- The rate at which we expect network usage to grow, as a consideration to the + disinflationary rate. Over time, we plan for inflation to drop and expect that + usage will grow. + +Based on these considerations and the community discussions following the +initial design, the Solana Foundation proposes the following Inflation Schedule +parameters: + +- Initial Inflation Rate: 8% +- Disinflation Rate: -15% +- Long-term Inflation Rate: 1.5% + +These parameters define the proposed _Inflation Schedule_. Below we show +implications of these parameters. These plots only show the impact of inflation +issuances given the Inflation Schedule as parameterized above. They _do not +account_ for other factors that may impact the Total Supply such as fee/rent +burning, slashing or other unforeseen future token destruction events. +Therefore, what is presented here is an **upper limit** on the amount of SOL +issued via inflation. + +![Example proposed inflation schedule graph](/assets/docs/economics/proposed_inflation_schedule.png) + +In the above graph we see the annual inflation rate percentage over time, given +the inflation parameters proposed above. + +![Example proposed total supply graph](/assets/docs/economics/proposed_total_supply.png) + +Similarly, here we see the _Total Current Supply_ of SOL [MM] over time, +assuming an initial _Total Current Supply_ of `488,587,349 SOL` (i.e. for this +example, taking the _Total Current Supply_ as of `2020-01-25` and simulating +inflation starting from that day). + +Setting aside validator uptime and commissions, the expected Staking Yield and +Adjusted Staking Yield metrics are then primarily a function of the % of total +SOL staked on the network. Therefore we can we can model _Staking Yield_, if we +introduce an additional parameter _% of Staked SOL_: + + + +This parameter must be estimated because it is a dynamic property of the token +holders and staking incentives. The values of _% of Staked SOL_ presented here +range from 60% - 90%, which we feel covers the likely range we expect to +observe, based on feedback from the investor and validator communities as well +as what is observed on comparable Proof-of-Stake protocols. + +![Example staked yields graph](/assets/docs/economics/example_staked_yields.png) + +Again, the above shows an example _Staked Yield_ that a staker might expect over +time on the Solana network with the _Inflation Schedule_ as specified. This is +an idealized _Staked Yield_ as it neglects validator uptime impact on rewards, +validator commissions, potential yield throttling and potential slashing +incidents. It additionally ignores that _% of Staked SOL_ is dynamic by design - +the economic incentives set up by this _Inflation Schedule_ are more clearly +seen when _Token Dilution_ is taken into account (see the **Adjusted Staking +Yield** section below). diff --git a/docs/economics/inflation/terminology.md b/docs/economics/inflation/terminology.md new file mode 100644 index 000000000..56b1c8154 --- /dev/null +++ b/docs/economics/inflation/terminology.md @@ -0,0 +1,112 @@ +--- +sidebarLabel: Inflation Terminology +title: Inflation Related Terminology +--- + +Many terms are thrown around when discussing inflation and the related +components (e.g. rewards/yield/interest), we try to define and clarify some +commonly used concept here: + +### Total Current Supply [SOL] + +The total amount of tokens (locked or unlocked) that have been generated (via +genesis block or protocol inflation) minus any tokens that have been burnt (via +transaction fees or other mechanism) or slashed. At network launch, 500,000,000 +SOL were instantiated in the genesis block. Since then the Total Current Supply +has been reduced by the burning of transaction fees and a planned token +reduction event. Solana’s _Total Current Supply_ can be found at +https://explorer.solana.com/supply + +### Inflation Rate [%] + +The Solana protocol will automatically create new tokens on a predetermined +inflation schedule (discussed below). The _Inflation Rate [%]_ is the annualized +growth rate of the _Total Current Supply_ at any point in time. + +### Inflation Schedule + +A deterministic description of token issuance over time. The Solana Foundation +is proposing a disinflationary _Inflation Schedule_. I.e. Inflation starts at +its highest value, the rate reduces over time until stabilizing at a +predetermined long-term inflation rate (see discussion below). This schedule is +completely and uniquely parameterized by three numbers: + +- **Initial Inflation Rate [%]**: The starting _Inflation Rate_ for when + inflation is first enabled. Token issuance rate can only decrease from this + point. +- **Disinflation Rate [%]**: The rate at which the _Inflation Rate_ is reduced. +- **Long-term Inflation Rate [%]**: The stable, long-term _Inflation Rate_ to be + expected. + +### Effective Inflation Rate [%] + +The inflation rate actually observed on the Solana network after accounting for +other factors that might decrease the _Total Current Supply_. Note that it is +not possible for tokens to be created outside of what is described by the +_Inflation Schedule_. + +- While the _Inflation Schedule_ determines how the protocol issues SOL, this + neglects the concurrent elimination of tokens in the ecosystem due to various + factors. The primary token burning mechanism is the burning of a portion of + each transaction fee. $50\%$ of each transaction fee is burned, with the + remaining fee retained by the validator that processes the transaction. +- Additional factors such as loss of private keys and slashing events should + also be considered in a holistic analysis of the _Effective Inflation Rate_. + For example, it’s estimated that $10-20\%$ of all BTC have been lost and are + unrecoverable and that networks may experience similar yearly losses at the + rate of $1-2\%$. + +### Staking Yield [%] + +The rate of return (aka _interest_) earned on SOL staked on the network. It is +often quoted as an annualized rate (e.g. "the network _staking yield_ is +currently $10\%$ per year"). + +- _Staking yield_ is of great interest to validators and token holders who wish + to delegate their tokens to avoid token dilution due to inflation (the extent + of which is discussed below). +- $100\%$ of inflationary issuances are to be distributed to staked + token-holders in proportion to their staked SOL and to validators who charge a + commission on the rewards earned by their delegated SOL. + - There may be future consideration for an additional split of inflation + issuance with the introduction of _Archivers_ into the economy. _Archivers_ + are network participants who provide a decentralized storage service and + should also be incentivized with token distribution from inflation issuances + for this service. - Similarly, early designs specified a fixed percentage of + inflationary issuance to be delivered to the Foundation treasury for + operational expenses and future grants. However, inflation will be launching + without any portion allocated to the Foundation. +- _Staking yield_ can be calculated from the _Inflation Schedule_ along with the + fraction of the _Total Current Supply_ that is staked at any given time. The + explicit relationship is given by: + + + +### Token Dilution [%] + +Dilution is defined here as the change in proportional representation of a set +of tokens within a larger set due to the introduction of new tokens. In +practical terms, we discuss the dilution of staked or un-staked tokens due to +the introduction and distribution of inflation issuance across the network. As +will be shown below, while dilution impacts every token holder, the _relative_ +dilution between staked and un-staked tokens should be the primary concern to +un-staked token holders. Staking tokens, which will receive their proportional +distribution of inflation issuance, should assuage any dilution concerns for +staked token holders. I.e. dilution from 'inflation' is offset by the +distribution of new tokens to staked token holders, nullifying the 'dilutive' +effects of the inflation for that group. + +### Adjusted Staking Yield [%] + +A complete appraisal of earning potential from staking tokens should take into +account staked _Token Dilution_ and its impact on the _Staking Yield_. For this, +we define the _Adjusted Staking Yield_ as the change in fractional token supply +ownership of staked tokens due to the distribution of inflation issuance. I.e. +the positive dilutive effects of inflation. diff --git a/docs/economics/staking/index.md b/docs/economics/staking/index.md new file mode 100644 index 000000000..5d32d948b --- /dev/null +++ b/docs/economics/staking/index.md @@ -0,0 +1,103 @@ +--- +sidebarLabel: Staking +title: Staking on Solana +--- + +_Note before reading: All references to increases in values are in absolute +terms with regards to balance of SOL. This document makes no suggestion as to +the monetary value of SOL at any time._ + +By staking your SOL tokens, you help secure the network and +[earn rewards](https://docs.solanalabs.com/implemented-proposals/staking-rewards) +while doing so. + +You can stake by delegating your tokens to validators who process transactions +and run the network. + +Delegating stake is a shared-risk shared-reward financial model that may provide +returns to holders of tokens delegated for a long period. This is achieved by +aligning the financial incentives of the token-holders (delegators) and the +validators to whom they delegate. + +The more stake delegated to a validator, the more often this validator is chosen +to write new transactions to the ledger. The more transactions the validator +writes, the more rewards the validator and its delegators earn. Validators who +configure their systems to be able to process more transactions earn +proportionally more rewards and because they keep the network running as fast +and as smoothly as possible. + +Validators incur costs by running and maintaining their systems, and this is +passed on to delegators in the form of a fee collected as a percentage of +rewards earned. This fee is known as a _commission_. Since validators earn more +rewards the more stake is delegated to them, they may compete with one another +to offer the lowest commission for their services. + +You risk losing tokens when staking through a process known as _slashing_. +Slashing involves the removal and destruction of a portion of a validator's +delegated stake in response to intentional malicious behavior, such as creating +invalid transactions or censoring certain types of transactions or network +participants. + +When a validator is slashed, all token holders who have delegated stake to that +validator lose a portion of their delegation. While this means an immediate loss +for the token holder, it also is a loss of future rewards for the validator due +to their reduced total delegation. More details on the slashing roadmap can be +found +[here](https://docs.solanalabs.com/proposals/optimistic-confirmation-and-slashing#slashing-roadmap). + +Rewards and slashing align validator and token holder interests which helps keep +the network secure, robust and performant. + +## How do I stake my SOL tokens? + +You can stake SOL by moving your tokens into a wallet that supports staking. The +wallet provides steps to create a stake account and do the delegation. + +#### Supported Wallets + +Many web and mobile wallets support Solana staking operations. Please check with +your favorite wallet's maintainers regarding status + +#### Solana command line tools + +- Solana command line tools can perform all stake operations in conjunction with + a CLI-generated keypair file wallet, a paper wallet, or with a connected + Ledger Nano. + [Staking commands using the Solana Command Line Tools](https://docs.solanalabs.com/cli/examples/delegate-stake). + +#### Create a Stake Account + +Follow the wallet's instructions for creating a staking account. This account +will be of a different type than one used to simply send and receive tokens. + +#### Select a Validator + +Follow the wallet's instructions for selecting a validator. You can get +information about potentially performant validators from the links below. The +Solana Foundation does not recommend any particular validator. + +The site solanabeach.io is built and maintained by one of our validators, +Staking Facilities. It provides a some high-level graphical information about +the network as a whole, as well as a list of each validator and some recent +performance statistics about each one. + +- https://solanabeach.io + +To view block production statistics, use the Solana command-line tools: + +- `solana validators` +- `solana block-production` + +The Solana team does not make recommendations on how to interpret this +information. Do your own due diligence. + +#### Delegate your Stake + +Follow the wallet's instructions for delegating your stake to your chosen +validator. + +## Stake Account Details + +For more information about the operations and permissions associated with a +stake account, please see +[Stake Accounts](/docs/economics/staking/stake-accounts.md) diff --git a/docs/economics/staking/stake-accounts.md b/docs/economics/staking/stake-accounts.md new file mode 100644 index 000000000..1e8d9d5cc --- /dev/null +++ b/docs/economics/staking/stake-accounts.md @@ -0,0 +1,146 @@ +--- +sidebarLabel: Stake Accounts +title: Stake Accounts +--- + +A stake account on Solana can be used to delegate tokens to validators on the +network to potentially earn rewards for the owner of the stake account. Stake +accounts are created and managed differently than a traditional wallet address, +known as a _system account_. A system account is only able to send and receive +SOL from other accounts on the network, whereas a stake account supports more +complex operations needed to manage a delegation of tokens. + +Stake accounts on Solana also work differently than those of other +Proof-of-Stake blockchain networks that you may be familiar with. This document +describes the high-level structure and functions of a Solana stake account. + +#### Account Address + +Each stake account has a unique address which can be used to look up the account +information in the command line or in any network explorer tools. However, +unlike a wallet address in which the holder of the address's keypair controls +the wallet, the keypair associated with a stake account address does not +necessarily have any control over the account. In fact, a keypair or private key +may not even exist for a stake account's address. + +The only time a stake account's address has a keypair file is when +[creating a stake account using the command line tools](https://docs.solanalabs.com/cli/examples/delegate-stake#create-a-stake-account). +A new keypair file is created first only to ensure that the stake account's +address is new and unique. + +#### Understanding Account Authorities + +Certain types of accounts may have one or more _signing authorities_ associated +with a given account. An account authority is used to sign certain transactions +for the account it controls. This is different from some other blockchain +networks where the holder of the keypair associated with the account's address +controls all of the account's activity. + +Each stake account has two signing authorities specified by their respective +address, each of which is authorized to perform certain operations on the stake +account. + +The _stake authority_ is used to sign transactions for the following operations: + +- Delegating stake +- Deactivating the stake delegation +- Splitting the stake account, creating a new stake account with a portion of + the funds in the first account +- Merging two stake accounts into one +- Setting a new stake authority + +The _withdraw authority_ signs transactions for the following: + +- Withdrawing un-delegated stake into a wallet address +- Setting a new withdraw authority +- Setting a new stake authority + +The stake authority and withdraw authority are set when the stake account is +created, and they can be changed to authorize a new signing address at any time. +The stake and withdraw authority can be the same address or two different +addresses. + +The withdraw authority keypair holds more control over the account as it is +needed to liquidate the tokens in the stake account, and can be used to reset +the stake authority if the stake authority keypair becomes lost or compromised. + +Securing the withdraw authority against loss or theft is of utmost importance +when managing a stake account. + +#### Multiple Delegations + +Each stake account may only be used to delegate to one validator at a time. All +of the tokens in the account are either delegated or un-delegated, or in the +process of becoming delegated or un-delegated. To delegate a fraction of your +tokens to a validator, or to delegate to multiple validators, you must create +multiple stake accounts. + +This can be accomplished by creating multiple stake accounts from a wallet +address containing some tokens, or by creating a single large stake account and +using the stake authority to split the account into multiple accounts with token +balances of your choosing. + +The same stake and withdraw authorities can be assigned to multiple stake +accounts. + +#### Merging stake accounts + +Two stake accounts that have the same authorities and lockup can be merged into +a single resulting stake account. A merge is possible between two stakes in the +following states with no additional conditions: + +- two deactivated stakes +- an inactive stake into an activating stake during its activation epoch + +For the following cases, the voter pubkey and vote credits observed must match: + +- two activated stakes +- two activating accounts that share an activation epoch, during the activation + epoch + +All other combinations of stake states will fail to merge, including all +"transient" states, where a stake is activating or deactivating with a non-zero +effective stake. + +#### Delegation Warmup and Cooldown + +When a stake account is delegated, or a delegation is deactivated, the operation +does not take effect immediately. + +A delegation or deactivation takes several [epochs](/docs/terminology.md#epoch) +to complete, with a fraction of the delegation becoming active or inactive at +each epoch boundary after the transaction containing the instructions has been +submitted to the cluster. + +There is also a limit on how much total stake can become delegated or +deactivated in a single epoch, to prevent large sudden changes in stake across +the network as a whole. Since warmup and cooldown are dependent on the behavior +of other network participants, their exact duration is difficult to predict. +Details on the warmup and cooldown timing can be found +[here](https://docs.solanalabs.com/consensus/stake-delegation-and-rewards#stake-warmup-cooldown-withdrawal). + +#### Lockups + +Stake accounts can have a lockup which prevents the tokens they hold from being +withdrawn before a particular date or epoch has been reached. While locked up, +the stake account can still be delegated, un-delegated, or split, and its stake +authority can be changed as normal. Only withdrawal into another wallet or +updating the withdraw authority is not allowed. + +A lockup can only be added when a stake account is first created, but it can be +modified later, by the _lockup authority_ or _custodian_, the address of which +is also set when the account is created. + +#### Destroying a Stake Account + +Like other types of accounts on the Solana network, a stake account that has a +balance of 0 SOL is no longer tracked. If a stake account is not delegated and +all of the tokens it contains are withdrawn to a wallet address, the account at +that address is effectively destroyed, and will need to be manually re-created +for the address to be used again. + +#### Viewing Stake Accounts + +Stake account details can be viewed on the +[Solana Explorer](http://explorer.solana.com/accounts) by copying and pasting an +account address into the search bar. diff --git a/docs/economics/staking/stake-programming.md b/docs/economics/staking/stake-programming.md new file mode 100644 index 000000000..afac9e315 --- /dev/null +++ b/docs/economics/staking/stake-programming.md @@ -0,0 +1,28 @@ +--- +title: Stake Programming +--- + +To maximize stake distribution, decentralization, and censorship resistance on +the Solana network, staking can be performed programmatically. The team and +community have developed several on-chain and off-chain programs to make stakes +easier to manage. + +#### Stake-o-matic aka Auto-delegation Bots + +This off-chain program manages a large population of validators staked by a +central authority. The Solana Foundation uses an auto-delegation bot to +regularly delegate its stake to "non-delinquent" validators that meet specified +performance requirements. + +#### Stake Pools + +This on-chain program pools together SOL to be staked by a manager, allowing SOL +holders to stake and earn rewards without managing stakes. Users deposit SOL in +exchange for SPL tokens (staking derivatives) that represent their ownership in +the stake pool. The pool manager stakes deposited SOL according to their +strategy, perhaps using a variant of an auto-delegation bot as described above. +As stakes earn rewards, the pool and pool tokens grow proportionally in value. +Finally, pool token holders can send SPL tokens back to the stake pool to redeem +SOL, thereby participating in decentralization with much less work required. +More information can be found at the +[SPL stake pool documentation](https://spl.solana.com/stake-pool). diff --git a/docs/index.md b/docs/index.md index 7b95218d8..c86f83b12 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,6 +1,7 @@ --- sidebarSortOrder: 0 title: Solana Documentation +seoTitle: Learn how the Solana blockchain works description: "Solana is a high performance network that is utilized for a range of use cases, \ including finance, NFTs, payments, and gaming." @@ -15,42 +16,42 @@ interoperable and decentralized. Dive right into Solana to start building or setup your tooling. -- [Setup local environment](/cli) - Install the Solana CLI to get your local - development environment setup -- [Hello World in your browser](getstarted/hello-world) - Build and deploy your - first on-chain Solana program, directly in your browser using Solana - Playground +- [Setup local environment](/content/guides/getstarted/setup-local-development.md) - + Install the Solana CLI to get your local development environment setup +- [Hello World in your browser](/content/guides/getstarted/hello-world-in-your-browser.md) - + Build and deploy your first on-chain Solana program, directly in your browser + using Solana Playground ## Start learning Build a strong understanding of the core concepts that make Solana different from other blockchains. -- [Transactions](./developing/programming-model/transactions) - Collection of - instructions for the blockchain to execute -- [Accounts](./developing/programming-model/accounts) - Data and state storage - mechanism for Solana -- [Programs](./developing/intro/programs) - The executable code used to perform +- [Transactions](/docs/core/transactions.md) - Collection of instructions for + the blockchain to execute +- [Accounts](/docs/core/accounts.md) - Data and state storage mechanism for + Solana +- [Programs](/docs/core/programs.md) - The executable code used to perform actions on the blockchain -- [Cross-Program Invocation](./developing/programming-model/calling-between-programs) - - Core of the "composability" of Solana, this is how programs can "call" each - other. +- [Cross-Program Invocation](/docs/core/cpi.md) - Core of the "composability" of + Solana, this is how programs can "call" each other. ## Understanding the architecture Get to know the underlying architecture of how the proof-of-stake blockchain works. -- [Validators](./validator/anatomy) - the individual nodes that are the backbone - of the network -- [Clusters](./cluster/overview) - a collection of validators that work together - for consensus +- [Validators](https://docs.solanalabs.com/validator/anatomy) - the individual + nodes that are the backbone of the network +- [Clusters](https://docs.solanalabs.com/clusters/overview) - a collection of + validators that work together for consensus ## Running a validator Explore what it takes to operate a Solana validator and help secure the network. -- [System requirements](./running-validator/validator-reqs) - Recommended - hardware requirements and expected SOL needed to operate a validator -- [Quick start guide](./validator/get-started/setup-a-validator) - Setup a - validator and get connected to a cluster for the first time +- [System requirements](https://docs.solanalabs.com/operations/requirements) - + Recommended hardware requirements and expected SOL needed to operate a + validator +- [Quick start guide](https://docs.solanalabs.com/operations/setup-a-validator) - + Setup a validator and get connected to a cluster for the first time diff --git a/docs/intro/overview.md b/docs/intro/dev.md similarity index 82% rename from docs/intro/overview.md rename to docs/intro/dev.md index ecc2dff9f..715bea79f 100644 --- a/docs/intro/overview.md +++ b/docs/intro/dev.md @@ -1,9 +1,10 @@ --- +sidebarLabel: Intro to Development title: "Introduction to Solana Development" -# sidebarSortOrder: 1 -# sidebarLabel: Overview description: "Learn about the basic development concepts of the Solana blockchain." +# sort order is set really high to ensure this doc is listed last +sidebarSortOrder: 9999 keywords: - accounts - transactions @@ -18,9 +19,9 @@ keywords: Welcome to the Solana developer docs! -This guide contains step-by-step instructions on how to get started. Before we -get into the hands on part of the guide, we'll cover basic concepts that all -developers need to be familiar with to build on Solana: +This guide contains step-by-step instructions on how to get started. Before the +hands-on part of the guide, we'll cover basic concepts that all developers need +to be familiar with to build on Solana: - Transactions - Accounts @@ -43,17 +44,17 @@ Here's a high level representation of this. It’s important to note that this i an oversimplification of the Solana network for the purposes of learning in an easy-to-understand way. -![Solana developer workflows program-client model](/img/quickstarts/solana-overview-client-program.png) +![Solana developer workflows program-client model](/assets/docs/quickstarts/solana-overview-client-program.png) ### Program development -The first development workflow allows you to to create and deploy custom Rust, C +The first development workflow allows you to create and deploy custom Rust, C and C++ programs directly to the blockchain. Once these programs are deployed, anyone who knows how to communicate with them can use them. You can communicate with these programs by writing dApps with any of the -available client SDKs (or the [CLI](../cli.md)), all of which use the -[JSON RPC API](../api) under the hood. +available client SDKs (or the [CLI](https://docs.solanalabs.com/cli)), all of +which use the [JSON RPC API](/docs/rpc/index.mdx) under the hood. ### Client development @@ -62,14 +63,14 @@ communicate with deployed programs. Your apps can submit transactions with instructions to these programs via a client SDK to create a wide variety of applications such as wallets, exchanges and more. The most popular apps are browser extension wallets and web apps, but you can build mobile/desktop apps or -anything that can communicate with the JSON RPC API. +anything that can communicate with the [JSON RPC API](/docs/rpc/index.mdx). These two pieces work together to create a network of dApps and programs that can communicate with each other to update the state and query the blockchain. ## Wallets -A wallet is a pair of public and secret keys that are used to verify actions on +A wallet is a pair of public and private keys that are used to verify actions on the blockchain. The public key is used to identify the account and the private key is used to sign transactions. @@ -89,7 +90,7 @@ them and update the state of the blockchain. Think of it like a write command that can be rejected if certain conditions aren't met. Here's a visual representation of what a transaction contains: -![Visual layout of a transaction](/img/transaction.svg) +![Visual layout of a transaction](/assets/docs/transaction.svg) - Signatures: An array of digital signatures from the transaction's signers. - Message: The actual instructions that the transaction is issuing to the @@ -106,8 +107,7 @@ Here's a visual representation of what a transaction contains: Transactions can be created and signed using clients via SDKs, or even on-chain programs. -You can learn more about transactions -[here](../developing/programming-model/transactions.md). +You can learn more about transactions [here](/docs/core/transactions.md). ### Instructions @@ -125,7 +125,7 @@ Here's what an instruction looks like: | `Data` | Input data provided to the program as additional information or parameters in the format of a byte array | You can read more about instructions -[here](../developing/programming-model/transactions#instructions). +[here](/docs/core/transactions.md#instructions). ### Transaction Fees @@ -140,12 +140,13 @@ Transactions fees are calculated based on two main parts: - a statically set base fee per signature, and - the computational resources used during the transaction, measured in - "[_compute units_](../terminology.md#compute-units)" + "[_compute units_](/docs/terminology.md#compute-units)" The more work a transaction requires, the more compute units it will use, and the more it will cost. -You can read more about transaction fees [here](../transaction_fees.md). +You can read more about transaction fees +[here](/docs/core/transactions/fees.md). ## Accounts @@ -172,8 +173,7 @@ account doesn't have enough tokens to cover the rent, it will be removed. However, if the account does hold enough tokens to cover the rent for two years, it's considered "rent-exempt" and won't be deleted. -You can read more about accounts -[here](../developing/programming-model/accounts.md). +You can read more about accounts [here](/docs/core/accounts.md). ## Programs @@ -186,7 +186,7 @@ programs are stateless: any data they interact with is stored in separate accounts that are passed in via instructions. There are two sets of programs that are maintained by the Solana Labs team: -[Native Programs](../developing/runtime-facilities/programs.md) and the +[Native Programs](https://docs.solanalabs.com/runtime/programs) and the [Solana Program Library (SPL)](https://spl.solana.com/). These serve as core building blocks for on-chain interactions. Native programs are used for core blockchain functionality like creating new accounts, assigning ownership, @@ -204,7 +204,7 @@ Developers most commonly write programs in Rust using frameworks such as Anchor. However, programs can be written in any language that compiles to BPF, including C++ and Move. -You can learn more about programs [here](../developing/intro/programs.md). +You can learn more about programs [here](/docs/core/programs.md). ## Testing and developing environments @@ -214,9 +214,10 @@ The easiest and quickest way to get started is the [Solana Playground](https://beta.solpg.io) - a browser based IDE that allows you to write, deploy, and test programs. -The most popular setup is [local development](local.md) with a local validator -that you run on your machine - this allows you to test your programs locally -before deploying them to any network. +The most popular setup is +[local development](/content/guides/getstarted/setup-local-development.md) with +a local validator that you run on your machine - this allows you to test your +programs locally before deploying them to any network. In each environment, you'll be using one of three networks: @@ -230,13 +231,14 @@ In each environment, you'll be using one of three networks: Devnet has a faucet that allows you to get free SOL to test with. It costs $0 to do development on Solana. -Check out the [clusters page](../clusters.md) for more information on these. +Check out the [clusters page](/docs/core/clusters.md) for more information on +these. ## Next steps You're now ready to get started building on Solana! -- [Deploy your first Solana program in the browser](./hello-world.md) -- [Setup your local development environment](./local.md) -- [Get started building programs locally with Rust](./rust.md) -- [Overview of writing Solana programs](../developing/on-chain-programs/overview) +- [Deploy your first Solana program in the browser](/content/guides/getstarted/hello-world-in-your-browser.md) +- [Setup your local development environment](/content/guides/getstarted/setup-local-development.md) +- [Get started building programs locally with Rust](/content/guides/getstarted/local-rust-hello-world.md) +- [Overview of writing Solana programs](/docs/programs/index.md) diff --git a/docs/intro/economics.md b/docs/intro/economics.md new file mode 100644 index 000000000..4f8bf7d04 --- /dev/null +++ b/docs/intro/economics.md @@ -0,0 +1,49 @@ +--- +sidebarLabel: Economics +title: Solana Economics Overview +--- + +**Subject to change.** + +Solana’s crypto-economic system is designed to promote a healthy, long term +self-sustaining economy with participant incentives aligned to the security and +decentralization of the network. The main participants in this economy are +validation-clients. Their contributions to the network, state validation, and +their requisite incentive mechanisms are discussed below. + +The main channels of participant remittances are referred to as protocol-based +rewards and transaction fees. Protocol-based rewards are generated from +inflationary issuances from a protocol-defined inflation schedule. These rewards +will constitute the total protocol-based reward delivered to validation clients, +the remaining sourced from transaction fees. In the early days of the network, +it is likely that protocol-based rewards, deployed based on predefined issuance +schedule, will drive the majority of participant incentives to participate in +the network. + +These protocol-based rewards are calculated per epoch and distributed across the +active delegated stake and validator set (per validator commission). As +discussed further below, the per annum inflation rate is based on a +pre-determined disinflationary schedule. This provides the network with supply +predictability which supports long term economic stability and security. + +Transaction fees are participant-to-participant transfers, attached to network +interactions as a motivation and compensation for the inclusion and execution of +a proposed transaction. A mechanism for long-term economic stability and forking +protection through partial burning of each transaction fee is also discussed +below. + +First, an overview of the inflation design is presented. This section starts +with defining and clarifying +[Terminology](/docs/economics/inflation/terminology.md) commonly used +subsequently in the discussion of inflation and the related components. +Following that, we outline Solana's proposed +[Inflation Schedule](/docs/economics/inflation/inflation_schedule.md), i.e. the +specific parameters that uniquely parameterize the protocol-driven inflationary +issuance over time. Next is a brief section on +[Adjusted Staking Yield](/docs/economics/inflation/_adjusted_staking_yield.md), +and how token dilution might influence staking behavior. + +An overview of [Transaction Fees](/docs/core/transactions/fees.md) on Solana is +followed by a discussion of [Storage Rent Economics](/docs/intro/economics.md) +in which we describe an implementation of storage rent to account for the +externality costs of maintaining the active state of the ledger. diff --git a/docs/intro/history.md b/docs/intro/history.md new file mode 100644 index 000000000..6ab645fa6 --- /dev/null +++ b/docs/intro/history.md @@ -0,0 +1,61 @@ +--- +sidebarLabel: History +title: Brief History of Solana +--- + +In November of 2017, Anatoly Yakovenko published a whitepaper describing Proof +of History, a technique for keeping time between computers that do not trust one +another. From Anatoly's previous experience designing distributed systems at +Qualcomm, Mesosphere and Dropbox, he knew that a reliable clock makes network +synchronization very simple. When synchronization is simple the resulting +network can be blazing fast, bound only by network bandwidth. + +Anatoly watched as blockchain systems without clocks, such as Bitcoin and +Ethereum, struggled to scale beyond 15 transactions per second worldwide when +centralized payment systems such as Visa required peaks of 65,000 tps. Without a +clock, it was clear they'd never graduate to being the global payment system or +global supercomputer most had dreamed them to be. When Anatoly solved the +problem of getting computers that don’t trust each other to agree on time, he +knew he had the key to bring 40 years of distributed systems research to the +world of blockchain. The resulting cluster wouldn't be just 10 times faster, or +a 100 times, or a 1,000 times, but 10,000 times faster, right out of the gate! + +Anatoly's implementation began in a private codebase and was implemented in the +C programming language. Greg Fitzgerald, who had previously worked with Anatoly +at semiconductor giant Qualcomm Incorporated, encouraged him to reimplement the +project in the Rust programming language. Greg had worked on the LLVM compiler +infrastructure, which underlies both the Clang C/C++ compiler as well as the +Rust compiler. Greg claimed that the language's safety guarantees would improve +software productivity and that its lack of a garbage collector would allow +programs to perform as well as those written in C. Anatoly gave it a shot and +just two weeks later, had migrated his entire codebase to Rust. Sold. With plans +to weave all the world's transactions together on a single, scalable blockchain, +Anatoly called the project Loom. + +On February 13th of 2018, Greg began prototyping the first open source +implementation of Anatoly's whitepaper. The project was published to GitHub +under the name Silk in the `loomprotocol` organization. On February 28th, Greg +made his first release, demonstrating 10 thousand signed transactions could be +verified and processed in just over half a second. Shortly after, another former +Qualcomm cohort, Stephen Akridge, demonstrated throughput could be massively +improved by offloading signature verification to graphics processors. Anatoly +recruited Greg, Stephen and three others to co-found a company, then called +Loom. + +Around the same time, Ethereum-based project Loom Network sprung up and many +people were confused about whether they were the same project. The Loom team +decided it would rebrand. They chose the name Solana, a nod to a small beach +town North of San Diego called Solana Beach, where Anatoly, Greg and Stephen +lived and surfed for three years when they worked for Qualcomm. On March 28th, +the team created the Solana GitHub organization and renamed Greg's prototype +Silk to Solana. + +In June of 2018, the team scaled up the technology to run on cloud-based +networks and on July 19th, published a 50-node, permissioned, public testnet +consistently supporting bursts of 250,000 transactions per second. In a later +release in December, called v0.10 Pillbox, the team published a permissioned +testnet running 150 nodes on a gigabit network and demonstrated soak tests +processing an _average_ of 200 thousand transactions per second with bursts over +500 thousand. The project was also extended to support on-chain programs written +in the C programming language and run concurrently in a safe execution +environment called SBF. diff --git a/docs/intro/index.md b/docs/intro/index.md index b643ce6fa..57aebcf02 100644 --- a/docs/intro/index.md +++ b/docs/intro/index.md @@ -1,6 +1,109 @@ --- -# metaOnly mode prevents this from becoming a linked record in the DocsNav -metaOnly: true title: Introduction sidebarSortOrder: 1 --- + +## What is Solana? + +Solana is an open source project implementing a new, high-performance, +permissionless blockchain. The Solana Foundation is based in Geneva, Switzerland +and maintains the open source project. + +## Why Solana? + +It is possible for a centralized database to process 710,000 transactions per +second on a standard gigabit network if the transactions are, on average, no +more than 176 bytes. A centralized database can also replicate itself and +maintain high availability without significantly compromising that transaction +rate using the distributed system technique known as Optimistic Concurrency +Control +[\[H.T.Kung, J.T.Robinson (1981)\]](http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.65.4735). +At Solana, we are demonstrating that these same theoretical limits apply just as +well to blockchain on an adversarial network. The key ingredient? Finding a way +to share time when nodes cannot rely upon one another. Once nodes can rely upon +time, suddenly ~40 years of distributed systems research becomes applicable to +blockchain! + +> Perhaps the most striking difference between algorithms obtained by our method +> and ones based upon timeout is that using timeout produces a traditional +> distributed algorithm in which the processes operate asynchronously, while our +> method produces a globally synchronous one in which every process does the +> same thing at (approximately) the same time. Our method seems to contradict +> the whole purpose of distributed processing, which is to permit different +> processes to operate independently and perform different functions. However, +> if a distributed system is really a single system, then the processes must be +> synchronized in some way. Conceptually, the easiest way to synchronize +> processes is to get them all to do the same thing at the same time. Therefore, +> our method is used to implement a kernel that performs the necessary +> synchronization--for example, making sure that two different processes do not +> try to modify a file at the same time. Processes might spend only a small +> fraction of their time executing the synchronizing kernel; the rest of the +> time, they can operate independently--e.g., accessing different files. This is +> an approach we have advocated even when fault-tolerance is not required. The +> method's basic simplicity makes it easier to understand the precise properties +> of a system, which is crucial if one is to know just how fault-tolerant the +> system is. +> [\[L.Lamport (1984)\]](http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.71.1078) + +Furthermore, and much to our surprise, it can be implemented using a mechanism +that has existed in Bitcoin since day one. The Bitcoin feature is called +`nLocktime` and it can be used to postdate transactions using block height +instead of a timestamp. As a Bitcoin client, you would use block height instead +of a timestamp if you don't rely upon the network. Block height turns out to be +an instance of what's being called a Verifiable Delay Function in cryptography +circles. It's a cryptographically secure way to say time has passed. In Solana, +we use a far more granular verifiable delay function, a SHA 256 hash chain, to +checkpoint the ledger and coordinate consensus. With it, we implement Optimistic +Concurrency Control and are now well en route towards that theoretical limit of +710,000 transactions per second. + +## Documentation Overview + +The Solana docs describe the Solana open source project, a blockchain built from +the ground up for scale. They cover why Solana is useful, how to use it, how it +works, and why it will continue to work long after the company Solana closes its +doors. The goal of the Solana architecture is to demonstrate there exists a set +of software algorithms that when used in combination to implement a blockchain, +removes software as a performance bottleneck, allowing transaction throughput to +scale proportionally with network bandwidth. The architecture goes on to satisfy +all three desirable properties of a proper blockchain: it is scalable, secure +and decentralized. + +The architecture describes a theoretical upper bound of 710 thousand +transactions per second \(tps\) on a standard gigabit network and 28.4 million +tps on 40 gigabit. Furthermore, the architecture supports safe, concurrent +execution of programs authored in general-purpose programming languages such as +C or Rust. + +## What is a Solana Cluster? + +A cluster is a set of computers that work together and can be viewed from the +outside as a single system. A Solana cluster is a set of independently owned +computers working together \(and sometimes against each other\) to verify the +output of untrusted, user-submitted programs. A Solana cluster can be utilized +any time a user wants to preserve an immutable record of events in time or +programmatic interpretations of those events. One use is to track which of the +computers did meaningful work to keep the cluster running. Another use might be +to track the possession of real-world assets. In each case, the cluster produces +a record of events called the ledger. It will be preserved for the lifetime of +the cluster. As long as someone somewhere in the world maintains a copy of the +ledger, the output of its programs \(which may contain a record of who possesses +what\) will forever be reproducible, independent of the organization that +launched it. + +## What are SOLs? + +A SOL is the name of Solana's native token, which can be passed to nodes in a +Solana cluster in exchange for running an on-chain program or validating its +output. The system may perform micro-payments of fractional SOLs, which are +called _lamports_. They are named in honor of Solana's biggest technical +influence, [Leslie Lamport](https://en.wikipedia.org/wiki/Leslie_Lamport). A +lamport has a value of 0.000000001 SOL. + +## Disclaimer + +All claims, content, designs, algorithms, estimates, roadmaps, specifications, +and performance measurements described in this project are done with the +author's best effort. It is up to the reader to check and validate their +accuracy and truthfulness. Furthermore, nothing in this project constitutes a +solicitation for investment. diff --git a/docs/intro/rent.md b/docs/intro/rent.md new file mode 100644 index 000000000..690dbfc3e --- /dev/null +++ b/docs/intro/rent.md @@ -0,0 +1,40 @@ +--- +sidebarLabel: Rent +title: Storage Rent Economics +--- + +Each transaction that is submitted to the Solana ledger imposes costs. +Transaction fees paid by the submitter, and collected by a validator, in theory, +account for the acute, transactional, costs of validating and adding that data +to the ledger. Unaccounted in this process is the mid-term storage of active +ledger state, necessarily maintained by the rotating validator set. This type of +storage imposes costs not only to validators but also to the broader network as +active state grows so does data transmission and validation overhead. To account +for these costs, we describe here our preliminary design and implementation of +storage rent. + +Storage rent can be paid via one of two methods: + +Method 1: Set it and forget it + +With this approach, accounts with two-years worth of rent deposits secured are +exempt from network rent charges. By maintaining this minimum-balance, the +broader network benefits from reduced liquidity and the account holder can rest +assured that their `Account::data` will be retained for continual access/usage. + +Method 2: Pay per byte + +If an account has less than two-years worth of deposited rent the network +charges rent on a per-epoch basis, in credit for the next epoch. This rent is +deducted at a rate specified in genesis, in lamports per kilobyte-year. + +For information on the technical implementation details of this design, see the +[Rent](https://docs.solanalabs.com/implemented-proposals/rent) section. + +**Note:** New accounts now **are required** to be initialized with enough +lamports to be rent exempt. Additionally, transactions that leave an account's +balance below the rent exempt minimum (and non-zero) will **fail**. This +essentially renders all accounts rent exempt. Rent-paying accounts that were +created before this requirement will continue paying rent until either (1) their +balance falls to zero, or (2) a transaction increases the account's balance to +be rent exempt. diff --git a/docs/intro/transaction_fees.md b/docs/intro/transaction_fees.md new file mode 100644 index 000000000..4b8ab0993 --- /dev/null +++ b/docs/intro/transaction_fees.md @@ -0,0 +1,232 @@ +--- +title: Transaction Fees +description: + "Transaction fees are the small fees paid to process instructions on the + network. These fees are based on computation and an optional prioritization + fee." +keywords: + - instruction fee + - processing fee + - storage fee + - low fee blockchain + - gas + - gwei + - cheap network + - affordable blockchain +--- + +The small fees paid to process [instructions](/docs/terminology.md#instruction) +on the Solana blockchain are known as "_transaction fees_". + +As each transaction (which contains one or more instructions) is sent through +the network, it gets processed by the current leader validation-client. Once +confirmed as a global state transaction, this _transaction fee_ is paid to the +network to help support the [economic design](#basic-economic-design) of the +Solana blockchain. + +> **NOTE:** Transaction fees are different from +> [account rent](/docs/terminology.md#rent)! While transaction fees are paid to +> process instructions on the Solana network, rent is paid to store data on the +> blockchain. You can learn more about rent here: +> [What is rent?](/docs/core/rent.md) + +## Why pay transaction fees? + +Transaction fees offer many benefits in the Solana +[economic design](#basic-economic-design) described below. Mainly: + +- they provide compensation to the validator network for the CPU/GPU resources + necessary to process transactions, +- reduce network spam by introducing real cost to transactions, +- and provide long-term economic stability to the network through a + protocol-captured minimum fee amount per transaction + +> **NOTE:** Network consensus votes are sent as normal system transfers, which +> means that validators pay transaction fees to participate in consensus. + +## Basic economic design + +Many blockchain networks \(e.g. Bitcoin and Ethereum\), rely on inflationary +_protocol-based rewards_ to secure the network in the short-term. Over the +long-term, these networks will increasingly rely on _transaction fees_ to +sustain security. + +The same is true on Solana. Specifically: + +- A fixed proportion (initially 50%) of each transaction fee is _burned_ + (destroyed), with the remaining going to the current + [leader](/docs/terminology.md#leader) processing the transaction. +- A scheduled global inflation rate provides a source for + [rewards](https://docs.solanalabs.com/implemented-proposals/staking-rewards) + distributed to [Solana Validators](https://docs.solanalabs.com/operations). + +### Why burn some fees? + +As mentioned above, a fixed proportion of each transaction fee is _burned_ +(destroyed). This is intended to cement the economic value of SOL and thus +sustain the network's security. Unlike a scheme where transactions fees are +completely burned, leaders are still incentivized to include as many +transactions as possible in their slots. + +Burnt fees can also help prevent malicious validators from censoring +transactions by being considered in [fork](/docs/terminology.md#fork) selection. + +#### Example of an attack: + +In the case of a +[Proof of History (PoH)](/docs/terminology.md#proof-of-history-poh) fork with a +malicious, censoring leader: + +- due to the fees lost from censoring, we would expect the total fees burned to + be **_less than_** a comparable honest fork +- if the censoring leader is to compensate for these lost protocol fees, they + would have to replace the burnt fees on their fork themselves +- thus potentially reducing the incentive to censor in the first place + +## Calculating transaction fees + +Transactions fees are calculated based on two main parts: + +- a statically set base fee per signature, and +- the computational resources used during the transaction, measured in + "[_compute units_](/docs/terminology.md#compute-units)" + +Since each transaction may require a different amount of computational +resources, they are allotted a maximum number of _compute units_ per transaction +known as the "[_compute budget_](/docs/terminology.md#compute-budget)". + +The execution of each instruction within a transaction consumes a different +number of _compute units_. After the maximum number of _compute units_ has been +consumed (aka compute budget exhaustion), the runtime will halt the transaction +and return an error. This results in a failed transaction. + +> **Learn more:** compute units and the +> [Compute Budget](/docs/core/runtime.md#compute-budget) in the Runtime and +> [requesting a fee estimate](/docs/rpc/http/getFeeForMessage.mdx) from the RPC. + +## Prioritization fee + +A Solana transaction can include an **optional** fee to prioritize itself +against others known as a +"_[prioritization fee](/docs/terminology.md#prioritization-fee)_". Paying this +additional fee helps boost how a transaction is prioritized against others, +resulting in faster execution times. + +### How the prioritization fee is calculated + +A transaction's [prioritization fee](/docs/terminology.md#prioritization-fee) is +calculated by multiplying the maximum number of **_compute units_** by the +**_compute unit price_** (measured in _micro-lamports_). + +Each transaction can set the maximum number of compute units it is allowed to +consume and the compute unit price by including a `SetComputeUnitLimit` and +`SetComputeUnitPrice` compute budget instruction respectively. + +> Note: Unlike other instructions inside a Solana transaction, +> [Compute Budget instructions](https://github.com/solana-labs/solana/blob/master/sdk/src/compute_budget.rs) +> do **NOT** require any accounts. + +If no `SetComputeUnitLimit` instruction is provided, the limit will be +calculated as the product of the number of instructions in the transaction and +the default per-instruction units, which is currently +[200k](https://github.com/solana-labs/solana/blob/4293f11cf13fc1e83f1baa2ca3bb2f8ea8f9a000/program-runtime/src/compute_budget.rs#L13). + +If no `SetComputeUnitPrice` instruction is provided, the transaction will +default to no additional elevated fee and the lowest priority. + +### How to set the prioritization fee + +A transaction's prioritization fee is set by including a `SetComputeUnitPrice` +instruction, and optionally a `SetComputeUnitLimit` instruction. The runtime +will use these values to calculate the prioritization fee, which will be used to +prioritize the given transaction within the block. + +You can craft each of these instructions via their `rust` or `@solana/web3.js` +functions. Each of these instructions can then be included in the transaction +and sent to the cluster like normal. See also the +[best practices](#prioritization-fee-best-practices) below. + +> Caution: Transactions can only contain **one of each type** of compute budget +> instruction. Duplicate types will result in an +> [`TransactionError::DuplicateInstruction`](https://github.com/solana-labs/solana/blob/master/sdk/src/transaction/error.rs#L144-145) +> error, and ultimately transaction failure. + +#### Rust + +The rust `solana-sdk` crate includes functions within +[`ComputeBudgetInstruction`](https://docs.rs/solana-sdk/latest/solana_sdk/compute_budget/enum.ComputeBudgetInstruction.html) +to craft instructions for setting the _compute unit limit_ and _compute unit +price_: + +```rust +let instruction = ComputeBudgetInstruction::set_compute_unit_limit(300_000); +``` + +```rust +let instruction = ComputeBudgetInstruction::set_compute_unit_price(1); +``` + +#### Javascript + +The `@solana/web3.js` library includes functions within the +[`ComputeBudgetProgram`](https://solana-labs.github.io/solana-web3.js/classes/ComputeBudgetProgram.html) +class to craft instructions for setting the _compute unit limit_ and _compute +unit price_: + +```js +const instruction = ComputeBudgetProgram.setComputeUnitLimit({ + units: 300_000, +}); +``` + +```js +const instruction = ComputeBudgetProgram.setComputeUnitPrice({ + microLamports: 1, +}); +``` + +### Prioritization fee best practices + +#### Request the minimum compute units + +Transactions should request the minimum amount of compute units required for +execution to minimize fees. Also note that fees are not adjusted when the number +of requested compute units exceeds the number of compute units actually consumed +by an executed transaction. + +#### Get recent prioritization fees + +Prior to sending a transaction to the cluster, you can use the +[`getRecentPrioritizationFees`](/docs/rpc/http/getRecentPrioritizationFees.mdx) +RPC method to get a list of the recent paid prioritization fees within the +recent blocks processed by the node. + +You could then use this data to estimate an appropriate prioritization fee for +your transaction to both (a) better ensure it gets processed by the cluster and +(b) minimize the fees paid. + +## Fee Collection + +Transactions are required to have at least one account which has signed the +transaction and is writable. Writable signer accounts are serialized first in +the list of transaction accounts and the first of these accounts is always used +as the "fee payer". + +Before any transaction instructions are processed, the fee payer account balance +will be deducted to pay for transaction fees. If the fee payer balance is not +sufficient to cover transaction fees, the transaction will be dropped by the +cluster. If the balance was sufficient, the fees will be deducted whether the +transaction is processed successfully or not. In fact, if any of the transaction +instructions return an error or violate runtime restrictions, all account +changes _except_ the transaction fee deduction will be rolled back. + +## Fee Distribution + +Transaction fees are partially burned and the remaining fees are collected by +the validator that produced the block that the corresponding transactions were +included in. The transaction fee burn rate was initialized as 50% when inflation +rewards were enabled at the beginning of 2021 and has not changed so far. These +fees incentivize a validator to process as many transactions as possible during +its slots in the leader schedule. Collected fees are deposited in the +validator's account (listed in the leader schedule for the current slot) after +processing all of the transactions included in a block. diff --git a/docs/intro/wallets.md b/docs/intro/wallets.md index 4dcd3bcea..4fc381220 100644 --- a/docs/intro/wallets.md +++ b/docs/intro/wallets.md @@ -1,4 +1,5 @@ --- +sidebarLabel: Wallets title: Solana Wallet Guide --- @@ -46,11 +47,11 @@ first will need to create a wallet.** ## Supported Wallets -Several browser and mobile app based wallets support Solana. Find the right one -for you on the +Several browser and mobile app based wallets support Solana. Find some options +that might be right for you on the [Solana Ecosystem](https://solana.com/ecosystem/explore?categories=wallet) page. For advanced users or developers, the -[command-line wallets](wallet-guide/cli.md) may be more appropriate, as new -features on the Solana blockchain will always be supported on the command line -first before being integrated into third-party solutions. +[command-line wallets](https://docs.solanalabs.com/cli/wallets) may be more +appropriate, as new features on the Solana blockchain will always be supported +on the command line first before being integrated into third-party solutions. diff --git a/docs/more/exchange.md b/docs/more/exchange.md new file mode 100644 index 000000000..32b5ae405 --- /dev/null +++ b/docs/more/exchange.md @@ -0,0 +1,946 @@ +--- +title: Add Solana to Your Exchange +--- + +This guide describes how to add Solana's native token SOL to your cryptocurrency +exchange. + +## Node Setup + +We highly recommend setting up at least two nodes on high-grade computers/cloud +instances, upgrading to newer versions promptly, and keeping an eye on service +operations with a bundled monitoring tool. + +This setup enables you: + +- to have a self-administered gateway to the Solana mainnet-beta cluster to get + data and submit withdrawal transactions +- to have full control over how much historical block data is retained +- to maintain your service availability even if one node fails + +Solana nodes demand relatively high computing power to handle our fast blocks +and high TPS. For specific requirements, please see +[hardware recommendations](https://docs.solanalabs.com/operations/requirements). + +To run an api node: + +1. [Install the Solana command-line tool suite](https://docs.solanalabs.com/cli/install) +2. Start the validator with at least the following parameters: + +```bash +solana-validator \ + --ledger \ + --identity \ + --entrypoint \ + --expected-genesis-hash \ + --rpc-port 8899 \ + --no-voting \ + --enable-rpc-transaction-history \ + --limit-ledger-size \ + --known-validator \ + --only-known-rpc +``` + +Customize `--ledger` to your desired ledger storage location, and `--rpc-port` +to the port you want to expose. + +The `--entrypoint` and `--expected-genesis-hash` parameters are all specific to +the cluster you are joining. +[Current parameters for Mainnet Beta](https://docs.solanalabs.com/clusters/available#example-solana-validator-command-line-2) + +The `--limit-ledger-size` parameter allows you to specify how many ledger +[shreds](/docs/terminology.md#shred) your node retains on disk. If you do not +include this parameter, the validator will keep the entire ledger until it runs +out of disk space. The default value attempts to keep the ledger disk usage +under 500GB. More or less disk usage may be requested by adding an argument to +`--limit-ledger-size` if desired. Check `solana-validator --help` for the +default limit value used by `--limit-ledger-size`. More information about +selecting a custom limit value is +[available here](https://github.com/solana-labs/solana/blob/583cec922b6107e0f85c7e14cb5e642bc7dfb340/core/src/ledger_cleanup_service.rs#L15-L26). + +Specifying one or more `--known-validator` parameters can protect you from +booting from a malicious snapshot. +[More on the value of booting with known validators](https://docs.solanalabs.com/operations/guides/validator-start#known-validators) + +Optional parameters to consider: + +- `--private-rpc` prevents your RPC port from being published for use by other + nodes +- `--rpc-bind-address` allows you to specify a different IP address to bind the + RPC port + +### Automatic Restarts and Monitoring + +We recommend configuring each of your nodes to restart automatically on exit, to +ensure you miss as little data as possible. Running the solana software as a +systemd service is one great option. + +For monitoring, we provide +[`solana-watchtower`](https://github.com/solana-labs/solana/blob/master/watchtower/README.md), +which can monitor your validator and detect with the `solana-validator` process +is unhealthy. It can directly be configured to alert you via Slack, Telegram, +Discord, or Twillio. For details, run `solana-watchtower --help`. + +```bash +solana-watchtower --validator-identity +``` + +> You can find more information about the +> [best practices for Solana Watchtower](https://docs.solanalabs.com/operations/best-practices/monitoring#solana-watchtower) +> here in the docs. + +#### New Software Release Announcements + +We release new software frequently (around 1 release / week). Sometimes newer +versions include incompatible protocol changes, which necessitate timely +software update to avoid errors in processing blocks. + +Our official release announcements for all kinds of releases (normal and +security) are communicated via a [discord](https://solana.com/discord) channel +called `#mb-announcement` (`mb` stands for `mainnet-beta`). + +Like staked validators, we expect any exchange-operated validators to be updated +at your earliest convenience within a business day or two after a normal release +announcement. For security-related releases, more urgent action may be needed. + +### Ledger Continuity + +By default, each of your nodes will boot from a snapshot provided by one of your +known validators. This snapshot reflects the current state of the chain, but +does not contain the complete historical ledger. If one of your node exits and +boots from a new snapshot, there may be a gap in the ledger on that node. In +order to prevent this issue, add the `--no-snapshot-fetch` parameter to your +`solana-validator` command to receive historical ledger data instead of a +snapshot. + +Do not pass the `--no-snapshot-fetch` parameter on your initial boot as it's not +possible to boot the node all the way from the genesis block. Instead boot from +a snapshot first and then add the `--no-snapshot-fetch` parameter for reboots. + +It is important to note that the amount of historical ledger available to your +nodes from the rest of the network is limited at any point in time. Once +operational if your validators experience significant downtime they may not be +able to catch up to the network and will need to download a new snapshot from a +known validator. In doing so your validators will now have a gap in its +historical ledger data that cannot be filled. + +### Minimizing Validator Port Exposure + +The validator requires that various UDP and TCP ports be open for inbound +traffic from all other Solana validators. While this is the most efficient mode +of operation, and is strongly recommended, it is possible to restrict the +validator to only require inbound traffic from one other Solana validator. + +First add the `--restricted-repair-only-mode` argument. This will cause the +validator to operate in a restricted mode where it will not receive pushes from +the rest of the validators, and instead will need to continually poll other +validators for blocks. The validator will only transmit UDP packets to other +validators using the _Gossip_ and _ServeR_ ("serve repair") ports, and only +receive UDP packets on its _Gossip_ and _Repair_ ports. + +The _Gossip_ port is bi-directional and allows your validator to remain in +contact with the rest of the cluster. Your validator transmits on the _ServeR_ +to make repair requests to obtaining new blocks from the rest of the network, +since Turbine is now disabled. Your validator will then receive repair responses +on the _Repair_ port from other validators. + +To further restrict the validator to only requesting blocks from one or more +validators, first determine the identity pubkey for that validator and add the +`--gossip-pull-validator PUBKEY --repair-validator PUBKEY` arguments for each +PUBKEY. This will cause your validator to be a resource drain on each validator +that you add, so please do this sparingly and only after consulting with the +target validator. + +Your validator should now only be communicating with the explicitly listed +validators and only on the _Gossip_, _Repair_ and _ServeR_ ports. + +## Setting up Deposit Accounts + +Solana accounts do not require any on-chain initialization; once they contain +some SOL, they exist. To set up a deposit account for your exchange, simply +generate a Solana keypair using any of our +[wallet tools](https://docs.solanalabs.com/cli/wallets). + +We recommend using a unique deposit account for each of your users. + +Solana accounts must be made rent-exempt by containing 2-years worth of +[rent](/docs/core/accounts.md#rent) in SOL. In order to find the minimum +rent-exempt balance for your deposit accounts, query the +[`getMinimumBalanceForRentExemption` endpoint](/docs/rpc/http/getMinimumBalanceForRentExemption.mdx): + +```bash +curl localhost:8899 -X POST -H "Content-Type: application/json" -d '{ + "jsonrpc": "2.0", + "id": 1, + "method": "getMinimumBalanceForRentExemption", + "params":[0] +}' + +# Result +{"jsonrpc":"2.0","result":890880,"id":1} +``` + +### Offline Accounts + +You may wish to keep the keys for one or more collection accounts offline for +greater security. If so, you will need to move SOL to hot accounts using our +[offline methods](https://docs.solanalabs.com/cli/examples/offline-signing). + +## Listening for Deposits + +When a user wants to deposit SOL into your exchange, instruct them to send a +transfer to the appropriate deposit address. + +### Versioned Transaction Migration + +When the Mainnet Beta network starts processing versioned transactions, +exchanges **MUST** make changes. If no changes are made, deposit detection will +no longer work properly because fetching a versioned transaction or a block +containing versioned transactions will return an error. + +- `{"maxSupportedTransactionVersion": 0}` + + The `maxSupportedTransactionVersion` parameter must be added to `getBlock` and + `getTransaction` requests to avoid disruption to deposit detection. The latest + transaction version is `0` and should be specified as the max supported + transaction version value. + +It's important to understand that versioned transactions allow users to create +transactions that use another set of account keys loaded from on-chain address +lookup tables. + +- `{"encoding": "jsonParsed"}` + + When fetching blocks and transactions, it's now recommended to use the + `"jsonParsed"` encoding because it includes all transaction account keys + (including those from lookup tables) in the message `"accountKeys"` list. This + makes it straightforward to resolve balance changes detailed in `preBalances` + / `postBalances` and `preTokenBalances` / `postTokenBalances`. + + If the `"json"` encoding is used instead, entries in `preBalances` / + `postBalances` and `preTokenBalances` / `postTokenBalances` may refer to + account keys that are **NOT** in the `"accountKeys"` list and need to be + resolved using `"loadedAddresses"` entries in the transaction metadata. + +### Poll for Blocks + +To track all the deposit accounts for your exchange, poll for each confirmed +block and inspect for addresses of interest, using the JSON-RPC service of your +Solana API node. + +- To identify which blocks are available, send a + [`getBlocks`](/docs/rpc/http/getBlocks.mdx) request, passing the last block + you have already processed as the start-slot parameter: + +```bash +curl https://api.devnet.solana.com -X POST -H "Content-Type: application/json" -d '{ + "jsonrpc": "2.0", + "id": 1, + "method": "getBlocks", + "params": [160017005, 160017015] +}' + +# Result +{"jsonrpc":"2.0","result":[160017005,160017006,160017007,160017012,160017013,160017014,160017015],"id":1} +``` + +Not every slot produces a block, so there may be gaps in the sequence of +integers. + +- For each block, request its contents with a + [`getBlock`](/docs/rpc/http/getBlock.mdx) request: + +### Block Fetching Tips + +- `{"rewards": false}` + +By default, fetched blocks will return information about validator fees on each +block and staking rewards on epoch boundaries. If you don't need this +information, disable it with the "rewards" parameter. + +- `{"transactionDetails": "accounts"}` + +By default, fetched blocks will return a lot of transaction info and metadata +that isn't necessary for tracking account balances. Set the "transactionDetails" +parameter to speed up block fetching. + +```bash +curl https://api.devnet.solana.com -X POST -H 'Content-Type: application/json' -d '{ + "jsonrpc": "2.0", + "id": 1, + "method": "getBlock", + "params": [ + 166974442, + { + "encoding": "jsonParsed", + "maxSupportedTransactionVersion": 0, + "transactionDetails": "accounts", + "rewards": false + } + ] +}' + +# Result +{ + "jsonrpc": "2.0", + "result": { + "blockHeight": 157201607, + "blockTime": 1665070281, + "blockhash": "HKhao674uvFc4wMK1Cm3UyuuGbKExdgPFjXQ5xtvsG3o", + "parentSlot": 166974441, + "previousBlockhash": "98CNLU4rsYa2HDUyp7PubU4DhwYJJhSX9v6pvE7SWsAo", + "transactions": [ + ... (omit) + { + "meta": { + "err": null, + "fee": 5000, + "postBalances": [ + 1110663066, + 1, + 1040000000 + ], + "postTokenBalances": [], + "preBalances": [ + 1120668066, + 1, + 1030000000 + ], + "preTokenBalances": [], + "status": { + "Ok": null + } + }, + "transaction": { + "accountKeys": [ + { + "pubkey": "9aE476sH92Vz7DMPyq5WLPkrKWivxeuTKEFKd2sZZcde", + "signer": true, + "source": "transaction", + "writable": true + }, + { + "pubkey": "11111111111111111111111111111111", + "signer": false, + "source": "transaction", + "writable": false + }, + { + "pubkey": "G1wZ113tiUHdSpQEBcid8n1x8BAvcWZoZgxPKxgE5B7o", + "signer": false, + "source": "lookupTable", + "writable": true + } + ], + "signatures": [ + "2CxNRsyRT7y88GBwvAB3hRg8wijMSZh3VNYXAdUesGSyvbRJbRR2q9G1KSEpQENmXHmmMLHiXumw4dp8CvzQMjrM" + ] + }, + "version": 0 + }, + ... (omit) + ] + }, + "id": 1 +} +``` + +The `preBalances` and `postBalances` fields allow you to track the balance +changes in every account without having to parse the entire transaction. They +list the starting and ending balances of each account in +[lamports](/docs/terminology.md#lamport), indexed to the `accountKeys` list. For +example, if the deposit address of interest is +`G1wZ113tiUHdSpQEBcid8n1x8BAvcWZoZgxPKxgE5B7o`, this transaction represents a +transfer of 1040000000 - 1030000000 = 10,000,000 lamports = 0.01 SOL + +If you need more information about the transaction type or other specifics, you +can request the block from RPC in binary format, and parse it using either our +[Rust SDK](https://github.com/solana-labs/solana) or +[Javascript SDK](https://github.com/solana-labs/solana-web3.js). + +### Address History + +You can also query the transaction history of a specific address. This is +generally _not_ a viable method for tracking all your deposit addresses over all +slots, but may be useful for examining a few accounts for a specific period of +time. + +- Send a [`getSignaturesForAddress`](/docs/rpc/http/getSignaturesForAddress.mdx) + request to the api node: + +```bash +curl localhost:8899 -X POST -H "Content-Type: application/json" -d '{ + "jsonrpc": "2.0", + "id": 1, + "method": "getSignaturesForAddress", + "params": [ + "3M2b3tLji7rvscqrLAHMukYxDK2nB96Q9hwfV6QkdzBN", + { + "limit": 3 + } + ] +}' + +# Result +{ + "jsonrpc": "2.0", + "result": [ + { + "blockTime": 1662064640, + "confirmationStatus": "finalized", + "err": null, + "memo": null, + "signature": "3EDRvnD5TbbMS2mCusop6oyHLD8CgnjncaYQd5RXpgnjYUXRCYwiNPmXb6ZG5KdTK4zAaygEhfdLoP7TDzwKBVQp", + "slot": 148697216 + }, + { + "blockTime": 1662064434, + "confirmationStatus": "finalized", + "err": null, + "memo": null, + "signature": "4rPQ5wthgSP1kLdLqcRgQnkYkPAZqjv5vm59LijrQDSKuL2HLmZHoHjdSLDXXWFwWdaKXUuryRBGwEvSxn3TQckY", + "slot": 148696843 + }, + { + "blockTime": 1662064341, + "confirmationStatus": "finalized", + "err": null, + "memo": null, + "signature": "36Q383JMiqiobuPV9qBqy41xjMsVnQBm9rdZSdpbrLTGhSQDTGZJnocM4TQTVfUGfV2vEX9ZB3sex6wUBUWzjEvs", + "slot": 148696677 + } + ], + "id": 1 +} +``` + +- For each signature returned, get the transaction details by sending a + [`getTransaction`](/docs/rpc/http/getTransaction.mdx) request: + +```bash +curl https://api.devnet.solana.com -X POST -H 'Content-Type: application/json' -d '{ + "jsonrpc":"2.0", + "id":1, + "method":"getTransaction", + "params":[ + "2CxNRsyRT7y88GBwvAB3hRg8wijMSZh3VNYXAdUesGSyvbRJbRR2q9G1KSEpQENmXHmmMLHiXumw4dp8CvzQMjrM", + { + "encoding":"jsonParsed", + "maxSupportedTransactionVersion":0 + } + ] +}' + +# Result +{ + "jsonrpc": "2.0", + "result": { + "blockTime": 1665070281, + "meta": { + "err": null, + "fee": 5000, + "innerInstructions": [], + "logMessages": [ + "Program 11111111111111111111111111111111 invoke [1]", + "Program 11111111111111111111111111111111 success" + ], + "postBalances": [ + 1110663066, + 1, + 1040000000 + ], + "postTokenBalances": [], + "preBalances": [ + 1120668066, + 1, + 1030000000 + ], + "preTokenBalances": [], + "rewards": [], + "status": { + "Ok": null + } + }, + "slot": 166974442, + "transaction": { + "message": { + "accountKeys": [ + { + "pubkey": "9aE476sH92Vz7DMPyq5WLPkrKWivxeuTKEFKd2sZZcde", + "signer": true, + "source": "transaction", + "writable": true + }, + { + "pubkey": "11111111111111111111111111111111", + "signer": false, + "source": "transaction", + "writable": false + }, + { + "pubkey": "G1wZ113tiUHdSpQEBcid8n1x8BAvcWZoZgxPKxgE5B7o", + "signer": false, + "source": "lookupTable", + "writable": true + } + ], + "addressTableLookups": [ + { + "accountKey": "4syr5pBaboZy4cZyF6sys82uGD7jEvoAP2ZMaoich4fZ", + "readonlyIndexes": [], + "writableIndexes": [ + 3 + ] + } + ], + "instructions": [ + { + "parsed": { + "info": { + "destination": "G1wZ113tiUHdSpQEBcid8n1x8BAvcWZoZgxPKxgE5B7o", + "lamports": 10000000, + "source": "9aE476sH92Vz7DMPyq5WLPkrKWivxeuTKEFKd2sZZcde" + }, + "type": "transfer" + }, + "program": "system", + "programId": "11111111111111111111111111111111" + } + ], + "recentBlockhash": "BhhivDNgoy4L5tLtHb1s3TP19uUXqKiy4FfUR34d93eT" + }, + "signatures": [ + "2CxNRsyRT7y88GBwvAB3hRg8wijMSZh3VNYXAdUesGSyvbRJbRR2q9G1KSEpQENmXHmmMLHiXumw4dp8CvzQMjrM" + ] + }, + "version": 0 + }, + "id": 1 +} +``` + +## Sending Withdrawals + +To accommodate a user's request to withdraw SOL, you must generate a Solana +transfer transaction, and send it to the api node to be forwarded to your +cluster. + +### Synchronous + +Sending a synchronous transfer to the Solana cluster allows you to easily ensure +that a transfer is successful and finalized by the cluster. + +Solana's command-line tool offers a simple command, `solana transfer`, to +generate, submit, and confirm transfer transactions. By default, this method +will wait and track progress on stderr until the transaction has been finalized +by the cluster. If the transaction fails, it will report any transaction errors. + +```bash +solana transfer --allow-unfunded-recipient --keypair --url http://localhost:8899 +``` + +The [Solana Javascript SDK](https://github.com/solana-labs/solana-web3.js) +offers a similar approach for the JS ecosystem. Use the `SystemProgram` to build +a transfer transaction, and submit it using the `sendAndConfirmTransaction` +method. + +### Asynchronous + +For greater flexibility, you can submit withdrawal transfers asynchronously. In +these cases, it is your responsibility to verify that the transaction succeeded +and was finalized by the cluster. + +**Note:** Each transaction contains a +[recent blockhash](/docs/core/transactions.md#blockhash-format) to indicate its +liveness. It is **critical** to wait until this blockhash expires before +retrying a withdrawal transfer that does not appear to have been confirmed or +finalized by the cluster. Otherwise, you risk a double spend. See more on +[blockhash expiration](#blockhash-expiration) below. + +First, get a recent blockhash using the +[`getFees`](/docs/rpc/deprecated/getFees.mdx) endpoint or the CLI command: + +```bash +solana fees --url http://localhost:8899 +``` + +In the command-line tool, pass the `--no-wait` argument to send a transfer +asynchronously, and include your recent blockhash with the `--blockhash` +argument: + +```bash +solana transfer --no-wait --allow-unfunded-recipient --blockhash --keypair --url http://localhost:8899 +``` + +You can also build, sign, and serialize the transaction manually, and fire it +off to the cluster using the JSON-RPC +[`sendTransaction`](/docs/rpc/http/sendTransaction.mdx) endpoint. + +#### Transaction Confirmations & Finality + +Get the status of a batch of transactions using the +[`getSignatureStatuses`](/docs/rpc/http/getSignatureStatuses.mdx) JSON-RPC +endpoint. The `confirmations` field reports how many +[confirmed blocks](/docs/terminology.md#confirmed-block) have elapsed since the +transaction was processed. If `confirmations: null`, it is +[finalized](/docs/terminology.md#finality). + +```bash +curl localhost:8899 -X POST -H "Content-Type: application/json" -d '{ + "jsonrpc":"2.0", + "id":1, + "method":"getSignatureStatuses", + "params":[ + [ + "5VERv8NMvzbJMEkV8xnrLkEaWRtSz9CosKDYjCJjBRnbJLgp8uirBgmQpjKhoR4tjF3ZpRzrFmBV6UjKdiSZkQUW", + "5j7s6NiJS3JAkvgkoc18WVAsiSaci2pxB2A6ueCJP4tprA2TFg9wSyTLeYouxPBJEMzJinENTkpA52YStRW5Dia7" + ] + ] +}' + +# Result +{ + "jsonrpc": "2.0", + "result": { + "context": { + "slot": 82 + }, + "value": [ + { + "slot": 72, + "confirmations": 10, + "err": null, + "status": { + "Ok": null + } + }, + { + "slot": 48, + "confirmations": null, + "err": null, + "status": { + "Ok": null + } + } + ] + }, + "id": 1 +} +``` + +#### Blockhash Expiration + +You can check whether a particular blockhash is still valid by sending a +[`getFeeCalculatorForBlockhash`](/docs/rpc/deprecated/getFeeCalculatorForBlockhash.mdx) +request with the blockhash as a parameter. If the response value is `null`, the +blockhash is expired, and the withdrawal transaction using that blockhash should +never succeed. + +### Validating User-supplied Account Addresses for Withdrawals + +As withdrawals are irreversible, it may be a good practice to validate a +user-supplied account address before authorizing a withdrawal in order to +prevent accidental loss of user funds. + +#### Basic verification + +Solana addresses a 32-byte array, encoded with the bitcoin base58 alphabet. This +results in an ASCII text string matching the following regular expression: + +```text +[1-9A-HJ-NP-Za-km-z]{32,44} +``` + +This check is insufficient on its own as Solana addresses are not checksummed, +so typos cannot be detected. To further validate the user's input, the string +can be decoded and the resulting byte array's length confirmed to be 32. +However, there are some addresses that can decode to 32 bytes despite a typo +such as a single missing character, reversed characters and ignored case + +#### Advanced verification + +Due to the vulnerability to typos described above, it is recommended that the +balance be queried for candidate withdraw addresses and the user prompted to +confirm their intentions if a non-zero balance is discovered. + +#### Valid ed25519 pubkey check + +The address of a normal account in Solana is a Base58-encoded string of a +256-bit ed25519 public key. Not all bit patterns are valid public keys for the +ed25519 curve, so it is possible to ensure user-supplied account addresses are +at least correct ed25519 public keys. + +#### Java + +Here is a Java example of validating a user-supplied address as a valid ed25519 +public key: + +The following code sample assumes you're using the Maven. + +`pom.xml`: + +```xml + + ... + + spring + https://repo.spring.io/libs-release/ + + + +... + + + ... + + io.github.novacrypto + Base58 + 0.1.3 + + + cafe.cryptography + curve25519-elisabeth + 0.1.0 + + +``` + +```java +import io.github.novacrypto.base58.Base58; +import cafe.cryptography.curve25519.CompressedEdwardsY; + +public class PubkeyValidator +{ + public static boolean verifyPubkey(String userProvidedPubkey) + { + try { + return _verifyPubkeyInternal(userProvidedPubkey); + } catch (Exception e) { + return false; + } + } + + public static boolean _verifyPubkeyInternal(String maybePubkey) throws Exception + { + byte[] bytes = Base58.base58Decode(maybePubkey); + return !(new CompressedEdwardsY(bytes)).decompress().isSmallOrder(); + } +} +``` + +## Minimum Deposit & Withdrawal Amounts + +Every deposit and withdrawal of SOL must be greater or equal to the minimum +rent-exempt balance for the account at the wallet address (a basic SOL account +holding no data), currently: 0.000890880 SOL + +Similarly, every deposit account must contain at least this balance. + +```bash +curl localhost:8899 -X POST -H "Content-Type: application/json" -d '{ + "jsonrpc": "2.0", + "id": 1, + "method": "getMinimumBalanceForRentExemption", + "params": [0] +}' + +# Result +{"jsonrpc":"2.0","result":890880,"id":1} +``` + +## Supporting the SPL Token Standard + +[SPL Token](https://spl.solana.com/token) is the standard for wrapped/synthetic +token creation and exchange on the Solana blockchain. + +The SPL Token workflow is similar to that of native SOL tokens, but there are a +few differences which will be discussed in this section. + +### Token Mints + +Each _type_ of SPL Token is declared by creating a _mint_ account. This account +stores metadata describing token features like the supply, number of decimals, +and various authorities with control over the mint. Each SPL Token account +references its associated mint and may only interact with SPL Tokens of that +type. + +### Installing the `spl-token` CLI Tool + +SPL Token accounts are queried and modified using the `spl-token` command line +utility. The examples provided in this section depend upon having it installed +on the local system. + +`spl-token` is distributed from Rust +[crates.io](https://crates.io/crates/spl-token) via the Rust `cargo` command +line utility. The latest version of `cargo` can be installed using a handy +one-liner for your platform at [rustup.rs](https://rustup.rs). Once `cargo` is +installed, `spl-token` can be obtained with the following command: + +```bash +cargo install spl-token-cli +``` + +You can then check the installed version to verify + +```bash +spl-token --version +``` + +Which should result in something like + +```text +spl-token-cli 2.0.1 +``` + +### Account Creation + +SPL Token accounts carry additional requirements that native System Program +accounts do not: + +1. SPL Token accounts must be created before an amount of tokens can be + deposited. Token accounts can be created explicitly with the + `spl-token create-account` command, or implicitly by the + `spl-token transfer --fund-recipient ...` command. +1. SPL Token accounts must remain + [rent-exempt](/docs/core/accounts.md#rent-exemption) for the duration of + their existence and therefore require a small amount of native SOL tokens be + deposited at account creation. For SPL Token v2 accounts, this amount is + 0.00203928 SOL (2,039,280 lamports). + +#### Command Line + +To create an SPL Token account with the following properties: + +1. Associated with the given mint +1. Owned by the funding account's keypair + +```bash +spl-token create-account +``` + +#### Example + +```bash +$ spl-token create-account AkUFCWTXb3w9nY2n6SFJvBV6VwvFUCe4KBMCcgLsa2ir +Creating account 6VzWGL51jLebvnDifvcuEDec17sK6Wupi4gYhm5RzfkV +Signature: 4JsqZEPra2eDTHtHpB4FMWSfk3UgcCVmkKkP7zESZeMrKmFFkDkNd91pKP3vPVVZZPiu5XxyJwS73Vi5WsZL88D7 +``` + +Or to create an SPL Token account with a specific keypair: + +```bash +$ solana-keygen new -o token-account.json +$ spl-token create-account AkUFCWTXb3w9nY2n6SFJvBV6VwvFUCe4KBMCcgLsa2ir token-account.json +Creating account 6VzWGL51jLebvnDifvcuEDec17sK6Wupi4gYhm5RzfkV +Signature: 4JsqZEPra2eDTHtHpB4FMWSfk3UgcCVmkKkP7zESZeMrKmFFkDkNd91pKP3vPVVZZPiu5XxyJwS73Vi5WsZL88D7 +``` + +### Checking an Account's Balance + +#### Command Line + +```bash +spl-token balance +``` + +#### Example + +```bash +$ solana balance 6VzWGL51jLebvnDifvcuEDec17sK6Wupi4gYhm5RzfkV +0 +``` + +### Token Transfers + +The source account for a transfer is the actual token account that contains the +amount. + +The recipient address however can be a normal wallet account. If an associated +token account for the given mint does not yet exist for that wallet, the +transfer will create it provided that the `--fund-recipient` argument as +provided. + +#### Command Line + +```bash +spl-token transfer --fund-recipient +``` + +#### Example + +```bash +$ spl-token transfer 6B199xxzw3PkAm25hGJpjj3Wj3WNYNHzDAnt1tEqg5BN 1 6VzWGL51jLebvnDifvcuEDec17sK6Wupi4gYhm5RzfkV +Transfer 1 tokens + Sender: 6B199xxzw3PkAm25hGJpjj3Wj3WNYNHzDAnt1tEqg5BN + Recipient: 6VzWGL51jLebvnDifvcuEDec17sK6Wupi4gYhm5RzfkV +Signature: 3R6tsog17QM8KfzbcbdP4aoMfwgo6hBggJDVy7dZPVmH2xbCWjEj31JKD53NzMrf25ChFjY7Uv2dfCDq4mGFFyAj +``` + +### Depositing + +Since each `(wallet, mint)` pair requires a separate account on chain. It is +recommended that the addresses for these accounts be derived from SOL deposit +wallets using the +[Associated Token Account](https://spl.solana.com/associated-token-account) +(ATA) scheme and that _only_ deposits from ATA addresses be accepted. + +Monitoring for deposit transactions should follow the +[block polling](#poll-for-blocks) method described above. Each new block should +be scanned for successful transactions referencing user token-account derived +addresses. The `preTokenBalance` and `postTokenBalance` fields from the +transaction's metadata must then be used to determine the effective balance +change. These fields will identify the token mint and account owner (main wallet +address) of the affected account. + +Note that if a receiving account is created during the transaction, it will have +no `preTokenBalance` entry as there is no existing account state. In this case, +the initial balance can be assumed to be zero. + +### Withdrawing + +The withdrawal address a user provides must be that of their SOL wallet. + +Before executing a withdrawal [transfer](#token-transfers), the exchange should +check the address as +[described above](#validating-user-supplied-account-addresses-for-withdrawals). +Additionally this address must be owned by the System Program and have no +account data. If the address has no SOL balance, user confirmation should be +obtained before proceeding with the withdrawal. All other withdrawal addresses +must be rejected. + +From the withdrawal address, the +[Associated Token Account](https://spl.solana.com/associated-token-account) +(ATA) for the correct mint is derived and the transfer issued to that account +via a +[TransferChecked](https://github.com/solana-labs/solana-program-library/blob/fc0d6a2db79bd6499f04b9be7ead0c400283845e/token/program/src/instruction.rs#L268) +instruction. Note that it is possible that the ATA address does not yet exist, +at which point the exchange should fund the account on behalf of the user. For +SPL Token v2 accounts, funding the withdrawal account will require 0.00203928 +SOL (2,039,280 lamports). + +Template `spl-token transfer` command for a withdrawal: + +```bash +$ spl-token transfer --fund-recipient +``` + +### Other Considerations + +#### Freeze Authority + +For regulatory compliance reasons, an SPL Token issuing entity may optionally +choose to hold "Freeze Authority" over all accounts created in association with +its mint. This allows them to +[freeze](https://spl.solana.com/token#freezing-accounts) the assets in a given +account at will, rendering the account unusable until thawed. If this feature is +in use, the freeze authority's pubkey will be registered in the SPL Token's mint +account. + +## Testing the Integration + +Be sure to test your complete workflow on Solana devnet and testnet +[clusters](/docs/core/clusters.md) before moving to production on mainnet-beta. +Devnet is the most open and flexible, and ideal for initial development, while +testnet offers more realistic cluster configuration. Both devnet and testnet +support a faucet, run `solana airdrop 1` to obtain some devnet or testnet SOL +for development and testing. diff --git a/docs/more/index.md b/docs/more/index.md new file mode 100644 index 000000000..0c7f22484 --- /dev/null +++ b/docs/more/index.md @@ -0,0 +1,6 @@ +--- +metaOnly: true +title: More Information +# note: sort order is set to a really high number so this section is at the bottom of the sidebar +sidebarSortOrder: 9999 +--- diff --git a/docs/programs/debugging.md b/docs/programs/debugging.md new file mode 100644 index 000000000..46e752bc2 --- /dev/null +++ b/docs/programs/debugging.md @@ -0,0 +1,273 @@ +--- +title: "Debugging Programs" +--- + +Solana programs run on-chain, so debugging them in the wild can be challenging. +To make debugging programs easier, developers can write unit tests that directly +test their program's execution via the Solana runtime, or run a local cluster +that will allow RPC clients to interact with their program. + +## Running unit tests + +- [Testing with Rust](/docs/programs/lang-rust.md#how-to-test) +- [Testing with C](/docs/programs/lang-c.md#how-to-test) + +## Logging + +During program execution both the runtime and the program log status and error +messages. + +For information about how to log from a program see the language specific +documentation: + +- [Logging from a Rust program](/docs/programs/lang-rust.md#logging) +- [Logging from a C program](/docs/programs/lang-c.md#logging) + +When running a local cluster the logs are written to stdout as long as they are +enabled via the `RUST_LOG` log mask. From the perspective of program development +it is helpful to focus on just the runtime and program logs and not the rest of +the cluster logs. To focus in on program specific information the following log +mask is recommended: + +```bash +export RUST_LOG=solana_runtime::system_instruction_processor=trace,solana_runtime::message_processor=info,solana_bpf_loader=debug,solana_rbpf=debug +``` + +Log messages coming directly from the program (not the runtime) will be +displayed in the form: + +`Program log: ` + +## Error Handling + +The amount of information that can be communicated via a transaction error is +limited but there are many points of possible failures. The following are +possible failure points and information about what errors to expect and where to +get more information: + +- The SBF loader may fail to parse the program, this should not happen since the + loader has already _finalized_ the program's account data. + - `InstructionError::InvalidAccountData` will be returned as part of the + transaction error. +- The SBF loader may fail to setup the program's execution environment + - `InstructionError::Custom(0x0b9f_0001)` will be returned as part of the + transaction error. "0x0b9f_0001" is the hexadecimal representation of + [`VirtualMachineCreationFailed`](https://github.com/solana-labs/solana/blob/bc7133d7526a041d1aaee807b80922baa89b6f90/programs/bpf_loader/src/lib.rs#L44). +- The SBF loader may have detected a fatal error during program executions + (things like panics, memory violations, system call errors, etc...) + - `InstructionError::Custom(0x0b9f_0002)` will be returned as part of the + transaction error. "0x0b9f_0002" is the hexadecimal representation of + [`VirtualMachineFailedToRunProgram`](https://github.com/solana-labs/solana/blob/bc7133d7526a041d1aaee807b80922baa89b6f90/programs/bpf_loader/src/lib.rs#L46). +- The program itself may return an error + - `InstructionError::Custom()` will be returned. The "user + defined value" must not conflict with any of the + [builtin runtime program errors](https://github.com/solana-labs/solana/blob/bc7133d7526a041d1aaee807b80922baa89b6f90/sdk/program/src/program_error.rs#L87). + Programs typically use enumeration types to define error codes starting at + zero so they won't conflict. + +In the case of `VirtualMachineFailedToRunProgram` errors, more information about +the specifics of what failed are written to the +[program's execution logs](/docs/programs/debugging.md#logging). + +For example, an access violation involving the stack will look something like +this: + +```text +SBF program 4uQeVj5tqViQh7yWWGStvkEG1Zmhx6uasJtWCJziofM failed: out of bounds memory store (insn #615), addr 0x200001e38/8 +``` + +## Monitoring Compute Budget Consumption + +The program can log the remaining number of compute units it will be allowed +before program execution is halted. Programs can use these logs to wrap +operations they wish to profile. + +- [Log the remaining compute units from a Rust program](/docs/programs/lang-rust.md#compute-budget) +- [Log the remaining compute units from a C program](/docs/programs/lang-c.md#compute-budget) + +See [compute budget](/docs/core/runtime.md#compute-budget) for more information. + +## ELF Dump + +The SBF shared object internals can be dumped to a text file to gain more +insight into a program's composition and what it may be doing at runtime. + +- [Create a dump file of a Rust program](/docs/programs/lang-rust.md#elf-dump) +- [Create a dump file of a C program](/docs/programs/lang-c.md#elf-dump) + +## Instruction Tracing + +During execution the runtime SBF interpreter can be configured to log a trace +message for each SBF instruction executed. This can be very helpful for things +like pin-pointing the runtime context leading up to a memory access violation. + +The trace logs together with the [ELF dump](#elf-dump) can provide a lot of +insight (though the traces produce a lot of information). + +To turn on SBF interpreter trace messages in a local cluster configure the +`solana_rbpf` level in `RUST_LOG` to `trace`. For example: + +`export RUST_LOG=solana_rbpf=trace` + +## Source level debugging + +Source level debugging of on-chain programs written in Rust or C can be done +using the `program run` subcommand of `solana-ledger-tool`, and lldb, +distributed with Solana Rust and Clang compiler binary package platform-tools. + +The `solana-ledger-tool program run` subcommand loads a compiled on-chain +program, executes it in RBPF virtual machine and runs a gdb server that accepts +incoming connections from LLDB or GDB. Once lldb is connected to +`solana-ledger-tool` gdbserver, it can control execution of an on-chain program. +Run `solana-ledger-tool program run --help` for an example of specifying input +data for parameters of the program entrypoint function. + +To compile a program for debugging use cargo-build-sbf build utility with the +command line option `--debug`. The utility will generate two loadable files, one +a usual loadable module with the extension `.so`, and another the same loadable +module but containing Dwarf debug information, a file with extension `.debug`. + +To execute a program in debugger, run `solana-ledger-tool program run` with +`-e debugger` command line option. For example, a crate named 'helloworld' is +compiled and an executable program is built in `target/deploy` directory. There +should be three files in that directory + +- helloworld-keypair.json -- a keypair for deploying the program, +- helloworld.debug -- a binary file containing debug information, +- helloworld.so -- an executable file loadable into the virtual machine. The + command line for running `solana-ledger-tool` would be something like this + +```bash +solana-ledger-tool program run -l test-ledger -e debugger target/deploy/helloworld.so +``` + +Note that `solana-ledger-tool` always loads a ledger database. Most on-chain +programs interact with a ledger in some manner. Even if for debugging purpose a +ledger is not needed, it has to be provided to `solana-ledger-tool`. A minimal +ledger database can be created by running `solana-test-validator`, which creates +a ledger in `test-ledger` subdirectory. + +In debugger mode `solana-ledger-tool program run` loads an `.so` file and starts +listening for an incoming connection from a debugger + +```text +Waiting for a Debugger connection on "127.0.0.1:9001"... +``` + +To connect to `solana-ledger-tool` and execute the program, run lldb. For +debugging rust programs it may be beneficial to run solana-lldb wrapper to lldb, +i.e. at a new shell prompt (other than the one used to start +`solana-ledger-tool`) run the command: + +```bash +solana-lldb +``` + +This script is installed in platform-tools path. If that path is not added to +`PATH` environment variable, it may be necessary to specify the full path, e.g. + +```text +~/.cache/solana/v1.35/platform-tools/llvm/bin/solana-lldb +``` + +After starting the debugger, load the .debug file by entering the following +command at the debugger prompt + +```text +(lldb) file target/deploy/helloworld.debug +``` + +If the debugger finds the file, it will print something like this + +```text +Current executable set to '/path/helloworld.debug' (bpf). +``` + +Now, connect to the gdb server that `solana-ledger-tool` implements, and debug +the program as usual. Enter the following command at lldb prompt + +```text +(lldb) gdb-remote 127.0.0.1:9001 +``` + +If the debugger and the gdb server establish a connection, the execution of the +program will be stopped at the entrypoint function, and lldb should print +several lines of the source code around the entrypoint function signature. From +this point on, normal lldb commands can be used to control execution of the +program being debugged. + +### Debugging in an IDE + +To debug on-chain programs in Visual Studio IDE, install the CodeLLDB extension. +Open CodeLLDB Extension Settings. In Advanced settings change the value of +`Lldb: Library` field to the path of `liblldb.so` (or liblldb.dylib on macOS). +For example on Linux a possible path to Solana customized lldb can be +`/home//.cache/solana/v1.33/platform-tools/llvm/lib/liblldb.so.` where +`` is your Linux system username. This can also be added directly to +`~/.config/Code/User/settings.json` file, e.g. + +```json +{ + "lldb.library": "/home//.cache/solana/v1.35/platform-tools/llvm/lib/liblldb.so" +} +``` + +In `.vscode` subdirectory of your on-chain project, create two files + +First file is `tasks.json` with the following content + +```json +{ + "version": "2.0.0", + "tasks": [ + { + "label": "build", + "type": "shell", + "command": "cargo build-sbf --debug", + "problemMatcher": [], + "group": { + "kind": "build", + "isDefault": true + } + }, + { + "label": "solana-debugger", + "type": "shell", + "command": "solana-ledger-tool program run -l test-ledger -e debugger ${workspaceFolder}/target/deploy/helloworld.so" + } + ] +} +``` + +The first task is to build the on-chain program using cargo-build-sbf utility. +The second task is to run `solana-ledger-tool program run` in debugger mode. + +Another file is `launch.json` with the following content + +```json +{ + "version": "0.2.0", + "configurations": [ + { + "type": "lldb", + "request": "custom", + "name": "Debug", + "targetCreateCommands": [ + "target create ${workspaceFolder}/target/deploy/helloworld.debug" + ], + "processCreateCommands": ["gdb-remote 127.0.0.1:9001"] + } + ] +} +``` + +This file specifies how to run debugger and to connect it to the gdb server +implemented by `solana-ledger-tool`. + +To start debugging a program, first build it by running the build task. The next +step is to run `solana-debugger` task. The tasks specified in `tasks.json` file +are started from `Terminal >> Run Task...` menu of VSCode. When +`solana-ledger-tool` is running and listening from incoming connections, it's +time to start the debugger. Launch it from VSCode `Run and Debug` menu. If +everything is set up correctly, VSCode will start a debugging session and the +program execution should stop on the entrance into the `entrypoint` function. diff --git a/docs/programs/deploying.md b/docs/programs/deploying.md new file mode 100644 index 000000000..a4bb5ace8 --- /dev/null +++ b/docs/programs/deploying.md @@ -0,0 +1,254 @@ +--- +title: "Deploying Programs" +description: + "Deploying on-chain programs can be done using the Solana CLI using the + Upgradable BPF loader to upload the compiled byte-code to the Solana + blockchain." +--- + +Solana on-chain programs (otherwise known as "smart contracts") are stored in +"executable" accounts on Solana. These accounts are identical to any other +account but with the exception of: + +- having the "executable" flag enabled, and +- the owner being assigned to a BPF loader + +Besides those exceptions, they are governed by the same runtime rules as +non-executable accounts, hold SOL tokens for rent fees, and store a data buffer +which is managed by the BPF loader program. The latest BPF loader is called the +"Upgradeable BPF Loader". + +## Overview of the Upgradeable BPF Loader + +### State accounts + +The Upgradeable BPF loader program supports three different types of state +accounts: + +1. [Program account](https://github.com/solana-labs/solana/blob/master/sdk/program/src/bpf_loader_upgradeable.rs#L34): + This is the main account of an on-chain program and its address is commonly + referred to as a "program id." Program id's are what transaction instructions + reference in order to invoke a program. Program accounts are immutable once + deployed, so you can think of them as a proxy account to the byte-code and + state stored in other accounts. +2. [Program data account](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/bpf_loader_upgradeable.rs#L39): + This account is what stores the executable byte-code of an on-chain program. + When a program is upgraded, this account's data is updated with new + byte-code. In addition to byte-code, program data accounts are also + responsible for storing the slot when it was last modified and the address of + the sole account authorized to modify the account (this address can be + cleared to make a program immutable). +3. [Buffer accounts](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/bpf_loader_upgradeable.rs#L27): + These accounts temporarily store byte-code while a program is being actively + deployed through a series of transactions. They also each store the address + of the sole account which is authorized to do writes. + +### Instructions + +The state accounts listed above can only be modified with one of the following +instructions supported by the Upgradeable BPF Loader program: + +1. [Initialize buffer](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/loader_upgradeable_instruction.rs#L21): + Creates a buffer account and stores an authority address which is allowed to + modify the buffer. +2. [Write](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/loader_upgradeable_instruction.rs#L28): + Writes byte-code at a specified byte offset inside a buffer account. Writes + are processed in small chunks due to a limitation of Solana transactions + having a maximum serialized size of 1232 bytes. +3. [Deploy](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/loader_upgradeable_instruction.rs#L77): + Creates both a program account and a program data account. It fills the + program data account by copying the byte-code stored in a buffer account. If + the byte-code is valid, the program account will be set as executable, + allowing it to be invoked. If the byte-code is invalid, the instruction will + fail and all changes are reverted. +4. [Upgrade](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/loader_upgradeable_instruction.rs#L102): + Fills an existing program data account by copying executable byte-code from a + buffer account. Similar to the deploy instruction, it will only succeed if + the byte-code is valid. +5. [Set authority](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/loader_upgradeable_instruction.rs#L114): + Updates the authority of a program data or buffer account if the account's + current authority has signed the transaction being processed. If the + authority is deleted without replacement, it can never be set to a new + address and the account can never be closed. +6. [Close](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/sdk/program/src/loader_upgradeable_instruction.rs#L127): + Clears the data of a program data account or buffer account and reclaims the + SOL used for the rent exemption deposit. + +## How `solana program deploy` works + +Deploying a program on Solana requires hundreds, if not thousands of +transactions, due to the max size limit of 1232 bytes for Solana transactions. +The Solana CLI takes care of this rapid firing of transactions with the +`solana program deploy` subcommand. The process can be broken down into the +following 3 phases: + +1. [Buffer initialization](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/cli/src/program.rs#L2113): + First, the CLI sends a transaction which + [creates a buffer account](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/cli/src/program.rs#L1903) + large enough for the byte-code being deployed. It also invokes the + [initialize buffer instruction](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/programs/bpf_loader/src/lib.rs#L320) + to set the buffer authority to restrict writes to the deployer's chosen + address. +2. [Buffer writes](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/cli/src/program.rs#L2129): + Once the buffer account is initialized, the CLI + [breaks up the program byte-code](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/cli/src/program.rs#L1940) + into ~1KB chunks and + [sends transactions at a rate of 100 transactions per second](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/client/src/tpu_client.rs#L133) + to write each chunk with + [the write buffer instruction](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/programs/bpf_loader/src/lib.rs#L334). + These transactions are sent directly to the current leader's transaction + processing (TPU) port and are processed in parallel with each other. Once all + transactions have been sent, the CLI + [polls the RPC API with batches of transaction signatures](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/client/src/tpu_client.rs#L216) + to ensure that every write was successful and confirmed. +3. [Finalization](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/cli/src/program.rs#L1807): + Once writes are completed, the CLI + [sends a final transaction](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/cli/src/program.rs#L2150) + to either + [deploy a new program](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/programs/bpf_loader/src/lib.rs#L362) + or + [upgrade an existing program](https://github.com/solana-labs/solana/blob/7409d9d2687fba21078a745842c25df805cdf105/programs/bpf_loader/src/lib.rs#L513). + In either case, the byte-code written to the buffer account will be copied + into a program data account and verified. + +## Reclaim rent from program accounts + +The storage of data on the Solana blockchain requires the payment of +[rent](/docs/core/rent.md), including for the byte-code for on-chain programs. +Therefore as you deploy more or larger programs, the amount of rent paid to +remain rent-exempt will also become larger. + +Using the current rent cost model configuration, a rent-exempt account requires +a deposit of ~0.7 SOL per 100KB stored. These costs can have an outsized impact +on developers who deploy their own programs since +[program accounts](/docs/core/accounts.md#executable) are among the largest we +typically see on Solana. + +#### Example of how much data is used for programs + +As a data point of the number of accounts and potential data stored on-chain, +below is the distribution of the largest accounts (at least 100KB) at slot +`103,089,804` on `mainnet-beta` by assigned on-chain program: + +1. **Serum Dex v3**: 1798 accounts +2. **Metaplex Candy Machine**: 1089 accounts +3. **Serum Dex v2**: 864 accounts +4. **Upgradeable BPF Program Loader**: 824 accounts +5. **BPF Program Loader v2**: 191 accounts +6. **BPF Program Loader v1**: 150 accounts + +### Reclaiming buffer accounts + +Buffer accounts are used by the Upgradeable BPF loader to temporarily store +byte-code that is in the process of being deployed on-chain. This temporary +buffer is required when upgrading programs because the currently deployed +program's byte-code cannot be affected by an in-progress upgrade. + +Unfortunately, deploys fail occasionally and instead of reusing the buffer +account, developers might retry their deployment with a new buffer and not +realize that they stored a good chunk of SOL in a forgotten buffer account from +an earlier deploy. + +> As of slot `103,089,804` on `mainnet-beta` there are 276 abandoned buffer +> accounts that could be reclaimed! + +Developers can check if they own any abandoned buffer accounts by using the +Solana CLI: + +```bash +solana program show --buffers --keypair ~/.config/solana/MY_KEYPAIR.json + +Buffer Address | Authority | Balance +9vXW2c3qo6DrLHa1Pkya4Mw2BWZSRYs9aoyoP3g85wCA | 2nr1bHFT86W9tGnyvmYW4vcHKsQB3sVQfnddasz4kExM | 3.41076888 SOL +``` + +And they can close those buffers to reclaim the SOL balance with the following +command: + +```bash +solana program close --buffers --keypair ~/.config/solana/MY_KEYPAIR.json +``` + +#### Fetch the owners of buffer accounts via RPC API + +The owners of all abandoned program deploy buffer accounts can be fetched via +the RPC API: + +```bash +curl http://api.mainnet-beta.solana.com -H "Content-Type: application/json" \ +--data-binary @- << EOF | jq --raw-output '.result | .[] | .account.data[0]' +{ + "jsonrpc":"2.0", "id":1, "method":"getProgramAccounts", + "params":[ + "BPFLoaderUpgradeab1e11111111111111111111111", + { + "dataSlice": {"offset": 5, "length": 32}, + "filters": [{"memcmp": {"offset": 0, "bytes": "2UzHM"}}], + "encoding": "base64" + } + ] +} +EOF +``` + +After re-encoding the base64 encoded keys into base58 and grouping by key, we +see some accounts have over 10 buffer accounts they could close, yikes! + +```bash +'BE3G2F5jKygsSNbPFKHHTxvKpuFXSumASeGweLcei6G3' => 10 buffer accounts +'EsQ179Q8ESroBnnmTDmWEV4rZLkRc3yck32PqMxypE5z' => 10 buffer accounts +'6KXtB89kAgzW7ApFzqhBg5tgnVinzP4NSXVqMAWnXcHs' => 12 buffer accounts +'FinVobfi4tbdMdfN9jhzUuDVqGXfcFnRGX57xHcTWLfW' => 15 buffer accounts +'TESAinbTL2eBLkWqyGA82y1RS6kArHvuYWfkL9dKkbs' => 42 buffer accounts +``` + +### Reclaiming program data accounts + +You may now realize that program data accounts (the accounts that store the +executable byte-code for an on-chain program) can also be closed. + +> **Note:** This does _not_ mean that _program accounts_ can be closed (those +> are immutable and can never be reclaimed, but it's fine they're pretty small). +> It's also important to keep in mind that once program data accounts are +> deleted, they can never be recreated for an existing program. Therefore, the +> corresponding program (and its program id) for any closed program data account +> is effectively disabled forever and may not be re-deployed + +While it would be uncommon for developers to need to close program data accounts +since they can be rewritten during upgrades, one potential scenario is that +since program data accounts can't be _resized_. You may wish to deploy your +program at a new address to accommodate larger executables. + +The ability to reclaim program data account rent deposits also makes testing and +experimentation on the `mainnet-beta` cluster a lot less costly since you could +reclaim everything except the transaction fees and a small amount of rent for +the program account. Lastly, this could help developers recover most of their +funds if they mistakenly deploy a program at an unintended address or on the +wrong cluster. + +To view the programs which are owned by your wallet address, you can run: + +```bash +solana -V # must be 1.7.11 or higher! +solana program show --programs --keypair ~/.config/solana/MY_KEYPAIR.json + +Program Id | Slot | Authority | Balance +CN5x9WEusU6pNH66G22SnspVx4cogWLqMfmb85Z3GW7N | 53796672 | 2nr1bHFT86W9tGnyvmYW4vcHKsQB3sVQfnddasz4kExM | 0.54397272 SOL +``` + +To close those program data accounts and reclaim their SOL balance, you can run: + +```bash +solana program close --programs --keypair ~/.config/solana/MY_KEYPAIR.json +``` + +You might be concerned about this feature allowing malicious actors to close a +program in a way that negatively impacts end users. While this is a valid +concern in general, closing program data accounts doesn't make this any more +exploitable than was already possible. + +Even without the ability to close a program data account, any upgradeable +program could be upgraded to a no-op implementation and then have its upgrade +authority cleared to make it immutable forever. This new feature for closing +program data accounts merely adds the ability to reclaim the rent deposit, +disabling a program was already technically possible. diff --git a/docs/programs/examples.md b/docs/programs/examples.md new file mode 100644 index 000000000..7c65bf6c9 --- /dev/null +++ b/docs/programs/examples.md @@ -0,0 +1,39 @@ +--- +title: "Program Examples" +--- + +## Break + +[Break](https://break.solana.com/) is a React app that gives users a visceral +feeling for just how fast and high-performance the Solana network really is. Can +you _break_ the Solana blockchain? During a 15 second play-though, each click of +a button or keystroke sends a new transaction to the cluster. Smash the keyboard +as fast as you can and watch your transactions get finalized in real time while +the network takes it all in stride! + +Break can be played on our Devnet, Testnet and Mainnet Beta networks. Plays are +free on Devnet and Testnet, where the session is funded by a network faucet. On +Mainnet Beta, users pay to play 0.08 SOL per game. The session account can be +funded by a local keystore wallet or by scanning a QR code from Trust Wallet to +transfer the tokens. + +[Click here to play Break](https://break.solana.com/) + +### Build and Run + +First fetch the latest version of the example code: + +```bash +$ git clone https://github.com/solana-labs/break.git +$ cd break +``` + +Next, follow the steps in the git repository's +[README](https://github.com/solana-labs/break/blob/main/README.md). + +## Language Specific + +You can find some language specific example programs at the following locations: + +- [Rust](/docs/programs/lang-rust.md#examples) +- [C](/docs/programs/lang-c.md#examples) diff --git a/docs/programs/faq.md b/docs/programs/faq.md new file mode 100644 index 000000000..dfc5f4cd8 --- /dev/null +++ b/docs/programs/faq.md @@ -0,0 +1,225 @@ +--- +title: "FAQ" +--- + +When writing or interacting with Solana programs, there are common questions or +challenges that often come up. Below are resources to help answer these +questions. + +If not addressed here, ask on +[StackExchange](https://solana.stackexchange.com/questions/ask?tags=solana-program) +with the `solana-program` tag. + +## Limitations + +Developing programs on the Solana blockchain have some inherent limitation +associated with them. Below is a list of common limitation that you may run +into. + +See [limitations of developing programs](/docs/programs/limitations.md) for more +details + +## Berkeley Packet Filter (BPF) + +Solana on-chain programs are compiled via the +[LLVM compiler infrastructure](https://llvm.org/) to an +[Executable and Linkable Format (ELF)](https://en.wikipedia.org/wiki/Executable_and_Linkable_Format) +containing a variation of the +[Berkeley Packet Filter (BPF)](https://en.wikipedia.org/wiki/Berkeley_Packet_Filter) +bytecode. + +Because Solana uses the LLVM compiler infrastructure, a program may be written +in any programming language that can target the LLVM's BPF backend. + +BPF provides an efficient +[instruction set](https://github.com/iovisor/bpf-docs/blob/master/eBPF.md) that +can be executed in an interpreted virtual machine or as efficient just-in-time +compiled native instructions. + +## Memory map + +The virtual address memory map used by Solana SBF programs is fixed and laid out +as follows + +- Program code starts at 0x100000000 +- Stack data starts at 0x200000000 +- Heap data starts at 0x300000000 +- Program input parameters start at 0x400000000 + +The above virtual addresses are start addresses but programs are given access to +a subset of the memory map. The program will panic if it attempts to read or +write to a virtual address that it was not granted access to, and an +`AccessViolation` error will be returned that contains the address and size of +the attempted violation. + +## InvalidAccountData + +This program error can happen for a lot of reasons. Usually, it's caused by +passing an account to the program that the program is not expecting, either in +the wrong position in the instruction or an account not compatible with the +instruction being executed. + +An implementation of a program might also cause this error when performing a +cross-program instruction and forgetting to provide the account for the program +that you are calling. + +## InvalidInstructionData + +This program error can occur while trying to deserialize the instruction, check +that the structure passed in matches exactly the instruction. There may be some +padding between fields. If the program implements the Rust `Pack` trait then try +packing and unpacking the instruction type `T` to determine the exact encoding +the program expects. + +## MissingRequiredSignature + +Some instructions require the account to be a signer; this error is returned if +an account is expected to be signed but is not. + +An implementation of a program might also cause this error when performing a +cross-program invocation that requires a signed program address, but the passed +signer seeds passed to [`invoke_signed`](/docs/core/cpi.md) don't match the +signer seeds used to create the program address +[`create_program_address`](/docs/core/cpi.md#program-derived-addresses). + +## `rand` Rust dependency causes compilation failure + +See +[Rust Project Dependencies](/docs/programs/lang-rust.md#project-dependencies) + +## Rust restrictions + +See [Rust restrictions](/docs/programs/lang-rust.md#restrictions) + +## Stack + +SBF uses stack frames instead of a variable stack pointer. Each stack frame is +4KB in size. + +If a program violates that stack frame size, the compiler will report the +overrun as a warning. + +For example: + +```text +Error: Function _ZN16curve25519_dalek7edwards21EdwardsBasepointTable6create17h178b3d2411f7f082E Stack offset of -30728 exceeded max offset of -4096 by 26632 bytes, please minimize large stack variables +``` + +The message identifies which symbol is exceeding its stack frame, but the name +might be mangled if it is a Rust or C++ symbol. + +> To demangle a Rust symbol use [rustfilt](https://github.com/luser/rustfilt). + +The above warning came from a Rust program, so the demangled symbol name is: + +```bash +rustfilt _ZN16curve25519_dalek7edwards21EdwardsBasepointTable6create17h178b3d2411f7f082E +curve25519_dalek::edwards::EdwardsBasepointTable::create +``` + +To demangle a C++ symbol use `c++filt` from binutils. + +The reason a warning is reported rather than an error is because some dependent +crates may include functionality that violates the stack frame restrictions even +if the program doesn't use that functionality. If the program violates the stack +size at runtime, an `AccessViolation` error will be reported. + +SBF stack frames occupy a virtual address range starting at `0x200000000`. + +## Heap size + +Programs have access to a runtime heap either directly in C or via the Rust +`alloc` APIs. To facilitate fast allocations, a simple 32KB bump heap is +utilized. The heap does not support `free` or `realloc` so use it wisely. + +Internally, programs have access to the 32KB memory region starting at virtual +address 0x300000000 and may implement a custom heap based on the program's +specific needs. + +- [Rust program heap usage](/docs/programs/lang-rust.md#heap) +- [C program heap usage](/docs/programs/lang-c.md#heap) + +## Loaders + +Programs are deployed with and executed by runtime loaders, currently there are +two supported loaders +[BPF Loader](https://github.com/solana-labs/solana/blob/7ddf10e602d2ed87a9e3737aa8c32f1db9f909d8/sdk/program/src/bpf_loader.rs#L17) +and +[BPF loader deprecated](https://github.com/solana-labs/solana/blob/7ddf10e602d2ed87a9e3737aa8c32f1db9f909d8/sdk/program/src/bpf_loader_deprecated.rs#L14) + +Loaders may support different application binary interfaces so developers must +write their programs for and deploy them to the same loader. If a program +written for one loader is deployed to a different one the result is usually a +`AccessViolation` error due to mismatched deserialization of the program's input +parameters. + +For all practical purposes program should always be written to target the latest +BPF loader and the latest loader is the default for the command-line interface +and the javascript APIs. + +For language specific information about implementing a program for a particular +loader see: + +- [Rust program entrypoints](/docs/programs/lang-rust.md#program-entrypoint) +- [C program entrypoints](/docs/programs/lang-c.md#program-entrypoint) + +### Deployment + +SBF program deployment is the process of uploading a BPF shared object into a +program account's data and marking the account executable. A client breaks the +SBF shared object into smaller pieces and sends them as the instruction data of +[`Write`](https://github.com/solana-labs/solana/blob/bc7133d7526a041d1aaee807b80922baa89b6f90/sdk/program/src/loader_instruction.rs#L13) +instructions to the loader where loader writes that data into the program's +account data. Once all the pieces are received the client sends a +[`Finalize`](https://github.com/solana-labs/solana/blob/bc7133d7526a041d1aaee807b80922baa89b6f90/sdk/program/src/loader_instruction.rs#L30) +instruction to the loader, the loader then validates that the SBF data is valid +and marks the program account as _executable_. Once the program account is +marked executable, subsequent transactions may issue instructions for that +program to process. + +When an instruction is directed at an executable SBF program the loader +configures the program's execution environment, serializes the program's input +parameters, calls the program's entrypoint, and reports any errors encountered. + +For further information, see [deploying programs](/docs/programs/deploying.md). + +### Input Parameter Serialization + +SBF loaders serialize the program input parameters into a byte array that is +then passed to the program's entrypoint, where the program is responsible for +deserializing it on-chain. One of the changes between the deprecated loader and +the current loader is that the input parameters are serialized in a way that +results in various parameters falling on aligned offsets within the aligned byte +array. This allows deserialization implementations to directly reference the +byte array and provide aligned pointers to the program. + +For language specific information about serialization see: + +- [Rust program parameter deserialization](/docs/programs/lang-rust.md#parameter-deserialization) +- [C program parameter deserialization](/docs/programs/lang-c.md#parameter-deserialization) + +The latest loader serializes the program input parameters as follows (all +encoding is little endian): + +- 8 bytes unsigned number of accounts +- For each account + - 1 byte indicating if this is a duplicate account, if not a duplicate then + the value is 0xff, otherwise the value is the index of the account it is a + duplicate of. + - If duplicate: 7 bytes of padding + - If not duplicate: + - 1 byte boolean, true if account is a signer + - 1 byte boolean, true if account is writable + - 1 byte boolean, true if account is executable + - 4 bytes of padding + - 32 bytes of the account public key + - 32 bytes of the account's owner public key + - 8 bytes unsigned number of lamports owned by the account + - 8 bytes unsigned number of bytes of account data + - x bytes of account data + - 10k bytes of padding, used for realloc + - enough padding to align the offset to 8 bytes. + - 8 bytes rent epoch +- 8 bytes of unsigned number of instruction data +- x bytes of instruction data +- 32 bytes of the program id diff --git a/docs/programs/index.md b/docs/programs/index.md new file mode 100644 index 000000000..680e54b82 --- /dev/null +++ b/docs/programs/index.md @@ -0,0 +1,94 @@ +--- +title: Developing on-chain programs +sidebarLabel: Developing Programs +--- + +Developers can write and deploy their own programs to the Solana blockchain. +While developing these "on-chain" programs can seem cumbersome, the entire +process can be broadly summarized into a few key steps. + +## On-chain program development lifecycle + +1. Setup your development environment +2. Write your program +3. Compile the program +4. Generate the program's public address +5. Deploy the program + +### 1. Setup your development environment + +The most robust way of getting started with Solana development, is +[installing the Solana CLI](https://docs.solanalabs.com/cli/install) tools on +your local computer. This will allow you to have the most powerful development +environment. + +Some developers may also opt for using +[Solana Playground](https://beta.solpg.io/), a browser based IDE. It will let +you write, build, and deploy on-chain programs. All from your browser. No +installation needed. + +### 2. Write your program + +Writing Solana programs is most commonly done so using the Rust language. These +Rust programs are effectively the same as creating a traditional +[Rust library](https://doc.rust-lang.org/rust-by-example/crates/lib.html). + +> You can read more about other [supported languages](#support-languages) below. + +### 3. Compile the program + +Once the program is written, it must be complied down to +[Berkley Packet Filter](/docs/programs/faq.md#berkeley-packet-filter-bpf) +byte-code that will then be deployed to the blockchain. + +### 4. Generate the program's public address + +Using the [Solana CLI](https://docs.solanalabs.com/cli/install), the developer +will generate a new unique [Keypair](/docs/terminology.md#keypair) for the new +program. The public address (aka +[Pubkey](/docs/terminology.md#public-key-pubkey)) from this Keypair will be used +on-chain as the program's public address (aka +[`programId`](/docs/terminology.md#program-id)). + +### 5. Deploying the program + +Then again using the CLI, the compiled program can be deployed to the selected +blockchain cluster by creating many transactions containing the program's +byte-code. Due to the transaction memory size limitations, each transaction +effectively sends small chunks of the program to the blockchain in a rapid-fire +manner. + +Once the entire program has been sent to the blockchain, a final transaction is +sent to write all of the buffered byte-code to the program's data account. This +either mark the new program as +[`executable`](/docs/core/accounts.md#executable), or complete the process to +upgrade an existing program (if it already existed). + +## Support languages + +Solana programs are typically written in the +[Rust language](/docs/programs/lang-rust.md), but +[C/C++](/docs/programs/lang-c.md) are also supported. + +There are also various community driven efforts to enable writing on-chain +programs using other languages, including: + +- Python via [Seahorse](https://seahorse.dev/) (that acts as a wrapper the Rust + based Anchor framework) + +## Example programs + +You can also explore the [Program Examples](/docs/programs/examples.md) for +examples of on-chain programs. + +## Limitations + +As you dive deeper into program development, it is important to understand some +of the important limitations associated with on-chain programs. + +Read more details on the [Limitations](/docs/programs/limitations.md) page + +## Frequently asked questions + +Discover many of the [frequently asked questions](/docs/programs/faq.md) other +developers have about writing/understanding Solana programs. diff --git a/docs/programs/lang-c.md b/docs/programs/lang-c.md new file mode 100644 index 000000000..d8b745d7b --- /dev/null +++ b/docs/programs/lang-c.md @@ -0,0 +1,192 @@ +--- +title: "Developing with C" +--- + +Solana supports writing on-chain programs using the C and C++ programming +languages. + +## Project Layout + +C projects are laid out as follows: + +```text +/src/ +/makefile +``` + +The `makefile` should contain the following: + +```bash +OUT_DIR := +include ~/.local/share/solana/install/active_release/bin/sdk/sbf/c/sbf.mk +``` + +The sbf-sdk may not be in the exact place specified above but if you setup your +environment per [How to Build](#how-to-build) then it should be. + +## How to Build + +First setup the environment: + +- Install the latest Rust stable from https://rustup.rs +- Install the latest + [Solana command-line tools](https://docs.solanalabs.com/cli/install) + +Then build using make: + +```bash +make -C +``` + +## How to Test + +Solana uses the [Criterion](https://github.com/Snaipe/Criterion) test framework +and tests are executed each time the program is built +[How to Build](#how-to-build). + +To add tests, create a new file next to your source file named +`test_.c` and populate it with criterion test cases. See the +[Criterion docs](https://criterion.readthedocs.io/en/master) for information on +how to write a test case. + +## Program Entrypoint + +Programs export a known entrypoint symbol which the Solana runtime looks up and +calls when invoking a program. Solana supports multiple versions of the SBF +loader and the entrypoints may vary between them. Programs must be written for +and deployed to the same loader. For more details see the +[FAQ section on Loaders](/docs/programs/faq.md#loaders). + +Currently there are two supported loaders +[SBF Loader](https://github.com/solana-labs/solana/blob/7ddf10e602d2ed87a9e3737aa8c32f1db9f909d8/sdk/program/src/bpf_loader.rs#L17) +and +[SBF loader deprecated](https://github.com/solana-labs/solana/blob/7ddf10e602d2ed87a9e3737aa8c32f1db9f909d8/sdk/program/src/bpf_loader_deprecated.rs#L14). + +They both have the same raw entrypoint definition, the following is the raw +symbol that the runtime looks up and calls: + +```c +extern uint64_t entrypoint(const uint8_t *input) +``` + +This entrypoint takes a generic byte array which contains the serialized program +parameters (program id, accounts, instruction data, etc...). To deserialize the +parameters each loader contains its own [helper function](#serialization). + +### Serialization + +Each loader provides a helper function that deserializes the program's input +parameters into C types: + +- [SBF Loader deserialization](https://github.com/solana-labs/solana/blob/d2ee9db2143859fa5dc26b15ee6da9c25cc0429c/sdk/sbf/c/inc/solana_sdk.h#L304) +- [SBF Loader deprecated deserialization](https://github.com/solana-labs/solana/blob/8415c22b593f164020adc7afe782e8041d756ddf/sdk/sbf/c/inc/deserialize_deprecated.h#L25) + +Some programs may want to perform deserialization themselves, and they can by +providing their own implementation of the [raw entrypoint](#program-entrypoint). +Take note that the provided deserialization functions retain references back to +the serialized byte array for variables that the program is allowed to modify +(lamports, account data). The reason for this is that upon return the loader +will read those modifications so they may be committed. If a program implements +their own deserialization function they need to ensure that any modifications +the program wishes to commit must be written back into the input byte array. + +Details on how the loader serializes the program inputs can be found in the +[Input Parameter Serialization/docs/programs/faq#input-parameter-serialization) +docs. + +## Data Types + +The loader's deserialization helper function populates the +[SolParameters](https://github.com/solana-labs/solana/blob/8415c22b593f164020adc7afe782e8041d756ddf/sdk/sbf/c/inc/solana_sdk.h#L276) +structure: + +```c +/** + * Structure that the program's entrypoint input data is deserialized into. + */ +typedef struct { + SolAccountInfo* ka; /** Pointer to an array of SolAccountInfo, must already + point to an array of SolAccountInfos */ + uint64_t ka_num; /** Number of SolAccountInfo entries in `ka` */ + const uint8_t *data; /** pointer to the instruction data */ + uint64_t data_len; /** Length in bytes of the instruction data */ + const SolPubkey *program_id; /** program_id of the currently executing program */ +} SolParameters; +``` + +'ka' is an ordered array of the accounts referenced by the instruction and +represented as a +[SolAccountInfo](https://github.com/solana-labs/solana/blob/8415c22b593f164020adc7afe782e8041d756ddf/sdk/sbf/c/inc/solana_sdk.h#L173) +structures. An account's place in the array signifies its meaning, for example, +when transferring lamports an instruction may define the first account as the +source and the second as the destination. + +The members of the `SolAccountInfo` structure are read-only except for +`lamports` and `data`. Both may be modified by the program in accordance with +the [runtime enforcement policy](/docs/core/runtime.md#policy). When an +instruction reference the same account multiple times there may be duplicate +`SolAccountInfo` entries in the array but they both point back to the original +input byte array. A program should handle these cases delicately to avoid +overlapping read/writes to the same buffer. If a program implements their own +deserialization function care should be taken to handle duplicate accounts +appropriately. + +`data` is the general purpose byte array from the +[instruction's instruction data](/docs/core/transactions.md#instruction-data) +being processed. + +`program_id` is the public key of the currently executing program. + +## Heap + +C programs can allocate memory via the system call +[`calloc`](https://github.com/solana-labs/solana/blob/c3d2d2134c93001566e1e56f691582f379b5ae55/sdk/sbf/c/inc/solana_sdk.h#L245) +or implement their own heap on top of the 32KB heap region starting at virtual +address x300000000. The heap region is also used by `calloc` so if a program +implements their own heap it should not also call `calloc`. + +## Logging + +The runtime provides two system calls that take data and log it to the program +logs. + +- [`sol_log(const char*)`](https://github.com/solana-labs/solana/blob/d2ee9db2143859fa5dc26b15ee6da9c25cc0429c/sdk/sbf/c/inc/solana_sdk.h#L128) +- [`sol_log_64(uint64_t, uint64_t, uint64_t, uint64_t, uint64_t)`](https://github.com/solana-labs/solana/blob/d2ee9db2143859fa5dc26b15ee6da9c25cc0429c/sdk/sbf/c/inc/solana_sdk.h#L134) + +The [debugging](/docs/programs/debugging.md#logging) section has more +information about working with program logs. + +## Compute Budget + +Use the system call `sol_remaining_compute_units()` to return a `u64` indicating +the number of compute units remaining for this transaction. + +Use the system call +[`sol_log_compute_units()`](https://github.com/solana-labs/solana/blob/d3a3a7548c857f26ec2cb10e270da72d373020ec/sdk/sbf/c/inc/solana_sdk.h#L140) +to log a message containing the remaining number of compute units the program +may consume before execution is halted + +See [compute budget](/docs/core/runtime.md#compute-budget) for more information. + +## ELF Dump + +The SBF shared object internals can be dumped to a text file to gain more +insight into a program's composition and what it may be doing at runtime. The +dump will contain both the ELF information as well as a list of all the symbols +and the instructions that implement them. Some of the SBF loader's error log +messages will reference specific instruction numbers where the error occurred. +These references can be looked up in the ELF dump to identify the offending +instruction and its context. + +To create a dump file: + +```bash +$ cd +$ make dump_ +``` + +## Examples + +The +[Solana Program Library github](https://github.com/solana-labs/solana-program-library/tree/master/examples/c) +repo contains a collection of C examples diff --git a/docs/programs/lang-rust.md b/docs/programs/lang-rust.md new file mode 100644 index 000000000..a5e02fcf6 --- /dev/null +++ b/docs/programs/lang-rust.md @@ -0,0 +1,389 @@ +--- +title: "Developing with Rust" +--- + +Solana supports writing on-chain programs using the +[Rust](https://www.rust-lang.org/) programming language. + +## Project Layout + +Solana Rust programs follow the typical +[Rust project layout](https://doc.rust-lang.org/cargo/guide/project-layout.html): + +```text +/inc/ +/src/ +/Cargo.toml +``` + +Solana Rust programs may depend directly on each other in order to gain access +to instruction helpers when making +[cross-program invocations](/docs/core/cpi.md). When doing so it's important to +not pull in the dependent program's entrypoint symbols because they may conflict +with the program's own. To avoid this, programs should define an `no-entrypoint` +feature in `Cargo.toml` and use to exclude the entrypoint. + +- [Define the feature](https://github.com/solana-labs/solana-program-library/blob/fca9836a2c8e18fc7e3595287484e9acd60a8f64/token/program/Cargo.toml#L12) +- [Exclude the entrypoint](https://github.com/solana-labs/solana-program-library/blob/fca9836a2c8e18fc7e3595287484e9acd60a8f64/token/program/src/lib.rs#L12) + +Then when other programs include this program as a dependency, they should do so +using the `no-entrypoint` feature. + +- [Include without entrypoint](https://github.com/solana-labs/solana-program-library/blob/fca9836a2c8e18fc7e3595287484e9acd60a8f64/token-swap/program/Cargo.toml#L22) + +## Project Dependencies + +At a minimum, Solana Rust programs must pull in the +[solana-program](https://crates.io/crates/solana-program) crate. + +Solana SBF programs have some [restrictions](#restrictions) that may prevent the +inclusion of some crates as dependencies or require special handling. + +For example: + +- Crates that require the architecture be a subset of the ones supported by the + official toolchain. There is no workaround for this unless that crate is + forked and SBF added to that those architecture checks. +- Crates may depend on `rand` which is not supported in Solana's deterministic + program environment. To include a `rand` dependent crate refer to + [Depending on Rand](#depending-on-rand). +- Crates may overflow the stack even if the stack overflowing code isn't + included in the program itself. For more information refer to + [Stack](/docs/programs/faq.md#stack). + +## How to Build + +First setup the environment: + +- Install the latest Rust stable from https://rustup.rs/ +- Install the latest + [Solana command-line tools](https://docs.solanalabs.com/cli/install) + +The normal cargo build is available for building programs against your host +machine which can be used for unit testing: + +```bash +$ cargo build +``` + +To build a specific program, such as SPL Token, for the Solana SBF target which +can be deployed to the cluster: + +```bash +$ cd +$ cargo build-bpf +``` + +## How to Test + +Solana programs can be unit tested via the traditional `cargo test` mechanism by +exercising program functions directly. + +To help facilitate testing in an environment that more closely matches a live +cluster, developers can use the +[`program-test`](https://crates.io/crates/solana-program-test) crate. The +`program-test` crate starts up a local instance of the runtime and allows tests +to send multiple transactions while keeping state for the duration of the test. + +For more information the +[test in sysvar example](https://github.com/solana-labs/solana-program-library/blob/master/examples/rust/sysvar/tests/functional.rs) +shows how an instruction containing sysvar account is sent and processed by the +program. + +## Program Entrypoint + +Programs export a known entrypoint symbol which the Solana runtime looks up and +calls when invoking a program. Solana supports multiple versions of the BPF +loader and the entrypoints may vary between them. Programs must be written for +and deployed to the same loader. For more details see the +[FAQ section on Loaders](/docs/programs/faq.md#loaders). + +Currently there are two supported loaders +[BPF Loader](https://github.com/solana-labs/solana/blob/d9b0fc0e3eec67dfe4a97d9298b15969b2804fab/sdk/program/src/bpf_loader.rs#L17) +and +[BPF loader deprecated](https://github.com/solana-labs/solana/blob/d9b0fc0e3eec67dfe4a97d9298b15969b2804fab/sdk/program/src/bpf_loader_deprecated.rs#L14) + +They both have the same raw entrypoint definition, the following is the raw +symbol that the runtime looks up and calls: + +```rust +#[no_mangle] +pub unsafe extern "C" fn entrypoint(input: *mut u8) -> u64; +``` + +This entrypoint takes a generic byte array which contains the serialized program +parameters (program id, accounts, instruction data, etc...). To deserialize the +parameters each loader contains its own wrapper macro that exports the raw +entrypoint, deserializes the parameters, calls a user defined instruction +processing function, and returns the results. + +You can find the entrypoint macros here: + +- [BPF Loader's entrypoint macro](https://github.com/solana-labs/solana/blob/9b1199cdb1b391b00d510ed7fc4866bdf6ee4eb3/sdk/program/src/entrypoint.rs#L42) +- [BPF Loader deprecated's entrypoint macro](https://github.com/solana-labs/solana/blob/9b1199cdb1b391b00d510ed7fc4866bdf6ee4eb3/sdk/program/src/entrypoint_deprecated.rs#L38) + +The program defined instruction processing function that the entrypoint macros +call must be of this form: + +```rust +pub type ProcessInstruction = + fn(program_id: &Pubkey, accounts: &[AccountInfo], instruction_data: &[u8]) -> ProgramResult; +``` + +### Parameter Deserialization + +Each loader provides a helper function that deserializes the program's input +parameters into Rust types. The entrypoint macros automatically calls the +deserialization helper: + +- [BPF Loader deserialization](https://github.com/solana-labs/solana/blob/d9b0fc0e3eec67dfe4a97d9298b15969b2804fab/sdk/program/src/entrypoint.rs#L146) +- [BPF Loader deprecated deserialization](https://github.com/solana-labs/solana/blob/d9b0fc0e3eec67dfe4a97d9298b15969b2804fab/sdk/program/src/entrypoint_deprecated.rs#L57) + +Some programs may want to perform deserialization themselves and they can by +providing their own implementation of the [raw entrypoint](#program-entrypoint). +Take note that the provided deserialization functions retain references back to +the serialized byte array for variables that the program is allowed to modify +(lamports, account data). The reason for this is that upon return the loader +will read those modifications so they may be committed. If a program implements +their own deserialization function they need to ensure that any modifications +the program wishes to commit be written back into the input byte array. + +Details on how the loader serializes the program inputs can be found in the +[Input Parameter Serialization](/docs/programs/faq.md#input-parameter-serialization) +docs. + +### Data Types + +The loader's entrypoint macros call the program defined instruction processor +function with the following parameters: + +```rust +program_id: &Pubkey, +accounts: &[AccountInfo], +instruction_data: &[u8] +``` + +The program id is the public key of the currently executing program. + +The accounts is an ordered slice of the accounts referenced by the instruction +and represented as an +[AccountInfo](https://github.com/solana-labs/solana/blob/d9b0fc0e3eec67dfe4a97d9298b15969b2804fab/sdk/program/src/account_info.rs#L12) +structures. An account's place in the array signifies its meaning, for example, +when transferring lamports an instruction may define the first account as the +source and the second as the destination. + +The members of the `AccountInfo` structure are read-only except for `lamports` +and `data`. Both may be modified by the program in accordance with the +[runtime enforcement policy](/docs/core/runtime.md#policy). Both of these +members are protected by the Rust `RefCell` construct, so they must be borrowed +to read or write to them. The reason for this is they both point back to the +original input byte array, but there may be multiple entries in the accounts +slice that point to the same account. Using `RefCell` ensures that the program +does not accidentally perform overlapping read/writes to the same underlying +data via multiple `AccountInfo` structures. If a program implements their own +deserialization function care should be taken to handle duplicate accounts +appropriately. + +The instruction data is the general purpose byte array from the +[instruction's instruction data](/docs/core/transactions.md#instruction-data) +being processed. + +## Heap + +Rust programs implement the heap directly by defining a custom +[`global_allocator`](https://github.com/solana-labs/solana/blob/d9b0fc0e3eec67dfe4a97d9298b15969b2804fab/sdk/program/src/entrypoint.rs#L72) + +Programs may implement their own `global_allocator` based on its specific needs. +Refer to the [custom heap example](#examples) for more information. + +## Restrictions + +On-chain Rust programs support most of Rust's libstd, libcore, and liballoc, as +well as many 3rd party crates. + +There are some limitations since these programs run in a resource-constrained, +single-threaded environment, as well as being deterministic: + +- No access to + - `rand` + - `std::fs` + - `std::net` + - `std::future` + - `std::process` + - `std::sync` + - `std::task` + - `std::thread` + - `std::time` +- Limited access to: + - `std::hash` + - `std::os` +- Bincode is extremely computationally expensive in both cycles and call depth + and should be avoided +- String formatting should be avoided since it is also computationally + expensive. +- No support for `println!`, `print!`, the Solana [logging helpers](#logging) + should be used instead. +- The runtime enforces a limit on the number of instructions a program can + execute during the processing of one instruction. See + [computation budget](/docs/core/runtime.md#compute-budget) for more + information. + +## Depending on Rand + +Programs are constrained to run deterministically, so random numbers are not +available. Sometimes a program may depend on a crate that depends itself on +`rand` even if the program does not use any of the random number functionality. +If a program depends on `rand`, the compilation will fail because there is no +`get-random` support for Solana. The error will typically look like this: + +```bash +error: target is not supported, for more information see: https://docs.rs/getrandom/#unsupported-targets + --> /Users/jack/.cargo/registry/src/github.com-1ecc6299db9ec823/getrandom-0.1.14/src/lib.rs:257:9 + | +257 | / compile_error!("\ +258 | | target is not supported, for more information see: \ +259 | | https://docs.rs/getrandom/#unsupported-targets\ +260 | | "); + | |___________^ +``` + +To work around this dependency issue, add the following dependency to the +program's `Cargo.toml`: + +```rust +getrandom = { version = "0.1.14", features = ["dummy"] } +``` + +or if the dependency is on getrandom v0.2 add: + +```rust +getrandom = { version = "0.2.2", features = ["custom"] } +``` + +## Logging + +Rust's `println!` macro is computationally expensive and not supported. Instead +the helper macro +[`msg!`](https://github.com/solana-labs/solana/blob/d9b0fc0e3eec67dfe4a97d9298b15969b2804fab/sdk/program/src/log.rs#L33) +is provided. + +`msg!` has two forms: + +```rust +msg!("A string"); +``` + +or + +```rust +msg!(0_64, 1_64, 2_64, 3_64, 4_64); +``` + +Both forms output the results to the program logs. If a program so wishes they +can emulate `println!` by using `format!`: + +```rust +msg!("Some variable: {:?}", variable); +``` + +The [debugging](/docs/programs/debugging.md#logging) section has more +information about working with program logs the [Rust examples](#examples) +contains a logging example. + +## Panicking + +Rust's `panic!`, `assert!`, and internal panic results are printed to the +[program logs](/docs/programs/debugging.md#logging) by default. + +```bash +INFO solana_runtime::message_processor] Finalized account CGLhHSuWsp1gT4B7MY2KACqp9RUwQRhcUFfVSuxpSajZ +INFO solana_runtime::message_processor] Call SBF program CGLhHSuWsp1gT4B7MY2KACqp9RUwQRhcUFfVSuxpSajZ +INFO solana_runtime::message_processor] Program log: Panicked at: 'assertion failed: `(left == right)` + left: `1`, + right: `2`', rust/panic/src/lib.rs:22:5 +INFO solana_runtime::message_processor] SBF program consumed 5453 of 200000 units +INFO solana_runtime::message_processor] SBF program CGLhHSuWsp1gT4B7MY2KACqp9RUwQRhcUFfVSuxpSajZ failed: BPF program panicked +``` + +### Custom Panic Handler + +Programs can override the default panic handler by providing their own +implementation. + +First define the `custom-panic` feature in the program's `Cargo.toml` + +```rust +[features] +default = ["custom-panic"] +custom-panic = [] +``` + +Then provide a custom implementation of the panic handler: + +```rust +#[cfg(all(feature = "custom-panic", target_os = "solana"))] +#[no_mangle] +fn custom_panic(info: &core::panic::PanicInfo<'_>) { + solana_program::msg!("program custom panic enabled"); + solana_program::msg!("{}", info); +} +``` + +In the above snippit, the default implementation is shown, but developers may +replace that with something that better suits their needs. + +One of the side effects of supporting full panic messages by default is that +programs incur the cost of pulling in more of Rust's `libstd` implementation +into program's shared object. Typical programs will already be pulling in a fair +amount of `libstd` and may not notice much of an increase in the shared object +size. But programs that explicitly attempt to be very small by avoiding `libstd` +may take a significant impact (~25kb). To eliminate that impact, programs can +provide their own custom panic handler with an empty implementation. + +```rust +#[cfg(all(feature = "custom-panic", target_os = "solana"))] +#[no_mangle] +fn custom_panic(info: &core::panic::PanicInfo<'_>) { + // Do nothing to save space +} +``` + +## Compute Budget + +Use the system call `sol_remaining_compute_units()` to return a `u64` indicating +the number of compute units remaining for this transaction. + +Use the system call +[`sol_log_compute_units()`](https://github.com/solana-labs/solana/blob/d9b0fc0e3eec67dfe4a97d9298b15969b2804fab/sdk/program/src/log.rs#L141) +to log a message containing the remaining number of compute units the program +may consume before execution is halted + +See [compute budget](/docs/core/runtime.md#compute-budget) for more information. + +## ELF Dump + +The SBF shared object internals can be dumped to a text file to gain more +insight into a program's composition and what it may be doing at runtime. The +dump will contain both the ELF information as well as a list of all the symbols +and the instructions that implement them. Some of the BPF loader's error log +messages will reference specific instruction numbers where the error occurred. +These references can be looked up in the ELF dump to identify the offending +instruction and its context. + +To create a dump file: + +```bash +$ cd +$ cargo build-bpf --dump +``` + +## Examples + +The +[Solana Program Library GitHub](https://github.com/solana-labs/solana-program-library/tree/master/examples/rust) +repo contains a collection of Rust examples. + +The +[Solana Developers Program Examples GitHub](https://github.com/solana-developers/program-examples) +repo also contains a collection of beginner to intermediate Rust program +examples. diff --git a/docs/programs/limitations.md b/docs/programs/limitations.md new file mode 100644 index 000000000..54c830b38 --- /dev/null +++ b/docs/programs/limitations.md @@ -0,0 +1,87 @@ +--- +title: "Limitations" +--- + +Developing programs on the Solana blockchain have some inherent limitation +associated with them. Below is a list of common limitation that you may run +into. + +## Rust libraries + +Since Rust based on-chain programs must run be deterministic while running in a +resource-constrained, single-threaded environment, they have some limitations on +various libraries. + +See +[Developing with Rust - Restrictions](/docs/programs/lang-rust.md#restrictions) +for a detailed breakdown these restrictions and limitations. + +## Compute budget + +To prevent abuse of the blockchain's computational resources, each transaction +is allocated a [compute budget](/docs/terminology.md#compute-budget). Exceeding +this compute budget will result in the transaction failing. + +See [computational constraints](/docs/core/runtime.md#compute-budget) in the +Runtime for more specific details. + +## Call stack depth - `CallDepthExceeded` error + +Solana programs are constrained to run quickly, and to facilitate this, the +program's call stack is limited to a max depth of **64 frames**. + +When a program exceeds the allowed call stack depth limit, it will receive the +`CallDepthExceeded` error. + +## CPI call depth - `CallDepth` error + +Cross-program invocations allow programs to invoke other programs directly, but +the depth is constrained currently to `4`. + +When a program exceeds the allowed +[cross-program invocation call depth](/docs/core/cpi.md#call-depth), it will +receive a `CallDepth` error + +## Float Rust types support + +Programs support a limited subset of Rust's float operations. If a program +attempts to use a float operation that is not supported, the runtime will report +an unresolved symbol error. + +Float operations are performed via software libraries, specifically LLVM's float +built-ins. Due to the software emulated, they consume more compute units than +integer operations. In general, fixed point operations are recommended where +possible. + +The +[Solana Program Library math](https://github.com/solana-labs/solana-program-library/tree/master/libraries/math) +tests will report the performance of some math operations. To run the test, sync +the repo and run: + +```sh +cargo test-sbf -- --nocapture --test-threads=1 +``` + +Recent results show the float operations take more instructions compared to +integers equivalents. Fixed point implementations may vary but will also be less +than the float equivalents: + +```text + u64 f32 +Multiply 8 176 +Divide 9 219 +``` + +## Static writable data + +Program shared objects do not support writable shared data. Programs are shared +between multiple parallel executions using the same shared read-only code and +data. This means that developers should not include any static writable or +global variables in programs. In the future a copy-on-write mechanism could be +added to support writable data. + +## Signed division + +The SBF instruction set does not support +[signed division](https://www.kernel.org/doc/html/latest/bpf/bpf_design_QA.Html#q-why-there-is-no-bpf-sdiv-for-signed-divide-operation). +Adding a signed division instruction is a consideration. diff --git a/docs/rpc.md b/docs/rpc.md new file mode 100644 index 000000000..75a50157f --- /dev/null +++ b/docs/rpc.md @@ -0,0 +1,10 @@ +--- +title: JSON RPC Methods +sidebarSortOrder: 0 +--- + +This file should not be edited since it is not intended to be displayed. This +file only exist to create a link within the master documentation sidebar. + +The Solana JSON RPC documentation is available within the +[`rpc` directory](./rpc/) diff --git a/docs/rpc/deprecated/getConfirmedBlock.mdx b/docs/rpc/deprecated/getConfirmedBlock.mdx new file mode 100644 index 000000000..7ce3c57e4 --- /dev/null +++ b/docs/rpc/deprecated/getConfirmedBlock.mdx @@ -0,0 +1,201 @@ +--- +sidebarLabel: getConfirmedBlock +title: getConfirmedBlock RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getConfirmedBlock +--- + +Returns identity and transaction information about a confirmed block in the +ledger + + + This method is expected to be removed in `solana-core` v2.0. Please use + [getBlock](/docs/rpc/http/getBlock) instead. + + + + + + +### Parameters + + + slot number, as u64 integer + + + + +Configuration object containing the following fields: + + + + + level of transaction detail to return, either "full", "signatures", or "none" + + + + whether to populate the `rewards` array. + + + + +Encoding format for Account data + + + +
+ +- `jsonParsed` encoding attempts to use program-specific instruction parsers to + return more human-readable and explicit data in the + `transaction.message.instructions` list. +- If `jsonParsed` is requested but a parser cannot be found, the instruction + falls back to regular JSON encoding (`accounts`, `data`, and `programIdIndex` + fields). + +
+ +
+ +
+ +### Result + +The result field will be an object with the following fields: + +- `` - if specified block is not confirmed +- `` - if block is confirmed, an object with the following fields: + - `blockhash: ` - the blockhash of this block, as base-58 encoded + string + - `previousBlockhash: ` - the blockhash of this block's parent, as + base-58 encoded string; if the parent block is not available due to ledger + cleanup, this field will return "11111111111111111111111111111111" + - `parentSlot: ` - the slot index of this block's parent + - `transactions: ` - present if "full" transaction details are + requested; an array of JSON objects containing: + - `transaction: ` - + [Transaction](/docs/rpc/json-structures#transactions) object, either in + JSON format or encoded binary data, depending on encoding parameter + - `meta: ` - transaction status metadata object, containing `null` + or: + - `err: ` - Error if transaction failed, null if transaction + succeeded. + [TransactionError definitions](https://github.com/solana-labs/solana/blob/c0c60386544ec9a9ec7119229f37386d9f070523/sdk/src/transaction/error.rs#L13) + - `fee: ` - fee this transaction was charged, as u64 integer + - `preBalances: ` - array of u64 account balances from before the + transaction was processed + - `postBalances: ` - array of u64 account balances after the + transaction was processed + - `innerInstructions: ` - List of + [inner instructions](/docs/rpc/json-structures#inner-instructions) or + `null` if inner instruction recording was not enabled during this + transaction + - `preTokenBalances: ` - List of + [token balances](/docs/rpc/json-structures#token-balances) from before + the transaction was processed or omitted if token balance recording was + not yet enabled during this transaction + - `postTokenBalances: ` - List of + [token balances](/docs/rpc/json-structures#token-balances) from after + the transaction was processed or omitted if token balance recording was + not yet enabled during this transaction + - `logMessages: ` - array of string log messages or `null` if + log message recording was not enabled during this transaction + - DEPRECATED: `status: ` - Transaction status + - `"Ok": ` - Transaction was successful + - `"Err": ` - Transaction failed with TransactionError + - `signatures: ` - present if "signatures" are requested for + transaction details; an array of signatures strings, corresponding to the + transaction order in the block + - `rewards: ` - present if rewards are requested; an array of JSON + objects containing: + - `pubkey: ` - The public key, as base-58 encoded string, of the + account that received the reward + - `lamports: `- number of reward lamports credited or debited by the + account, as a i64 + - `postBalance: ` - account balance in lamports after the reward was + applied + - `rewardType: ` - type of reward: "fee", "rent", + "voting", "staking" + - `commission: ` - vote account commission when the reward was + credited, only present for voting and staking rewards + - `blockTime: ` - estimated production time, as Unix timestamp + (seconds since the Unix epoch). null if not available + +#### For more details on returned data: + +- [Transaction Structure](/docs/rpc/json-structures#transactions) +- [Inner Instructions Structure](/docs/rpc/json-structures#inner-instructions) +- [Token Balances Structure](/docs/rpc/json-structures#token-balances) + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", "id": 1, + "method": "getConfirmedBlock", + "params": [430, "base64"] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "blockTime": null, + "blockhash": "3Eq21vXNB5s86c62bVuUfTeaMif1N2kUqRPBmGRJhyTA", + "parentSlot": 429, + "previousBlockhash": "mfcyqEXB3DnHXki6KjjmZck6YjmZLvpAByy2fj4nh6B", + "rewards": [], + "transactions": [ + { + "meta": { + "err": null, + "fee": 5000, + "innerInstructions": [], + "logMessages": [], + "postBalances": [499998932500, 26858640, 1, 1, 1], + "postTokenBalances": [], + "preBalances": [499998937500, 26858640, 1, 1, 1], + "preTokenBalances": [], + "status": { + "Ok": null + } + }, + "transaction": [ + "AVj7dxHlQ9IrvdYVIjuiRFs1jLaDMHixgrv+qtHBwz51L4/ImLZhszwiyEJDIp7xeBSpm/TX5B7mYzxa+fPOMw0BAAMFJMJVqLw+hJYheizSoYlLm53KzgT82cDVmazarqQKG2GQsLgiqktA+a+FDR4/7xnDX7rsusMwryYVUdixfz1B1Qan1RcZLwqvxvJl4/t3zHragsUp0L47E24tAFUgAAAABqfVFxjHdMkoVmOYaR1etoteuKObS21cc1VbIQAAAAAHYUgdNXR0u3xNdiTr072z2DVec9EQQ/wNo1OAAAAAAAtxOUhPBp2WSjUNJEgfvy70BbxI00fZyEPvFHNfxrtEAQQEAQIDADUCAAAAAQAAAAAAAACtAQAAAAAAAAdUE18R96XTJCe+YfRfUp6WP+YKCy/72ucOL8AoBFSpAA==", + "base64" + ] + } + ] + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/deprecated/getConfirmedBlocks.mdx b/docs/rpc/deprecated/getConfirmedBlocks.mdx new file mode 100644 index 000000000..767eaea33 --- /dev/null +++ b/docs/rpc/deprecated/getConfirmedBlocks.mdx @@ -0,0 +1,64 @@ +--- +sidebarLabel: getConfirmedBlocks +title: getConfirmedBlocks RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getConfirmedBlocks +--- + +Returns a list of confirmed blocks between two slots + + + This method is expected to be removed in `solana-core` v2.0. Please use + [getBlocks](/docs/rpc/http/getblocks) instead. + + + + + + +### Parameters + + + start_slot, as u64 integer + + + + +Configuration object containing the following fields: + + + + + +### Result + +The result field will be an array of u64 integers listing confirmed blocks +between `start_slot` and either `end_slot` - if provided, or latest confirmed +block, inclusive. Max range allowed is 500,000 slots. + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + {"jsonrpc": "2.0","id":1,"method":"getConfirmedBlocks","params":[5, 10]} +' +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": [5, 6, 7, 8, 9, 10], "id": 1 } +``` + + + diff --git a/docs/rpc/deprecated/getConfirmedBlocksWithLimit.mdx b/docs/rpc/deprecated/getConfirmedBlocksWithLimit.mdx new file mode 100644 index 000000000..8a46687f8 --- /dev/null +++ b/docs/rpc/deprecated/getConfirmedBlocksWithLimit.mdx @@ -0,0 +1,71 @@ +--- +sidebarLabel: getConfirmedBlocksWithLimit +title: getConfirmedBlocksWithLimit RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getConfirmedBlocksWithLimit +--- + +Returns a list of confirmed blocks starting at the given slot + + + This method is expected to be removed in `solana-core` v2.0. Please use + [getBlocksWithLimit](/docs/rpc/http/getblockswithlimit) instead. + + + + + + +### Parameters + + + start_slot, as u64 integer + + + + limit, as u64 integer + + + + +Configuration object containing the following fields: + + + + + +### Result + +The result field will be an array of u64 integers listing confirmed blocks +starting at `start_slot` for up to `limit` blocks, inclusive. + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", "id": 1, + "method": "getConfirmedBlocksWithLimit", + "params": [5, 3] + } +' +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": [5, 6, 7], "id": 1 } +``` + + + diff --git a/docs/rpc/deprecated/getConfirmedSignaturesForAddress2.mdx b/docs/rpc/deprecated/getConfirmedSignaturesForAddress2.mdx new file mode 100644 index 000000000..4efde28dd --- /dev/null +++ b/docs/rpc/deprecated/getConfirmedSignaturesForAddress2.mdx @@ -0,0 +1,113 @@ +--- +sidebarLabel: getConfirmedSignaturesForAddress2 +title: getConfirmedSignaturesForAddress2 RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getConfirmedSignaturesForAddress2 +--- + +Returns signatures for confirmed transactions that include the given address in +their `accountKeys` list. Returns signatures backwards in time from the provided +signature or most recent confirmed block + + + This method is expected to be removed in `solana-core` v2.0. Please use + [getSignaturesForAddress](/docs/rpc/http/getSignaturesForAddress) instead. + + + + + + +### Parameters + + + account address, as base-58 encoded string + + + +Configuration object containing the following fields: + + + + + maximum transaction signatures to return (between 1 and 1,000, default: + 1,000). + + + + start searching backwards from this transaction signature. (If not provided + the search starts from the top of the highest max confirmed block.) + + + + search until this transaction signature, if found before limit reached. + + + + +### Result + +The result field will be an array of ``, ordered from newest to oldest +transaction, containing transaction signature information with the following +fields: + +- `signature: ` - transaction signature as base-58 encoded string +- `slot: ` - The slot that contains the block with the transaction +- `err: ` - Error if transaction failed, null if transaction + succeeded. + [TransactionError definitions](https://github.com/solana-labs/solana/blob/c0c60386544ec9a9ec7119229f37386d9f070523/sdk/src/transaction/error.rs#L13) +- `memo: ` - Memo associated with the transaction, null if no memo + is present +- `blockTime: ` - estimated production time, as Unix timestamp + (seconds since the Unix epoch) of when transaction was processed. null if not + available. + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", + "id": 1, + "method": "getConfirmedSignaturesForAddress2", + "params": [ + "Vote111111111111111111111111111111111111111", + { + "limit": 1 + } + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": [ + { + "err": null, + "memo": null, + "signature": "5h6xBEauJ3PK6SWCZ1PGjBvj8vDdWG3KpwATGy1ARAXFSDwt8GFXM7W5Ncn16wmqokgpiKRLuS83KUxyZyv2sUYv", + "slot": 114, + "blockTime": null + } + ], + "id": 1 +} +``` + + + diff --git a/docs/rpc/deprecated/getConfirmedTransaction.mdx b/docs/rpc/deprecated/getConfirmedTransaction.mdx new file mode 100644 index 000000000..c850bb375 --- /dev/null +++ b/docs/rpc/deprecated/getConfirmedTransaction.mdx @@ -0,0 +1,152 @@ +--- +sidebarLabel: getConfirmedTransaction +title: getConfirmedTransaction RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getConfirmedTransaction +--- + +Returns transaction details for a confirmed transaction + + + This method is expected to be removed in `solana-core` v2.0. Please use + [getTransaction](/docs/rpc/http/getTransaction) instead. + + + + + + +### Parameters + + + transaction signature, as base-58 encoded string + + + + +Configuration object containing the following fields: + + + + + +Encoding format for Account data + + + +
+ +- `base58` is slow and limited to less than 129 bytes of Account data. +- `jsonParsed` encoding attempts to use program-specific instruction parsers to + return more human-readable and explicit data in the + `transaction.message.instructions` list. +- If `jsonParsed` is requested but a parser cannot be found, the instruction + falls back to regular `json` encoding (`accounts`, `data`, and + `programIdIndex` fields). + +
+ +
+ +
+ +### Result + +- `` - if transaction is not found or not confirmed +- `` - if transaction is confirmed, an object with the following fields: + - `slot: ` - the slot this transaction was processed in + - `transaction: ` - + [Transaction](/docs/rpc/json-structures#transactions) object, either in JSON + format or encoded binary data, depending on encoding parameter + - `blockTime: ` - estimated production time, as Unix timestamp + (seconds since the Unix epoch) of when the transaction was processed. null + if not available + - `meta: ` - transaction status metadata object: + - `err: ` - Error if transaction failed, null if transaction + succeeded. + [TransactionError definitions](https://docs.rs/solana-sdk/latest/solana_sdk/transaction/enum.TransactionError.html) + - `fee: ` - fee this transaction was charged, as u64 integer + - `preBalances: ` - array of u64 account balances from before the + transaction was processed + - `postBalances: ` - array of u64 account balances after the + transaction was processed + - `innerInstructions: ` - List of + [inner instructions](/docs/rpc/json-structures#inner-instructions) or + `null` if inner instruction recording was not enabled during this + transaction + - `preTokenBalances: ` - List of + [token balances](/docs/rpc/json-structures#token-balances) from before the + transaction was processed or omitted if token balance recording was not + yet enabled during this transaction + - `postTokenBalances: ` - List of + [token balances](/docs/rpc/json-structures#token-balances) from after the + transaction was processed or omitted if token balance recording was not + yet enabled during this transaction + - `logMessages: ` - array of string log messages or `null` if + log message recording was not enabled during this transaction + - DEPRECATED: `status: ` - Transaction status + - `"Ok": ` - Transaction was successful + - `"Err": ` - Transaction failed with TransactionError + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", + "id": 1, + "method": "getConfirmedTransaction", + "params": [ + "2nBhEBYYvfaAe16UMNqRHre4YNSskvuYgx3M6E4JP1oDYvZEJHvoPzyUidNgNX5r9sTyN1J9UxtbCXy2rqYcuyuv", + "base64" + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "meta": { + "err": null, + "fee": 5000, + "innerInstructions": [], + "postBalances": [499998932500, 26858640, 1, 1, 1], + "postTokenBalances": [], + "preBalances": [499998937500, 26858640, 1, 1, 1], + "preTokenBalances": [], + "status": { + "Ok": null + } + }, + "slot": 430, + "transaction": [ + "AVj7dxHlQ9IrvdYVIjuiRFs1jLaDMHixgrv+qtHBwz51L4/ImLZhszwiyEJDIp7xeBSpm/TX5B7mYzxa+fPOMw0BAAMFJMJVqLw+hJYheizSoYlLm53KzgT82cDVmazarqQKG2GQsLgiqktA+a+FDR4/7xnDX7rsusMwryYVUdixfz1B1Qan1RcZLwqvxvJl4/t3zHragsUp0L47E24tAFUgAAAABqfVFxjHdMkoVmOYaR1etoteuKObS21cc1VbIQAAAAAHYUgdNXR0u3xNdiTr072z2DVec9EQQ/wNo1OAAAAAAAtxOUhPBp2WSjUNJEgfvy70BbxI00fZyEPvFHNfxrtEAQQEAQIDADUCAAAAAQAAAAAAAACtAQAAAAAAAAdUE18R96XTJCe+YfRfUp6WP+YKCy/72ucOL8AoBFSpAA==", + "base64" + ] + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/deprecated/getFeeCalculatorForBlockhash.mdx b/docs/rpc/deprecated/getFeeCalculatorForBlockhash.mdx new file mode 100644 index 000000000..458690e67 --- /dev/null +++ b/docs/rpc/deprecated/getFeeCalculatorForBlockhash.mdx @@ -0,0 +1,93 @@ +--- +sidebarLabel: getFeeCalculatorForBlockhash +title: getFeeCalculatorForBlockhash RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getFeeCalculatorForBlockhash +--- + +Returns the fee calculator associated with the query blockhash, or `null` if the +blockhash has expired + + + This method is expected to be removed in `solana-core` v2.0. Please use + [isBlockhashValid](/docs/rpc/http/isBlockhashValid) or + [getFeeForMessage](/docs/rpc/http/getFeeForMessage) instead. + + + + + + +### Parameters + + + query blockhash, as a base-58 encoded string + + + + +Configuration object containing the following fields: + + + + + The minimum slot that the request can be evaluated at + + + + +### Result + +The result will be an RpcResponse JSON object with `value` equal to: + +- `` - if the query blockhash has expired; or +- `` - otherwise, a JSON object containing: + - `feeCalculator: ` - `FeeCalculator` object describing the cluster + fee rate at the queried blockhash + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", + "id": 1, + "method": "getFeeCalculatorForBlockhash", + "params": [ + "GJxqhuxcgfn5Tcj6y3f8X4FeCDd2RQ6SnEMo1AAxrPRZ" + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "context": { + "slot": 221 + }, + "value": { + "feeCalculator": { + "lamportsPerSignature": 5000 + } + } + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/deprecated/getFeeRateGovernor.mdx b/docs/rpc/deprecated/getFeeRateGovernor.mdx new file mode 100644 index 000000000..c7d64e35d --- /dev/null +++ b/docs/rpc/deprecated/getFeeRateGovernor.mdx @@ -0,0 +1,72 @@ +--- +sidebarLabel: getFeeRateGovernor +title: getFeeRateGovernor RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getFeeRateGovernor +--- + +Returns the fee rate governor information from the root bank + + + This method is expected to be removed in `solana-core` v2.0. + + + + + + +### Parameters + +**None** + +### Result + +The result will be an RpcResponse JSON object with `value` equal to an `object` +with the following fields: + +- `burnPercent: ` - Percentage of fees collected to be destroyed +- `maxLamportsPerSignature: ` - Largest value `lamportsPerSignature` can + attain for the next slot +- `minLamportsPerSignature: ` - Smallest value `lamportsPerSignature` can + attain for the next slot +- `targetLamportsPerSignature: ` - Desired fee rate for the cluster +- `targetSignaturesPerSlot: ` - Desired signature rate for the cluster + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + {"jsonrpc":"2.0","id":1, "method":"getFeeRateGovernor"} +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "context": { + "slot": 54 + }, + "value": { + "feeRateGovernor": { + "burnPercent": 50, + "maxLamportsPerSignature": 100000, + "minLamportsPerSignature": 5000, + "targetLamportsPerSignature": 10000, + "targetSignaturesPerSlot": 20000 + } + } + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/deprecated/getFees.mdx b/docs/rpc/deprecated/getFees.mdx new file mode 100644 index 000000000..dd673e28e --- /dev/null +++ b/docs/rpc/deprecated/getFees.mdx @@ -0,0 +1,91 @@ +--- +sidebarLabel: getFees +title: getFees RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getFees + - /docs/rpc/http/getFees +--- + +Returns a recent block hash from the ledger, a fee schedule that can be used to +compute the cost of submitting a transaction using it, and the last slot in +which the blockhash will be valid. + + + This method is expected to be removed in `solana-core` v2.0. Please use + [getFeeForMessage](/docs/rpc/http/getFeeForMessage) instead. + + + + + + +### Parameters + + + Pubkey of account to query, as base-58 encoded string + + + + +Configuration object containing the following fields: + + + + + +### Result + +The result will be an RpcResponse JSON object with `value` set to a JSON object +with the following fields: + +- `blockhash: ` - a Hash as base-58 encoded string +- `feeCalculator: ` - FeeCalculator object, the fee schedule for this + block hash +- `lastValidSlot: ` - DEPRECATED - this value is inaccurate and should not + be relied upon +- `lastValidBlockHeight: ` - last + [block height](/docs/terminology.md#block-height) at which the blockhash will + be valid + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { "jsonrpc":"2.0", "id": 1, "method":"getFees"} +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "context": { + "slot": 1 + }, + "value": { + "blockhash": "CSymwgTNX1j3E4qhKfJAUE41nBWEwXufoYryPbkde5RR", + "feeCalculator": { + "lamportsPerSignature": 5000 + }, + "lastValidSlot": 297, + "lastValidBlockHeight": 296 + } + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/deprecated/getRecentBlockhash.mdx b/docs/rpc/deprecated/getRecentBlockhash.mdx new file mode 100644 index 000000000..987b67c79 --- /dev/null +++ b/docs/rpc/deprecated/getRecentBlockhash.mdx @@ -0,0 +1,84 @@ +--- +sidebarLabel: getRecentBlockhash +title: getRecentBlockhash RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getRecentBlockhash +--- + +Returns a recent block hash from the ledger, and a fee schedule that can be used +to compute the cost of submitting a transaction using it. + + + This method is expected to be removed in `solana-core` v2.0. Please use + [getLatestBlockhash](/docs/rpc/http/getlatestblockhash) instead. + + + + + + +### Parameters + + + Pubkey of account to query, as base-58 encoded string + + + + +Configuration object containing the following fields: + + + + + +### Result + +An RpcResponse containing a JSON object consisting of a string blockhash and +FeeCalculator JSON object. + +- `RpcResponse` - RpcResponse JSON object with `value` field set to a + JSON object including: +- `blockhash: ` - a Hash as base-58 encoded string +- `feeCalculator: ` - FeeCalculator object, the fee schedule for this + block hash + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + {"jsonrpc":"2.0","id":1, "method":"getRecentBlockhash"} +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "context": { + "slot": 1 + }, + "value": { + "blockhash": "CSymwgTNX1j3E4qhKfJAUE41nBWEwXufoYryPbkde5RR", + "feeCalculator": { + "lamportsPerSignature": 5000 + } + } + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/deprecated/getSnapshotSlot.mdx b/docs/rpc/deprecated/getSnapshotSlot.mdx new file mode 100644 index 000000000..a6eef8370 --- /dev/null +++ b/docs/rpc/deprecated/getSnapshotSlot.mdx @@ -0,0 +1,57 @@ +--- +sidebarLabel: getSnapshotSlot +title: getSnapshotSlot RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getSnapshotSlot +--- + +Returns the highest slot that the node has a snapshot for + + + This method is expected to be removed in `solana-core` v2.0. Please use + [getHighestSnapshotSlot](/docs/rpc/http/getHighestSnapshotSlot) instead. + + + + + + +### Parameters + +**None** + +### Result + +`` - Snapshot slot + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + {"jsonrpc":"2.0","id":1, "method":"getSnapshotSlot"} +' +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": 100, "id": 1 } +``` + +Result when the node has no snapshot: + +```json +{ + "jsonrpc": "2.0", + "error": { "code": -32008, "message": "No snapshot" }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getAccountInfo.mdx b/docs/rpc/http/getAccountInfo.mdx new file mode 100644 index 000000000..8d85a7782 --- /dev/null +++ b/docs/rpc/http/getAccountInfo.mdx @@ -0,0 +1,143 @@ +--- +sidebarLabel: getAccountInfo +title: getAccountInfo RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getAccountInfo +--- + +Returns all information associated with the account of provided Pubkey + + + + + +### Parameters + + + Pubkey of account to query, as base-58 encoded string + + + + +Configuration object containing the following fields: + + + + + +Encoding format for Account data + + + +
+ +- `base58` is slow and limited to less than 129 bytes of Account data. +- `base64` will return base64 encoded data for Account data of any size. +- `base64+zstd` compresses the Account data using + [Zstandard](https://facebook.github.io/zstd/) and base64-encodes the result. +- `jsonParsed` encoding attempts to use program-specific state parsers to return + more human-readable and explicit account state data. +- If `jsonParsed` is requested but a parser cannot be found, the field falls + back to `base64` encoding, detectable when the `data` field is type `string`. + +
+ +
+ + + Request a slice of the account's data. + +- `length: ` - number of bytes to return +- `offset: ` - byte offset from which to start reading + + + Data slicing is only available for `base58`, `base64`, or `base64+zstd` + encodings. + + + + + + The minimum slot that the request can be evaluated at + + +
+ +### Result + +The result will be an RpcResponse JSON object with `value` equal to: + +- `` - if the requested account doesn't exist +- `` - otherwise, a JSON object containing: + - `lamports: ` - number of lamports assigned to this account, as a u64 + - `owner: ` - base-58 encoded Pubkey of the program this account has + been assigned to + - `data: <[string, encoding]|object>` - data associated with the account, + either as encoded binary data or JSON format `{: }` - + depending on encoding parameter + - `executable: ` - boolean indicating if the account contains a program + \(and is strictly read-only\) + - `rentEpoch: ` - the epoch at which this account will next owe rent, as + u64 + - `size: ` - the data size of the account + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", + "id": 1, + "method": "getAccountInfo", + "params": [ + "vines1vzrYbzLMRdu58ou5XTby4qAqVRLmqo36NKPTg", + { + "encoding": "base58" + } + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "context": { + "slot": 1 + }, + "value": { + "data": [ + "11116bv5nS2h3y12kD1yUKeMZvGcKLSjQgX6BeV7u1FrjeJcKfsHRTPuR3oZ1EioKtYGiYxpxMG5vpbZLsbcBYBEmZZcMKaSoGx9JZeAuWf", + "base58" + ], + "executable": false, + "lamports": 1000000000, + "owner": "11111111111111111111111111111111", + "rentEpoch": 2, + "space": 80 + } + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getBalance.mdx b/docs/rpc/http/getBalance.mdx new file mode 100644 index 000000000..dc56f7edb --- /dev/null +++ b/docs/rpc/http/getBalance.mdx @@ -0,0 +1,73 @@ +--- +sidebarLabel: getBalance +title: getBalance RPC Method +sidebarSortOrder: 10 +hideTableOfContents: true +altRoutes: + - /docs/rpc/getBalance +--- + +Returns the lamport balance of the account of provided Pubkey + + + + + +### Parameters + + + Pubkey of account to query, as base-58 encoded string + + + + +Configuration object containing the following fields: + + + + + The minimum slot that the request can be evaluated at + + + + +### Result + +`RpcResponse` - RpcResponse JSON object with `value` field set to the +balance + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", "id": 1, + "method": "getBalance", + "params": [ + "83astBRguLMdt2h5U1Tpdq5tjFoJ6noeGwaY3mDLVcri" + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { "context": { "slot": 1 }, "value": 0 }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getBlock.mdx b/docs/rpc/http/getBlock.mdx new file mode 100644 index 000000000..6a44a6a2b --- /dev/null +++ b/docs/rpc/http/getBlock.mdx @@ -0,0 +1,287 @@ +--- +sidebarLabel: getBlock +title: getBlock RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getBlock +--- + +Returns identity and transaction information about a confirmed block in the +ledger + + + + + +### Parameters + + + slot number, as `u64` integer + + + + +Configuration object containing the following fields: + + + +- `processed` is not supported. + + + + + +encoding format for each returned Transaction + + + +
+ +- `jsonParsed` attempts to use program-specific instruction parsers to return + more human-readable and explicit data in the + `transaction.message.instructions` list. +- If `jsonParsed` is requested but a parser cannot be found, the instruction + falls back to regular JSON encoding (`accounts`, `data`, and `programIdIndex` + fields). + +
+ +
+ + + +level of transaction detail to return + + + +
+ +- If `accounts` are requested, transaction details only include signatures and + an annotated list of accounts in each transaction. +- Transaction metadata is limited to only: fee, err, pre_balances, + post_balances, pre_token_balances, and post_token_balances. + +
+ +
+ + + +the max transaction version to return in responses. + +
+ +- If the requested block contains a transaction with a higher version, an error + will be returned. +- If this parameter is omitted, only legacy transactions will be returned, and a + block containing any versioned transaction will prompt the error. + +
+ +
+ + + whether to populate the `rewards` array. If parameter not provided, the + default includes rewards. + + +
+ +### Result + +The result field will be an object with the following fields: + +- `` - if specified block is not confirmed +- `` - if block is confirmed, an object with the following fields: + - `blockhash: ` - the blockhash of this block, as base-58 encoded + string + - `previousBlockhash: ` - the blockhash of this block's parent, as + base-58 encoded string; if the parent block is not available due to ledger + cleanup, this field will return "11111111111111111111111111111111" + - `parentSlot: ` - the slot index of this block's parent + - `transactions: ` - present if "full" transaction details are + requested; an array of JSON objects containing: + - `transaction: ` - + [Transaction](/docs/rpc/json-structures#transactions) object, either in + JSON format or encoded binary data, depending on encoding parameter + - `meta: ` - transaction status metadata object, containing `null` + or: + - `err: ` - Error if transaction failed, null if transaction + succeeded. + [TransactionError definitions](https://github.com/solana-labs/solana/blob/c0c60386544ec9a9ec7119229f37386d9f070523/sdk/src/transaction/error.rs#L13) + - `fee: ` - fee this transaction was charged, as u64 integer + - `preBalances: ` - array of u64 account balances from before the + transaction was processed + - `postBalances: ` - array of u64 account balances after the + transaction was processed + - `innerInstructions: ` - List of + [inner instructions](/docs/rpc/json-structures#inner-instructions) or + `null` if inner instruction recording was not enabled during this + transaction + - `preTokenBalances: ` - List of + [token balances](/docs/rpc/json-structures#token-balances) from before + the transaction was processed or omitted if token balance recording was + not yet enabled during this transaction + - `postTokenBalances: ` - List of + [token balances](/docs/rpc/json-structures#token-balances) from after + the transaction was processed or omitted if token balance recording was + not yet enabled during this transaction + - `logMessages: ` - array of string log messages or `null` if + log message recording was not enabled during this transaction + - `rewards: ` - transaction-level rewards, populated if + rewards are requested; an array of JSON objects containing: + - `pubkey: ` - The public key, as base-58 encoded string, of the + account that received the reward + - `lamports: `- number of reward lamports credited or debited by + the account, as a i64 + - `postBalance: ` - account balance in lamports after the reward + was applied + - `rewardType: ` - type of reward: "fee", "rent", + "voting", "staking" + - `commission: ` - vote account commission when the reward + was credited, only present for voting and staking rewards + - DEPRECATED: `status: ` - Transaction status + - `"Ok": ` - Transaction was successful + - `"Err": ` - Transaction failed with TransactionError + - `loadedAddresses: ` - Transaction addresses loaded + from address lookup tables. Undefined if + `maxSupportedTransactionVersion` is not set in request params, or if + `jsonParsed` encoding is set in request params. + - `writable: ` - Ordered list of base-58 encoded + addresses for writable loaded accounts + - `readonly: ` - Ordered list of base-58 encoded + addresses for readonly loaded accounts + - `returnData: ` - the most-recent return data generated + by an instruction in the transaction, with the following fields: + - `programId: ` - the program that generated the return data, as + base-58 encoded Pubkey + - `data: <[string, encoding]>` - the return data itself, as base-64 + encoded binary data + - `computeUnitsConsumed: ` - number of + [compute units](/docs/core/runtime.md#compute-budget) consumed by the + transaction + - `version: <"legacy"|number|undefined>` - Transaction version. Undefined if + `maxSupportedTransactionVersion` is not set in request params. + - `signatures: ` - present if "signatures" are requested for + transaction details; an array of signatures strings, corresponding to the + transaction order in the block + - `rewards: ` - block-level rewards, present if rewards are + requested; an array of JSON objects containing: + - `pubkey: ` - The public key, as base-58 encoded string, of the + account that received the reward + - `lamports: `- number of reward lamports credited or debited by the + account, as a i64 + - `postBalance: ` - account balance in lamports after the reward was + applied + - `rewardType: ` - type of reward: "fee", "rent", + "voting", "staking" + - `commission: ` - vote account commission when the reward was + credited, only present for voting and staking rewards + - `blockTime: ` - estimated production time, as Unix timestamp + (seconds since the Unix epoch). null if not available + - `blockHeight: ` - the number of blocks beneath this block + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0","id":1, + "method":"getBlock", + "params": [ + 430, + { + "encoding": "json", + "maxSupportedTransactionVersion":0, + "transactionDetails":"full", + "rewards":false + } + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "blockHeight": 428, + "blockTime": null, + "blockhash": "3Eq21vXNB5s86c62bVuUfTeaMif1N2kUqRPBmGRJhyTA", + "parentSlot": 429, + "previousBlockhash": "mfcyqEXB3DnHXki6KjjmZck6YjmZLvpAByy2fj4nh6B", + "transactions": [ + { + "meta": { + "err": null, + "fee": 5000, + "innerInstructions": [], + "logMessages": [], + "postBalances": [499998932500, 26858640, 1, 1, 1], + "postTokenBalances": [], + "preBalances": [499998937500, 26858640, 1, 1, 1], + "preTokenBalances": [], + "rewards": null, + "status": { + "Ok": null + } + }, + "transaction": { + "message": { + "accountKeys": [ + "3UVYmECPPMZSCqWKfENfuoTv51fTDTWicX9xmBD2euKe", + "AjozzgE83A3x1sHNUR64hfH7zaEBWeMaFuAN9kQgujrc", + "SysvarS1otHashes111111111111111111111111111", + "SysvarC1ock11111111111111111111111111111111", + "Vote111111111111111111111111111111111111111" + ], + "header": { + "numReadonlySignedAccounts": 0, + "numReadonlyUnsignedAccounts": 3, + "numRequiredSignatures": 1 + }, + "instructions": [ + { + "accounts": [1, 2, 3, 0], + "data": "37u9WtQpcm6ULa3WRQHmj49EPs4if7o9f1jSRVZpm2dvihR9C8jY4NqEwXUbLwx15HBSNcP1", + "programIdIndex": 4 + } + ], + "recentBlockhash": "mfcyqEXB3DnHXki6KjjmZck6YjmZLvpAByy2fj4nh6B" + }, + "signatures": [ + "2nBhEBYYvfaAe16UMNqRHre4YNSskvuYgx3M6E4JP1oDYvZEJHvoPzyUidNgNX5r9sTyN1J9UxtbCXy2rqYcuyuv" + ] + } + } + ] + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getBlockCommitment.mdx b/docs/rpc/http/getBlockCommitment.mdx new file mode 100644 index 000000000..3e4789f78 --- /dev/null +++ b/docs/rpc/http/getBlockCommitment.mdx @@ -0,0 +1,65 @@ +--- +sidebarLabel: getBlockCommitment +title: getBlockCommitment RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getBlockCommitment +--- + +Returns commitment for particular block + + + + + +### Parameters + + + block number, identified by Slot + + +### Result + +The result field will be a JSON object containing: + +- `commitment` - commitment, comprising either: + - `` - Unknown block + - `` - commitment, array of u64 integers logging the amount of cluster + stake in lamports that has voted on the block at each depth from 0 to + `MAX_LOCKOUT_HISTORY` + 1 +- `totalStake` - total active stake, in lamports, of the current epoch + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", "id": 1, + "method": "getBlockCommitment", + "params":[5] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "commitment": [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 10, 32 + ], + "totalStake": 42 + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getBlockHeight.mdx b/docs/rpc/http/getBlockHeight.mdx new file mode 100644 index 000000000..c9d3270a4 --- /dev/null +++ b/docs/rpc/http/getBlockHeight.mdx @@ -0,0 +1,66 @@ +--- +sidebarLabel: getBlockHeight +title: getBlockHeight RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getBlockHeight +--- + +Returns the current block height of the node + + + + + +### Parameters + + + +Configuration object containing the following fields: + + + + + The minimum slot that the request can be evaluated at + + + + +### Result + +- `` - Current block height + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc":"2.0","id":1, + "method":"getBlockHeight" + } +' +``` + +Result: + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": 1233, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getBlockProduction.mdx b/docs/rpc/http/getBlockProduction.mdx new file mode 100644 index 000000000..eb51b4285 --- /dev/null +++ b/docs/rpc/http/getBlockProduction.mdx @@ -0,0 +1,94 @@ +--- +sidebarLabel: getBlockProduction +title: getBlockProduction RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getBlockProduction +--- + +Returns recent block production information from the current or previous epoch. + + + + + +### Parameters + + + +Configuration object containing the following fields: + + + + + Only return results for this validator identity (base-58 encoded) + + + +Slot range to return block production for. If parameter not provided, defaults to current epoch. + +- `firstSlot: ` - first slot to return block production information for + (inclusive) +- (optional) `lastSlot: ` - last slot to return block production + information for (inclusive). If parameter not provided, defaults to the + highest slot + + + + + +### Result + +The result will be an RpcResponse JSON object with `value` equal to: + +- `` + - `byIdentity: ` - a dictionary of validator identities, as base-58 + encoded strings. Value is a two element array containing the number of + leader slots and the number of blocks produced. + - `range: ` - Block production slot range + - `firstSlot: ` - first slot of the block production information + (inclusive) + - `lastSlot: ` - last slot of block production information (inclusive) + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + {"jsonrpc":"2.0","id":1, "method":"getBlockProduction"} +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "context": { + "slot": 9887 + }, + "value": { + "byIdentity": { + "85iYT5RuzRTDgjyRa3cP8SYhM2j21fj7NhfJ3peu1DPr": [9888, 9886] + }, + "range": { + "firstSlot": 0, + "lastSlot": 9887 + } + } + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getBlockTime.mdx b/docs/rpc/http/getBlockTime.mdx new file mode 100644 index 000000000..b92a32bd9 --- /dev/null +++ b/docs/rpc/http/getBlockTime.mdx @@ -0,0 +1,75 @@ +--- +sidebarLabel: getBlockTime +title: getBlockTime RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getBlockTime +--- + +Returns the estimated production time of a block. + + + Each validator reports their UTC time to the ledger on a regular interval by + intermittently adding a timestamp to a Vote for a particular block. A + requested block's time is calculated from the stake-weighted mean of the Vote + timestamps in a set of recent blocks recorded on the ledger. + + + + + + +### Parameters + + + block number, identified by Slot + + +### Result + +- `` - estimated production time, as Unix timestamp (seconds since the Unix + epoch) + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc":"2.0", "id":1, + "method": "getBlockTime", + "params":[5] + } +' +``` + +### Response + +When a block time is available: + +```json +{ + "jsonrpc": "2.0", + "result": 1574721591, + "id": 1 +} +``` + +When a block time is not available: + +```json +{ + "jsonrpc": "2.0", + "error": { + "code": -32004, + "message": "Block not available for slot 150" + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getBlocks.mdx b/docs/rpc/http/getBlocks.mdx new file mode 100644 index 000000000..225785bb8 --- /dev/null +++ b/docs/rpc/http/getBlocks.mdx @@ -0,0 +1,79 @@ +--- +sidebarLabel: getBlocks +title: getBlocks RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getBlocks +--- + +Returns a list of confirmed blocks between two slots + + + + + +### Parameters + + + start_slot, as `u64` integer + + + + end_slot, as `u64` integer (must be no more than 500,000 blocks higher than + the `start_slot`) + + + + +Configuration object containing the following fields: + + + +- "processed" is not supported + + + + + +### Result + +The result field will be an array of u64 integers listing confirmed blocks +between `start_slot` and either `end_slot` - if provided, or latest confirmed +block, inclusive. Max range allowed is 500,000 slots. + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", "id": 1, + "method": "getBlocks", + "params": [ + 5, 10 + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": [5, 6, 7, 8, 9, 10], + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getBlocksWithLimit.mdx b/docs/rpc/http/getBlocksWithLimit.mdx new file mode 100644 index 000000000..4a5666e81 --- /dev/null +++ b/docs/rpc/http/getBlocksWithLimit.mdx @@ -0,0 +1,77 @@ +--- +sidebarLabel: getBlocksWithLimit +title: getBlocksWithLimit RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getBlocksWithLimit +--- + +Returns a list of confirmed blocks starting at the given slot + + + + + +### Parameters + + + start_slot, as `u64` integer + + + + limit, as `u64` integer (must be no more than 500,000 blocks higher than the + `start_slot`) + + + + +Configuration object containing the following field: + + + +- "processed" is not supported + + + + + +### Result + +The result field will be an array of u64 integers listing confirmed blocks +starting at `start_slot` for up to `limit` blocks, inclusive. + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", + "id":1, + "method":"getBlocksWithLimit", + "params":[5, 3] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": [5, 6, 7], + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getClusterNodes.mdx b/docs/rpc/http/getClusterNodes.mdx new file mode 100644 index 000000000..38b15726f --- /dev/null +++ b/docs/rpc/http/getClusterNodes.mdx @@ -0,0 +1,69 @@ +--- +sidebarLabel: getClusterNodes +title: getClusterNodes RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getClusterNodes +--- + +Returns information about all the nodes participating in the cluster + + + + + +### Parameters + +**None** + +### Result + +The result field will be an array of JSON objects, each with the following sub +fields: + +- `pubkey: ` - Node public key, as base-58 encoded string +- `gossip: ` - Gossip network address for the node +- `tpu: ` - TPU network address for the node +- `rpc: ` - JSON RPC network address for the node, or `null` if the + JSON RPC service is not enabled +- `version: ` - The software version of the node, or `null` if the + version information is not available +- `featureSet: ` - The unique identifier of the node's feature set +- `shredVersion: ` - The shred version the node has been configured to + use + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", "id": 1, + "method": "getClusterNodes" + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": [ + { + "gossip": "10.239.6.48:8001", + "pubkey": "9QzsJf7LPLj8GkXbYT3LFDKqsj2hHG7TA3xinJHu8epQ", + "rpc": "10.239.6.48:8899", + "tpu": "10.239.6.48:8856", + "version": "1.0.0 c375ce1f" + } + ], + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getEpochInfo.mdx b/docs/rpc/http/getEpochInfo.mdx new file mode 100644 index 000000000..4d46885e3 --- /dev/null +++ b/docs/rpc/http/getEpochInfo.mdx @@ -0,0 +1,76 @@ +--- +sidebarLabel: getEpochInfo +title: getEpochInfo RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getEpochInfo +--- + +Returns information about the current epoch + + + + + +### Parameters + + + +Configuration object containing the following fields: + + + + + The minimum slot that the request can be evaluated at + + + + +### Result + +The result field will be an object with the following fields: + +- `absoluteSlot: ` - the current slot +- `blockHeight: ` - the current block height +- `epoch: ` - the current epoch +- `slotIndex: ` - the current slot relative to the start of the current + epoch +- `slotsInEpoch: ` - the number of slots in this epoch +- `transactionCount: ` - total number of transactions processed + without error since genesis + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + {"jsonrpc":"2.0","id":1, "method":"getEpochInfo"} +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "absoluteSlot": 166598, + "blockHeight": 166500, + "epoch": 27, + "slotIndex": 2790, + "slotsInEpoch": 8192, + "transactionCount": 22661093 + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getEpochSchedule.mdx b/docs/rpc/http/getEpochSchedule.mdx new file mode 100644 index 000000000..aac0b42c3 --- /dev/null +++ b/docs/rpc/http/getEpochSchedule.mdx @@ -0,0 +1,64 @@ +--- +sidebarLabel: getEpochSchedule +title: getEpochSchedule RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getEpochSchedule +--- + +Returns the epoch schedule information from this cluster's genesis config + + + + + +### Parameters + +**None** + +### Result + +The result field will be an object with the following fields: + +- `slotsPerEpoch: ` - the maximum number of slots in each epoch +- `leaderScheduleSlotOffset: ` - the number of slots before beginning of an + epoch to calculate a leader schedule for that epoch +- `warmup: ` - whether epochs start short and grow +- `firstNormalEpoch: ` - first normal-length epoch, log2(slotsPerEpoch) - + log2(MINIMUM_SLOTS_PER_EPOCH) +- `firstNormalSlot: ` - MINIMUM_SLOTS_PER_EPOCH \* + (2.pow(firstNormalEpoch) - 1) + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc":"2.0","id":1, + "method":"getEpochSchedule" + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "firstNormalEpoch": 8, + "firstNormalSlot": 8160, + "leaderScheduleSlotOffset": 8192, + "slotsPerEpoch": 8192, + "warmup": true + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getFeeForMessage.mdx b/docs/rpc/http/getFeeForMessage.mdx new file mode 100644 index 000000000..76df0a96f --- /dev/null +++ b/docs/rpc/http/getFeeForMessage.mdx @@ -0,0 +1,80 @@ +--- +sidebarLabel: getFeeForMessage +title: getFeeForMessage RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getFeeForMessage +--- + +Get the fee the network will charge for a particular Message + + + This method is only available in `solana-core` v1.9 or newer. Please use + [getFees](/docs/rpc/deprecated/getFees.mdx) for `solana-core` v1.8 and below. + + + + + + +### Parameters + + + Base-64 encoded Message + + + + +Configuration object containing the following fields: + + + + + The minimum slot that the request can be evaluated at + + + + +### Result + +- `` - Fee corresponding to the message at the specified blockhash + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' +{ + "id":1, + "jsonrpc":"2.0", + "method":"getFeeForMessage", + "params":[ + "AQABAgIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAQAA", + { + "commitment":"processed" + } + ] +} +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { "context": { "slot": 5068 }, "value": 5000 }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getFirstAvailableBlock.mdx b/docs/rpc/http/getFirstAvailableBlock.mdx new file mode 100644 index 000000000..0bd896171 --- /dev/null +++ b/docs/rpc/http/getFirstAvailableBlock.mdx @@ -0,0 +1,45 @@ +--- +sidebarLabel: getFirstAvailableBlock +title: getFirstAvailableBlock RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getFirstAvailableBlock +--- + +Returns the slot of the lowest confirmed block that has not been purged from the +ledger + + + + + +### Parameters + +**None** + +### Result + +- `` - Slot + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc":"2.0","id":1, + "method":"getFirstAvailableBlock" + } +' +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": 250000, "id": 1 } +``` + + + diff --git a/docs/rpc/http/getGenesisHash.mdx b/docs/rpc/http/getGenesisHash.mdx new file mode 100644 index 000000000..054f105ad --- /dev/null +++ b/docs/rpc/http/getGenesisHash.mdx @@ -0,0 +1,45 @@ +--- +sidebarLabel: getGenesisHash +title: getGenesisHash RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getGenesisHash +--- + +Returns the genesis hash + + + + + +### Parameters + +**None** + +### Result + +- `` - a Hash as base-58 encoded string + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + {"jsonrpc":"2.0","id":1, "method":"getGenesisHash"} +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": "GH7ome3EiwEr7tu9JuTh2dpYWBJK3z69Xm1ZE3MEE6JC", + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getHealth.mdx b/docs/rpc/http/getHealth.mdx new file mode 100644 index 000000000..cc6dbcec5 --- /dev/null +++ b/docs/rpc/http/getHealth.mdx @@ -0,0 +1,78 @@ +--- +sidebarLabel: getHealth +title: getHealth RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getHealth +--- + +Returns the current health of the node. A healthy node is one that is within +`HEALTH_CHECK_SLOT_DISTANCE` slots of the latest cluster confirmed slot. + + + + + +### Parameters + +**None** + +### Result + +If the node is healthy: "ok" + +If the node is unhealthy, a JSON RPC error response is returned. The specifics +of the error response are **UNSTABLE** and may change in the future + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + {"jsonrpc":"2.0","id":1, "method":"getHealth"} +' +``` + +### Response + +Healthy Result: + +```json +{ "jsonrpc": "2.0", "result": "ok", "id": 1 } +``` + +Unhealthy Result (generic): + +```json +{ + "jsonrpc": "2.0", + "error": { + "code": -32005, + "message": "Node is unhealthy", + "data": {} + }, + "id": 1 +} +``` + +Unhealthy Result (if additional information is available) + +```json +{ + "jsonrpc": "2.0", + "error": { + "code": -32005, + "message": "Node is behind by 42 slots", + "data": { + "numSlotsBehind": 42 + } + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getHighestSnapshotSlot.mdx b/docs/rpc/http/getHighestSnapshotSlot.mdx new file mode 100644 index 000000000..75f9a0f4a --- /dev/null +++ b/docs/rpc/http/getHighestSnapshotSlot.mdx @@ -0,0 +1,75 @@ +--- +sidebarLabel: getHighestSnapshotSlot +title: getHighestSnapshotSlot RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getHighestSnapshotSlot +--- + +Returns the highest slot information that the node has snapshots for. + +This will find the highest full snapshot slot, and the highest incremental +snapshot slot _based on_ the full snapshot slot, if there is one. + + + This method is only available in `solana-core` v1.9 or newer. Please use + [getSnapshotSlot](/docs/rpc/http/getSnapshotSlot) for `solana-core` v1.8 and + below. + + + + + + +### Parameters + +**None** + +### Result + +When the node has a snapshot, this returns a JSON object with the following +fields: + +- `full: ` - Highest full snapshot slot +- `incremental: ` - Highest incremental snapshot slot _based on_ + `full` + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + {"jsonrpc":"2.0","id":1,"method":"getHighestSnapshotSlot"} +' +``` + +### Response + +Result when the node has a snapshot: + +```json +{ + "jsonrpc": "2.0", + "result": { + "full": 100, + "incremental": 110 + }, + "id": 1 +} +``` + +Result when the node has no snapshot: + +```json +{ + "jsonrpc": "2.0", + "error": { "code": -32008, "message": "No snapshot" }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getIdentity.mdx b/docs/rpc/http/getIdentity.mdx new file mode 100644 index 000000000..969ddaafb --- /dev/null +++ b/docs/rpc/http/getIdentity.mdx @@ -0,0 +1,50 @@ +--- +sidebarLabel: getIdentity +title: getIdentity RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getIdentity +--- + +Returns the identity pubkey for the current node + + + + + +### Parameters + +**None** + +### Result + +The result field will be a JSON object with the following fields: + +- `identity` - the identity pubkey of the current node \(as a base-58 encoded + string\) + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + {"jsonrpc":"2.0","id":1, "method":"getIdentity"} +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "identity": "2r1F4iWqVcb8M1DbAjQuFpebkQHY9hcVU4WuW2DJBppN" + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getInflationGovernor.mdx b/docs/rpc/http/getInflationGovernor.mdx new file mode 100644 index 000000000..b2e64d238 --- /dev/null +++ b/docs/rpc/http/getInflationGovernor.mdx @@ -0,0 +1,71 @@ +--- +sidebarLabel: getInflationGovernor +title: getInflationGovernor RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getInflationGovernor +--- + +Returns the current inflation governor + + + + + +### Parameters + + + +Configuration object containing the following fields: + + + + + +### Result + +The result field will be a JSON object with the following fields: + +- `initial: ` - the initial inflation percentage from time 0 +- `terminal: ` - terminal inflation percentage +- `taper: ` - rate per year at which inflation is lowered. (Rate reduction + is derived using the target slot time in genesis config) +- `foundation: ` - percentage of total inflation allocated to the + foundation +- `foundationTerm: ` - duration of foundation pool inflation in years + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + {"jsonrpc":"2.0","id":1, "method":"getInflationGovernor"} +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "foundation": 0.05, + "foundationTerm": 7, + "initial": 0.15, + "taper": 0.15, + "terminal": 0.015 + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getInflationRate.mdx b/docs/rpc/http/getInflationRate.mdx new file mode 100644 index 000000000..312e44aaf --- /dev/null +++ b/docs/rpc/http/getInflationRate.mdx @@ -0,0 +1,56 @@ +--- +sidebarLabel: getInflationRate +title: getInflationRate RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getInflationRate +--- + +Returns the specific inflation values for the current epoch + + + + + +### Parameters + +**None** + +### Result + +The result field will be a JSON object with the following fields: + +- `total: ` - total inflation +- `validator: ` -inflation allocated to validators +- `foundation: ` - inflation allocated to the foundation +- `epoch: ` - epoch for which these values are valid + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + {"jsonrpc":"2.0","id":1, "method":"getInflationRate"} +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "epoch": 100, + "foundation": 0.001, + "total": 0.149, + "validator": 0.148 + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getInflationReward.mdx b/docs/rpc/http/getInflationReward.mdx new file mode 100644 index 000000000..3db6d9d1a --- /dev/null +++ b/docs/rpc/http/getInflationReward.mdx @@ -0,0 +1,96 @@ +--- +sidebarLabel: getInflationReward +title: getInflationReward RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getInflationReward +--- + +Returns the inflation / staking reward for a list of addresses for an epoch + + + + + +### Parameters + + + An array of addresses to query, as base-58 encoded strings + + + + +Configuration object containing the following fields: + + + + + An epoch for which the reward occurs. If omitted, the previous epoch will be + used + + + + The minimum slot that the request can be evaluated at + + + + +### Result + +The result field will be a JSON array with the following fields: + +- `epoch: ` - epoch for which reward occured +- `effectiveSlot: ` - the slot in which the rewards are effective +- `amount: ` - reward amount in lamports +- `postBalance: ` - post balance of the account in lamports +- `commission: ` - vote account commission when the reward was + credited + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", + "id": 1, + "method": "getInflationReward", + "params": [ + [ + "6dmNQ5jwLeLk5REvio1JcMshcbvkYMwy26sJ8pbkvStu", + "BGsqMegLpV6n6Ve146sSX2dTjUMj3M92HnU8BbNRMhF2" + ], + {"epoch": 2} + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": [ + { + "amount": 2500, + "effectiveSlot": 224, + "epoch": 2, + "postBalance": 499999442500 + }, + null + ], + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getLargestAccounts.mdx b/docs/rpc/http/getLargestAccounts.mdx new file mode 100644 index 000000000..27f08f4f3 --- /dev/null +++ b/docs/rpc/http/getLargestAccounts.mdx @@ -0,0 +1,146 @@ +--- +sidebarLabel: getLargestAccounts +title: getLargestAccounts RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getLargestAccounts +--- + +Returns the 20 largest accounts, by lamport balance (results may be cached up to +two hours) + + + + + +### Parameters + + + +Configuration object containing the following fields: + + + + + filter results by account type + + + + + + +### Result + +The result will be an RpcResponse JSON object with `value` equal to an array of +`` containing: + +- `address: ` - base-58 encoded address of the account +- `lamports: ` - number of lamports in the account, as a u64 + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + {"jsonrpc":"2.0","id":1, "method":"getLargestAccounts"} +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "context": { + "slot": 54 + }, + "value": [ + { + "lamports": 999974, + "address": "99P8ZgtJYe1buSK8JXkvpLh8xPsCFuLYhz9hQFNw93WJ" + }, + { + "lamports": 42, + "address": "uPwWLo16MVehpyWqsLkK3Ka8nLowWvAHbBChqv2FZeL" + }, + { + "lamports": 42, + "address": "aYJCgU7REfu3XF8b3QhkqgqQvLizx8zxuLBHA25PzDS" + }, + { + "lamports": 42, + "address": "CTvHVtQ4gd4gUcw3bdVgZJJqApXE9nCbbbP4VTS5wE1D" + }, + { + "lamports": 20, + "address": "4fq3xJ6kfrh9RkJQsmVd5gNMvJbuSHfErywvEjNQDPxu" + }, + { + "lamports": 4, + "address": "AXJADheGVp9cruP8WYu46oNkRbeASngN5fPCMVGQqNHa" + }, + { + "lamports": 2, + "address": "8NT8yS6LiwNprgW4yM1jPPow7CwRUotddBVkrkWgYp24" + }, + { + "lamports": 1, + "address": "SysvarEpochSchedu1e111111111111111111111111" + }, + { + "lamports": 1, + "address": "11111111111111111111111111111111" + }, + { + "lamports": 1, + "address": "Stake11111111111111111111111111111111111111" + }, + { + "lamports": 1, + "address": "SysvarC1ock11111111111111111111111111111111" + }, + { + "lamports": 1, + "address": "StakeConfig11111111111111111111111111111111" + }, + { + "lamports": 1, + "address": "SysvarRent111111111111111111111111111111111" + }, + { + "lamports": 1, + "address": "Config1111111111111111111111111111111111111" + }, + { + "lamports": 1, + "address": "SysvarStakeHistory1111111111111111111111111" + }, + { + "lamports": 1, + "address": "SysvarRecentB1ockHashes11111111111111111111" + }, + { + "lamports": 1, + "address": "SysvarFees111111111111111111111111111111111" + }, + { + "lamports": 1, + "address": "Vote111111111111111111111111111111111111111" + } + ] + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getLatestBlockhash.mdx b/docs/rpc/http/getLatestBlockhash.mdx new file mode 100644 index 000000000..de5adffae --- /dev/null +++ b/docs/rpc/http/getLatestBlockhash.mdx @@ -0,0 +1,91 @@ +--- +sidebarLabel: getLatestBlockhash +title: getLatestBlockhash RPC Method +sidebarSortOrder: 0 +hideTableOfContents: true +altRoutes: + - /docs/rpc/getLatestBlockhash +--- + +Returns the latest blockhash + + + This method is only available in `solana-core` v1.9 or newer. Please use + [getRecentBlockhash](/docs/rpc/http/getRecentBlockhash) for `solana-core` v1.8 + and below. + + + + + + +### Parameters + + + +Configuration object containing the following fields: + + + + + The minimum slot that the request can be evaluated at + + + + +### Result + +`RpcResponse` - RpcResponse JSON object with `value` field set to a JSON +object including: + +- `blockhash: ` - a Hash as base-58 encoded string +- `lastValidBlockHeight: ` - last + [block height](/docs/terminology.md#block-height) at which the blockhash will + be valid + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "id":1, + "jsonrpc":"2.0", + "method":"getLatestBlockhash", + "params":[ + { + "commitment":"processed" + } + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "context": { + "slot": 2792 + }, + "value": { + "blockhash": "EkSnNWid2cvwEVnVx9aBqawnmiCNiDgp3gUdkDPTKN1N", + "lastValidBlockHeight": 3090 + } + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getLeaderSchedule.mdx b/docs/rpc/http/getLeaderSchedule.mdx new file mode 100644 index 000000000..a9cfc232e --- /dev/null +++ b/docs/rpc/http/getLeaderSchedule.mdx @@ -0,0 +1,93 @@ +--- +sidebarLabel: getLeaderSchedule +title: getLeaderSchedule RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getLeaderSchedule +--- + +Returns the leader schedule for an epoch + + + + + +### Parameters + + + +Fetch the leader schedule for the epoch that corresponds to the provided slot. + + + If unspecified, the leader schedule for the current epoch is fetched + + + + + + +Configuration object containing the following fields: + + + + + Only return results for this validator identity (base-58 encoded) + + + + +### Result + +Returns a result with one of the two following values: + +- `` - if requested epoch is not found, or +- `` - the result field will be a dictionary of validator identities, as + base-58 encoded strings, and their corresponding leader slot indices as values + (indices are relative to the first slot in the requested epoch) + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", + "id": 1, + "method": "getLeaderSchedule", + "params": [ + null, + { + "identity": "4Qkev8aNZcqFNSRhQzwyLMFSsi94jHqE8WNVTJzTP99F" + } + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "4Qkev8aNZcqFNSRhQzwyLMFSsi94jHqE8WNVTJzTP99F": [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, + 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, + 57, 58, 59, 60, 61, 62, 63 + ] + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getMaxRetransmitSlot.mdx b/docs/rpc/http/getMaxRetransmitSlot.mdx new file mode 100644 index 000000000..9def26990 --- /dev/null +++ b/docs/rpc/http/getMaxRetransmitSlot.mdx @@ -0,0 +1,42 @@ +--- +sidebarLabel: getMaxRetransmitSlot +title: getMaxRetransmitSlot RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getMaxRetransmitSlot +--- + +Get the max slot seen from retransmit stage. + + + + + +### Parameters + +**None** + +### Result + +`` - Slot number + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + {"jsonrpc":"2.0","id":1, "method":"getMaxRetransmitSlot"} +' +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": 1234, "id": 1 } +``` + + + diff --git a/docs/rpc/http/getMaxShredInsertSlot.mdx b/docs/rpc/http/getMaxShredInsertSlot.mdx new file mode 100644 index 000000000..efa971b39 --- /dev/null +++ b/docs/rpc/http/getMaxShredInsertSlot.mdx @@ -0,0 +1,42 @@ +--- +sidebarLabel: getMaxShredInsertSlot +title: getMaxShredInsertSlot RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getMaxShredInsertSlot +--- + +Get the max slot seen from after shred insert. + + + + + +### Parameters + +**None** + +### Result + +`` - Slot number + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + {"jsonrpc":"2.0","id":1, "method":"getMaxShredInsertSlot"} +' +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": 1234, "id": 1 } +``` + + + diff --git a/docs/rpc/http/getMinimumBalanceForRentExemption.mdx b/docs/rpc/http/getMinimumBalanceForRentExemption.mdx new file mode 100644 index 000000000..445d3f011 --- /dev/null +++ b/docs/rpc/http/getMinimumBalanceForRentExemption.mdx @@ -0,0 +1,62 @@ +--- +sidebarLabel: getMinimumBalanceForRentExemption +title: getMinimumBalanceForRentExemption RPC Method +sidebarSortOrder: 0 +hideTableOfContents: true +altRoutes: + - /docs/rpc/getMinimumBalanceForRentExemption +--- + +Returns minimum balance required to make account rent exempt. + + + + + +### Parameters + + + the Account's data length + + + + +Configuration object containing the following fields: + + + + + +### Result + +`` - minimum lamports required in the Account to remain rent free + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", "id": 1, + "method": "getMinimumBalanceForRentExemption", + "params": [50] + } +' +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": 500, "id": 1 } +``` + + + diff --git a/docs/rpc/http/getMultipleAccounts.mdx b/docs/rpc/http/getMultipleAccounts.mdx new file mode 100644 index 000000000..3ecc5e58c --- /dev/null +++ b/docs/rpc/http/getMultipleAccounts.mdx @@ -0,0 +1,156 @@ +--- +sidebarLabel: getMultipleAccounts +title: getMultipleAccounts RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getMultipleAccounts +--- + +Returns the account information for a list of Pubkeys. + + + + + +### Parameters + + + An array of Pubkeys to query, as base-58 encoded strings (up to a maximum of + 100) + + + + +Configuration object containing the following fields: + + + + + The minimum slot that the request can be evaluated at + + + + Request a slice of the account's data. + +- `length: ` - number of bytes to return +- `offset: ` - byte offset from which to start reading + + + Data slicing is only available for `base58`, `base64`, or `base64+zstd` + encodings. + + + + + + +encoding format for the returned Account data + + + +
+ +- `base58` is slow and limited to less than 129 bytes of Account data. +- `base64` will return base64 encoded data for Account data of any size. +- `base64+zstd` compresses the Account data using + [Zstandard](https://facebook.github.io/zstd/) and base64-encodes the result. +- `jsonParsed` encoding attempts to use program-specific state parsers to return + more human-readable and explicit account state data. +- If `jsonParsed` is requested but a parser cannot be found, the field falls + back to `base64` encoding, detectable when the `data` field is type + ``. + +
+ +
+ +
+ +### Result + +The result will be a JSON object with `value` equal to an array of: + +- `` - if the account at that Pubkey doesn't exist, or +- `` - a JSON object containing: + - `lamports: ` - number of lamports assigned to this account, as a u64 + - `owner: ` - base-58 encoded Pubkey of the program this account has + been assigned to + - `data: <[string, encoding]|object>` - data associated with the account, + either as encoded binary data or JSON format `{: }` - + depending on encoding parameter + - `executable: ` - boolean indicating if the account contains a program + \(and is strictly read-only\) + - `rentEpoch: ` - the epoch at which this account will next owe rent, as + u64 + - `size: ` - the data size of the account + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", + "id": 1, + "method": "getMultipleAccounts", + "params": [ + [ + "vines1vzrYbzLMRdu58ou5XTby4qAqVRLmqo36NKPTg", + "4fYNw3dojWmQ4dXtSGE9epjRGy9pFSx62YypT7avPYvA" + ], + { + "encoding": "base58" + } + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "context": { + "slot": 1 + }, + "value": [ + { + "data": ["", "base64"], + "executable": false, + "lamports": 1000000000, + "owner": "11111111111111111111111111111111", + "rentEpoch": 2, + "space": 16 + }, + { + "data": ["", "base64"], + "executable": false, + "lamports": 5000000000, + "owner": "11111111111111111111111111111111", + "rentEpoch": 2, + "space": 0 + } + ] + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getProgramAccounts.mdx b/docs/rpc/http/getProgramAccounts.mdx new file mode 100644 index 000000000..cce47ba3d --- /dev/null +++ b/docs/rpc/http/getProgramAccounts.mdx @@ -0,0 +1,180 @@ +--- +sidebarLabel: getProgramAccounts +title: getProgramAccounts RPC Method +sidebarSortOrder: 10 +hideTableOfContents: true +altRoutes: + - /docs/rpc/getProgramAccounts +--- + +Returns all accounts owned by the provided program Pubkey + + + + + +### Parameters + + + Pubkey of program, as base-58 encoded string + + + + +Configuration object containing the following fields: + + + + + The minimum slot that the request can be evaluated at + + + + wrap the result in an RpcResponse JSON object + + + + +encoding format for the returned Account data + + + +
+ +- `base58` is slow and limited to less than 129 bytes of Account data. +- `base64` will return base64 encoded data for Account data of any size. +- `base64+zstd` compresses the Account data using + [Zstandard](https://facebook.github.io/zstd/) and base64-encodes the result. +- `jsonParsed` encoding attempts to use program-specific state parsers to return + more human-readable and explicit account state data. +- If `jsonParsed` is requested but a parser cannot be found, the field falls + back to `base64` encoding, detectable when the `data` field is type + ``. + +
+ +
+ + + Request a slice of the account's data. + +- `length: ` - number of bytes to return +- `offset: ` - byte offset from which to start reading + + + Data slicing is only available for `base58`, `base64`, or `base64+zstd` + encodings. + + + + + + +filter results using up to 4 filter objects + + + The resultant account(s) must meet **ALL** filter criteria to be included in + the returned results + + + + +
+ +### Result + +By default, the result field will be an array of JSON objects. + + + If the `withContext` flag is set, the array will be wrapped in an + `RpcResponse` JSON object. + + +The resultant response array will contain: + +- `pubkey: ` - the account Pubkey as base-58 encoded string +- `account: ` - a JSON object, with the following sub fields: + - `lamports: ` - number of lamports assigned to this account, as a u64 + - `owner: ` - base-58 encoded Pubkey of the program this account has + been assigned to + - `data: <[string,encoding]|object>` - data associated with the account, + either as encoded binary data or JSON format `{: }` - + depending on encoding parameter + - `executable: ` - boolean indicating if the account contains a program + \(and is strictly read-only\) + - `rentEpoch: ` - the epoch at which this account will next owe rent, as + u64 + - `size: ` - the data size of the account + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", + "id": 1, + "method": "getProgramAccounts", + "params": [ + "4Nd1mBQtrMJVYVfKf2PJy9NZUZdTAsp7D4xWLs4gDB4T", + { + "filters": [ + { + "dataSize": 17 + }, + { + "memcmp": { + "offset": 4, + "bytes": "3Mc6vR" + } + } + ] + } + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": [ + { + "account": { + "data": "2R9jLfiAQ9bgdcw6h8s44439", + "executable": false, + "lamports": 15298080, + "owner": "4Nd1mBQtrMJVYVfKf2PJy9NZUZdTAsp7D4xWLs4gDB4T", + "rentEpoch": 28, + "space": 42 + }, + "pubkey": "CxELquR1gPP8wHe33gZ4QxqGB3sZ9RSwsJ2KshVewkFY" + } + ], + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getRecentPerformanceSamples.mdx b/docs/rpc/http/getRecentPerformanceSamples.mdx new file mode 100644 index 000000000..88bb4b06d --- /dev/null +++ b/docs/rpc/http/getRecentPerformanceSamples.mdx @@ -0,0 +1,99 @@ +--- +sidebarLabel: getRecentPerformanceSamples +title: getRecentPerformanceSamples RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getRecentPerformanceSamples +--- + +Returns a list of recent performance samples, in reverse slot order. Performance +samples are taken every 60 seconds and include the number of transactions and +slots that occur in a given time window. + + + + + +### Parameters + + + +number of samples to return (maximum 720) + + + +### Result + +An array of `RpcPerfSample` with the following fields: + +- `slot: ` - Slot in which sample was taken at +- `numTransactions: ` - Number of transactions processed during the sample + period +- `numSlots: ` - Number of slots completed during the sample period +- `samplePeriodSecs: ` - Number of seconds in a sample window +- `numNonVoteTransaction: ` - Number of non-vote transactions processed + during the sample period. + + + `numNonVoteTransaction` is present starting with v1.15. To get a number of + voting transactions compute: +
+ `numTransactions - numNonVoteTransaction` +
+ + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc":"2.0", "id":1, + "method": "getRecentPerformanceSamples", + "params": [4]} +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": [ + { + "numSlots": 126, + "numTransactions": 126, + "numNonVoteTransaction": 1, + "samplePeriodSecs": 60, + "slot": 348125 + }, + { + "numSlots": 126, + "numTransactions": 126, + "numNonVoteTransaction": 1, + "samplePeriodSecs": 60, + "slot": 347999 + }, + { + "numSlots": 125, + "numTransactions": 125, + "numNonVoteTransaction": 0, + "samplePeriodSecs": 60, + "slot": 347873 + }, + { + "numSlots": 125, + "numTransactions": 125, + "numNonVoteTransaction": 0, + "samplePeriodSecs": 60, + "slot": 347748 + } + ], + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getRecentPrioritizationFees.mdx b/docs/rpc/http/getRecentPrioritizationFees.mdx new file mode 100644 index 000000000..0392bcda9 --- /dev/null +++ b/docs/rpc/http/getRecentPrioritizationFees.mdx @@ -0,0 +1,93 @@ +--- +sidebarLabel: getRecentPrioritizationFees +title: getRecentPrioritizationFees RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getRecentPrioritizationFees +--- + +Returns a list of prioritization fees from recent blocks. + + + Currently, a node's prioritization-fee cache stores data from up to 150 + blocks. + + + + + + +### Parameters + + + +An array of Account addresses (up to a maximum of 128 addresses), as base-58 +encoded strings + + + If this parameter is provided, the response will reflect a fee to land a + transaction locking all of the provided accounts as writable. + + + + +### Result + +An array of `RpcPrioritizationFee` with the following fields: + +- `slot: ` - slot in which the fee was observed +- `prioritizationFee: ` - the per-compute-unit fee paid by at least one + successfully landed transaction, specified in increments of micro-lamports + (0.000001 lamports) + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc":"2.0", "id":1, + "method": "getRecentPrioritizationFees", + "params": [ + ["CxELquR1gPP8wHe33gZ4QxqGB3sZ9RSwsJ2KshVewkFY"] + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": [ + { + "slot": 348125, + "prioritizationFee": 0 + }, + { + "slot": 348126, + "prioritizationFee": 1000 + }, + { + "slot": 348127, + "prioritizationFee": 500 + }, + { + "slot": 348128, + "prioritizationFee": 0 + }, + { + "slot": 348129, + "prioritizationFee": 1234 + } + ], + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getSignatureStatuses.mdx b/docs/rpc/http/getSignatureStatuses.mdx new file mode 100644 index 000000000..3adc2a6e3 --- /dev/null +++ b/docs/rpc/http/getSignatureStatuses.mdx @@ -0,0 +1,115 @@ +--- +sidebarLabel: getSignatureStatuses +title: getSignatureStatuses RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getSignatureStatuses +--- + +Returns the statuses of a list of signatures. Each signature must be a +[txid](/docs/terminology.md#transaction-id), the first signature of a +transaction. + + + Unless the `searchTransactionHistory` configuration parameter is included, + this method only searches the recent status cache of signatures, which retains + statuses for all active slots plus `MAX_RECENT_BLOCKHASHES` rooted slots. + + + + + + +### Parameters + + + An array of transaction signatures to confirm, as base-58 encoded strings (up + to a maximum of 256) + + + + +Configuration object containing the following fields: + + + +if `true` - a Solana node will search its ledger cache for any signatures not +found in the recent status cache + + + + + +### Result + +An array of `RpcResponse` consisting of either: + +- `` - Unknown transaction, or +- `` + - `slot: ` - The slot the transaction was processed + - `confirmations: ` - Number of blocks since signature + confirmation, null if rooted, as well as finalized by a supermajority of the + cluster + - `err: ` - Error if transaction failed, null if transaction + succeeded. See + [TransactionError definitions](https://github.com/solana-labs/solana/blob/c0c60386544ec9a9ec7119229f37386d9f070523/sdk/src/transaction/error.rs#L13) + - `confirmationStatus: ` - The transaction's cluster confirmation + status; Either `processed`, `confirmed`, or `finalized`. See + [Commitment](/docs/rpc/index.mdx#configuring-state-commitment) for more on + optimistic confirmation. + - DEPRECATED: `status: ` - Transaction status + - `"Ok": ` - Transaction was successful + - `"Err": ` - Transaction failed with TransactionError + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", + "id": 1, + "method": "getSignatureStatuses", + "params": [ + [ + "5VERv8NMvzbJMEkV8xnrLkEaWRtSz9CosKDYjCJjBRnbJLgp8uirBgmQpjKhoR4tjF3ZpRzrFmBV6UjKdiSZkQUW" + ], + { + "searchTransactionHistory": true + } + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "context": { + "slot": 82 + }, + "value": [ + { + "slot": 48, + "confirmations": null, + "err": null, + "status": { + "Ok": null + }, + "confirmationStatus": "finalized" + }, + null + ] + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getSignaturesForAddress.mdx b/docs/rpc/http/getSignaturesForAddress.mdx new file mode 100644 index 000000000..c50e22364 --- /dev/null +++ b/docs/rpc/http/getSignaturesForAddress.mdx @@ -0,0 +1,116 @@ +--- +sidebarLabel: getSignaturesForAddress +title: getSignaturesForAddress RPC Method +sidebarSortOrder: 10 +hideTableOfContents: true +altRoutes: + - /docs/rpc/getSignaturesForAddress +--- + +Returns signatures for confirmed transactions that include the given address in +their `accountKeys` list. Returns signatures backwards in time from the provided +signature or most recent confirmed block + + + + + +### Parameters + + + Account address as base-58 encoded string + + + + +Configuration object containing the following fields: + + + + + The minimum slot that the request can be evaluated at + + + + maximum transaction signatures to return (between 1 and 1,000). + + + + start searching backwards from this transaction signature. If not provided the + search starts from the top of the highest max confirmed block. + + + + search until this transaction signature, if found before limit reached + + + + +### Result + +An array of ``, ordered from **newest** to **oldest** transaction, +containing transaction signature information with the following fields: + +- `signature: ` - transaction signature as base-58 encoded string +- `slot: ` - The slot that contains the block with the transaction +- `err: ` - Error if transaction failed, null if transaction + succeeded. See + [TransactionError definitions](https://github.com/solana-labs/solana/blob/c0c60386544ec9a9ec7119229f37386d9f070523/sdk/src/transaction/error.rs#L13) + for more info. +- `memo: ` - Memo associated with the transaction, null if no memo + is present +- `blockTime: ` - estimated production time, as Unix timestamp + (seconds since the Unix epoch) of when transaction was processed. null if not + available. +- `confirmationStatus: ` - The transaction's cluster confirmation + status; Either `processed`, `confirmed`, or `finalized`. See + [Commitment](/docs/rpc/index.mdx#configuring-state-commitment) for more on + optimistic confirmation. + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", + "id": 1, + "method": "getSignaturesForAddress", + "params": [ + "Vote111111111111111111111111111111111111111", + { + "limit": 1 + } + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": [ + { + "err": null, + "memo": null, + "signature": "5h6xBEauJ3PK6SWCZ1PGjBvj8vDdWG3KpwATGy1ARAXFSDwt8GFXM7W5Ncn16wmqokgpiKRLuS83KUxyZyv2sUYv", + "slot": 114, + "blockTime": null + } + ], + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getSlot.mdx b/docs/rpc/http/getSlot.mdx new file mode 100644 index 000000000..4439f4ad5 --- /dev/null +++ b/docs/rpc/http/getSlot.mdx @@ -0,0 +1,58 @@ +--- +sidebarLabel: getSlot +title: getSlot RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getSlot +--- + +Returns the slot that has reached the +[given or default commitment level](/docs/rpc/index.mdx#configuring-state-commitment) + + + + + +### Parameters + + + +Configuration object containing the following fields: + + + + + The minimum slot that the request can be evaluated at + + + + +### Result + +`` - Current slot + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + {"jsonrpc":"2.0","id":1, "method":"getSlot"} +' +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": 1234, "id": 1 } +``` + + + diff --git a/docs/rpc/http/getSlotLeader.mdx b/docs/rpc/http/getSlotLeader.mdx new file mode 100644 index 000000000..bac7b1a49 --- /dev/null +++ b/docs/rpc/http/getSlotLeader.mdx @@ -0,0 +1,61 @@ +--- +sidebarLabel: getSlotLeader +title: getSlotLeader RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getSlotLeader +--- + +Returns the current slot leader + + + + + +### Parameters + + + +Configuration object containing the following fields: + + + + + The minimum slot that the request can be evaluated at + + + + +### Result + +`` - Node identity Pubkey as base-58 encoded string + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + {"jsonrpc":"2.0","id":1, "method":"getSlotLeader"} +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": "ENvAW7JScgYq6o4zKZwewtkzzJgDzuJAFxYasvmEQdpS", + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getSlotLeaders.mdx b/docs/rpc/http/getSlotLeaders.mdx new file mode 100644 index 000000000..9e13db0dd --- /dev/null +++ b/docs/rpc/http/getSlotLeaders.mdx @@ -0,0 +1,73 @@ +--- +sidebarLabel: getSlotLeaders +title: getSlotLeaders RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getSlotLeaders +--- + +Returns the slot leaders for a given slot range + + + + + +### Parameters + + + Start slot, as u64 integer + + + + Limit, as u64 integer (between 1 and 5,000) + + +### Result + +`` - array of Node identity public keys as base-58 encoded +strings + + + + + +### Code sample + +If the current slot is `#99` - query the next `10` leaders with the following +request: + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc":"2.0", "id": 1, + "method": "getSlotLeaders", + "params": [100, 10] + } +' +``` + +### Response + +The first leader returned is the leader for slot `#100`: + +```json +{ + "jsonrpc": "2.0", + "result": [ + "ChorusmmK7i1AxXeiTtQgQZhQNiXYU84ULeaYF1EH15n", + "ChorusmmK7i1AxXeiTtQgQZhQNiXYU84ULeaYF1EH15n", + "ChorusmmK7i1AxXeiTtQgQZhQNiXYU84ULeaYF1EH15n", + "ChorusmmK7i1AxXeiTtQgQZhQNiXYU84ULeaYF1EH15n", + "Awes4Tr6TX8JDzEhCZY2QVNimT6iD1zWHzf1vNyGvpLM", + "Awes4Tr6TX8JDzEhCZY2QVNimT6iD1zWHzf1vNyGvpLM", + "Awes4Tr6TX8JDzEhCZY2QVNimT6iD1zWHzf1vNyGvpLM", + "Awes4Tr6TX8JDzEhCZY2QVNimT6iD1zWHzf1vNyGvpLM", + "DWvDTSh3qfn88UoQTEKRV2JnLt5jtJAVoiCo3ivtMwXP", + "DWvDTSh3qfn88UoQTEKRV2JnLt5jtJAVoiCo3ivtMwXP" + ], + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getStakeActivation.mdx b/docs/rpc/http/getStakeActivation.mdx new file mode 100644 index 000000000..93e47f04e --- /dev/null +++ b/docs/rpc/http/getStakeActivation.mdx @@ -0,0 +1,90 @@ +--- +sidebarLabel: getStakeActivation +title: getStakeActivation RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getStakeActivation +--- + +Returns epoch activation information for a stake account + + + + + +### Parameters + + + Pubkey of stake Account to query, as base-58 encoded string + + + + +Configuration object containing the following fields: + + + + + The minimum slot that the request can be evaluated at + + + + epoch for which to calculate activation details. If parameter not provided, + defaults to current epoch. **DEPRECATED**, inputs other than the current epoch + return an error. + + + + +### Result + +The result will be a JSON object with the following fields: + +- `state: ` - the stake account's activation state, either: `active`, + `inactive`, `activating`, or `deactivating` +- `active: ` - stake active during the epoch +- `inactive: ` - stake inactive during the epoch + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", + "id": 1, + "method": "getStakeActivation", + "params": [ + "CYRJWqiSjLitBAcRxPvWpgX3s5TvmN2SuRY3eEYypFvT", + { + "epoch": 4 + } + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "active": 124429280, + "inactive": 73287840, + "state": "activating" + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getStakeMinimumDelegation.mdx b/docs/rpc/http/getStakeMinimumDelegation.mdx new file mode 100644 index 000000000..40b447e2c --- /dev/null +++ b/docs/rpc/http/getStakeMinimumDelegation.mdx @@ -0,0 +1,67 @@ +--- +sidebarLabel: getStakeMinimumDelegation +title: getStakeMinimumDelegation RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getStakeMinimumDelegation +--- + +Returns the stake minimum delegation, in lamports. + + + + + +### Parameters + + + +Configuration object containing the following fields: + + + + + +### Result + +The result will be an RpcResponse JSON object with `value` equal to: + +- `` - The stake minimum delegation, in lamports + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc":"2.0", "id":1, + "method": "getStakeMinimumDelegation" + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "context": { + "slot": 501 + }, + "value": 1000000000 + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getSupply.mdx b/docs/rpc/http/getSupply.mdx new file mode 100644 index 000000000..79b81f13c --- /dev/null +++ b/docs/rpc/http/getSupply.mdx @@ -0,0 +1,84 @@ +--- +sidebarLabel: getSupply +title: getSupply RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getSupply +--- + +Returns information about the current supply. + + + + + +### Parameters + + + +Configuration object containing the following fields: + + + + + exclude non circulating accounts list from response + + + + +### Result + +The result will be an RpcResponse JSON object with `value` equal to a JSON +object containing: + +- `total: ` - Total supply in lamports +- `circulating: ` - Circulating supply in lamports +- `nonCirculating: ` - Non-circulating supply in lamports +- `nonCirculatingAccounts: ` - an array of account addresses of + non-circulating accounts, as strings. If `excludeNonCirculatingAccountsList` + is enabled, the returned array will be empty. + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + {"jsonrpc":"2.0", "id":1, "method":"getSupply"} +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "context": { + "slot": 1114 + }, + "value": { + "circulating": 16000, + "nonCirculating": 1000000, + "nonCirculatingAccounts": [ + "FEy8pTbP5fEoqMV1GdTz83byuA8EKByqYat1PKDgVAq5", + "9huDUZfxoJ7wGMTffUE7vh1xePqef7gyrLJu9NApncqA", + "3mi1GmwEE3zo2jmfDuzvjSX9ovRXsDUKHvsntpkhuLJ9", + "BYxEJTDerkaRWBem3XgnVcdhppktBXa2HbkHPKj2Ui4Z" + ], + "total": 1016000 + } + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getTokenAccountBalance.mdx b/docs/rpc/http/getTokenAccountBalance.mdx new file mode 100644 index 000000000..a427e208c --- /dev/null +++ b/docs/rpc/http/getTokenAccountBalance.mdx @@ -0,0 +1,90 @@ +--- +sidebarLabel: getTokenAccountBalance +title: getTokenAccountBalance RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getTokenAccountBalance +--- + +Returns the token balance of an SPL Token account. + + + + + +### Parameters + + + Pubkey of Token account to query, as base-58 encoded string + + + + +Configuration object containing the following fields: + + + + + +### Result + +The result will be an RpcResponse JSON object with `value` equal to a JSON +object containing: + +- `amount: ` - the raw balance without decimals, a string representation + of u64 +- `decimals: ` - number of base 10 digits to the right of the decimal place +- `uiAmount: ` - the balance, using mint-prescribed decimals + **DEPRECATED** +- `uiAmountString: ` - the balance as a string, using mint-prescribed + decimals + +For more details on returned data, the +[Token Balances Structure](/docs/rpc/json-structures#token-balances) response +from [getBlock](/docs/rpc/http/getblock) follows a similar structure. + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", "id": 1, + "method": "getTokenAccountBalance", + "params": [ + "7fUAJdStEuGbc3sM84cKRL6yYaaSstyLSU4ve5oovLS7" + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "context": { + "slot": 1114 + }, + "value": { + "amount": "9864", + "decimals": 2, + "uiAmount": 98.64, + "uiAmountString": "98.64" + }, + "id": 1 + } +} +``` + + + diff --git a/docs/rpc/http/getTokenAccountsByDelegate.mdx b/docs/rpc/http/getTokenAccountsByDelegate.mdx new file mode 100644 index 000000000..766ecdbcd --- /dev/null +++ b/docs/rpc/http/getTokenAccountsByDelegate.mdx @@ -0,0 +1,189 @@ +--- +sidebarLabel: getTokenAccountsByDelegate +title: getTokenAccountsByDelegate RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getTokenAccountsByDelegate +--- + +Returns all SPL Token accounts by approved Delegate. + + + + + +### Parameters + + + Pubkey of account delegate to query, as base-58 encoded string + + + + +A JSON object with one of the following fields: + +- `mint: ` - Pubkey of the specific token Mint to limit accounts to, as + base-58 encoded string; or +- `programId: ` - Pubkey of the Token program that owns the accounts, as + base-58 encoded string + + + + + +Configuration object containing the following fields: + + + + + The minimum slot that the request can be evaluated at + + + + Request a slice of the account's data. + +- `length: ` - number of bytes to return +- `offset: ` - byte offset from which to start reading + + + Data slicing is only available for `base58`, `base64`, or `base64+zstd` + encodings. + + + + + + +Encoding format for Account data + + + +
+ +- `base58` is slow and limited to less than 129 bytes of Account data. +- `base64` will return base64 encoded data for Account data of any size. +- `base64+zstd` compresses the Account data using + [Zstandard](https://facebook.github.io/zstd/) and base64-encodes the result. +- `jsonParsed` encoding attempts to use program-specific state parsers to return + more human-readable and explicit account state data. +- If `jsonParsed` is requested but a parser cannot be found, the field falls + back to `base64` encoding, detectable when the `data` field is type `string`. + +
+ +
+ +
+ +### Result + +The result will be an RpcResponse JSON object with `value` equal to an array of +JSON objects, which will contain: + +- `pubkey: ` - the account Pubkey as base-58 encoded string +- `account: ` - a JSON object, with the following sub fields: + - `lamports: ` - number of lamports assigned to this account, as a u64 + - `owner: ` - base-58 encoded Pubkey of the program this account has + been assigned to + - `data: ` - Token state data associated with the account, either as + encoded binary data or in JSON format `{: }` + - `executable: ` - boolean indicating if the account contains a program + (and is strictly read-only\) + - `rentEpoch: ` - the epoch at which this account will next owe rent, as + u64 + - `size: ` - the data size of the account + +When the data is requested with the `jsonParsed` encoding a format similar to +that of the [Token Balances Structure](/docs/rpc/json-structures#token-balances) +can be expected inside the structure, both for the `tokenAmount` and the +`delegatedAmount` - with the latter being an optional object. + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", + "id": 1, + "method": "getTokenAccountsByDelegate", + "params": [ + "4Nd1mBQtrMJVYVfKf2PJy9NZUZdTAsp7D4xWLs4gDB4T", + { + "programId": "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA" + }, + { + "encoding": "jsonParsed" + } + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "context": { + "slot": 1114 + }, + "value": [ + { + "account": { + "data": { + "program": "spl-token", + "parsed": { + "info": { + "tokenAmount": { + "amount": "1", + "decimals": 1, + "uiAmount": 0.1, + "uiAmountString": "0.1" + }, + "delegate": "4Nd1mBQtrMJVYVfKf2PJy9NZUZdTAsp7D4xWLs4gDB4T", + "delegatedAmount": { + "amount": "1", + "decimals": 1, + "uiAmount": 0.1, + "uiAmountString": "0.1" + }, + "state": "initialized", + "isNative": false, + "mint": "3wyAj7Rt1TWVPZVteFJPLa26JmLvdb1CAKEFZm3NY75E", + "owner": "CnPoSPKXu7wJqxe59Fs72tkBeALovhsCxYeFwPCQH9TD" + }, + "type": "account" + }, + "space": 165 + }, + "executable": false, + "lamports": 1726080, + "owner": "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA", + "rentEpoch": 4, + "space": 165 + }, + "pubkey": "28YTZEwqtMHWrhWcvv34se7pjS7wctgqzCPB3gReCFKp" + } + ] + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getTokenAccountsByOwner.mdx b/docs/rpc/http/getTokenAccountsByOwner.mdx new file mode 100644 index 000000000..b5aae7281 --- /dev/null +++ b/docs/rpc/http/getTokenAccountsByOwner.mdx @@ -0,0 +1,190 @@ +--- +sidebarLabel: getTokenAccountsByOwner +title: getTokenAccountsByOwner RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getTokenAccountsByOwner +--- + +Returns all SPL Token accounts by token owner. + + + + + +### Parameters + + + Pubkey of account delegate to query, as base-58 encoded string + + + + +A JSON object with one of the following fields: + +- `mint: ` - Pubkey of the specific token Mint to limit accounts to, as + base-58 encoded string; or +- `programId: ` - Pubkey of the Token program that owns the accounts, as + base-58 encoded string + + + + + +Configuration object containing the following fields: + + + + + The minimum slot that the request can be evaluated at + + + + Request a slice of the account's data. + +- `length: ` - number of bytes to return +- `offset: ` - byte offset from which to start reading + + + Data slicing is only available for `base58`, `base64`, or `base64+zstd` + encodings. + + + + + + +Encoding format for Account data + + + +
+ +- `base58` is slow and limited to less than 129 bytes of Account data. +- `base64` will return base64 encoded data for Account data of any size. +- `base64+zstd` compresses the Account data using + [Zstandard](https://facebook.github.io/zstd/) and base64-encodes the result. +- `jsonParsed` encoding attempts to use program-specific state parsers to return + more human-readable and explicit account state data. +- If `jsonParsed` is requested but a parser cannot be found, the field falls + back to `base64` encoding, detectable when the `data` field is type `string`. + +
+ +
+ +
+ +### Result + +The result will be an RpcResponse JSON object with `value` equal to an array of +JSON objects, which will contain: + +- `pubkey: ` - the account Pubkey as base-58 encoded string +- `account: ` - a JSON object, with the following sub fields: + - `lamports: ` - number of lamports assigned to this account, as a u64 + - `owner: ` - base-58 encoded Pubkey of the program this account has + been assigned to + - `data: ` - Token state data associated with the account, either as + encoded binary data or in JSON format `{: }` + - `executable: ` - boolean indicating if the account contains a program + \(and is strictly read-only\) + - `rentEpoch: ` - the epoch at which this account will next owe rent, as + u64 + - `size: ` - the data size of the account + +When the data is requested with the `jsonParsed` encoding a format similar to +that of the [Token Balances Structure](/docs/rpc/json-structures#token-balances) +can be expected inside the structure, both for the `tokenAmount` and the +`delegatedAmount` - with the latter being an optional object. + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", + "id": 1, + "method": "getTokenAccountsByOwner", + "params": [ + "4Qkev8aNZcqFNSRhQzwyLMFSsi94jHqE8WNVTJzTP99F", + { + "mint": "3wyAj7Rt1TWVPZVteFJPLa26JmLvdb1CAKEFZm3NY75E" + }, + { + "encoding": "jsonParsed" + } + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "context": { + "slot": 1114 + }, + "value": [ + { + "account": { + "data": { + "program": "spl-token", + "parsed": { + "accountType": "account", + "info": { + "tokenAmount": { + "amount": "1", + "decimals": 1, + "uiAmount": 0.1, + "uiAmountString": "0.1" + }, + "delegate": "4Nd1mBQtrMJVYVfKf2PJy9NZUZdTAsp7D4xWLs4gDB4T", + "delegatedAmount": { + "amount": "1", + "decimals": 1, + "uiAmount": 0.1, + "uiAmountString": "0.1" + }, + "state": "initialized", + "isNative": false, + "mint": "3wyAj7Rt1TWVPZVteFJPLa26JmLvdb1CAKEFZm3NY75E", + "owner": "4Qkev8aNZcqFNSRhQzwyLMFSsi94jHqE8WNVTJzTP99F" + }, + "type": "account" + }, + "space": 165 + }, + "executable": false, + "lamports": 1726080, + "owner": "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA", + "rentEpoch": 4, + "space": 165 + }, + "pubkey": "C2gJg6tKpQs41PRS1nC8aw3ZKNZK3HQQZGVrDFDup5nx" + } + ] + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getTokenLargestAccounts.mdx b/docs/rpc/http/getTokenLargestAccounts.mdx new file mode 100644 index 000000000..434bb924e --- /dev/null +++ b/docs/rpc/http/getTokenLargestAccounts.mdx @@ -0,0 +1,97 @@ +--- +sidebarLabel: getTokenLargestAccounts +title: getTokenLargestAccounts RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getTokenLargestAccounts +--- + +Returns the 20 largest accounts of a particular SPL Token type. + + + + + +### Parameters + + + Pubkey of the token Mint to query, as base-58 encoded string + + + + +Configuration object containing the following fields: + + + + + +### Result + +The result will be an RpcResponse JSON object with `value` equal to an array of +JSON objects containing: + +- `address: ` - the address of the token account +- `amount: ` - the raw token account balance without decimals, a string + representation of u64 +- `decimals: ` - number of base 10 digits to the right of the decimal place +- `uiAmount: ` - the token account balance, using mint-prescribed + decimals **DEPRECATED** +- `uiAmountString: ` - the token account balance as a string, using + mint-prescribed decimals + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", "id": 1, + "method": "getTokenLargestAccounts", + "params": [ + "3wyAj7Rt1TWVPZVteFJPLa26JmLvdb1CAKEFZm3NY75E" + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "context": { + "slot": 1114 + }, + "value": [ + { + "address": "FYjHNoFtSQ5uijKrZFyYAxvEr87hsKXkXcxkcmkBAf4r", + "amount": "771", + "decimals": 2, + "uiAmount": 7.71, + "uiAmountString": "7.71" + }, + { + "address": "BnsywxTcaYeNUtzrPxQUvzAWxfzZe3ZLUJ4wMMuLESnu", + "amount": "229", + "decimals": 2, + "uiAmount": 2.29, + "uiAmountString": "2.29" + } + ] + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getTokenSupply.mdx b/docs/rpc/http/getTokenSupply.mdx new file mode 100644 index 000000000..353433f15 --- /dev/null +++ b/docs/rpc/http/getTokenSupply.mdx @@ -0,0 +1,86 @@ +--- +sidebarLabel: getTokenSupply +title: getTokenSupply RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getTokenSupply +--- + +Returns the total supply of an SPL Token type. + + + + + +### Parameters + + + Pubkey of the token Mint to query, as base-58 encoded string + + + + +Configuration object containing the following fields: + + + + + +### Result + +The result will be an RpcResponse JSON object with `value` equal to a JSON +object containing: + +- `amount: ` - the raw total token supply without decimals, a string + representation of u64 +- `decimals: ` - number of base 10 digits to the right of the decimal place +- `uiAmount: ` - the total token supply, using mint-prescribed + decimals **DEPRECATED** +- `uiAmountString: ` - the total token supply as a string, using + mint-prescribed decimals + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", "id": 1, + "method": "getTokenSupply", + "params": [ + "3wyAj7Rt1TWVPZVteFJPLa26JmLvdb1CAKEFZm3NY75E" + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "context": { + "slot": 1114 + }, + "value": { + "amount": "100000", + "decimals": 2, + "uiAmount": 1000, + "uiAmountString": "1000" + } + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getTransaction.mdx b/docs/rpc/http/getTransaction.mdx new file mode 100644 index 000000000..b176d9710 --- /dev/null +++ b/docs/rpc/http/getTransaction.mdx @@ -0,0 +1,210 @@ +--- +sidebarLabel: getTransaction +title: getTransaction RPC Method +sidebarSortOrder: 10 +hideTableOfContents: true +altRoutes: + - /docs/rpc/getTransaction +--- + +Returns transaction details for a confirmed transaction + + + + + +### Parameters + + + Transaction signature, as base-58 encoded string + + + + +Configuration object containing the following fields: + + + + + Set the max transaction version to return in responses. If the requested + transaction is a higher version, an error will be returned. If this parameter + is omitted, only legacy transactions will be returned, and any versioned + transaction will prompt the error. + + + + +Encoding for the returned Transaction + + + +
+ +- `jsonParsed` encoding attempts to use program-specific state parsers to return + more human-readable and explicit data in the + `transaction.message.instructions` list. +- If `jsonParsed` is requested but a parser cannot be found, the instruction + falls back to regular JSON encoding (`accounts`, `data`, and `programIdIndex` + fields). + +
+ +
+ +
+ +### Result + +- `` - if transaction is not found or not confirmed +- `` - if transaction is confirmed, an object with the following fields: + - `slot: ` - the slot this transaction was processed in + - `transaction: ` - + [Transaction](/docs/rpc/json-structures#transactions) object, either in JSON + format or encoded binary data, depending on encoding parameter + - `blockTime: ` - estimated production time, as Unix timestamp + (seconds since the Unix epoch) of when the transaction was processed. null + if not available + - `meta: ` - transaction status metadata object: + - `err: ` - Error if transaction failed, null if transaction + succeeded. + [TransactionError definitions](https://docs.rs/solana-sdk/latest/solana_sdk/transaction/enum.TransactionError.html) + - `fee: ` - fee this transaction was charged, as u64 integer + - `preBalances: ` - array of u64 account balances from before the + transaction was processed + - `postBalances: ` - array of u64 account balances after the + transaction was processed + - `innerInstructions: ` - List of + [inner instructions](/docs/rpc/json-structures#inner-instructions) or + `null` if inner instruction recording was not enabled during this + transaction + - `preTokenBalances: ` - List of + [token balances](/docs/rpc/json-structures#token-balances) from before the + transaction was processed or omitted if token balance recording was not + yet enabled during this transaction + - `postTokenBalances: ` - List of + [token balances](/docs/rpc/json-structures#token-balances) from after the + transaction was processed or omitted if token balance recording was not + yet enabled during this transaction + - `logMessages: ` - array of string log messages or `null` if + log message recording was not enabled during this transaction + - DEPRECATED: `status: ` - Transaction status + - `"Ok": ` - Transaction was successful + - `"Err": ` - Transaction failed with TransactionError + - `rewards: ` - transaction-level rewards, populated if rewards + are requested; an array of JSON objects containing: + - `pubkey: ` - The public key, as base-58 encoded string, of the + account that received the reward + - `lamports: `- number of reward lamports credited or debited by the + account, as a i64 + - `postBalance: ` - account balance in lamports after the reward was + applied + - `rewardType: ` - type of reward: currently only "rent", other + types may be added in the future + - `commission: ` - vote account commission when the reward + was credited, only present for voting and staking rewards + - `loadedAddresses: ` - Transaction addresses loaded from + address lookup tables. Undefined if `maxSupportedTransactionVersion` is + not set in request params, or if `jsonParsed` encoding is set in request + params. + - `writable: ` - Ordered list of base-58 encoded addresses + for writable loaded accounts + - `readonly: ` - Ordered list of base-58 encoded addresses + for readonly loaded accounts + - `returnData: ` - the most-recent return data generated + by an instruction in the transaction, with the following fields: + - `programId: ` - the program that generated the return data, as + base-58 encoded Pubkey + - `data: <[string, encoding]>` - the return data itself, as base-64 + encoded binary data + - `computeUnitsConsumed: ` - number of + [compute units](/docs/core/runtime.md#compute-budget) consumed by the + transaction + - `version: <"legacy"|number|undefined>` - Transaction version. Undefined if + `maxSupportedTransactionVersion` is not set in request params. + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", + "id": 1, + "method": "getTransaction", + "params": [ + "2nBhEBYYvfaAe16UMNqRHre4YNSskvuYgx3M6E4JP1oDYvZEJHvoPzyUidNgNX5r9sTyN1J9UxtbCXy2rqYcuyuv", + "json" + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "meta": { + "err": null, + "fee": 5000, + "innerInstructions": [], + "postBalances": [499998932500, 26858640, 1, 1, 1], + "postTokenBalances": [], + "preBalances": [499998937500, 26858640, 1, 1, 1], + "preTokenBalances": [], + "rewards": [], + "status": { + "Ok": null + } + }, + "slot": 430, + "transaction": { + "message": { + "accountKeys": [ + "3UVYmECPPMZSCqWKfENfuoTv51fTDTWicX9xmBD2euKe", + "AjozzgE83A3x1sHNUR64hfH7zaEBWeMaFuAN9kQgujrc", + "SysvarS1otHashes111111111111111111111111111", + "SysvarC1ock11111111111111111111111111111111", + "Vote111111111111111111111111111111111111111" + ], + "header": { + "numReadonlySignedAccounts": 0, + "numReadonlyUnsignedAccounts": 3, + "numRequiredSignatures": 1 + }, + "instructions": [ + { + "accounts": [1, 2, 3, 0], + "data": "37u9WtQpcm6ULa3WRQHmj49EPs4if7o9f1jSRVZpm2dvihR9C8jY4NqEwXUbLwx15HBSNcP1", + "programIdIndex": 4 + } + ], + "recentBlockhash": "mfcyqEXB3DnHXki6KjjmZck6YjmZLvpAByy2fj4nh6B" + }, + "signatures": [ + "2nBhEBYYvfaAe16UMNqRHre4YNSskvuYgx3M6E4JP1oDYvZEJHvoPzyUidNgNX5r9sTyN1J9UxtbCXy2rqYcuyuv" + ] + } + }, + "blockTime": null, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getTransactionCount.mdx b/docs/rpc/http/getTransactionCount.mdx new file mode 100644 index 000000000..3ad9ccc72 --- /dev/null +++ b/docs/rpc/http/getTransactionCount.mdx @@ -0,0 +1,57 @@ +--- +sidebarLabel: getTransactionCount +title: getTransactionCount RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getTransactionCount +--- + +Returns the current Transaction count from the ledger + + + + + +### Parameters + + + +Configuration object containing the following fields: + + + + + The minimum slot that the request can be evaluated at + + + + +### Result + +`` - the current Transaction count from the ledger + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + {"jsonrpc":"2.0","id":1, "method":"getTransactionCount"} +' +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": 268, "id": 1 } +``` + + + diff --git a/docs/rpc/http/getVersion.mdx b/docs/rpc/http/getVersion.mdx new file mode 100644 index 000000000..766cbbf24 --- /dev/null +++ b/docs/rpc/http/getVersion.mdx @@ -0,0 +1,50 @@ +--- +sidebarLabel: getVersion +title: getVersion RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getVersion +--- + +Returns the current Solana version running on the node + + + + + +### Parameters + +**None** + +### Result + +The result field will be a JSON object with the following fields: + +- `solana-core` - software version of solana-core as a `string` +- `feature-set` - unique identifier of the current software's feature set as a + `u32` + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + {"jsonrpc":"2.0","id":1, "method":"getVersion"} +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { "feature-set": 2891131721, "solana-core": "1.16.7" }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/getVoteAccounts.mdx b/docs/rpc/http/getVoteAccounts.mdx new file mode 100644 index 000000000..864200408 --- /dev/null +++ b/docs/rpc/http/getVoteAccounts.mdx @@ -0,0 +1,113 @@ +--- +sidebarLabel: getVoteAccounts +title: getVoteAccounts RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/getVoteAccounts +--- + +Returns the account info and associated stake for all the voting accounts in the +current bank. + + + + + +### Parameters + + + +Configuration object containing the following fields: + + + + + Only return results for this validator vote address (base-58 encoded) + + + + Do not filter out delinquent validators with no stake + + + + Specify the number of slots behind the tip that a validator must fall to be + considered delinquent. **NOTE:** For the sake of consistency between ecosystem + products, _it is **not** recommended that this argument be specified._ + + + + +### Result + +The result field will be a JSON object of `current` and `delinquent` accounts, +each containing an array of JSON objects with the following sub fields: + +- `votePubkey: ` - Vote account address, as base-58 encoded string +- `nodePubkey: ` - Validator identity, as base-58 encoded string +- `activatedStake: ` - the stake, in lamports, delegated to this vote + account and active in this epoch +- `epochVoteAccount: ` - bool, whether the vote account is staked for this + epoch +- `commission: ` - percentage (0-100) of rewards payout owed to the vote + account +- `lastVote: ` - Most recent slot voted on by this vote account +- `epochCredits: ` - Latest history of earned credits for up to five + epochs, as an array of arrays containing: `[epoch, credits, previousCredits]`. +- `rootSlot: ` - Current root slot for this vote account + + + + + +### Code sample + +Restrict results to a single validator vote account: + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", + "id": 1, + "method": "getVoteAccounts", + "params": [ + { + "votePubkey": "3ZT31jkAGhUaw8jsy4bTknwBMP8i4Eueh52By4zXcsVw" + } + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "current": [ + { + "commission": 0, + "epochVoteAccount": true, + "epochCredits": [ + [1, 64, 0], + [2, 192, 64] + ], + "nodePubkey": "B97CCUW3AEZFGy6uUg6zUdnNYvnVq5VG8PUtb2HayTDD", + "lastVote": 147, + "activatedStake": 42, + "votePubkey": "3ZT31jkAGhUaw8jsy4bTknwBMP8i4Eueh52By4zXcsVw" + } + ], + "delinquent": [] + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/index.mdx b/docs/rpc/http/index.mdx new file mode 100644 index 000000000..9182e8a89 --- /dev/null +++ b/docs/rpc/http/index.mdx @@ -0,0 +1,103 @@ +--- +title: Solana RPC HTTP Methods +seoTitle: Solana RPC HTTP Methods +sidebarLabel: HTTP Methods +sidebarSortOrder: 0 +hideTableOfContents: false +--- + +Solana nodes accept HTTP requests using the +[JSON-RPC 2.0](https://www.jsonrpc.org/specification) specification. + +> For JavaScript applications, use the +> [@solana/web3.js](https://github.com/solana-labs/solana-web3.js) library as a +> convenient interface for the RPC methods to interact with a Solana node. For +> an PubSub connection to a Solana node, use the +> [Websocket API](/docs/rpc/websocket/index.mdx). + +## RPC HTTP Endpoint + +Default port: `8899` + +- http://localhost:8899 +- http://192.168.1.88:8899 + +## Request Formatting + +To make a JSON-RPC request, send an HTTP POST request with a +`Content-Type: application/json` header. The JSON request data should contain 4 +fields: + +- `jsonrpc: ` - set to `"2.0"` +- `id: ` - a unique client-generated identifying integer +- `method: ` - a string containing the method to be invoked +- `params: ` - a JSON array of ordered parameter values + +Example using curl: + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", + "id": 1, + "method": "getBalance", + "params": [ + "83astBRguLMdt2h5U1Tpdq5tjFoJ6noeGwaY3mDLVcri" + ] + } +' +``` + +The response output will be a JSON object with the following fields: + +- `jsonrpc: ` - matching the request specification +- `id: ` - matching the request identifier +- `result: ` - requested data or success + confirmation + +Requests can be sent in batches by sending an array of JSON-RPC request objects +as the data for a single POST. + +### Example Request + +The commitment parameter should be included as the last element in the `params` +array: + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", + "id": 1, + "method": "getBalance", + "params": [ + "83astBRguLMdt2h5U1Tpdq5tjFoJ6noeGwaY3mDLVcri", + { + "commitment": "finalized" + } + ] + } +' +``` + +## Definitions + +- Hash: A SHA-256 hash of a chunk of data. +- Pubkey: The public key of a Ed25519 key-pair. +- Transaction: A list of Solana instructions signed by a client keypair to + authorize those actions. +- Signature: An Ed25519 signature of transaction's payload data including + instructions. This can be used to identify transactions. + +## Health Check + +Although not a JSON RPC API, a `GET /health` at the RPC HTTP Endpoint provides a +health-check mechanism for use by load balancers or other network +infrastructure. This request will always return a HTTP 200 OK response with a +body of "ok", "behind" or "unknown": + +- `ok`: The node is within `HEALTH_CHECK_SLOT_DISTANCE` slots from the latest + cluster confirmed slot +- `behind { distance }`: The node is behind `distance` slots from the latest + cluster confirmed slot where `distance > HEALTH_CHECK_SLOT_DISTANCE` +- `unknown`: The node is unable to determine where it stands in relation to the + cluster diff --git a/docs/rpc/http/isBlockhashValid.mdx b/docs/rpc/http/isBlockhashValid.mdx new file mode 100644 index 000000000..da74a1562 --- /dev/null +++ b/docs/rpc/http/isBlockhashValid.mdx @@ -0,0 +1,84 @@ +--- +sidebarLabel: isBlockhashValid +title: isBlockhashValid RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/isBlockhashValid +--- + +Returns whether a blockhash is still valid or not + + + This method is only available in `solana-core` v1.9 or newer. Please use + [getFeeCalculatorForBlockhash](/docs/rpc/http/getFeeCalculatorForBlockhash) + for `solana-core` v1.8 and below. + + + + + + +### Parameters + + + the blockhash of the block to evauluate, as base-58 encoded string + + + + +Configuration object containing the following fields: + + + + + The minimum slot that the request can be evaluated at + + + + +### Result + +`` - `true` if the blockhash is still valid + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "id":45, + "jsonrpc":"2.0", + "method":"isBlockhashValid", + "params":[ + "J7rBdM6AecPDEZp8aPq5iPSNKVkU5Q76F3oAV4eW5wsW", + {"commitment":"processed"} + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "context": { + "slot": 2483 + }, + "value": false + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/minimumLedgerSlot.mdx b/docs/rpc/http/minimumLedgerSlot.mdx new file mode 100644 index 000000000..86e234458 --- /dev/null +++ b/docs/rpc/http/minimumLedgerSlot.mdx @@ -0,0 +1,47 @@ +--- +sidebarLabel: minimumLedgerSlot +title: minimumLedgerSlot RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/minimumLedgerSlot +--- + +Returns the lowest slot that the node has information about in its ledger. + + + This value may increase over time if the node is configured to purge older + ledger data + + + + + + +### Parameters + +**None** + +### Result + +`u64` - Minimum ledger slot number + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + {"jsonrpc":"2.0","id":1, "method":"minimumLedgerSlot"} +' +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": 1234, "id": 1 } +``` + + + diff --git a/docs/rpc/http/requestAirdrop.mdx b/docs/rpc/http/requestAirdrop.mdx new file mode 100644 index 000000000..9c80219a0 --- /dev/null +++ b/docs/rpc/http/requestAirdrop.mdx @@ -0,0 +1,72 @@ +--- +sidebarLabel: requestAirdrop +title: requestAirdrop RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/requestAirdrop +--- + +Requests an airdrop of lamports to a Pubkey + + + + + +### Parameters + + + Pubkey of account to receive lamports, as a base-58 encoded string + + + + lamports to airdrop, as a "u64" + + + + +Configuration object containing the following fields: + + + + + +### Result + +`` - Transaction Signature of the airdrop, as a base-58 encoded string + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", "id": 1, + "method": "requestAirdrop", + "params": [ + "83astBRguLMdt2h5U1Tpdq5tjFoJ6noeGwaY3mDLVcri", + 1000000000 + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": "5VERv8NMvzbJMEkV8xnrLkEaWRtSz9CosKDYjCJjBRnbJLgp8uirBgmQpjKhoR4tjF3ZpRzrFmBV6UjKdiSZkQUW", + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/sendTransaction.mdx b/docs/rpc/http/sendTransaction.mdx new file mode 100644 index 000000000..30dbecf80 --- /dev/null +++ b/docs/rpc/http/sendTransaction.mdx @@ -0,0 +1,126 @@ +--- +sidebarLabel: sendTransaction +title: sendTransaction RPC Method +sidebarSortOrder: 0 +hideTableOfContents: true +altRoutes: + - /docs/rpc/sendTransaction +--- + +Submits a signed transaction to the cluster for processing. + +This method does not alter the transaction in any way; it relays the transaction +created by clients to the node as-is. + +If the node's rpc service receives the transaction, this method immediately +succeeds, without waiting for any confirmations. A successful response from this +method does not guarantee the transaction is processed or confirmed by the +cluster. + +While the rpc service will reasonably retry to submit it, the transaction could +be rejected if transaction's `recent_blockhash` expires before it lands. + +Use [`getSignatureStatuses`](#getsignaturestatuses) to ensure a transaction is +processed and confirmed. + +Before submitting, the following preflight checks are performed: + +1. The transaction signatures are verified +2. The transaction is simulated against the bank slot specified by the preflight + commitment. On failure an error will be returned. Preflight checks may be + disabled if desired. It is recommended to specify the same commitment and + preflight commitment to avoid confusing behavior. + +The returned signature is the first signature in the transaction, which is used +to identify the transaction +([transaction id](/docs/terminology.md#transaction-id)). This identifier can be +easily extracted from the transaction data before submission. + + + + + +### Parameters + + + Fully-signed Transaction, as encoded string. + + + + +Configuration object containing the following optional fields: + + + +Encoding used for the transaction data. + +Values: `base58` (_slow_, **DEPRECATED**), or `base64`. + + + + + when `true`, skip the preflight transaction checks + + + + Commitment level to use for preflight. + + + + Maximum number of times for the RPC node to retry sending the transaction to + the leader. If this parameter not provided, the RPC node will retry the + transaction until it is finalized or until the blockhash expires. + + + + set the minimum slot at which to perform preflight transaction checks + + + + +### Result + +`` - First Transaction Signature embedded in the transaction, as base-58 +encoded string ([transaction id](/docs/terminology.md#transaction-id)) + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", + "id": 1, + "method": "sendTransaction", + "params": [ + "4hXTCkRzt9WyecNzV1XPgCDfGAZzQKNxLXgynz5QDuWWPSAZBZSHptvWRL3BjCvzUXRdKvHL2b7yGrRQcWyaqsaBCncVG7BFggS8w9snUts67BSh3EqKpXLUm5UMHfD7ZBe9GhARjbNQMLJ1QD3Spr6oMTBU6EhdB4RD8CP2xUxr2u3d6fos36PD98XS6oX8TQjLpsMwncs5DAMiD4nNnR8NBfyghGCWvCVifVwvA8B8TJxE1aiyiv2L429BCWfyzAme5sZW8rDb14NeCQHhZbtNqfXhcp2tAnaAT" + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": "2id3YC2jK9G5Wo2phDx4gJVAew8DcY5NAojnVuao8rkxwPYPe8cSwE5GzhEgJA2y8fVjDEo6iR6ykBvDxrTQrtpb", + "id": 1 +} +``` + + + diff --git a/docs/rpc/http/simulateTransaction.mdx b/docs/rpc/http/simulateTransaction.mdx new file mode 100644 index 000000000..c58269be7 --- /dev/null +++ b/docs/rpc/http/simulateTransaction.mdx @@ -0,0 +1,187 @@ +--- +sidebarLabel: simulateTransaction +title: simulateTransaction RPC Method +sidebarSortOrder: 0 +hideTableOfContents: true +altRoutes: + - /docs/rpc/simulateTransaction +--- + +Simulate sending a transaction + + + + + +### Parameters + + + +Transaction, as an encoded string. + + + The transaction must have a valid blockhash, but is not required to be signed. + + + + + + +Configuration object containing the following fields: + + + Commitment level to simulate the transaction at + + + + if `true` the transaction signatures will be verified (conflicts with + `replaceRecentBlockhash`) + + + + if `true` the transaction recent blockhash will be replaced with the most + recent blockhash. (conflicts with `sigVerify`) + + + + the minimum slot that the request can be evaluated at + + + + +Encoding used for the transaction data. + +Values: `base58` (_slow_, **DEPRECATED**), or `base64`. + + + + + +Accounts configuration object containing the following fields: + + + An `array` of accounts to return, as base-58 encoded strings + + + + +encoding for returned Account data + + + +
+ +- `jsonParsed` encoding attempts to use program-specific state parsers to return + more human-readable and explicit account state data. +- If `jsonParsed` is requested but a + [parser cannot be found](https://github.com/solana-labs/solana/blob/cfd0a00ae2ba85a6d76757df8b4fa38ed242d185/account-decoder/src/parse_account_data.rs#L98-L100), + the field falls back to `base64` encoding, detectable when the returned + `accounts.data` field is type `string`. + +
+ +
+ +
+ +
+ +### Result + +The result will be an RpcResponse JSON object with `value` set to a JSON object +with the following fields: + +- `err: ` - Error if transaction failed, null if transaction + succeeded. + [TransactionError definitions](https://github.com/solana-labs/solana/blob/c0c60386544ec9a9ec7119229f37386d9f070523/sdk/src/transaction/error.rs#L13) +- `logs: ` - Array of log messages the transaction instructions + output during execution, null if simulation failed before the transaction was + able to execute (for example due to an invalid blockhash or signature + verification failure) +- `accounts: ` - array of accounts with the same length as the + `accounts.addresses` array in the request + - `` - if the account doesn't exist or if `err` is not null + - `` - otherwise, a JSON object containing: + - `lamports: ` - number of lamports assigned to this account, as a u64 + - `owner: ` - base-58 encoded Pubkey of the program this account has + been assigned to + - `data: <[string, encoding]|object>` - data associated with the account, + either as encoded binary data or JSON format `{: }` - + depending on encoding parameter + - `executable: ` - boolean indicating if the account contains a + program \(and is strictly read-only\) + - `rentEpoch: ` - the epoch at which this account will next owe rent, + as u64 +- `unitsConsumed: ` - The number of compute budget units consumed + during the processing of this transaction +- `returnData: ` - the most-recent return data generated by an + instruction in the transaction, with the following fields: + - `programId: ` - the program that generated the return data, as + base-58 encoded Pubkey + - `data: <[string, encoding]>` - the return data itself, as base-64 encoded + binary data + + + + + +### Code sample + +```bash +curl http://localhost:8899 -X POST -H "Content-Type: application/json" -d ' + { + "jsonrpc": "2.0", + "id": 1, + "method": "simulateTransaction", + "params": [ + "AQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAEDArczbMia1tLmq7zz4DinMNN0pJ1JtLdqIJPUw3YrGCzYAMHBsgN27lcgB6H2WQvFgyZuJYHa46puOQo9yQ8CVQbd9uHXZaGT2cvhRs7reawctIXtX1s3kTqM9YV+/wCp20C7Wj2aiuk5TReAXo+VTVg8QTHjs0UjNMMKCvpzZ+ABAgEBARU=", + { + "encoding":"base64", + } + ] + } +' +``` + +### Response + +```json +{ + "jsonrpc": "2.0", + "result": { + "context": { + "slot": 218 + }, + "value": { + "err": null, + "accounts": null, + "logs": [ + "Program 83astBRguLMdt2h5U1Tpdq5tjFoJ6noeGwaY3mDLVcri invoke [1]", + "Program 83astBRguLMdt2h5U1Tpdq5tjFoJ6noeGwaY3mDLVcri consumed 2366 of 1400000 compute units", + "Program return: 83astBRguLMdt2h5U1Tpdq5tjFoJ6noeGwaY3mDLVcri KgAAAAAAAAA=", + "Program 83astBRguLMdt2h5U1Tpdq5tjFoJ6noeGwaY3mDLVcri success" + ], + "returnData": { + "data": ["Kg==", "base64"], + "programId": "83astBRguLMdt2h5U1Tpdq5tjFoJ6noeGwaY3mDLVcri" + }, + "unitsConsumed": 2366 + } + }, + "id": 1 +} +``` + + + diff --git a/docs/rpc/index.mdx b/docs/rpc/index.mdx new file mode 100644 index 000000000..4c9d9b604 --- /dev/null +++ b/docs/rpc/index.mdx @@ -0,0 +1,99 @@ +--- +title: Solana RPC Methods & Documentation +seoTitle: "Solana RPC Methods: HTTP & Websockets" +sidebarLabel: Solana RPC Methods +sidebarSortOrder: 0 +hideTableOfContents: false +--- + +Interact with Solana nodes directly with the JSON RPC API via the HTTP and +Websocket methods. + +## Configuring State Commitment + +For preflight checks and transaction processing, Solana nodes choose which bank +state to query based on a commitment requirement set by the client. The +commitment describes how finalized a block is at that point in time. When +querying the ledger state, it's recommended to use lower levels of commitment to +report progress and higher levels to ensure the state will not be rolled back. + +In descending order of commitment (most finalized to least finalized), clients +may specify: + +- `finalized` - the node will query the most recent block confirmed by + supermajority of the cluster as having reached maximum lockout, meaning the + cluster has recognized this block as finalized +- `confirmed` - the node will query the most recent block that has been voted on + by supermajority of the cluster. + - It incorporates votes from gossip and replay. + - It does not count votes on descendants of a block, only direct votes on that + block. + - This confirmation level also upholds "optimistic confirmation" guarantees in + release 1.3 and onwards. +- `processed` - the node will query its most recent block. Note that the block + may still be skipped by the cluster. + +For processing many dependent transactions in series, it's recommended to use +`confirmed` commitment, which balances speed with rollback safety. For total +safety, it's recommended to use `finalized` commitment. + +### Default Commitment + +If commitment configuration is not provided, the node will default to +`finalized` commitment + +Only methods that query bank state accept the commitment parameter. They are +indicated in the API Reference below. + +## RpcResponse Structure + +Many methods that take a commitment parameter return an RpcResponse JSON object +comprised of two parts: + +- `context` : An RpcResponseContext JSON structure including a `slot` field at + which the operation was evaluated. +- `value` : The value returned by the operation itself. + +## Parsed Responses + +Some methods support an `encoding` parameter, and can return account or +instruction data in parsed JSON format if `"encoding":"jsonParsed"` is requested +and the node has a parser for the owning program. Solana nodes currently support +JSON parsing for the following native and SPL programs: + +| Program | Account State | Instructions | +| ---------------------------- | ------------- | ------------ | +| Address Lookup | v1.15.0 | v1.15.0 | +| BPF Loader | n/a | stable | +| BPF Upgradeable Loader | stable | stable | +| Config | stable | | +| SPL Associated Token Account | n/a | stable | +| SPL Memo | n/a | stable | +| SPL Token | stable | stable | +| SPL Token 2022 | stable | stable | +| Stake | stable | stable | +| Vote | stable | stable | + +The list of account parsers can be found +[here](https://github.com/solana-labs/solana/blob/master/account-decoder/src/parse_account_data.rs), +and instruction parsers +[here](https://github.com/solana-labs/solana/blob/master/transaction-status/src/parse_instruction.rs). + +## Filter criteria + +Some methods support providing a `filters` object to enable pre-filtering the +data returned within the RpcResponse JSON object. The following filters exist: + +- `memcmp: object` - compares a provided series of bytes with program account + data at a particular offset. Fields: + + - `offset: usize` - offset into program account data to start comparison + - `bytes: string` - data to match, as encoded string + - `encoding: string` - encoding for filter `bytes` data, either "base58" or + "base64". Data is limited in size to 128 or fewer decoded bytes.
+ **NEW: This field, and base64 support generally, is only available in + solana-core v1.14.0 or newer. Please omit when querying nodes on earlier + versions** + +- `dataSize: u64` - compares the program account data length with the provided + data size diff --git a/docs/rpc/json-structures.mdx b/docs/rpc/json-structures.mdx new file mode 100644 index 000000000..0855dc189 --- /dev/null +++ b/docs/rpc/json-structures.mdx @@ -0,0 +1,110 @@ +--- +title: Common JSON Data Structures for Solana RPC Methods +sidebarLabel: Data Structures as JSON +sidebarSortOrder: -1 +hideTableOfContents: false +--- + +Various Solana RPC methods will return more complex responses as structured JSON +objects, filled with specific keyed values. + +The most common of these JSON data structures include: + +- [transactions](#transactions) +- [inner instructions](#inner-instructions) +- [token balances](#token-balances) + +## Transactions + +Transactions are quite different from those on other blockchains. Be sure to +review [Anatomy of a Transaction](/docs/core/transactions.md) to learn about +transactions on Solana. + +The JSON structure of a transaction is defined as follows: + +- `signatures: ` - A list of base-58 encoded signatures applied + to the transaction. The list is always of length + `message.header.numRequiredSignatures` and not empty. The signature at index + `i` corresponds to the public key at index `i` in `message.accountKeys`. The + first one is used as the + [transaction id](/docs/terminology.md#transaction-id). +- `message: ` - Defines the content of the transaction. + - `accountKeys: ` - List of base-58 encoded public keys used by + the transaction, including by the instructions and for signatures. The first + `message.header.numRequiredSignatures` public keys must sign the + transaction. + - `header: ` - Details the account types and signatures required by + the transaction. + - `numRequiredSignatures: ` - The total number of signatures + required to make the transaction valid. The signatures must match the + first `numRequiredSignatures` of `message.accountKeys`. + - `numReadonlySignedAccounts: ` - The last + `numReadonlySignedAccounts` of the signed keys are read-only accounts. + Programs may process multiple transactions that load read-only accounts + within a single PoH entry, but are not permitted to credit or debit + lamports or modify account data. Transactions targeting the same + read-write account are evaluated sequentially. + - `numReadonlyUnsignedAccounts: ` - The last + `numReadonlyUnsignedAccounts` of the unsigned keys are read-only accounts. + - `recentBlockhash: ` - A base-58 encoded hash of a recent block in + the ledger used to prevent transaction duplication and to give transactions + lifetimes. + - `instructions: ` - List of program instructions that will be + executed in sequence and committed in one atomic transaction if all succeed. + - `programIdIndex: ` - Index into the `message.accountKeys` array + indicating the program account that executes this instruction. + - `accounts: ` - List of ordered indices into the + `message.accountKeys` array indicating which accounts to pass to the + program. + - `data: ` - The program input data encoded in a base-58 string. + - `addressTableLookups: ` - List of address table + lookups used by a transaction to dynamically load addresses from on-chain + address lookup tables. Undefined if `maxSupportedTransactionVersion` is not + set. + - `accountKey: ` - base-58 encoded public key for an address lookup + table account. + - `writableIndexes: ` - List of indices used to load + addresses of writable accounts from a lookup table. + - `readonlyIndexes: ` - List of indices used to load + addresses of readonly accounts from a lookup table. + +## Inner Instructions + +The Solana runtime records the cross-program instructions that are invoked +during transaction processing and makes these available for greater transparency +of what was executed on-chain per transaction instruction. Invoked instructions +are grouped by the originating transaction instruction and are listed in order +of processing. + +The JSON structure of inner instructions is defined as a list of objects in the +following structure: + +- `index: number` - Index of the transaction instruction from which the inner + instruction(s) originated +- `instructions: ` - Ordered list of inner program instructions + that were invoked during a single transaction instruction. + - `programIdIndex: ` - Index into the `message.accountKeys` array + indicating the program account that executes this instruction. + - `accounts: ` - List of ordered indices into the + `message.accountKeys` array indicating which accounts to pass to the + program. + - `data: ` - The program input data encoded in a base-58 string. + +## Token Balances + +The JSON structure of token balances is defined as a list of objects in the +following structure: + +- `accountIndex: ` - Index of the account in which the token balance is + provided for. +- `mint: ` - Pubkey of the token's mint. +- `owner: ` - Pubkey of token balance's owner. +- `programId: ` - Pubkey of the Token program that owns the + account. +- `uiTokenAmount: ` - + - `amount: ` - Raw amount of tokens as a string, ignoring decimals. + - `decimals: ` - Number of decimals configured for token's mint. + - `uiAmount: ` - Token amount as a float, accounting for + decimals. **DEPRECATED** + - `uiAmountString: ` - Token amount as a string, accounting for + decimals. diff --git a/docs/rpc/websocket/accountSubscribe.mdx b/docs/rpc/websocket/accountSubscribe.mdx new file mode 100644 index 000000000..57bbc5057 --- /dev/null +++ b/docs/rpc/websocket/accountSubscribe.mdx @@ -0,0 +1,160 @@ +--- +sidebarLabel: accountSubscribe +title: accountSubscribe RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/accountSubscribe +--- + +Subscribe to an account to receive notifications when the lamports or data for a +given account public key changes + + + + + +### Parameters + + + Account Pubkey, as base-58 encoded string + + + + +Configuration object containing the following fields: + + + + + +Encoding format for Account data + + + +
+ +- `base58` is slow. +- `jsonParsed` encoding attempts to use program-specific state parsers to return + more human-readable and explicit account state data +- If `jsonParsed` is requested but a parser cannot be found, the field falls + back to binary encoding, detectable when the `data`field is type`string`. + +
+ +
+ +
+ +### Result + +`` - Subscription id \(needed to unsubscribe\) + +
+ + + +### Code sample + +```json +{ + "jsonrpc": "2.0", + "id": 1, + "method": "accountSubscribe", + "params": [ + "CM78CPUeXjn8o3yroDHxUtKsZZgoy4GPkPPXfouKNH12", + { + "encoding": "jsonParsed", + "commitment": "finalized" + } + ] +} +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": 23784, "id": 1 } +``` + + +
+ +#### Notification Format: + +The notification format is the same as seen in the +[getAccountInfo](/docs/rpc/http/getAccountInfo) RPC HTTP method. + +Base58 encoding: + +```json +{ + "jsonrpc": "2.0", + "method": "accountNotification", + "params": { + "result": { + "context": { + "slot": 5199307 + }, + "value": { + "data": [ + "11116bv5nS2h3y12kD1yUKeMZvGcKLSjQgX6BeV7u1FrjeJcKfsHPXHRDEHrBesJhZyqnnq9qJeUuF7WHxiuLuL5twc38w2TXNLxnDbjmuR", + "base58" + ], + "executable": false, + "lamports": 33594, + "owner": "11111111111111111111111111111111", + "rentEpoch": 635, + "space": 80 + } + }, + "subscription": 23784 + } +} +``` + +Parsed-JSON encoding: + +```json +{ + "jsonrpc": "2.0", + "method": "accountNotification", + "params": { + "result": { + "context": { + "slot": 5199307 + }, + "value": { + "data": { + "program": "nonce", + "parsed": { + "type": "initialized", + "info": { + "authority": "Bbqg1M4YVVfbhEzwA9SpC9FhsaG83YMTYoR4a8oTDLX", + "blockhash": "LUaQTmM7WbMRiATdMMHaRGakPtCkc2GHtH57STKXs6k", + "feeCalculator": { + "lamportsPerSignature": 5000 + } + } + } + }, + "executable": false, + "lamports": 33594, + "owner": "11111111111111111111111111111111", + "rentEpoch": 635, + "space": 80 + } + }, + "subscription": 23784 + } +} +``` diff --git a/docs/rpc/websocket/accountUnsubscribe.mdx b/docs/rpc/websocket/accountUnsubscribe.mdx new file mode 100644 index 000000000..f46399941 --- /dev/null +++ b/docs/rpc/websocket/accountUnsubscribe.mdx @@ -0,0 +1,47 @@ +--- +sidebarLabel: accountUnsubscribe +title: accountUnsubscribe RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/accountUnsubscribe +--- + +Unsubscribe from account change notifications + + + + + +### Parameters + + + id of the account Subscription to cancel + + +### Result + +`` - unsubscribe success message + + + + + +### Code sample + +```json +{ + "jsonrpc": "2.0", + "id": 1, + "method": "accountUnsubscribe", + "params": [0] +} +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": true, "id": 1 } +``` + + + diff --git a/docs/rpc/websocket/blockSubscribe.mdx b/docs/rpc/websocket/blockSubscribe.mdx new file mode 100644 index 000000000..f40659ffa --- /dev/null +++ b/docs/rpc/websocket/blockSubscribe.mdx @@ -0,0 +1,386 @@ +--- +sidebarLabel: blockSubscribe +title: blockSubscribe RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/blockSubscribe +--- + +Subscribe to receive notification anytime a new block is `confirmed` or +`finalized`. + + + This subscription is considered **unstable** and is only available if the + validator was started with the `--rpc-pubsub-enable-block-subscription` flag. + The format of this subscription may change in the future. + + + + + + +### Parameters + + + +filter criteria for the logs to receive results by account type; currently +supported: + + + `all` - include all transactions in block + + + + +A JSON object with the following field: + +- `mentionsAccountOrProgram: ` - return only transactions that mention + the provided public key (as base-58 encoded string). If no mentions in a given + block, then no notification will be sent. + + + + + + + +Configuration object containing the following fields: + + + +- `processed` is not supported. + + + + + +encoding format for each returned Transaction + + + +
+ +- `jsonParsed` attempts to use program-specific instruction parsers to return + more human-readable and explicit data in the + `transaction.message.instructions` list. +- If `jsonParsed` is requested but a parser cannot be found, the instruction + falls back to regular JSON encoding (`accounts`, `data`, and `programIdIndex` + fields). + +
+ +
+ + + +level of transaction detail to return + + + +
+ +- If `accounts` are requested, transaction details only include signatures and + an annotated list of accounts in each transaction. +- Transaction metadata is limited to only: fee, err, pre_balances, + post_balances, pre_token_balances, and post_token_balances. + +
+ +
+ + + +the max transaction version to return in responses. + +
+ +- If the requested block contains a transaction with a higher version, an error + will be returned. +- If this parameter is omitted, only legacy transactions will be returned, and a + block containing any versioned transaction will prompt the error. + +
+ +
+ + + whether to populate the `rewards` array. If parameter not provided, the + default includes rewards. + + +
+ +### Result + +`integer` - subscription id \(needed to unsubscribe\) + +
+ + + +### Code sample + +```json +{ + "jsonrpc": "2.0", + "id": "1", + "method": "blockSubscribe", + "params": ["all"] +} +``` + +```json +{ + "jsonrpc": "2.0", + "id": "1", + "method": "blockSubscribe", + "params": [ + { + "mentionsAccountOrProgram": "LieKvPRE8XeX3Y2xVNHjKlpAScD12lYySBVQ4HqoJ5op" + }, + { + "commitment": "confirmed", + "encoding": "base64", + "showRewards": true, + "transactionDetails": "full" + } + ] +} +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": 0, "id": 1 } +``` + + +
+ +#### Notification Format: + +The notification will be an object with the following fields: + +- `slot: ` - The corresponding slot. +- `err: ` - Error if something went wrong publishing the + notification otherwise null. +- `block: ` - A block object as seen in the + [getBlock](/docs/rpc/http/getblock) RPC HTTP method. + +```json +{ + "jsonrpc": "2.0", + "method": "blockNotification", + "params": { + "result": { + "context": { + "slot": 112301554 + }, + "value": { + "slot": 112301554, + "block": { + "previousBlockhash": "GJp125YAN4ufCSUvZJVdCyWQJ7RPWMmwxoyUQySydZA", + "blockhash": "6ojMHjctdqfB55JDpEpqfHnP96fiaHEcvzEQ2NNcxzHP", + "parentSlot": 112301553, + "transactions": [ + { + "transaction": [ + "OpltwoUvWxYi1P2U8vbIdE/aPntjYo5Aa0VQ2JJyeJE2g9Vvxk8dDGgFMruYfDu8/IfUWb0REppTe7IpAuuLRgIBAAkWnj4KHRpEWWW7gvO1c0BHy06wZi2g7/DLqpEtkRsThAXIdBbhXCLvltw50ZnjDx2hzw74NVn49kmpYj2VZHQJoeJoYJqaKcvuxCi/2i4yywedcVNDWkM84Iuw+cEn9/ROCrXY4qBFI9dveEERQ1c4kdU46xjxj9Vi+QXkb2Kx45QFVkG4Y7HHsoS6WNUiw2m4ffnMNnOVdF9tJht7oeuEfDMuUEaO7l9JeUxppCvrGk3CP45saO51gkwVYEgKzhpKjCx3rgsYxNR81fY4hnUQXSbbc2Y55FkwgRBpVvQK7/+clR4Gjhd3L4y+OtPl7QF93Akg1LaU9wRMs5nvfDFlggqI9PqJl+IvVWrNRdBbPS8LIIhcwbRTkSbqlJQWxYg3Bo2CTVbw7rt1ZubuHWWp0mD/UJpLXGm2JprWTePNULzHu67sfqaWF99LwmwjTyYEkqkRt1T0Je5VzHgJs0N5jY4iIU9K3lMqvrKOIn/2zEMZ+ol2gdgjshx+sphIyhw65F3J/Dbzk04LLkK+CULmN571Y+hFlXF2ke0BIuUG6AUF+4214Cu7FXnqo3rkxEHDZAk0lRrAJ8X/Z+iwuwI5cgbd9uHXZaGT2cvhRs7reawctIXtX1s3kTqM9YV+/wCpDLAp8axcEkaQkLDKRoWxqp8XLNZSKial7Rk+ELAVVKWoWLRXRZ+OIggu0OzMExvVLE5VHqy71FNHq4gGitkiKYNFWSLIE4qGfdFLZXy/6hwS+wq9ewjikCpd//C9BcCL7Wl0iQdUslxNVCBZHnCoPYih9JXvGefOb9WWnjGy14sG9j70+RSVx6BlkFELWwFvIlWR/tHn3EhHAuL0inS2pwX7ZQTAU6gDVaoqbR2EiJ47cKoPycBNvHLoKxoY9AZaBjPl6q8SKQJSFyFd9n44opAgI6zMTjYF/8Ok4VpXEESp3QaoUyTI9sOJ6oFP6f4dwnvQelgXS+AEfAsHsKXxGAIUDQENAgMEBQAGBwgIDg8IBJCER3QXl1AVDBADCQoOAAQLERITDAjb7ugh3gOuTy==", + "base64" + ], + "meta": { + "err": null, + "status": { + "Ok": null + }, + "fee": 5000, + "preBalances": [ + 1758510880, 2067120, 1566000, 1461600, 2039280, 2039280, + 1900080, 1865280, 0, 3680844220, 2039280 + ], + "postBalances": [ + 1758505880, 2067120, 1566000, 1461600, 2039280, 2039280, + 1900080, 1865280, 0, 3680844220, 2039280 + ], + "innerInstructions": [ + { + "index": 0, + "instructions": [ + { + "programIdIndex": 13, + "accounts": [1, 15, 3, 4, 2, 14], + "data": "21TeLgZXNbtHXVBzCaiRmH" + }, + { + "programIdIndex": 14, + "accounts": [3, 4, 1], + "data": "6qfC8ic7Aq99" + }, + { + "programIdIndex": 13, + "accounts": [1, 15, 3, 5, 2, 14], + "data": "21TeLgZXNbsn4QEpaSEr3q" + }, + { + "programIdIndex": 14, + "accounts": [3, 5, 1], + "data": "6LC7BYyxhFRh" + } + ] + }, + { + "index": 1, + "instructions": [ + { + "programIdIndex": 14, + "accounts": [4, 3, 0], + "data": "7aUiLHFjSVdZ" + }, + { + "programIdIndex": 19, + "accounts": [17, 18, 16, 9, 11, 12, 14], + "data": "8kvZyjATKQWYxaKR1qD53V" + }, + { + "programIdIndex": 14, + "accounts": [9, 11, 18], + "data": "6qfC8ic7Aq99" + } + ] + } + ], + "logMessages": [ + "Program QMNeHCGYnLVDn1icRAfQZpjPLBNkfGbSKRB83G5d8KB invoke [1]", + "Program QMWoBmAyJLAsA1Lh9ugMTw2gciTihncciphzdNzdZYV invoke [2]" + ], + "preTokenBalances": [ + { + "accountIndex": 4, + "mint": "iouQcQBAiEXe6cKLS85zmZxUqaCqBdeHFpqKoSz615u", + "uiTokenAmount": { + "uiAmount": null, + "decimals": 6, + "amount": "0", + "uiAmountString": "0" + }, + "owner": "LieKvPRE8XeX3Y2xVNHjKlpAScD12lYySBVQ4HqoJ5op", + "programId": "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA" + }, + { + "accountIndex": 5, + "mint": "iouQcQBAiEXe6cKLS85zmZxUqaCqBdeHFpqKoSz615u", + "uiTokenAmount": { + "uiAmount": 11513.0679, + "decimals": 6, + "amount": "11513067900", + "uiAmountString": "11513.0679" + }, + "owner": "rXhAofQCT7NN9TUqigyEAUzV1uLL4boeD8CRkNBSkYk", + "programId": "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA" + }, + { + "accountIndex": 10, + "mint": "Saber2gLauYim4Mvftnrasomsv6NvAuncvMEZwcLpD1", + "uiTokenAmount": { + "uiAmount": null, + "decimals": 6, + "amount": "0", + "uiAmountString": "0" + }, + "owner": "CL9wkGFT3SZRRNa9dgaovuRV7jrVVigBUZ6DjcgySsCU", + "programId": "TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb" + }, + { + "accountIndex": 11, + "mint": "Saber2gLauYim4Mvftnrasomsv6NvAuncvMEZwcLpD1", + "uiTokenAmount": { + "uiAmount": 15138.514093, + "decimals": 6, + "amount": "15138514093", + "uiAmountString": "15138.514093" + }, + "owner": "LieKvPRE8XeX3Y2xVNHjKlpAScD12lYySBVQ4HqoJ5op", + "programId": "TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb" + } + ], + "postTokenBalances": [ + { + "accountIndex": 4, + "mint": "iouQcQBAiEXe6cKLS85zmZxUqaCqBdeHFpqKoSz615u", + "uiTokenAmount": { + "uiAmount": null, + "decimals": 6, + "amount": "0", + "uiAmountString": "0" + }, + "owner": "LieKvPRE8XeX3Y2xVNHjKlpAScD12lYySBVQ4HqoJ5op", + "programId": "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA" + }, + { + "accountIndex": 5, + "mint": "iouQcQBAiEXe6cKLS85zmZxUqaCqBdeHFpqKoSz615u", + "uiTokenAmount": { + "uiAmount": 11513.103028, + "decimals": 6, + "amount": "11513103028", + "uiAmountString": "11513.103028" + }, + "owner": "rXhAofQCT7NN9TUqigyEAUzV1uLL4boeD8CRkNBSkYk", + "programId": "TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA" + }, + { + "accountIndex": 10, + "mint": "Saber2gLauYim4Mvftnrasomsv6NvAuncvMEZwcLpD1", + "uiTokenAmount": { + "uiAmount": null, + "decimals": 6, + "amount": "0", + "uiAmountString": "0" + }, + "owner": "CL9wkGFT3SZRRNa9dgaovuRV7jrVVigBUZ6DjcgySsCU", + "programId": "TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb" + }, + { + "accountIndex": 11, + "mint": "Saber2gLauYim4Mvftnrasomsv6NvAuncvMEZwcLpD1", + "uiTokenAmount": { + "uiAmount": 15489.767829, + "decimals": 6, + "amount": "15489767829", + "uiAmountString": "15489.767829" + }, + "owner": "BeiHVPRE8XeX3Y2xVNrSsTpAScH94nYySBVQ4HqgN9at", + "programId": "TokenzQdBNbLqP5VEhdkAS6EPFLC1PHnBqCXEpPxuEb" + } + ], + "rewards": [] + } + } + ], + "blockTime": 1639926816, + "blockHeight": 101210751 + }, + "err": null + } + }, + "subscription": 14 + } +} +``` diff --git a/docs/rpc/websocket/blockUnsubscribe.mdx b/docs/rpc/websocket/blockUnsubscribe.mdx new file mode 100644 index 000000000..a1dd8b893 --- /dev/null +++ b/docs/rpc/websocket/blockUnsubscribe.mdx @@ -0,0 +1,47 @@ +--- +sidebarLabel: blockUnsubscribe +title: blockUnsubscribe RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/blockUnsubscribe +--- + +Unsubscribe from block notifications + + + + + +### Parameters + + + subscription id to cancel + + +### Result + +`` - unsubscribe success message + + + + + +### Code sample + +```json +{ + "jsonrpc": "2.0", + "id": 1, + "method": "blockUnsubscribe", + "params": [0] +} +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": true, "id": 1 } +``` + + + diff --git a/docs/rpc/websocket/index.mdx b/docs/rpc/websocket/index.mdx new file mode 100644 index 000000000..bfee36c14 --- /dev/null +++ b/docs/rpc/websocket/index.mdx @@ -0,0 +1,23 @@ +--- +title: Solana RPC Websocket Methods +seoTitle: Solana RPC Websocket Methods +sidebarLabel: Websocket Methods +sidebarSortOrder: 2 +hideTableOfContents: false +--- + +After connecting to the RPC PubSub websocket at `ws://
/`: + +- Submit subscription requests to the websocket using the methods below +- Multiple subscriptions may be active at once +- Many subscriptions take the optional + [`commitment` parameter](/docs/rpc/index.mdx#configuring-state-commitment), + defining how finalized a change should be to trigger a notification. For + subscriptions, if commitment is unspecified, the default value is `finalized`. + +## RPC PubSub WebSocket Endpoint + +Default port: `8900` + +- ws://localhost:8900 +- http://192.168.1.88:8900 diff --git a/docs/rpc/websocket/logsSubscribe.mdx b/docs/rpc/websocket/logsSubscribe.mdx new file mode 100644 index 000000000..77e439e5e --- /dev/null +++ b/docs/rpc/websocket/logsSubscribe.mdx @@ -0,0 +1,138 @@ +--- +sidebarLabel: logsSubscribe +title: logsSubscribe RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/logsSubscribe +--- + +Subscribe to transaction logging + + + + + +### Parameters + + + filter criteria for the logs to receive results by account type. The following filters types are currently supported: + + + +A string with one of the following values: + +- `all` - subscribe to all transactions except for simple vote transactions +- `allWithVotes` - subscribe to all transactions, including simple vote + transactions + + + + + +An object with the following field: + +- `mentions: [ ]` - array containing a single Pubkey (as base-58 + encoded string); if present, subscribe to only transactions mentioning this + address + + + The `mentions` field currently [only supports + one](https://github.com/solana-labs/solana/blob/master/rpc/src/rpc_pubsub.rs#L481) + Pubkey string per method call. Listing additional addresses will result in an + error. + + + + + + + + +Configuration object containing the following fields: + + + + + +### Result + +`` - Subscription id \(needed to unsubscribe\) + + + + + +### Code sample + +```json +{ + "jsonrpc": "2.0", + "id": 1, + "method": "logsSubscribe", + "params": [ + { + "mentions": [ "11111111111111111111111111111111" ] + }, + { + "commitment": "finalized" + } + ] +} +{ + "jsonrpc": "2.0", + "id": 1, + "method": "logsSubscribe", + "params": [ "all" ] +} +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": 24040, "id": 1 } +``` + + + + +#### Notification Format: + +The notification will be an RpcResponse JSON object with value equal to: + +- `signature: ` - The transaction signature base58 encoded. +- `err: ` - Error if transaction failed, null if transaction + succeeded. + [TransactionError definitions](https://github.com/solana-labs/solana/blob/c0c60386544ec9a9ec7119229f37386d9f070523/sdk/src/transaction/error.rs#L13) +- `logs: ` - Array of log messages the transaction instructions + output during execution, null if simulation failed before the transaction was + able to execute (for example due to an invalid blockhash or signature + verification failure) + +Example: + +```json +{ + "jsonrpc": "2.0", + "method": "logsNotification", + "params": { + "result": { + "context": { + "slot": 5208469 + }, + "value": { + "signature": "5h6xBEauJ3PK6SWCZ1PGjBvj8vDdWG3KpwATGy1ARAXFSDwt8GFXM7W5Ncn16wmqokgpiKRLuS83KUxyZyv2sUYv", + "err": null, + "logs": [ + "SBF program 83astBRguLMdt2h5U1Tpdq5tjFoJ6noeGwaY3mDLVcri success" + ] + } + }, + "subscription": 24040 + } +} +``` diff --git a/docs/rpc/websocket/logsUnsubscribe.mdx b/docs/rpc/websocket/logsUnsubscribe.mdx new file mode 100644 index 000000000..a98d9da0f --- /dev/null +++ b/docs/rpc/websocket/logsUnsubscribe.mdx @@ -0,0 +1,47 @@ +--- +sidebarLabel: logsUnsubscribe +title: logsUnsubscribe RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/logsUnsubscribe +--- + +Unsubscribe from transaction logging + + + + + +### Parameters + + + subscription id to cancel + + +### Result + +`` - unsubscribe success message + + + + + +### Code sample + +```json +{ + "jsonrpc": "2.0", + "id": 1, + "method": "logsUnsubscribe", + "params": [0] +} +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": true, "id": 1 } +``` + + + diff --git a/docs/rpc/websocket/programSubscribe.mdx b/docs/rpc/websocket/programSubscribe.mdx new file mode 100644 index 000000000..f5847112d --- /dev/null +++ b/docs/rpc/websocket/programSubscribe.mdx @@ -0,0 +1,211 @@ +--- +sidebarLabel: programSubscribe +title: programSubscribe RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/programSubscribe +--- + +Subscribe to a program to receive notifications when the lamports or data for an +account owned by the given program changes + + + + + +### Parameters + + + +Pubkey of the `program_id`, as base-58 encoded string + + + + + +Configuration object containing the following fields: + + + + + +filter results using various filter objects + + + The resultant account must meet **ALL** filter criteria to be included in the + returned results + + + + + + +Encoding format for Account data + + + +
+ +- `base58` is slow. +- `jsonParsed` encoding attempts to use program-specific state parsers to return + more human-readable and explicit account state data. +- If `jsonParsed` is requested but a parser cannot be found, the field falls + back to `base64` encoding, detectable when the `data` field is type `string`. + +
+ +
+ +
+ +### Result + +`` - Subscription id \(needed to unsubscribe\) + +
+ + + +### Code sample + +```json +{ + "jsonrpc": "2.0", + "id": 1, + "method": "programSubscribe", + "params": [ + "11111111111111111111111111111111", + { + "encoding": "base64", + "commitment": "finalized" + } + ] +} +{ + "jsonrpc": "2.0", + "id": 1, + "method": "programSubscribe", + "params": [ + "11111111111111111111111111111111", + { + "encoding": "jsonParsed" + } + ] +} +{ + "jsonrpc": "2.0", + "id": 1, + "method": "programSubscribe", + "params": [ + "11111111111111111111111111111111", + { + "encoding": "base64", + "filters": [ + { + "dataSize": 80 + } + ] + } + ] +} +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": 24040, "id": 1 } +``` + + +
+ +#### Notification format + +The notification format is a single program account object as seen in the +[getProgramAccounts](/docs/rpc/http/getprogramaccounts) RPC HTTP method. + +Base58 encoding: + +```json +{ + "jsonrpc": "2.0", + "method": "programNotification", + "params": { + "result": { + "context": { + "slot": 5208469 + }, + "value": { + "pubkey": "H4vnBqifaSACnKa7acsxstsY1iV1bvJNxsCY7enrd1hq", + "account": { + "data": [ + "11116bv5nS2h3y12kD1yUKeMZvGcKLSjQgX6BeV7u1FrjeJcKfsHPXHRDEHrBesJhZyqnnq9qJeUuF7WHxiuLuL5twc38w2TXNLxnDbjmuR", + "base58" + ], + "executable": false, + "lamports": 33594, + "owner": "11111111111111111111111111111111", + "rentEpoch": 636, + "space": 80 + } + } + }, + "subscription": 24040 + } +} +``` + +Parsed-JSON encoding: + +```json +{ + "jsonrpc": "2.0", + "method": "programNotification", + "params": { + "result": { + "context": { + "slot": 5208469 + }, + "value": { + "pubkey": "H4vnBqifaSACnKa7acsxstsY1iV1bvJNxsCY7enrd1hq", + "account": { + "data": { + "program": "nonce", + "parsed": { + "type": "initialized", + "info": { + "authority": "Bbqg1M4YVVfbhEzwA9SpC9FhsaG83YMTYoR4a8oTDLX", + "blockhash": "LUaQTmM7WbMRiATdMMHaRGakPtCkc2GHtH57STKXs6k", + "feeCalculator": { + "lamportsPerSignature": 5000 + } + } + } + }, + "executable": false, + "lamports": 33594, + "owner": "11111111111111111111111111111111", + "rentEpoch": 636, + "space": 80 + } + } + }, + "subscription": 24040 + } +} +``` diff --git a/docs/rpc/websocket/programUnsubscribe.mdx b/docs/rpc/websocket/programUnsubscribe.mdx new file mode 100644 index 000000000..e548ddf88 --- /dev/null +++ b/docs/rpc/websocket/programUnsubscribe.mdx @@ -0,0 +1,47 @@ +--- +sidebarLabel: programUnsubscribe +title: programUnsubscribe RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/programUnsubscribe +--- + +Unsubscribe from program-owned account change notifications + + + + + +### Parameters + + + id of account Subscription to cancel + + +### Result + +`` - unsubscribe success message + + + + + +### Code sample + +```json +{ + "jsonrpc": "2.0", + "id": 1, + "method": "programUnsubscribe", + "params": [0] +} +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": true, "id": 1 } +``` + + + diff --git a/docs/rpc/websocket/rootSubscribe.mdx b/docs/rpc/websocket/rootSubscribe.mdx new file mode 100644 index 000000000..16bf8b689 --- /dev/null +++ b/docs/rpc/websocket/rootSubscribe.mdx @@ -0,0 +1,55 @@ +--- +sidebarLabel: rootSubscribe +title: rootSubscribe RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/rootSubscribe +--- + +Subscribe to receive notification anytime a new root is set by the validator. + + + + + +### Parameters + +**None** + +### Result + +`integer` - subscription id \(needed to unsubscribe\) + + + + + +### Code sample + +```json +{ "jsonrpc": "2.0", "id": 1, "method": "rootSubscribe" } +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": 0, "id": 1 } +``` + + + + +#### Notification Format: + +The result is the latest root slot number. + +```json +{ + "jsonrpc": "2.0", + "method": "rootNotification", + "params": { + "result": 42, + "subscription": 0 + } +} +``` diff --git a/docs/rpc/websocket/rootUnsubscribe.mdx b/docs/rpc/websocket/rootUnsubscribe.mdx new file mode 100644 index 000000000..4f8068929 --- /dev/null +++ b/docs/rpc/websocket/rootUnsubscribe.mdx @@ -0,0 +1,47 @@ +--- +sidebarLabel: rootUnsubscribe +title: rootUnsubscribe RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/rootUnsubscribe +--- + +Unsubscribe from root notifications + + + + + +### Parameters + + + subscription id to cancel + + +### Result + +`` - unsubscribe success message + + + + + +### Code sample + +```json +{ + "jsonrpc": "2.0", + "id": 1, + "method": "rootUnsubscribe", + "params": [0] +} +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": true, "id": 1 } +``` + + + diff --git a/docs/rpc/websocket/signatureSubscribe.mdx b/docs/rpc/websocket/signatureSubscribe.mdx new file mode 100644 index 000000000..3ad3f9315 --- /dev/null +++ b/docs/rpc/websocket/signatureSubscribe.mdx @@ -0,0 +1,149 @@ +--- +sidebarLabel: signatureSubscribe +title: signatureSubscribe RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/signatureSubscribe +--- + +Subscribe to receive a notification when the transaction with the given +signature reaches the specified commitment level. + + + This is a subscription to a single notification. It is automatically cancelled + by the server once the notification, `signatureNotification`, is sent by the + RPC. + + + + + + +### Parameters + + + +transaction signature, as base-58 encoded string + + + The transaction signature must be the first signature from the transaction + (see [transaction id](/docs/terminology.md#transaction-id) for more details). + + + + + + +Configuration object containing the following fields: + + + + + +Whether or not to subscribe for notifications when signatures are received by +the RPC, in addition to when they are processed. + + + + + +### Result + +`` - subscription id (needed to unsubscribe) + + + + + +### Code sample + +```json +{ + "jsonrpc": "2.0", + "id": 1, + "method": "signatureSubscribe", + "params": [ + "2EBVM6cB8vAAD93Ktr6Vd8p67XPbQzCJX47MpReuiCXJAtcjaxpvWpcg9Ege1Nr5Tk3a2GFrByT7WPBjdsTycY9b", + { + "commitment": "finalized", + "enableReceivedNotification": false + } + ] +} +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": 0, "id": 1 } +``` + + + + +#### Notification Format: + +The notification will be an RpcResponse JSON object with value containing an +object with: + +- `slot: ` - The corresponding slot. +- `value: ` - a notification value of + [`RpcSignatureResult`](https://github.com/solana-labs/solana/blob/6d28fd455b07e3557fc6c0c3ddf3ba03e3fe8482/rpc-client-api/src/response.rs#L265-L268), + resulting in either: + - when `enableReceivedNotification` is `true` and the signature is received: + the literal string + [`"receivedSignature"`](https://github.com/solana-labs/solana/blob/6d28fd455b07e3557fc6c0c3ddf3ba03e3fe8482/rpc-client-api/src/response.rs#L286-L288), + or + - when the signature is processed: `err: `: + - `null` if the transaction succeeded in being processed at the specified + commitment level, or + - a + [`TransactionError`](https://github.com/solana-labs/solana/blob/6d28fd455b07e3557fc6c0c3ddf3ba03e3fe8482/sdk/src/transaction/error.rs#L15-L164), + if the transaction failed + +#### Example responses: + +The following is an example response of a notification from a successfully +**processed** transactions: + +```json +{ + "jsonrpc": "2.0", + "method": "signatureNotification", + "params": { + "result": { + "context": { + "slot": 5207624 + }, + "value": { + "err": null + } + }, + "subscription": 24006 + } +} +``` + +The following is an example response of a notification from a successfully +**recieved** transaction signature: + +```json +{ + "jsonrpc": "2.0", + "method": "signatureNotification", + "params": { + "result": { + "context": { + "slot": 5207624 + }, + "value": "receivedSignature" + }, + "subscription": 24006 + } +} +``` diff --git a/docs/rpc/websocket/signatureUnsubscribe.mdx b/docs/rpc/websocket/signatureUnsubscribe.mdx new file mode 100644 index 000000000..f34ced2dd --- /dev/null +++ b/docs/rpc/websocket/signatureUnsubscribe.mdx @@ -0,0 +1,47 @@ +--- +sidebarLabel: signatureUnsubscribe +title: signatureUnsubscribe RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/signatureUnsubscribe +--- + +Unsubscribe from signature confirmation notification + + + + + +### Parameters + + + subscription id to cancel + + +### Result + +`` - unsubscribe success message + + + + + +### Code sample + +```json +{ + "jsonrpc": "2.0", + "id": 1, + "method": "signatureUnsubscribe", + "params": [0] +} +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": true, "id": 1 } +``` + + + diff --git a/docs/rpc/websocket/slotSubscribe.mdx b/docs/rpc/websocket/slotSubscribe.mdx new file mode 100644 index 000000000..e90dff657 --- /dev/null +++ b/docs/rpc/websocket/slotSubscribe.mdx @@ -0,0 +1,65 @@ +--- +sidebarLabel: slotSubscribe +title: slotSubscribe RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/slotSubscribe +--- + +Subscribe to receive notification anytime a slot is processed by the validator + + + + + +### Parameters + +**None** + +### Result + +`` - Subscription id \(needed to unsubscribe\) + + + + + +### Code sample + +```json +{ "jsonrpc": "2.0", "id": 1, "method": "slotSubscribe" } +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": 0, "id": 1 } +``` + + + + +#### Notification Format: + +The notification will be an object with the following fields: + +- `parent: ` - The parent slot +- `root: ` - The current root slot +- `slot: ` - The newly set slot value + +Example: + +```json +{ + "jsonrpc": "2.0", + "method": "slotNotification", + "params": { + "result": { + "parent": 75, + "root": 44, + "slot": 76 + }, + "subscription": 0 + } +} +``` diff --git a/docs/rpc/websocket/slotUnsubscribe.mdx b/docs/rpc/websocket/slotUnsubscribe.mdx new file mode 100644 index 000000000..450afc276 --- /dev/null +++ b/docs/rpc/websocket/slotUnsubscribe.mdx @@ -0,0 +1,47 @@ +--- +sidebarLabel: slotUnsubscribe +title: slotUnsubscribe RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/slotUnsubscribe +--- + +Unsubscribe from slot notifications + + + + + +### Parameters + + + subscription id to cancel + + +### Result + +`` - unsubscribe success message + + + + + +### Code sample + +```json +{ + "jsonrpc": "2.0", + "id": 1, + "method": "slotUnsubscribe", + "params": [0] +} +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": true, "id": 1 } +``` + + + diff --git a/docs/rpc/websocket/slotsUpdatesSubscribe.mdx b/docs/rpc/websocket/slotsUpdatesSubscribe.mdx new file mode 100644 index 000000000..086aa1d48 --- /dev/null +++ b/docs/rpc/websocket/slotsUpdatesSubscribe.mdx @@ -0,0 +1,87 @@ +--- +sidebarLabel: slotsUpdatesSubscribe +title: slotsUpdatesSubscribe RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/slotsUpdatesSubscribe +--- + +Subscribe to receive a notification from the validator on a variety of updates +on every slot + + + This subscription is unstable. The format of this subscription may change in + the future, and may not always be supported. + + + + + + +### Parameters + +**None** + +### Result + +`` - Subscription id (needed to unsubscribe) + + + + + +### Code sample + +```json +{ "jsonrpc": "2.0", "id": 1, "method": "slotsUpdatesSubscribe" } +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": 0, "id": 1 } +``` + + + + +### Notification Format + +The notification will be an object with the following fields: + +- `err: ` - The error message. Only present if the update is + of type "dead". +- `parent: ` - The parent slot. Only present if the update is of + type "createdBank". +- `slot: ` - The newly updated slot +- `stats: ` - The error message. Only present if the update is + of type "frozen". An object with the following fields: + - `maxTransactionsPerEntry: `, + - `numFailedTransactions: `, + - `numSuccessfulTransactions: `, + - `numTransactionEntries: `, +- `timestamp: ` - The Unix timestamp of the update +- `type: ` - The update type, one of: + - "firstShredReceived" + - "completed" + - "createdBank" + - "frozen" + - "dead" + - "optimisticConfirmation" + - "root" + +```bash +{ + "jsonrpc": "2.0", + "method": "slotsUpdatesNotification", + "params": { + "result": { + "parent": 75, + "slot": 76, + "timestamp": 1625081266243, + "type": "optimisticConfirmation" + }, + "subscription": 0 + } +} +``` diff --git a/docs/rpc/websocket/slotsUpdatesUnsubscribe.mdx b/docs/rpc/websocket/slotsUpdatesUnsubscribe.mdx new file mode 100644 index 000000000..61f25e196 --- /dev/null +++ b/docs/rpc/websocket/slotsUpdatesUnsubscribe.mdx @@ -0,0 +1,47 @@ +--- +sidebarLabel: slotsUpdatesUnsubscribe +title: slotsUpdatesUnsubscribe RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/slotsUpdatesUnsubscribe +--- + +Unsubscribe from slot-update notifications + + + + + +### Parameters + + + subscription id to cancel + + +### Result + +`` - unsubscribe success message + + + + + +### Code sample + +```json +{ + "jsonrpc": "2.0", + "id": 1, + "method": "slotsUpdatesUnsubscribe", + "params": [0] +} +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": true, "id": 1 } +``` + + + diff --git a/docs/rpc/websocket/voteSubscribe.mdx b/docs/rpc/websocket/voteSubscribe.mdx new file mode 100644 index 000000000..77384856a --- /dev/null +++ b/docs/rpc/websocket/voteSubscribe.mdx @@ -0,0 +1,75 @@ +--- +sidebarLabel: voteSubscribe +title: voteSubscribe RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/voteSubscribe +--- + +Subscribe to receive notification anytime a new vote is observed in gossip. +These votes are pre-consensus therefore there is no guarantee these votes will +enter the ledger. + + + This subscription is unstable and only available if the validator was started + with the `--rpc-pubsub-enable-vote-subscription` flag. The format of this + subscription may change in the future. + + + + + + +### Parameters + +**None** + +### Result + +`` - subscription id (needed to unsubscribe) + + + + + +### Code sample + +```json +{ "jsonrpc": "2.0", "id": 1, "method": "voteSubscribe" } +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": 0, "id": 1 } +``` + + + + +#### Notification Format: + +The notification will be an object with the following fields: + +- `hash: ` - The vote hash +- `slots: ` - The slots covered by the vote, as an array of u64 integers +- `timestamp: ` - The timestamp of the vote +- `signature: ` - The signature of the transaction that contained this + vote +- `votePubkey: ` - The public key of the vote account, as base-58 + encoded string + +```json +{ + "jsonrpc": "2.0", + "method": "voteNotification", + "params": { + "result": { + "hash": "8Rshv2oMkPu5E4opXTRyuyBeZBqQ4S477VG26wUTFxUM", + "slots": [1, 2], + "timestamp": null + }, + "subscription": 0 + } +} +``` diff --git a/docs/rpc/websocket/voteUnsubscribe.mdx b/docs/rpc/websocket/voteUnsubscribe.mdx new file mode 100644 index 000000000..abcfe1354 --- /dev/null +++ b/docs/rpc/websocket/voteUnsubscribe.mdx @@ -0,0 +1,47 @@ +--- +sidebarLabel: voteUnsubscribe +title: voteUnsubscribe RPC Method +hideTableOfContents: true +altRoutes: + - /docs/rpc/voteUnsubscribe +--- + +Unsubscribe from vote notifications + + + + + +### Parameters + + + subscription id to cancel + + +### Result + +`` - unsubscribe success message + + + + + +### Code sample + +```json +{ + "jsonrpc": "2.0", + "id": 1, + "method": "voteUnsubscribe", + "params": [0] +} +``` + +### Response + +```json +{ "jsonrpc": "2.0", "result": true, "id": 1 } +``` + + + diff --git a/docs/terminology.md b/docs/terminology.md index c57cc65e9..fdbfdcab5 100644 --- a/docs/terminology.md +++ b/docs/terminology.md @@ -38,29 +38,12 @@ capable of modifying the account. A front-end application that interacts with a Solana cluster. -## associated token account - -An account that stores tokens belonging to a particular `mint`. When sending -tokens, the sender can create associated token accounts for the recipient on -demand, so the recipient can recieve the token in the receipent's associated -token account. - ## bank state The result of interpreting all programs on the ledger at a given [tick height](#tick-height). It includes at least the set of all [accounts](#account) holding nonzero [native tokens](#native-token). -## base58 - -A way of representing binary values as strings. Unlike similar systems like -base64, base58 omits similar-appearing characters to avoid confusion. Solana -commonly uses base58 to refer to [wallet addresses](#wallet-address). - -## basis point - -A hundredth of one percent, or %0.01. Sometimes pronounced as 'bip'. Solana uses basis points to set royalties on [NFTs](#nft). - ## block A contiguous set of [entries](#entry) on the ledger covered by a @@ -82,15 +65,11 @@ the [genesis block](#genesis-block) has height one. The [validator](#validator) that produces the genesis (first) [block](#block) of a block chain. -## bip - -A pronunciation of [basis points](#basis-point). - ## BPF loader The Solana program that owns and loads -[BPF](developing/on-chain-programs/faq#berkeley-packet-filter-bpf) on-chain -programs, allowing the program to interface with the runtime. +[BPF](/docs/programs/faq.md#berkeley-packet-filter-bpf) smart contract programs, +allowing the program to interface with the runtime. ## client @@ -132,7 +111,7 @@ A gossip network connecting all [nodes](#node) of a [cluster](#cluster). Some number of [epochs](#epoch) after [stake](#stake) has been deactivated while it progressively becomes available for withdrawal. During this period, the stake is considered to be "deactivating". More info about: -[warmup and cooldown](implemented-proposals/staking-rewards.md#stake-warmup-cooldown-withdrawal) +[warmup and cooldown](https://docs.solanalabs.com/implemented-proposals/staking-rewards#stake-warmup-cooldown-withdrawal) ## credit @@ -140,9 +119,8 @@ See [vote credit](#vote-credit). ## cross-program invocation (CPI) -A call from one on-chain [program](@program) to another. For more information, -see -[calling between programs](developing/programming-model/calling-between-programs.md). +A call from one smart contract program to another. For more information, see +[calling between programs](/docs/core/cpi.md). ## data plane @@ -151,7 +129,7 @@ consensus. ## drone -An off-chain service that acts as a custodian for a user's secret key. It +An off-chain service that acts as a custodian for a user's private key. It typically serves to validate and sign transactions. ## entry @@ -223,13 +201,9 @@ program. A [client](#client) can include one or multiple instructions in a [transaction](#transaction). An instruction may contain one or more [cross-program invocations](#cross-program-invocation-cpi). -## ix - -Short for [instruction](#instruction). - ## keypair -A [public key](#public-key-pubkey) and corresponding [secret key](#private-key) +A [public key](#public-key-pubkey) and corresponding [private key](#private-key) for accessing an account. ## lamport @@ -237,9 +211,9 @@ for accessing an account. A fractional [native token](#native-token) with the value of 0.000000001 [sol](#sol). -:::info Within the compute budget, a quantity of -_[micro-lamports](https://github.com/solana-labs/solana/blob/ced8f6a512c61e0dd5308095ae8457add4a39e94/program-runtime/src/prioritization_fee.rs#L1-L2)_ -is used in the calculation of [prioritization fees](#prioritization-fee). ::: +> Within the compute budget, a quantity of +> _[micro-lamports](https://github.com/solana-labs/solana/blob/ced8f6a512c61e0dd5308095ae8457add4a39e94/program-runtime/src/prioritization_fee.rs#L1-L2)_ +> is used in the calculation of [prioritization fees](#prioritization-fee). ## leader @@ -284,15 +258,6 @@ on-chain programs. The duration of time for which a [validator](#validator) is unable to [vote](#ledger-vote) on another [fork](#fork). -## major unit - -The 'main' unit of a currency. For example, among fiat currencies, the dollar -(for USD), pound (for GBP) and Euro (for EUR) are major units. Solana's SOL -token has a major unit called the [Sol](#sol). Each Sol is worth 1 billion of -[lamports](#lamport). Since computers aren't good at decimal maths, transactions -usually use [minor-units](#minor-unit), converting to major units only for user -display. - ## message The structured contents of a [transaction](#transaction). Generally containing a @@ -300,35 +265,9 @@ header, array of account addresses, recent [blockhash](#blockhash), and an array of [instructions](#instruction). Learn more about the -[message formatting inside of transactions](./developing/programming-model/transactions.md#message-format) +[message formatting inside of transactions](/docs/core/transactions.md#message-format) here. -## memo - -The [memo program](https://spl.solana.com/memo) provides a way to write text to -the blockchain. The Memo program is often added as an instruction to transfer -transactions as a text description by the sender of what the transfer is for. -For example, 'Thanks!❤️ '. - -## minor unit - -The lesser unit of a currency. Well known minor units include US cents (for -USD), pence (for GBP) and Eurocents (for EUR). Solana's Sol -[cryptocurrency](#cryptocurrency) has a minor unit called the -[Lamport](#lamport). - -## mint - -Verb. Minting tokens increases the supply of the tokens and transfers the new -tokens to a specific [token account](#token-account). - -## mint account - -An [account](#account) used to [mint](#mint) tokens to store in a separate -[token account](#token-account). Each token mint is unique for that network. For -example, [USDC on Solana mainnet](https://www.circle.com/en/usdc/developers) has -the mint address `EPjFWdd5AufqSSqeM2qN1xzybapC8G4wEGGkZwyTDt1v`. - ## native token The [token](#token) used to track work done by [nodes](#node) in a cluster. @@ -341,10 +280,9 @@ A computer participating in a [cluster](#cluster). The number of [validators](#validator) participating in a [cluster](#cluster). -## on-chain program +## onchain program -A programthat runs on a blockchain that can read and modify accounts over which -the on-chain program has control. +A program on a blockchain that can read and modify accounts over which it has control. ## PoH @@ -353,27 +291,25 @@ See [Proof of History](#proof-of-history-poh). ## point A weighted [credit](#credit) in a rewards regime. In the [validator](#validator) -[rewards regime](cluster/stake-delegation-and-rewards.md), the number of points -owed to a [stake](#stake) during redemption is the product of the -[vote credits](#vote-credit) earned and the number of lamports staked. +[rewards regime](https://docs.solanalabs.com/consensus/stake-delegation-and-rewards), +the number of points owed to a [stake](#stake) during redemption is the product +of the [vote credits](#vote-credit) earned and the number of lamports staked. ## private key -Another word for the [#secret-key]. Solana tools and documentation generally use -the term 'secret key' instead. +The private key of a [keypair](#keypair). -## program (or on-chain program) +## program The executable code that interprets the [instructions](#instruction) sent inside -of each [transaction](#transaction) on the Solana. - -These programs are often referred to as -"[_smart contracts_](./developing//intro/programs.md)" on other blockchains. +of each [transaction](#transaction) on the Solana. These programs are often +referred to as "[_smart contracts_](/docs/core/programs.md)" on other +blockchains. ## program derived account (PDA) An account whose signing authority is a program and thus is not controlled by a -secret key like other accounts. +private key like other accounts. ## program id @@ -409,13 +345,13 @@ blockchain. When accounts do not have enough balance to pay rent, they may be Garbage Collected. See also [rent exempt](#rent-exempt) below. Learn more about rent here: -[What is rent?](../src/developing/intro/rent.md). +[What is rent?](/docs/core/rent.md). ## rent exempt Accounts that maintain more than 2 years with of rent payments in their account are considered "_rent exempt_" and will not incur the -[collection of rent](../src/developing/intro/rent.md#collecting-rent). +[collection of rent](/docs/core/rent.md#collecting-rent). ## root @@ -430,13 +366,9 @@ root are excluded from consideration for consensus and can be discarded. The component of a [validator](#validator) responsible for [program](#program) execution. -## secret key - -The secret key of a [keypair](#keypair). - ## Sealevel -Solana's parallel runtime for [on-chain programs](#on-chain-program). +Solana's parallel smart contracts run-time. ## shred @@ -483,8 +415,7 @@ non-overlapping, comprising roughly equal real-world time as per ## smart contract -Another term for [on chain programs](#programs). Solana programs should use the -term ["on-chain programs"](#program) or simply ["programs"](#program). +See [onchain program](#onchain-program). ## sol @@ -504,14 +435,10 @@ behavior can be proven. 2/3 of a [cluster](#cluster). -## sx - -Short for [signature](#signature). - ## sysvar A system [account](#account). -[Sysvars](developing/runtime-facilities/sysvars.md) provide cluster state +[Sysvars](https://docs.solanalabs.com/runtime/sysvars) provide cluster state information such as current tick height, rewards [points](#point) values, etc. Programs can access Sysvars via a Sysvar account (pubkey) or by querying via a syscall. @@ -533,19 +460,13 @@ The Nth [tick](#tick) in the [ledger](#ledger). A digitally transferable asset. -## token account - -An [account](#account) that holds the balance for newly [mint](#mint)ed -[token](#token)s. Each token account is associated with a single -[mint account](#mint-account). - ## tps [Transactions](#transaction) per second. ## tpu -[Transaction processing unit](validator/tpu.md). +[Transaction processing unit](https://docs.solanalabs.com/validator/tpu). ## transaction @@ -570,11 +491,7 @@ A set of [transactions](#transaction) that may be executed in parallel. ## tvu -[Transaction validation unit](validator/tvu.md). - -## tx - -Short for [transaction](#transaction). +[Transaction validation unit](https://docs.solanalabs.com/validator/tvu). ## validator @@ -604,26 +521,9 @@ validator in its vote account when the validator reaches a [root](#root). A collection of [keypairs](#keypair) that allows users to manage their funds. -## wallet address - -The unique [public key](#public-key-pubkey) of a wallet. These are usually -displayed to programs or user interfaces as a [base58](#base58) string. - -## wallet app - -An end-user program with a user interface (UI) to interact with a blockchain. A -wallet typically includes a UI to send and recieve tokens and NFTs, check -balances of diffferent tokens, view NFTs, stake, and on/off ramp into fiat and -back. See the -[wallet apps on a Solana ecosytstem page](https://solana.com/ecosystem/explore?categories=wallet). - -## wallet name - -A unique memorable name used to refer to a [wallet address](#wallet-address). - ## warmup period Some number of [epochs](#epoch) after [stake](#stake) has been delegated while it progressively becomes effective. During this period, the stake is considered to be "activating". More info about: -[warmup and cooldown](cluster/stake-delegation-and-rewards.md#stake-warmup-cooldown-withdrawal) +[warmup and cooldown](https://docs.solanalabs.com/consensus/stake-delegation-and-rewards#stake-warmup-cooldown-withdrawal) diff --git a/next.config.cjs b/next.config.js similarity index 50% rename from next.config.cjs rename to next.config.js index e2322d9c9..751efc725 100644 --- a/next.config.cjs +++ b/next.config.js @@ -1,6 +1,8 @@ +// @ts-check /** @type {import('next').NextConfig} */ const { withContentlayer } = require("next-contentlayer"); +const redirectsJson = require("./redirects.json"); module.exports = withContentlayer({ reactStrictMode: true, @@ -27,4 +29,26 @@ module.exports = withContentlayer({ }, ], }, + async redirects() { + return [ + // common solana docs + ...redirectFormatter("https://solana.com/", redirectsJson["common-docs"]), + ...redirectFormatter( + "https://docs.solanalabs.com/", + redirectsJson["solana-client"], + ), + ]; + }, }); + +/** + * Format the NextJS redirects + */ +function redirectFormatter(basePath, redirects, permanent = true) { + return redirects.map(item => { + item.basePath = false; + item.permanent = item.permanent ?? permanent; + item.destination = `${basePath}${item.destination}`; + return item; + }); +} diff --git a/package.json b/package.json index cc8c24be5..5913f5340 100644 --- a/package.json +++ b/package.json @@ -10,11 +10,13 @@ "scripts": { "runner": "npx ts-node -r tsconfig-paths/register", "contentlayer:build": "npx contentlayer build --clearCache", - "test": "yarn contentlayer:build", "dev": "yarn contentlayer:build && next dev -p 3001", "build": "yarn contentlayer:build && next build", "start": "next start", - "lint": "next lint" + "test": "yarn contentlayer:build", + "lint": "next lint", + "prettier:check": "prettier -c .", + "prettier:fix": "prettier -cw ." }, "dependencies": { "@types/node": "20.4.2", diff --git a/public/assets/docs/economics/example_adjusted_staking_yields.png b/public/assets/docs/economics/example_adjusted_staking_yields.png new file mode 100644 index 000000000..db68e4b0a Binary files /dev/null and b/public/assets/docs/economics/example_adjusted_staking_yields.png differ diff --git a/public/assets/docs/economics/example_relative_dilution.png b/public/assets/docs/economics/example_relative_dilution.png new file mode 100644 index 000000000..ba59a78bd Binary files /dev/null and b/public/assets/docs/economics/example_relative_dilution.png differ diff --git a/public/assets/docs/economics/example_staked_supply_w_range_initial_stake.png b/public/assets/docs/economics/example_staked_supply_w_range_initial_stake.png new file mode 100644 index 000000000..89b893d0e Binary files /dev/null and b/public/assets/docs/economics/example_staked_supply_w_range_initial_stake.png differ diff --git a/public/assets/docs/economics/example_staked_yields.png b/public/assets/docs/economics/example_staked_yields.png new file mode 100644 index 000000000..32311c396 Binary files /dev/null and b/public/assets/docs/economics/example_staked_yields.png differ diff --git a/public/assets/docs/economics/example_unstaked_dilution.png b/public/assets/docs/economics/example_unstaked_dilution.png new file mode 100644 index 000000000..a283011ac Binary files /dev/null and b/public/assets/docs/economics/example_unstaked_dilution.png differ diff --git a/public/assets/docs/economics/proposed_inflation_schedule.png b/public/assets/docs/economics/proposed_inflation_schedule.png new file mode 100644 index 000000000..f515efa12 Binary files /dev/null and b/public/assets/docs/economics/proposed_inflation_schedule.png differ diff --git a/public/assets/docs/economics/proposed_total_supply.png b/public/assets/docs/economics/proposed_total_supply.png new file mode 100644 index 000000000..e2562ffde Binary files /dev/null and b/public/assets/docs/economics/proposed_total_supply.png differ diff --git a/public/assets/docs/quickstarts/solana-get-started-build-and-deploy.png b/public/assets/docs/quickstarts/solana-get-started-build-and-deploy.png new file mode 100644 index 000000000..59bb3ef52 Binary files /dev/null and b/public/assets/docs/quickstarts/solana-get-started-build-and-deploy.png differ diff --git a/public/assets/docs/quickstarts/solana-get-started-import-on-playground.png b/public/assets/docs/quickstarts/solana-get-started-import-on-playground.png new file mode 100644 index 000000000..cd90b00cb Binary files /dev/null and b/public/assets/docs/quickstarts/solana-get-started-import-on-playground.png differ diff --git a/public/assets/docs/quickstarts/solana-get-started-successful-build.png b/public/assets/docs/quickstarts/solana-get-started-successful-build.png new file mode 100644 index 000000000..82b0a5df0 Binary files /dev/null and b/public/assets/docs/quickstarts/solana-get-started-successful-build.png differ diff --git a/public/assets/docs/quickstarts/solana-overview-client-program.png b/public/assets/docs/quickstarts/solana-overview-client-program.png new file mode 100644 index 000000000..42b80cee5 Binary files /dev/null and b/public/assets/docs/quickstarts/solana-overview-client-program.png differ diff --git a/public/assets/docs/rt-dropped-minority-fork-post-process.png b/public/assets/docs/rt-dropped-minority-fork-post-process.png new file mode 100644 index 000000000..ba56d1ccc Binary files /dev/null and b/public/assets/docs/rt-dropped-minority-fork-post-process.png differ diff --git a/public/assets/docs/rt-dropped-minority-fork-pre-process.png b/public/assets/docs/rt-dropped-minority-fork-pre-process.png new file mode 100644 index 000000000..10f08dfed Binary files /dev/null and b/public/assets/docs/rt-dropped-minority-fork-pre-process.png differ diff --git a/public/assets/docs/rt-dropped-via-rpc-pool.png b/public/assets/docs/rt-dropped-via-rpc-pool.png new file mode 100644 index 000000000..8020ebd87 Binary files /dev/null and b/public/assets/docs/rt-dropped-via-rpc-pool.png differ diff --git a/public/assets/docs/rt-tpu-jito-labs.png b/public/assets/docs/rt-tpu-jito-labs.png new file mode 100644 index 000000000..934e1dcdc Binary files /dev/null and b/public/assets/docs/rt-tpu-jito-labs.png differ diff --git a/public/assets/docs/rt-tx-journey.png b/public/assets/docs/rt-tx-journey.png new file mode 100644 index 000000000..faa8b6e22 Binary files /dev/null and b/public/assets/docs/rt-tx-journey.png differ diff --git a/public/assets/docs/transaction.svg b/public/assets/docs/transaction.svg new file mode 100644 index 000000000..49fb14b96 --- /dev/null +++ b/public/assets/docs/transaction.svg @@ -0,0 +1,75 @@ + + + + + + + + + + + + + + + + + + + + + + Neighborhood + 3 + + Neighborhood + 4 + + Neighborhood + 5 + + Neighborhood + 6 + Neighborhood + 0 + Neighborhood + 1 + Neighborhood + 2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/redirects.json b/redirects.json new file mode 100644 index 000000000..fd446b6bc --- /dev/null +++ b/redirects.json @@ -0,0 +1,394 @@ +{ + "solana-client": [ + { + "source": "/apps/sysvars", + "destination": "/developing/runtime-facilities/sysvars" + }, + { + "source": "/apps/builtins", + "destination": "/developing/runtime-facilities/programs" + }, + { + "source": "/apps/backwards-compatibility", + "destination": "/developing/backwards-compatibility" + }, + { + "source": "/implemented-proposals/secp256k1_instruction", + "destination": "/developing/runtime-facilities/programs#secp256k1-program" + }, + + { + "source": "/implemented-proposals/implemented-proposals", + "destination": "/implemented-proposals" + }, + { + "source": "/cli/install-solana-cli-tools", + "destination": "/cli/install" + }, + { "source": "/cli/conventions", "destination": "/cli/intro" }, + { + "source": "/cli/choose-a-cluster", + "destination": "/cli/examples/choose-a-cluster" + }, + { + "source": "/cli/delegate-stake", + "destination": "/cli/examples/delegate-stake" + }, + { + "source": "/delegate-stake", + "destination": "/cli/examples/delegate-stake" + }, + { + "source": "/cli/sign-offchain-message", + "destination": "/cli/examples/sign-offchain-message" + }, + { + "source": "/cli/deploy-a-program", + "destination": "/cli/examples/deploy-a-program" + }, + { + "source": "/cli/transfer-tokens", + "destination": "/cli/examples/transfer-tokens" + }, + { + "source": "/offline-signing/durable-nonce", + "destination": "/cli/examples/durable-nonce" + }, + { + "source": "/offline-signing", + "destination": "/cli/examples/offline-signing" + }, + { + "source": "/developing/test-validator", + "destination": "/cli/examples/test-validator" + }, + { "source": "/wallet-guide/cli", "destination": "/cli/wallets" }, + { + "source": "/wallet-guide/paper-wallet", + "destination": "/cli/wallets/paper" + }, + { + "source": "/wallet-guide/file-system-wallet", + "destination": "/cli/wallets/file-system" + }, + { + "source": "/wallet-guide/hardware-wallet", + "destination": "/cli/wallets/hardware-wallet" + }, + { + "source": "/wallet-guide/hardware-wallet/ledger", + "destination": "/cli/wallets/hardware-wallet/ledger" + }, + + { "source": "/cluster/overview", "destination": "/clusters/index" }, + { "source": "/cluster/bench-tps", "destination": "/clusters/benchmark" }, + { + "source": "/cluster/performance-metrics", + "destination": "/clusters/metrics" + }, + + { "source": "/running-validator", "destination": "/operations" }, + { + "source": "/validator/get-started/setup-a-validator", + "destination": "/operations/setup-a-validator" + }, + { + "source": "/validator/get-started/setup-an-rpc-node", + "destination": "/operations/setup-an-rpc-node" + }, + { + "source": "/validator/best-practices/operations", + "destination": "/operations/best-practices/general" + }, + { + "source": "/validator/best-practices/monitoring", + "destination": "/operations/best-practices/monitoring" + }, + { + "source": "/validator/best-practices/security", + "destination": "/operations/best-practices/security" + }, + { + "source": "/validator/overview/running-validator-or-rpc-node", + "destination": "/operations/validator-or-rpc-node" + }, + { + "source": "/validator/overview/validator-prerequisites", + "destination": "/operations/prerequisites" + }, + { + "source": "/validator/overview/validator-initiatives", + "destination": "/operations/validator-initiatives" + }, + { + "source": "/running-validator/validator-reqs", + "destination": "/operations/requirements" + }, + + { + "source": "/running-validator/validator-troubleshoot", + "destination": "/operations/guides/validator-troubleshoot" + }, + { + "source": "/running-validator/validator-start", + "destination": "/operations/guides/validator-start" + }, + { + "source": "/running-validator/vote-accounts", + "destination": "/operations/guides/vote-accounts" + }, + { + "source": "/running-validator/validator-stake", + "destination": "/operations/guides/validator-stake" + }, + { + "source": "/running-validator/validator-monitor", + "destination": "/operations/guides/validator-monitor" + }, + { + "source": "/running-validator/validator-info", + "destination": "/operations/guides/validator-info" + }, + { + "source": "/running-validator/validator-failover", + "destination": "/operations/guides/validator-failover" + }, + { + "source": "/running-validator/restart-cluster", + "destination": "/operations/guides/restart-cluster" + }, + + { + "source": "/cluster/synchronization", + "destination": "/consensus/synchronization" + }, + { + "source": "/cluster/leader-rotation", + "destination": "/consensus/leader-rotation" + }, + { + "source": "/cluster/fork-generation", + "destination": "/consensus/fork-generation" + }, + { + "source": "/cluster/managing-forks", + "destination": "/consensus/managing-forks" + }, + { + "source": "/cluster/turbine-block-propagation", + "destination": "/consensus/turbine-block-propagation" + }, + { + "source": "/cluster/commitments", + "destination": "/consensus/commitments" + }, + { + "source": "/cluster/vote-signing", + "destination": "/consensus/vote-signing" + }, + { + "source": "/cluster/stake-delegation-and-rewards", + "destination": "/consensus/stake-delegation-and-rewards" + }, + + { + "source": "/developing/backwards-compatibility", + "destination": "/backwards-compatibility" + }, + { "source": "/validator/faq", "destination": "/faq" }, + { + "source": "/developing/plugins/geyser-plugins", + "destination": "/validator/geyser" + }, + { + "source": "/validator/overview/what-is-an-rpc-node", + "destination": "/what-is-an-rpc-node" + }, + { + "source": "/validator/overview/what-is-a-validator", + "destination": "/what-is-a-validator" + }, + + { + "source": "/developing/runtime-facilities/:path*", + "destination": "/runtime/:path*" + } + ], + "common-docs": [ + { "source": "/apps", "destination": "/developers" }, + { + "source": "/developing/programming-model/overview", + "destination": "/docs/programs" + }, + { "source": "/apps/break", "destination": "/docs/programs/examples" }, + { "source": "/apps/drones", "destination": "/docs/programs/examples" }, + { "source": "/apps/hello-world", "destination": "/docs/programs/examples" }, + { + "source": "/apps/javascript-api", + "destination": "/docs/clients/javascript" + }, + { "source": "/apps/programming-faq", "destination": "/docs/programs/faq" }, + { "source": "/apps/rent", "destination": "/docs/core/rent" }, + { "source": "/apps/webwallet", "destination": "/docs/intro/wallets" }, + { + "source": "/implemented-proposals/cross-program-invocation", + "destination": "/docs/core/cpi" + }, + { + "source": "/implemented-proposals/program-derived-addresses", + "destination": "/docs/core/cpi#program-derived-addresses" + }, + + { "destination": "/docs/rpc/http:path*", "source": "/api/http/:path*" }, + { + "destination": "/docs/rpc/websocket/:path*", + "source": "/api/websocket/:path*" + }, + { + "destination": "/docs/rpc/:path*", + "source": "/developing/clients/jsonrpc-api" + }, + { "destination": "/docs/rpc/:path*", "source": "/apps/jsonrpc-api" }, + { "destination": "/docs/terminology", "source": "/terminology" }, + { "destination": "/docs/core/rent", "source": "/developing/intro/rent" }, + { + "destination": "/docs/core/programs", + "source": "/developing/intro/programs" + }, + { + "destination": "/docs/core/accounts", + "source": "/developing/programming-model/accounts" + }, + { + "destination": "/docs/core/cpi", + "source": "/developing/programming-model/calling-between-programs" + }, + { + "destination": "/docs/core/runtime", + "source": "/developing/programming-model/runtime" + }, + { + "destination": "/docs/core/transactions", + "source": "/developing/programming-model/transactions" + }, + { + "destination": "/docs/core/transactions/fees", + "source": "/developing/intro/transaction_fees" + }, + { + "destination": "/docs/core/transactions/confirmation", + "source": "/developing/transaction_confirmation" + }, + { + "destination": "/docs/core/transactions/versions", + "source": "/developing/versioned-transactions" + }, + { + "destination": "/docs/core/transactions/retry", + "source": "/integrations/retrying-transactions" + }, + { + "destination": "/docs/intro/dev", + "source": "/developing/programming-model/overview" + }, + { + "destination": "/docs/advanced/lookup-tables", + "source": "/developing/lookup-tables" + }, + { + "destination": "/docs/advanced/state-compression", + "source": "/learn/state-compression" + }, + { + "destination": "/developers/guides/javascript/compressed-nfts", + "source": "/developing/guides/compressed-nfts" + }, + { + "destination": "/docs/programs", + "source": "/developing/on-chain-programs/overview" + }, + { + "destination": "/docs/programs/debugging", + "source": "/developing/on-chain-programs/debugging" + }, + { + "destination": "/docs/programs/deploying", + "source": "/developing/on-chain-programs/deploying" + }, + { + "destination": "/docs/programs/examples", + "source": "/developing/on-chain-programs/examples" + }, + { + "destination": "/docs/programs/faq", + "source": "/developing/on-chain-programs/faq" + }, + { + "destination": "/docs/programs/limitations", + "source": "/developing/on-chain-programs/limitations" + }, + { + "destination": "/docs/programs/lang-rust", + "source": "/developing/on-chain-programs/developing-rust" + }, + { + "destination": "/docs/programs/lang-c", + "source": "/developing/on-chain-programs/developing-c" + }, + { + "destination": "/docs/clients/javascript-reference", + "source": "/developing/clients/javascript-reference" + }, + { + "destination": "/docs/clients/javascript", + "source": "/developing/clients/javascript-api" + }, + { + "destination": "/docs/clients/rust", + "source": "/developing/clients/rust-api" + }, + { "destination": "/docs/intro/dev", "source": "/getstarted/overview" }, + { + "destination": "/developers/guides/getstarted/hello-world-in-your-browser", + "source": "/getstarted/hello-world" + }, + { + "destination": "/developers/guides/getstarted/setup-local-development", + "source": "/getstarted/local" + }, + { + "destination": "/developers/guides/getstarted/local-rust-hello-world", + "source": "/getstarted/rust" + }, + { + "destination": "/docs/core/clusters", + "source": "/clusters/rpc-endpoints" + }, + { "destination": "/docs/economics/staking", "source": "/staking" }, + { + "destination": "/docs/economics/staking/:path*", + "source": "/staking/:path*" + }, + { + "destination": "/docs/economics/inflation/:path*", + "source": "/inflation/:path*" + }, + { + "destination": "/docs/more/exchange", + "source": "/integrations/exchange" + }, + { + "destination": "/docs/intro/transaction_fees", + "source": "/transaction_fees" + }, + { + "destination": "/docs/intro/economics", + "source": "/storage_rent_economics" + }, + { "destination": "/docs/intro/economics", "source": "/economics_overview" }, + { "destination": "/docs/intro/history", "source": "/history" }, + { "destination": "/docs/intro/wallets", "source": "/wallet-guide/support" }, + { "destination": "/docs/intro/wallets", "source": "/wallet-guide" }, + { "destination": "/docs/intro", "source": "/introduction" } + ] +} diff --git a/src/pages/api/content/[[...slug]].ts b/src/pages/api/content/[[...slug]].ts index caac21cc0..fa026a2fe 100644 --- a/src/pages/api/content/[[...slug]].ts +++ b/src/pages/api/content/[[...slug]].ts @@ -3,12 +3,18 @@ * based on the provided url `slug` */ -import { SimpleRecordGroupName } from "@/types"; -import { computeNavItem } from "@/utils/navItem"; +import { NavItem, SimpleRecordGroupName } from "@/types"; +import { + generateFlatNavItemListing, + generateNavItemListing, +} from "@/utils/navItem"; import { allDeveloperGuides, allDeveloperResources, allSolanaDocs, + allDeveloperWorkshops, + allSolanaRPCDocs, + DocumentTypes, } from "contentlayer/generated"; import type { NextApiRequest, NextApiResponse } from "next"; @@ -27,63 +33,78 @@ export default function handler( // retrieve the correct group's records by its simple group name const records = ((group: SimpleRecordGroupName) => { switch (group) { - case "docs": + case "docs": { + if (slug[1] == "rpc") return allSolanaRPCDocs; return allSolanaDocs; + } case "guides": return allDeveloperGuides; case "resources": return allDeveloperResources; + case "workshops": + return allDeveloperWorkshops; } })(group); if (!records) return res.status(404).json({ notFound: true }); // define the formatted href value to search for - const href = `${ - slug[0].toLocaleLowerCase() == "docs" ? "" : "/developers" - }/${slug.join("/")}`; - // note: this effectively enforces that only href's that start with "/developers" are supported - - // init the record to be returned - let record; - - // locate the correct record requested (via the url param) - for (let i = 0; i < records.length; i++) { - // @ts-ignore - const navItem = computeNavItem(records[i]); - + const href = `${ + slug[0].toLocaleLowerCase() == "docs" || + slug[0].toLocaleLowerCase() == "rpc" + ? "" + : "/developers" + }/${slug.join("/")}`.toLowerCase(); + + // create a flat listing of all the nav items in order to locate the next, current, and prev records + const flatNavItems = generateFlatNavItemListing( + generateNavItemListing(records), + ); + + // initialize the NavItem record trackers + let current: NavItem | null = null; + let next: NavItem | null = null; + let prev: NavItem | null = null; + + for (let i = 0; i < flatNavItems.length; i++) { // skip incorrect routes if ( - navItem.href != href && - navItem.href != `/${href}` && - records[i]?.altRoutes?.filter(route => route == href)?.[0] != href + flatNavItems[i].href != href && + flatNavItems[i].href != `/${href}` && + flatNavItems[i]?.altRoutes?.filter(route => route == href)?.[0] != href ) { continue; } - // set the requested record's data (weaving in the computed nav item data) - record = Object.assign(navItem, records[i]); - - /** - * todo: support next/prev type records - * note: this will likely require processing the nav records? - */ + current = flatNavItems[i]; + if (flatNavItems.length >= i - 1) prev = flatNavItems[i - 1]; + if (flatNavItems.length >= i + 1) next = flatNavItems[i + 1]; // break out of the loop and stop processing break; } + if (!current) return res.status(404).json({ notFound: true }); + + // locate full content record + + let record = (records as DocumentTypes[]).filter( + (item: DocumentTypes) => + item._raw.sourceFilePath.toLowerCase() == current?.path?.toLowerCase(), + )?.[0]; if (!record) return res.status(404).json({ notFound: true }); // remove the html formatted content (since it is undesired data to send over the wire) - // @ts-ignore - record.body = record.body.raw.trim(); + if (typeof record.body.raw !== "undefined") { + // @ts-ignore + record.body = record.body.raw.trim(); + } // todo: preprocess the body content? (if desired in the future) // todo: support sending related content records back to the client - // finally, return the json formatted listing of NavItems - return res.status(200).json(record); + // finally, return the json formatted listing of NavItems (with the next and prev records) + return res.status(200).json(Object.assign(current, record, { next, prev })); } diff --git a/src/pages/api/nav/[group].ts b/src/pages/api/nav/[[...group]].ts similarity index 85% rename from src/pages/api/nav/[group].ts rename to src/pages/api/nav/[[...group]].ts index 623b459bf..c5f1303fd 100644 --- a/src/pages/api/nav/[group].ts +++ b/src/pages/api/nav/[[...group]].ts @@ -8,8 +8,10 @@ import { NavItem, SimpleRecordGroupName } from "@/types"; import { generateNavItemListing } from "@/utils/navItem"; import { allDeveloperGuides, - allDeveloperResources, + // allDeveloperResources, allSolanaDocs, + allDeveloperWorkshops, + allSolanaRPCDocs, } from "contentlayer/generated"; export default function handler( @@ -23,12 +25,17 @@ export default function handler( // retrieve the correct group's records by its simple group name const records = ((group: SimpleRecordGroupName) => { switch (group) { + case "rpc": + case "docs,rpc": + return allSolanaRPCDocs; case "docs": return allSolanaDocs; case "guides": return allDeveloperGuides; // case "resources": // return allDeveloperResources; + case "workshops": + return allDeveloperWorkshops; } })(group); diff --git a/src/pages/api/overview.ts b/src/pages/api/overview.ts index 289bc772e..d17456ecd 100644 --- a/src/pages/api/overview.ts +++ b/src/pages/api/overview.ts @@ -8,6 +8,7 @@ import type { NextApiRequest, NextApiResponse } from "next"; import { allDeveloperGuides, allDeveloperResources, + allDeveloperWorkshops, } from "contentlayer/generated"; import { extractFeaturedRecords, simplifyRecords } from "@/utils/parsers"; @@ -28,5 +29,11 @@ export default function handler( limit: 6, callback: simplifyRecords, }), + // featured workshops + workshops: extractFeaturedRecords({ + records: allDeveloperWorkshops, + limit: 6, + callback: simplifyRecords, + }), }); } diff --git a/src/pages/api/paths/[group].ts b/src/pages/api/paths/[[...group]].ts similarity index 91% rename from src/pages/api/paths/[group].ts rename to src/pages/api/paths/[[...group]].ts index 6aa5512a3..3b9fcf449 100644 --- a/src/pages/api/paths/[group].ts +++ b/src/pages/api/paths/[[...group]].ts @@ -10,6 +10,8 @@ import { allDeveloperGuides, allDeveloperResources, allSolanaDocs, + allDeveloperWorkshops, + allSolanaRPCDocs, } from "contentlayer/generated"; export default function handler( @@ -23,12 +25,17 @@ export default function handler( // retrieve the correct group's records by its simple group name const records = ((group: SimpleRecordGroupName) => { switch (group) { + case "rpc": + case "docs,rpc": + return allSolanaRPCDocs; case "docs": return allSolanaDocs; case "guides": return allDeveloperGuides; case "resources": return allDeveloperResources; + case "workshops": + return allDeveloperWorkshops; } })(group); diff --git a/src/pages/api/records/[group].ts b/src/pages/api/records/[group].ts index 116650d84..20ca08e35 100644 --- a/src/pages/api/records/[group].ts +++ b/src/pages/api/records/[group].ts @@ -8,6 +8,8 @@ import { allDeveloperGuides, allDeveloperResources, allSolanaDocs, + allDeveloperWorkshops, + allSolanaRPCDocs, } from "contentlayer/generated"; import { simplifyRecords } from "@/utils/parsers"; @@ -22,12 +24,17 @@ export default function handler( // retrieve the correct group's records by its simple group name let records: SupportedDocTypes[] = ((group: SimpleRecordGroupName) => { switch (group) { + case "rpc": + case "docs,rpc": + return allSolanaRPCDocs; case "docs": return allSolanaDocs; case "guides": return allDeveloperGuides; case "resources": return allDeveloperResources; + case "workshops": + return allDeveloperWorkshops; } })(group); diff --git a/src/pages/index.tsx b/src/pages/index.tsx index 54582cd50..39ff627d4 100644 --- a/src/pages/index.tsx +++ b/src/pages/index.tsx @@ -23,7 +23,7 @@ export default function Page() { Solana Developer Content -
nothing to see here
+
nothing to see here
); } diff --git a/src/types/index.ts b/src/types/index.ts index 7254b47ad..4903b47c8 100644 --- a/src/types/index.ts +++ b/src/types/index.ts @@ -15,7 +15,13 @@ export type SupportedDocTypes = Exclude; * @dev when adding new group names, ensure the desired support is added in all * other places the type `SimpleRecordGroupName` is used (e.g. api routes) */ -export type SimpleRecordGroupName = "docs" | "guides" | "resources"; +export type SimpleRecordGroupName = + | "docs" + | "rpc" + | "docs,rpc" // note: this is to support stringify-ing the route via the url + | "guides" + | "resources" + | "workshops"; type NavItemBase = { id: String; @@ -28,6 +34,7 @@ type NavItemBase = { * */ items?: Array; + altRoutes?: string[] | undefined; }; export type NavItem = NavItemBase & { diff --git a/src/utils/navItem.ts b/src/utils/navItem.ts index ba8b35309..4188a7050 100644 --- a/src/utils/navItem.ts +++ b/src/utils/navItem.ts @@ -24,7 +24,10 @@ export function generateNavItemListing( grouping[record._raw.sourceFileDir] = { items: [] } as unknown as NavItem; // process the index file as the root of the NavItem - if (record._raw.sourceFileName == "index.md") { + if ( + record._raw.sourceFileName == "index.md" || + record._raw.sourceFileName == "index.mdx" + ) { grouping[record._raw.sourceFileDir] = Object.assign( grouping[record._raw.sourceFileDir], // @ts-ignore @@ -36,33 +39,120 @@ export function generateNavItemListing( } }); + // regroup all the items for multi-nested relationships + for (const [key, data] of Object.entries(grouping)) { + const currentItem = data as NavItem; + + // handle category items that do not have metadata pulled from a file (i.e. no `path`) + if (!currentItem.path) { + Object.assign(currentItem, computeDetailsFromKey(key)); + // currentItem.label = ucFirst(key.split("/").reverse()[0]); + // currentItem.id = key.replaceAll("/", "-"); + } + + const parentKey = key.slice(0, key.lastIndexOf("/")); + if ( + key.lastIndexOf("/") > 0 && + key != parentKey && + parentKey != key.slice(0, key.indexOf("/")) + ) { + // handle the `parentKey` already existing + if (Object.hasOwn(grouping, parentKey)) { + // + const parentItems: NavItem[] = + (grouping[parentKey] as NavItem)?.items || []; + + const siblingIndex = parentItems.findIndex(s => s.id == currentItem.id); + + // update an existing sibling category if it already exists + if (siblingIndex >= 0) { + // join the existing items listing with the `currentItem` being manipulated + if (Array.isArray(parentItems[siblingIndex]?.items)) { + // the sibling already exists + parentItems[siblingIndex].items?.push(...(currentItem.items as [])); + } else { + // the sibling did not already exist + parentItems[siblingIndex].items = currentItem.items; + } + } else { + // add the new sibling record since it did not already exist + parentItems.push(currentItem); + } + } else { + (grouping[parentKey] as NavItem) = { + ...computeDetailsFromKey(parentKey), + items: [currentItem], + }; + } + + // finally delete the `currentItem`'s data from the master grouping + delete grouping[key]; + } else { + grouping[key] = currentItem; + } + } + // init the response NavItem listing const navItems: NavItem[] = []; // massage the dir based grouping into a valid NavItem[] - for (const entry of Object.entries(grouping)) { - const item = entry[1] as NavItem; + for (const [_key, data] of Object.entries(grouping)) { + navItems.push(data as NavItem); + } - // handle category items that do not have metadata pulled from a file (i.e. no `path`) - if (!item.path) { - item.label = ucFirst(entry[0].split("/").reverse()[0]); - item.id = entry[0].replaceAll("/", "-"); + // finally, return the NavItem array (sorted, of course) + return sortNavItems(navItems); +} + +/** + * Create a flat listing of all nav items provided + * + * note: normally, the provided `navItems` should be preprocessed by `generateNavItemListing` + */ +export function generateFlatNavItemListing( + navItems: Array, +): Array { + return navItems.flatMap(({ items, ...node }: NavItem) => { + if (typeof items !== "undefined") { + return [node as NavItem] + .concat(items) + .flatMap(children => generateFlatNavItemListing([children])); } + return node; + }); +} - navItems.push(item); - } +/** + * + */ +export function computeDetailsFromKey(key: string) { + return { + label: ucFirst(key.split("/").reverse()[0]), + id: key.replaceAll("/", "-"), + }; +} - /** - * finally, return the NavItem array (sorted, of course) - * --- - * note on sorting: final sorting on the full navItems listing is different than category items - * sort here will actually sort using the `sidebarSortOrder=0` value - */ - return navItems.sort( - (a, b) => - (typeof a?.sidebarSortOrder == "undefined" ? 999 : a.sidebarSortOrder) - - (typeof b?.sidebarSortOrder == "undefined" ? 999 : b.sidebarSortOrder), - ); +/** + * Sort the listing of NavItems based on their `sidebarSortOrder`, + * including recursively sorting all child items + * --- + * note on sorting: final sorting on the full navItems listing is different than category items + * sort here will actually sort using the `sidebarSortOrder=0` value + */ +export function sortNavItems(navItems: NavItem[]) { + return navItems + .map(record => { + // sort the child items + if (Array.isArray(record.items)) { + record.items = sortNavItems(record.items); + } + return record; + }) + .sort( + (a, b) => + (typeof a?.sidebarSortOrder == "undefined" ? 999 : a.sidebarSortOrder) - + (typeof b?.sidebarSortOrder == "undefined" ? 999 : b.sidebarSortOrder), + ); } /** @@ -70,7 +160,7 @@ export function generateNavItemListing( */ export function shouldIgnoreRecord({ fileName, - allowedExtensions = ["md"], + allowedExtensions = ["md", "mdx"], }: { fileName: string; allowedExtensions?: Array; @@ -106,6 +196,7 @@ export function computeNavItem( label: doc?.sidebarLabel || doc?.title, sidebarSortOrder: doc?.sidebarSortOrder, metaOnly: doc?.metaOnly, + altRoutes: doc.altRoutes, }; // compute an id based on the doc's path @@ -124,6 +215,10 @@ export function computeNavItem( ); } + // always lowercase certain specific values + record.href = record.href.toLowerCase(); + record.id = record.id.toLowerCase(); + /** * when the record is only storing metadata, remove it as a linked item * --- diff --git a/tsconfig.json b/tsconfig.json index ac56221f0..0c5fa3575 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -19,13 +19,19 @@ "contentlayer/generated": ["./.contentlayer/generated"], "@/*": ["./src/*"], "@@/*": ["./*"] - } + }, + "plugins": [ + { + "name": "next" + } + ] }, "include": [ ".contentlayer/generated", "next-env.d.ts", "**/*.ts", - "**/*.tsx" + "**/*.tsx", + ".next/types/**/*.ts" ], "exclude": ["node_modules"] }