Jelajahi Sumber

Merge pull request #65 from 0xPratik/main

cNft Burn anchor example
Jonas Hahn 1 tahun lalu
induk
melakukan
43666698d4

+ 8 - 0
compression/cnft-burn/.gitignore

@@ -0,0 +1,8 @@
+
+.anchor
+.DS_Store
+target
+**/*.rs.bk
+node_modules
+test-ledger
+.yarn

+ 8 - 0
compression/cnft-burn/.prettierignore

@@ -0,0 +1,8 @@
+
+.anchor
+.DS_Store
+target
+node_modules
+dist
+build
+test-ledger

+ 18 - 0
compression/cnft-burn/Anchor.toml

@@ -0,0 +1,18 @@
+[toolchain]
+
+[features]
+seeds = false
+skip-lint = false
+
+[programs.devnet]
+cnft_burn = "FbeHkUEevbhKmdk5FE5orcTaJkCYn5drwZoZXaxQXXNn"
+
+[registry]
+url = "https://api.apr.dev"
+
+[provider]
+cluster = "devnet"
+wallet = "~/.config/solana/id.json"
+
+[scripts]
+test = "yarn run ts-mocha -p ./tsconfig.json -t 1000000 tests/cnft-burn.ts"

+ 13 - 0
compression/cnft-burn/Cargo.toml

@@ -0,0 +1,13 @@
+[workspace]
+members = [
+    "programs/*"
+]
+
+[profile.release]
+overflow-checks = true
+lto = "fat"
+codegen-units = 1
+[profile.release.build-override]
+opt-level = 3
+incremental = false
+codegen-units = 1

+ 26 - 0
compression/cnft-burn/README.md

@@ -0,0 +1,26 @@
+# cnft-burn
+
+This repository contains the cnft-burn program, a Solana Anchor program that allows you to burn compressed NFTs (cNFTs) in your collection. The program interacts with the Metaplex Bubblegum program through CPI to burn cNFTs.
+
+## Components
+
+- programs: Contains the anchor program
+- tests: Contains the tests for the anchor program
+
+## Deployment
+
+The program is deployed on devnet at `FbeHkUEevbhKmdk5FE5orcTaJkCYn5drwZoZXaxQXXNn`. You can deploy it yourself by changing the respective values in lib.rs and Anchor.toml.
+
+## How to run
+
+1. Configure RPC path in cnft-burn.ts. Personal preference: Helius RPCs.
+2. run `anchor build` at the root of the project i.e cnft-burn in this case.
+3. run `anchor deploy` to deploy and test the program on your own cluster.
+4. run `anchor test` to run the tests.
+
+## Acknowledgements
+
+This Example program would not have been possible without the work of:
+
+- [Metaplex](https://github.com/metaplex-foundation/) for providing the Bubblegum program with ix builders.
+- [@nickfrosty](https://twitter.com/nickfrosty) for providing the sample code for fetching and creating cNFTs.

+ 12 - 0
compression/cnft-burn/migrations/deploy.ts

@@ -0,0 +1,12 @@
+// Migrations are an early feature. Currently, they're nothing more than this
+// single deploy script that's invoked from the CLI, injecting a provider
+// configured from the workspace's Anchor.toml.
+
+const anchor = require("@coral-xyz/anchor");
+
+module.exports = async function (provider) {
+  // Configure client to use the provider.
+  anchor.setProvider(provider);
+
+  // Add your deploy script here.
+};

+ 26 - 0
compression/cnft-burn/package.json

@@ -0,0 +1,26 @@
+{
+  "scripts": {
+    "lint:fix": "prettier */*.js \"*/**/*{.js,.ts}\" -w",
+    "lint": "prettier */*.js \"*/**/*{.js,.ts}\" --check"
+  },
+  "dependencies": {
+    "@coral-xyz/anchor": "^0.29.0",
+    "@metaplex-foundation/js": "^0.19.4",
+    "@metaplex-foundation/mpl-bubblegum": "^0.7.0",
+    "@metaplex-foundation/mpl-token-metadata": "^2.12.0",
+    "@metaplex-foundation/umi": "^0.9.0",
+    "@solana/spl-account-compression": "^0.2.0",
+    "@solana/web3.js": "^1.89.0",
+    "axios": "^1.6.5"
+  },
+  "devDependencies": {
+    "@types/bn.js": "^5.1.0",
+    "@types/chai": "^4.3.0",
+    "@types/mocha": "^9.0.0",
+    "chai": "^4.3.4",
+    "mocha": "^9.0.3",
+    "prettier": "^2.6.2",
+    "ts-mocha": "^10.0.0",
+    "typescript": "^4.3.5"
+  }
+}

+ 22 - 0
compression/cnft-burn/programs/cnft-burn/Cargo.toml

@@ -0,0 +1,22 @@
+[package]
+name = "cnft-burn"
+version = "0.1.0"
+description = "Created with Anchor"
+edition = "2021"
+
+[lib]
+crate-type = ["cdylib", "lib"]
+name = "cnft_burn"
+
+[features]
+no-entrypoint = []
+no-idl = []
+no-log-ix-name = []
+cpi = ["no-entrypoint"]
+default = []
+
+[dependencies]
+anchor-lang = "0.29.0"
+mpl-bubblegum = {version="1.1.0" }
+spl-account-compression = { version="0.3.0",features = ["no-entrypoint","cpi"] }
+ahash = "=0.8.6"

+ 2 - 0
compression/cnft-burn/programs/cnft-burn/Xargo.toml

@@ -0,0 +1,2 @@
+[target.bpfel-unknown-unknown.dependencies.std]
+features = []

+ 86 - 0
compression/cnft-burn/programs/cnft-burn/src/lib.rs

@@ -0,0 +1,86 @@
+use anchor_lang::prelude::*;
+
+declare_id!("FbeHkUEevbhKmdk5FE5orcTaJkCYn5drwZoZXaxQXXNn");
+
+#[derive(Clone)]
+pub struct SPLCompression;
+
+impl anchor_lang::Id for SPLCompression {
+    fn id() -> Pubkey {
+        spl_account_compression::id()
+    }
+}
+
+#[program]
+pub mod cnft_burn {
+    use super::*;
+
+    pub fn burn_cnft<'info>(
+        ctx: Context<'_, '_, '_, 'info, BurnCnft<'info>>,
+        root: [u8; 32],
+        data_hash: [u8; 32],
+        creator_hash: [u8; 32],
+        nonce: u64,
+        index: u32,
+    ) -> Result<()> {
+        let tree_config = ctx.accounts.tree_authority.to_account_info();
+        let leaf_owner = ctx.accounts.leaf_owner.to_account_info();
+        let merkle_tree = ctx.accounts.merkle_tree.to_account_info();
+        let log_wrapper = ctx.accounts.log_wrapper.to_account_info();
+        let compression_program = ctx.accounts.compression_program.to_account_info();
+        let system_program = ctx.accounts.system_program.to_account_info();
+
+        let cnft_burn_cpi = mpl_bubblegum::instructions::BurnCpi::new(
+            &ctx.accounts.bubblegum_program,
+            mpl_bubblegum::instructions::BurnCpiAccounts {
+                tree_config: &tree_config,
+                leaf_owner: (&leaf_owner, true),
+                leaf_delegate: (&leaf_owner, false),
+                merkle_tree: &merkle_tree,
+                log_wrapper: &log_wrapper,
+                compression_program: &compression_program,
+                system_program: &system_program,
+            },
+            mpl_bubblegum::instructions::BurnInstructionArgs {
+                root,
+                data_hash,
+                creator_hash,
+                nonce,
+                index,
+            },
+        );
+
+        cnft_burn_cpi.invoke_with_remaining_accounts(
+            ctx.remaining_accounts
+                .iter()
+                .map(|account| (account, false, false))
+                .collect::<Vec<_>>()
+                .as_slice(),
+        )?;
+
+        Ok(())
+    }
+}
+
+#[derive(Accounts)]
+pub struct BurnCnft<'info> {
+    #[account(mut)]
+    pub leaf_owner: Signer<'info>,
+    #[account(mut)]
+    #[account(
+        seeds = [merkle_tree.key().as_ref()],
+        bump,
+        seeds::program = bubblegum_program.key()
+    )]
+    /// CHECK: This account is modified in the downstream program
+    pub tree_authority: UncheckedAccount<'info>,
+    #[account(mut)]
+    /// CHECK: This account is neither written to nor read from.
+    pub merkle_tree: UncheckedAccount<'info>,
+    /// CHECK: This account is neither written to nor read from.
+    pub log_wrapper: UncheckedAccount<'info>,
+    pub compression_program: Program<'info, SPLCompression>,
+    /// CHECK: This account is neither written to nor read from.
+    pub bubblegum_program: UncheckedAccount<'info>,
+    pub system_program: Program<'info, System>,
+}

+ 258 - 0
compression/cnft-burn/tests/ReadApi/WrapperConnection.ts

@@ -0,0 +1,258 @@
+import { Commitment, Connection, ConnectionConfig, PublicKey } from "@solana/web3.js";
+// local imports for the ReadApi types
+import type {
+  GetAssetProofRpcInput,
+  GetAssetProofRpcResponse,
+  GetAssetRpcInput,
+  GetAssetsByGroupRpcInput,
+  GetAssetsByOwnerRpcInput,
+  ReadApiAsset,
+  ReadApiAssetList,
+} from "@/ReadApi/types";
+import type { Metadata, Mint, NftOriginalEdition, SplTokenCurrency } from "@metaplex-foundation/js";
+// import from the `@metaplex-foundation/js`
+import { MetaplexError, Pda, amount, toBigNumber } from "@metaplex-foundation/js";
+
+import BN from "bn.js";
+import { PROGRAM_ID as BUBBLEGUM_PROGRAM_ID } from "@metaplex-foundation/mpl-bubblegum";
+import { TokenStandard } from "@metaplex-foundation/mpl-token-metadata";
+
+type JsonRpcParams<ReadApiMethodParams> = {
+  method: string;
+  id?: string;
+  params: ReadApiMethodParams;
+};
+
+type JsonRpcOutput<ReadApiJsonOutput> = {
+  result: ReadApiJsonOutput;
+};
+
+/** @group Errors */
+export class ReadApiError extends MetaplexError {
+  readonly name: string = "ReadApiError";
+  constructor(message: string, cause?: Error) {
+    super(message, "rpc", undefined, cause);
+  }
+}
+
+/**
+ * Convert a ReadApi asset (e.g. compressed NFT) into an NftEdition
+ */
+export const toNftEditionFromReadApiAsset = (input: ReadApiAsset): NftOriginalEdition => {
+  return {
+    model: "nftEdition",
+    isOriginal: true,
+    address: new PublicKey(input.id),
+    supply: toBigNumber(input.supply.print_current_supply),
+    maxSupply: toBigNumber(input.supply.print_max_supply),
+  };
+};
+
+/**
+ * Convert a ReadApi asset (e.g. compressed NFT) into an NFT mint
+ */
+export const toMintFromReadApiAsset = (input: ReadApiAsset): Mint => {
+  const currency: SplTokenCurrency = {
+    symbol: "Token",
+    decimals: 0,
+    namespace: "spl-token",
+  };
+
+  return {
+    model: "mint",
+    address: new PublicKey(input.id),
+    mintAuthorityAddress: new PublicKey(input.id),
+    freezeAuthorityAddress: new PublicKey(input.id),
+    decimals: 0,
+    supply: amount(1, currency),
+    isWrappedSol: false,
+    currency,
+  };
+};
+
+/**
+ * Convert a ReadApi asset's data into standard Metaplex `Metadata`
+ */
+export const toMetadataFromReadApiAsset = (input: ReadApiAsset): Metadata => {
+  const updateAuthority = input.authorities?.find(authority => authority.scopes.includes("full"));
+
+  const collection = input.grouping.find(({ group_key }) => group_key === "collection");
+
+  return {
+    model: "metadata",
+    /**
+     * We technically don't have a metadata address anymore.
+     * So we are using the asset's id as the address
+     */
+    address: Pda.find(BUBBLEGUM_PROGRAM_ID, [
+      Buffer.from("asset", "utf-8"),
+      new PublicKey(input.compression.tree).toBuffer(),
+      Uint8Array.from(new BN(input.compression.leaf_id).toArray("le", 8)),
+    ]),
+    mintAddress: new PublicKey(input.id),
+    updateAuthorityAddress: new PublicKey(updateAuthority!.address),
+
+    name: input.content.metadata?.name ?? "",
+    symbol: input.content.metadata?.symbol ?? "",
+
+    json: input.content.metadata,
+    jsonLoaded: true,
+    uri: input.content.json_uri,
+    isMutable: input.mutable,
+
+    primarySaleHappened: input.royalty.primary_sale_happened,
+    sellerFeeBasisPoints: input.royalty.basis_points,
+    creators: input.creators,
+
+    editionNonce: input.supply.edition_nonce,
+    tokenStandard: TokenStandard.NonFungible,
+
+    collection: collection
+      ? { address: new PublicKey(collection.group_value), verified: false }
+      : null,
+
+    // Current regular `Metadata` does not currently have a `compression` value
+    // @ts-ignore
+    compression: input.compression,
+
+    // Read API doesn't return this info, yet
+    collectionDetails: null,
+    // Read API doesn't return this info, yet
+    uses: null,
+    // Read API doesn't return this info, yet
+    programmableConfig: null,
+  };
+};
+
+/**
+ * Wrapper class to add additional methods on top the standard Connection from `@solana/web3.js`
+ * Specifically, adding the RPC methods used by the Digital Asset Standards (DAS) ReadApi
+ * for state compression and compressed NFTs
+ */
+export class WrapperConnection extends Connection {
+  constructor(endpoint: string, commitmentOrConfig?: Commitment | ConnectionConfig) {
+    super(endpoint, commitmentOrConfig);
+  }
+
+  private callReadApi = async <ReadApiMethodParams, ReadApiJsonOutput>(
+    jsonRpcParams: JsonRpcParams<ReadApiMethodParams>,
+  ): Promise<JsonRpcOutput<ReadApiJsonOutput>> => {
+    const response = await fetch(this.rpcEndpoint, {
+      method: "POST",
+      headers: {
+        "Content-Type": "application/json",
+      },
+      body: JSON.stringify({
+        jsonrpc: "2.0",
+        method: jsonRpcParams.method,
+        id: jsonRpcParams.id ?? "rpd-op-123",
+        params: jsonRpcParams.params,
+      }),
+    });
+
+    return await response.json() as JsonRpcOutput<ReadApiJsonOutput>;
+  };
+
+  // Asset id can be calculated via Bubblegum#getLeafAssetId
+  // It is a PDA with the following seeds: ["asset", tree, leafIndex]
+  async getAsset(assetId: PublicKey): Promise<ReadApiAsset> {
+    const { result: asset } = await this.callReadApi<GetAssetRpcInput, ReadApiAsset>({
+      method: "getAsset",
+      params: {
+        id: assetId.toBase58(),
+      },
+    });
+
+    if (!asset) throw new ReadApiError("No asset returned");
+
+    return asset;
+  }
+
+  // Asset id can be calculated via Bubblegum#getLeafAssetId
+  // It is a PDA with the following seeds: ["asset", tree, leafIndex]
+  async getAssetProof(assetId: PublicKey): Promise<GetAssetProofRpcResponse> {
+    const { result: proof } = await this.callReadApi<
+      GetAssetProofRpcInput,
+      GetAssetProofRpcResponse
+    >({
+      method: "getAssetProof",
+      params: {
+        id: assetId.toBase58(),
+      },
+    });
+
+    if (!proof) throw new ReadApiError("No asset proof returned");
+
+    return proof;
+  }
+
+  //
+  async getAssetsByGroup({
+    groupKey,
+    groupValue,
+    page,
+    limit,
+    sortBy,
+    before,
+    after,
+  }: GetAssetsByGroupRpcInput): Promise<ReadApiAssetList> {
+    // `page` cannot be supplied with `before` or `after`
+    if (typeof page == "number" && (before || after))
+      throw new ReadApiError(
+        "Pagination Error. Only one pagination parameter supported per query.",
+      );
+
+    // a pagination method MUST be selected, but we are defaulting to using `page=0`
+
+    const { result } = await this.callReadApi<GetAssetsByGroupRpcInput, ReadApiAssetList>({
+      method: "getAssetsByGroup",
+      params: {
+        groupKey,
+        groupValue,
+        after: after ?? null,
+        before: before ?? null,
+        limit: limit ?? null,
+        page: page ?? 1,
+        sortBy: sortBy ?? null,
+      },
+    });
+
+    if (!result) throw new ReadApiError("No results returned");
+
+    return result;
+  }
+
+  //
+  async getAssetsByOwner({
+    ownerAddress,
+    page,
+    limit,
+    sortBy,
+    before,
+    after,
+  }: GetAssetsByOwnerRpcInput): Promise<ReadApiAssetList> {
+    // `page` cannot be supplied with `before` or `after`
+    if (typeof page == "number" && (before || after))
+      throw new ReadApiError(
+        "Pagination Error. Only one pagination parameter supported per query.",
+      );
+
+    // a pagination method MUST be selected, but we are defaulting to using `page=0`
+
+    const { result } = await this.callReadApi<GetAssetsByOwnerRpcInput, ReadApiAssetList>({
+      method: "getAssetsByOwner",
+      params: {
+        ownerAddress,
+        after: after ?? null,
+        before: before ?? null,
+        limit: limit ?? null,
+        page: page ?? 1,
+        sortBy: sortBy ?? null,
+      },
+    });
+
+    if (!result) throw new ReadApiError("No results returned");
+
+    return result;
+  }
+}

+ 175 - 0
compression/cnft-burn/tests/ReadApi/types.ts

@@ -0,0 +1,175 @@
+/*
+  Types specific to the ReadApi
+*/
+
+import type { Metadata, Option } from "@metaplex-foundation/js";
+import { ConcurrentMerkleTreeAccount } from "@solana/spl-account-compression";
+
+export type ReadApiAssetInterface =
+  | "V1_NFT"
+  | "V1_PRINT"
+  | "LEGACY_NFT"
+  | "V2_NFT"
+  | "FungibleAsset"
+  | "Custom"
+  | "Identity"
+  | "Executable"
+  | "ProgrammableNFT";
+
+export type ReadApiPropGroupKey = "collection";
+
+export type ReadApiPropSortBy = "created" | "updated" | "recent_action";
+
+export type ReadApiPropSortDirection = "asc" | "desc";
+
+export type TransferNftCompressionParam = {
+  ownership?: ReadApiOwnershipMetadata;
+  data?: ReadApiCompressionMetadata;
+  assetProof?: GetAssetProofRpcResponse;
+  merkleTree?: ConcurrentMerkleTreeAccount;
+};
+
+export type ReadApiParamAssetSortBy = {
+  sortBy: ReadApiPropSortBy;
+  sortDirection: ReadApiPropSortDirection;
+};
+
+export type ReadApiAssetContent = {
+  json_uri: string;
+  metadata: Metadata["json"];
+};
+
+export type ReadApiCompressionMetadata = {
+  eligible: boolean;
+  compressed: boolean;
+  data_hash: string;
+  creator_hash: string;
+  asset_hash: string;
+  tree: string;
+  seq: number;
+  leaf_id: number;
+};
+
+export type ReadApiOwnershipMetadata = {
+  frozen: boolean;
+  delegated: boolean;
+  delegate: string | null;
+  owner: string;
+  ownership_model: "single" | "token";
+};
+
+export type ReadApiAssetSupplyMetadata = {
+  edition_nonce: number;
+  print_current_supply: number;
+  print_max_supply: number;
+};
+
+export type ReadApiAssetRoyaltyMetadata = {
+  primary_sale_happened: boolean;
+  basis_points: number;
+};
+
+export type ReadApiAssetGrouping = {
+  group_key: ReadApiPropGroupKey;
+  group_value: string;
+};
+
+export type ReadApiAuthorityScope = "full";
+
+export type ReadApiAssetAuthority = {
+  address: string;
+  scopes: ReadApiAuthorityScope[];
+};
+
+export type GetAssetRpcInput = {
+  id: string;
+};
+
+export type GetAssetProofRpcInput = {
+  id: string;
+};
+
+export type GetAssetProofRpcResponse = {
+  root: string;
+  proof: string[];
+  node_index: number;
+  leaf: string;
+  tree_id: string;
+};
+
+export type GetAssetsByGroupRpcInput = {
+  groupKey: ReadApiPropGroupKey;
+  groupValue: string;
+  page?: Option<number>;
+  limit?: Option<number>;
+  /* assetId to search before */
+  before?: Option<string>;
+  /* assetId to search after */
+  after?: Option<string>;
+  sortBy?: Option<ReadApiParamAssetSortBy>;
+};
+
+export type GetAssetsByOwnerRpcInput = {
+  /**
+   * String of the owner's PublicKey address
+   */
+  ownerAddress: string;
+  page?: Option<number>;
+  limit?: Option<number>;
+  before?: Option<string>;
+  after?: Option<string>;
+  sortBy?: Option<ReadApiParamAssetSortBy>;
+};
+
+export type ReadApiAsset = {
+  /**
+   * The asset Id
+   */
+  id: string;
+  interface: ReadApiAssetInterface;
+  ownership: ReadApiOwnershipMetadata;
+  mutable: boolean;
+  authorities: Array<ReadApiAssetAuthority>;
+  content: ReadApiAssetContent;
+  royalty: ReadApiAssetRoyaltyMetadata;
+  supply: ReadApiAssetSupplyMetadata;
+  creators: Metadata["creators"];
+  grouping: Array<ReadApiAssetGrouping>;
+  compression: ReadApiCompressionMetadata;
+};
+
+export type ReadApiAssetList = {
+  total: number;
+  limit: number;
+
+  /**
+   * listing of individual assets, and their associated metadata
+   */
+  items: Array<ReadApiAsset>;
+
+  /**
+   * `page` is only provided when using page based pagination, as apposed
+   * to asset id before/after based pagination
+   */
+  page: Option<number>;
+
+  /**
+   * asset Id searching before
+   */
+  before: Option<string>;
+
+  /**
+   * asset Id searching after
+   */
+  after: Option<string>;
+
+  /**
+   * listing of errors provided by the ReadApi RPC
+   */
+  errors: Option<ReadApiRpcResponseError[]>;
+};
+
+export type ReadApiRpcResponseError = {
+  error: string;
+  id: string;
+};

+ 81 - 0
compression/cnft-burn/tests/cnft-burn.ts

@@ -0,0 +1,81 @@
+import * as anchor from "@coral-xyz/anchor";
+import { Program } from "@coral-xyz/anchor";
+import { CnftBurn } from "../target/types/cnft_burn";
+import { PROGRAM_ID as BUBBLEGUM_PROGRAM_ID } from "@metaplex-foundation/mpl-bubblegum";
+import { decode, mapProof } from "./utils";
+import { getAsset, getAssetProof } from "./readApi";
+import { createAndMint } from "./createAndMint";
+import { getcNFTsFromCollection } from "./fetchNFTsByCollection";
+import {
+  SPL_ACCOUNT_COMPRESSION_PROGRAM_ID,
+  SPL_NOOP_PROGRAM_ID,
+} from "@solana/spl-account-compression";
+
+// Replace this with your custom RPC endpoint
+export const RPC_PATH = "https://api.devnet.solana.com";
+
+describe("cnft-burn", () => {
+  // Configure the client to use the local cluster.
+  anchor.setProvider(anchor.AnchorProvider.env());
+
+  const program = anchor.workspace.CnftBurn as Program<CnftBurn>;
+  const provider = anchor.AnchorProvider.env();
+  const payerWallet = provider.wallet as anchor.Wallet;
+
+  let treeAddress: anchor.web3.PublicKey | undefined = undefined;
+  const MPL_BUBBLEGUM_PROGRAM_ID_KEY = new anchor.web3.PublicKey(
+    BUBBLEGUM_PROGRAM_ID
+  );
+
+  // this is the assetId of the cNft you want to burn
+  let assetId: string = "";
+
+  it("Should create the tree and mint a cnft", async () => {
+    const { tree, collection } = await createAndMint();
+    if (!tree.treeAddress) {
+      throw new Error("Tree address not found");
+    }
+    treeAddress = tree.treeAddress;
+
+    const fetchcNFTs = await getcNFTsFromCollection(
+      collection.mint,
+      payerWallet.publicKey.toString()
+    );
+    console.log("fetchcNFTs", fetchcNFTs);
+    assetId = fetchcNFTs[0];
+  });
+  it("Burn cNft!", async () => {
+    const asset = await getAsset(assetId);
+
+    const proof = await getAssetProof(assetId);
+    const proofPathAsAccounts = mapProof(proof);
+    const root = decode(proof.root);
+    const dataHash = decode(asset.compression.data_hash);
+    const creatorHash = decode(asset.compression.creator_hash);
+    const nonce = new anchor.BN(asset.compression.leaf_id);
+    const index = asset.compression.leaf_id;
+    const [treeAuthority, _bump2] =
+      anchor.web3.PublicKey.findProgramAddressSync(
+        [treeAddress.toBuffer()],
+        MPL_BUBBLEGUM_PROGRAM_ID_KEY
+      );
+    const tx = await program.methods
+      .burnCnft(root, dataHash, creatorHash, nonce, index)
+      .accounts({
+        merkleTree: treeAddress,
+        leafOwner: payerWallet.publicKey,
+        treeAuthority: treeAuthority,
+        bubblegumProgram: BUBBLEGUM_PROGRAM_ID,
+        compressionProgram: SPL_ACCOUNT_COMPRESSION_PROGRAM_ID,
+        logWrapper: SPL_NOOP_PROGRAM_ID,
+        systemProgram: anchor.web3.SystemProgram.programId,
+      })
+      .remainingAccounts(proofPathAsAccounts)
+      .rpc({
+        skipPreflight: true,
+      });
+    console.log("Your transaction signature", tx);
+    // here is a sample transaction signature on devnet
+    // https://explorer.solana.com/tx/2MpeHi64pbWNY7BKBuhAp4yND5HdfQqNqkd8pu6F6meoSNUYRvxQgV5TC4w8BM8hUihB8G8TwBAaPRqS7pnN8Nu1?cluster=devnet
+  });
+});

+ 185 - 0
compression/cnft-burn/tests/createAndMint.ts

@@ -0,0 +1,185 @@
+/**
+  Overall flow of this script
+  - load or create two keypairs (named `payer` and `testWallet`)
+  - create a new tree with enough space to mint all the nft's you want for the "collection"
+  - create a new NFT Collection on chain (using the usual Metaplex methods)
+  - mint a single compressed nft into the tree to the `payer`
+  - mint a single compressed nft into the tree to the `testWallet`
+  - display the overall cost to perform all these actions
+
+  ---
+  NOTE: this script is identical to the `scripts/verboseCreateAndMint.ts` file, except THIS file has
+  less console logging and explanation of what is occurring
+*/
+
+import { Keypair, LAMPORTS_PER_SOL, clusterApiUrl } from "@solana/web3.js";
+import {
+  MetadataArgs,
+  TokenProgramVersion,
+  TokenStandard,
+} from "@metaplex-foundation/mpl-bubblegum";
+// import custom helpers to mint compressed NFTs
+import {
+  createCollection,
+  createTree,
+  mintCompressedNFT,
+} from "./utils/compression";
+// import custom helpers for demos
+import { loadKeypairFromFile, numberFormatter } from "./utils/helpers";
+
+import { CreateMetadataAccountArgsV3 } from "@metaplex-foundation/mpl-token-metadata";
+import { ValidDepthSizePair } from "@solana/spl-account-compression";
+// local import of the connection wrapper, to help with using the ReadApi
+import { WrapperConnection } from "./ReadApi/WrapperConnection";
+import { RPC_PATH } from "./cnft-burn";
+import * as anchor from "@coral-xyz/anchor";
+
+// define some reusable balance values for tracking
+let initBalance: number, balance: number;
+
+export async function createAndMint() {
+  //////////////////////////////////////////////////////////////////////////////
+  //////////////////////////////////////////////////////////////////////////////
+
+  // load it locally from the filesystem when available
+  anchor.setProvider(anchor.AnchorProvider.env());
+  const provider = anchor.AnchorProvider.env();
+  const payerWallet = provider.wallet as anchor.Wallet;
+  const payer = payerWallet.payer;
+
+  console.log("Payer address:", payer.publicKey.toBase58());
+
+  //////////////////////////////////////////////////////////////////////////////
+  //////////////////////////////////////////////////////////////////////////////
+
+  // load the env variables and store the cluster RPC url
+  const CLUSTER_URL = RPC_PATH;
+
+  // create a new rpc connection, using the ReadApi wrapper
+  const connection = new WrapperConnection(CLUSTER_URL, "confirmed");
+
+  // get the payer's starting balance (only used for demonstration purposes)
+  initBalance = await connection.getBalance(payer.publicKey);
+
+  //////////////////////////////////////////////////////////////////////////////
+  //////////////////////////////////////////////////////////////////////////////
+
+  /*
+    Define our tree size parameters
+  */
+  const maxDepthSizePair: ValidDepthSizePair = {
+    // max=16,384 nodes
+    maxDepth: 14,
+    maxBufferSize: 64,
+  };
+  const canopyDepth = maxDepthSizePair.maxDepth - 5;
+
+  /*
+    Actually allocate the tree on chain
+  */
+
+  // define the address the tree will live at
+  const treeKeypair = Keypair.generate();
+
+  // create and send the transaction to create the tree on chain
+  const tree = await createTree(
+    connection,
+    payer,
+    treeKeypair,
+    maxDepthSizePair,
+    canopyDepth
+  );
+
+  /*
+    Create the actual NFT collection (using the normal Metaplex method)
+    (nothing special about compression here)
+  */
+
+  // define the metadata to be used for creating the NFT collection
+  const collectionMetadataV3: CreateMetadataAccountArgsV3 = {
+    data: {
+      name: "Test Burn",
+      symbol: "TB",
+      // specific json metadata for the collection
+      uri: "https://supersweetcollection.notarealurl/collection.json",
+      sellerFeeBasisPoints: 100,
+      creators: [
+        {
+          address: payer.publicKey,
+          verified: false,
+          share: 100,
+        },
+      ],
+      collection: null,
+      uses: null,
+    },
+    isMutable: false,
+    collectionDetails: null,
+  };
+
+  // create a full token mint and initialize the collection (with the `payer` as the authority)
+  const collection = await createCollection(
+    connection,
+    payer,
+    collectionMetadataV3
+  );
+
+  /*
+    Mint a single compressed NFT
+  */
+
+  const compressedNFTMetadata: MetadataArgs = {
+    name: "Pratik test",
+    symbol: collectionMetadataV3.data.symbol,
+    // specific json metadata for each NFT
+    uri: "https://bafkreies5r7b5eszpq5dgnw2brhjtlw7xtdtmsmoniebqehf37nv5rxajy.ipfs.nftstorage.link/",
+    creators: [
+      {
+        address: payer.publicKey,
+        verified: false,
+        share: 100,
+      },
+    ],
+    editionNonce: 0,
+    uses: null,
+    collection: null,
+    primarySaleHappened: false,
+    sellerFeeBasisPoints: 0,
+    isMutable: false,
+    // these values are taken from the Bubblegum package
+    tokenProgramVersion: TokenProgramVersion.Original,
+    tokenStandard: TokenStandard.NonFungible,
+  };
+
+  // fully mint a single compressed NFT to the payer
+  console.log(
+    `Minting a single compressed NFT to ${payer.publicKey.toBase58()}...`
+  );
+
+  await mintCompressedNFT(
+    connection,
+    payer,
+    treeKeypair.publicKey,
+    collection.mint,
+    collection.metadataAccount,
+    collection.masterEditionAccount,
+    compressedNFTMetadata,
+    // mint to this specific wallet (in this case, the tree owner aka `payer`)
+    payer.publicKey
+  );
+
+  //////////////////////////////////////////////////////////////////////////////
+  //////////////////////////////////////////////////////////////////////////////
+
+  // fetch the payer's final balance
+  balance = await connection.getBalance(payer.publicKey);
+
+  console.log(`===============================`);
+  console.log(
+    "Total cost:",
+    numberFormatter((initBalance - balance) / LAMPORTS_PER_SOL, true),
+    "SOL\n"
+  );
+
+  return { tree, collection };
+}

+ 77 - 0
compression/cnft-burn/tests/fetchNFTsByCollection.ts

@@ -0,0 +1,77 @@
+/**
+ * Demonstrate the use of a few of the Metaplex Read API methods,
+ * (needed to fetch compressed NFTs)
+ */
+
+// imports from other libraries
+import { PublicKey } from "@solana/web3.js";
+// import custom helpers for demos
+import { printConsoleSeparator } from "./utils/helpers";
+
+// local import of the connection wrapper, to help with using the ReadApi
+import { WrapperConnection } from "./ReadApi/WrapperConnection";
+import { RPC_PATH } from "./cnft-burn";
+
+export async function getcNFTsFromCollection(
+  collectionMint: PublicKey,
+  owner: string
+) {
+  // load the stored PublicKeys for ease of use
+  // let keys = loadPublicKeysFromFile();
+
+  // ensure the primary script was already run
+  // if (!keys?.collectionMint)
+  //   return console.warn("No local keys were found, specifically `collectionMint`");
+
+  // convert the locally saved keys to PublicKeys
+  // const collectionMint: PublicKey = keys.collectionMint;
+
+  console.log("Collection mint:", collectionMint.toBase58());
+
+  //////////////////////////////////////////////////////////////////////////////
+  //////////////////////////////////////////////////////////////////////////////
+
+  // load the env variables and store the cluster RPC url
+  const CLUSTER_URL = RPC_PATH;
+
+  // create a new rpc connection, using the ReadApi wrapper
+  const connection = new WrapperConnection(CLUSTER_URL);
+
+  printConsoleSeparator("Getting all assets by the 'collection' group...");
+
+  const assets = await connection
+    .getAssetsByGroup({
+      groupKey: "collection",
+      groupValue: collectionMint.toBase58(),
+      sortBy: {
+        sortBy: "recent_action",
+        sortDirection: "asc",
+      },
+    })
+    .then((res) => {
+      console.log("Total assets returned:", res.total);
+
+      // loop over each of the asset items in the collection
+      const assetsIds = res.items?.map((asset) => {
+        // display a spacer between each of the assets
+        console.log("\n===============================================");
+
+        // print the entire asset record to the console
+        // console.log(asset);
+
+        // print some useful info
+        console.log("assetId:", asset.id);
+        console.log("ownership:", asset.ownership);
+        console.log("compression:", asset.compression);
+
+        if (asset.ownership?.owner === owner) {
+          console.log("assetId:", asset.id);
+          return asset.id;
+        }
+      });
+
+      return assetsIds;
+    });
+
+  return assets;
+}

+ 49 - 0
compression/cnft-burn/tests/readApi.ts

@@ -0,0 +1,49 @@
+// I recommend using a WrappedConnection for production
+// as it supports more readAPI functionality
+// this is just a subset of functions for quick availabiity
+
+import axios from "axios";
+import { RPC_PATH } from "./cnft-burn";
+
+// you might want to change that to your custom RPC endpoint as this endpoint is not going to work as it does not support DAS
+
+export async function getAsset(assetId: any, rpcUrl = RPC_PATH): Promise<any> {
+  try {
+    const axiosInstance = axios.create({
+      baseURL: rpcUrl,
+    });
+    const response = await axiosInstance.post(rpcUrl, {
+      jsonrpc: "2.0",
+      method: "getAsset",
+      id: "rpd-op-123",
+      params: {
+        id: assetId,
+      },
+    });
+    return response.data.result;
+  } catch (error) {
+    console.error(error);
+  }
+}
+
+export async function getAssetProof(
+  assetId: any,
+  rpcUrl = RPC_PATH
+): Promise<any> {
+  try {
+    const axiosInstance = axios.create({
+      baseURL: rpcUrl,
+    });
+    const response = await axiosInstance.post(rpcUrl, {
+      jsonrpc: "2.0",
+      method: "getAssetProof",
+      id: "rpd-op-123",
+      params: {
+        id: assetId,
+      },
+    });
+    return response.data.result;
+  } catch (error) {
+    console.error(error);
+  }
+}

+ 40 - 0
compression/cnft-burn/tests/utils.ts

@@ -0,0 +1,40 @@
+import {
+  Connection,
+  Keypair,
+  PublicKey,
+  Signer,
+  TransactionInstruction,
+  TransactionMessage,
+  VersionedTransaction,
+  AccountMeta,
+} from "@solana/web3.js";
+
+import * as bs58 from "bs58";
+
+export function loadWalletKey(keypairFile: string): Keypair {
+  const fs = require("fs");
+  return Keypair.fromSecretKey(
+    new Uint8Array(JSON.parse(fs.readFileSync(keypairFile).toString()))
+  );
+}
+
+export function decode(stuff: string) {
+  return bufferToArray(bs58.decode(stuff));
+}
+function bufferToArray(buffer: Buffer): number[] {
+  const nums: number[] = [];
+  for (let i = 0; i < buffer.length; i++) {
+    nums.push(buffer[i]);
+  }
+  return nums;
+}
+export const mapProof = (assetProof: { proof: string[] }): AccountMeta[] => {
+  if (!assetProof.proof || assetProof.proof.length === 0) {
+    throw new Error("Proof is empty");
+  }
+  return assetProof.proof.map((node) => ({
+    pubkey: new PublicKey(node),
+    isSigner: false,
+    isWritable: false,
+  }));
+};

+ 404 - 0
compression/cnft-burn/tests/utils/compression.ts

@@ -0,0 +1,404 @@
+import {
+  Keypair,
+  PublicKey,
+  Connection,
+  Transaction,
+  sendAndConfirmTransaction,
+  TransactionInstruction,
+} from "@solana/web3.js";
+import {
+  createAccount,
+  createMint,
+  mintTo,
+  TOKEN_PROGRAM_ID,
+} from "@solana/spl-token";
+import {
+  SPL_ACCOUNT_COMPRESSION_PROGRAM_ID,
+  createAllocTreeIx,
+  ValidDepthSizePair,
+  SPL_NOOP_PROGRAM_ID,
+} from "@solana/spl-account-compression";
+import {
+  PROGRAM_ID as BUBBLEGUM_PROGRAM_ID,
+  MetadataArgs,
+  computeCreatorHash,
+  computeDataHash,
+  createCreateTreeInstruction,
+  createMintToCollectionV1Instruction,
+} from "@metaplex-foundation/mpl-bubblegum";
+import {
+  PROGRAM_ID as TOKEN_METADATA_PROGRAM_ID,
+  CreateMetadataAccountArgsV3,
+  createCreateMetadataAccountV3Instruction,
+  createCreateMasterEditionV3Instruction,
+  createSetCollectionSizeInstruction,
+} from "@metaplex-foundation/mpl-token-metadata";
+
+// import local helper functions
+import { explorerURL, extractSignatureFromFailedTransaction } from "./helpers";
+
+/*
+  Helper function to create a merkle tree on chain, including allocating 
+  all the space required to store all the nodes
+*/
+export async function createTree(
+  connection: Connection,
+  payer: Keypair,
+  treeKeypair: Keypair,
+  maxDepthSizePair: ValidDepthSizePair,
+  canopyDepth: number = 0
+) {
+  console.log("Creating a new Merkle tree...");
+  console.log("treeAddress:", treeKeypair.publicKey.toBase58());
+
+  // derive the tree's authority (PDA), owned by Bubblegum
+  const [treeAuthority, _bump] = PublicKey.findProgramAddressSync(
+    [treeKeypair.publicKey.toBuffer()],
+    BUBBLEGUM_PROGRAM_ID
+  );
+  console.log("treeAuthority:", treeAuthority.toBase58());
+
+  // allocate the tree's account on chain with the `space`
+  // NOTE: this will compute the space needed to store the tree on chain (and the lamports required to store it)
+  const allocTreeIx = await createAllocTreeIx(
+    connection,
+    treeKeypair.publicKey,
+    payer.publicKey,
+    maxDepthSizePair,
+    canopyDepth
+  );
+
+  // create the instruction to actually create the tree
+  const createTreeIx = createCreateTreeInstruction(
+    {
+      payer: payer.publicKey,
+      treeCreator: payer.publicKey,
+      treeAuthority,
+      merkleTree: treeKeypair.publicKey,
+      compressionProgram: SPL_ACCOUNT_COMPRESSION_PROGRAM_ID,
+      // NOTE: this is used for some on chain logging
+      logWrapper: SPL_NOOP_PROGRAM_ID,
+    },
+    {
+      maxBufferSize: maxDepthSizePair.maxBufferSize,
+      maxDepth: maxDepthSizePair.maxDepth,
+      public: false,
+    },
+    BUBBLEGUM_PROGRAM_ID
+  );
+
+  try {
+    // create and send the transaction to initialize the tree
+    const tx = new Transaction().add(allocTreeIx).add(createTreeIx);
+    tx.feePayer = payer.publicKey;
+
+    // send the transaction
+    const txSignature = await sendAndConfirmTransaction(
+      connection,
+      tx,
+      // ensuring the `treeKeypair` PDA and the `payer` are BOTH signers
+      [treeKeypair, payer],
+      {
+        commitment: "confirmed",
+        skipPreflight: true,
+      }
+    );
+
+    console.log("\nMerkle tree created successfully!");
+    console.log(explorerURL({ txSignature }));
+
+    // return useful info
+    return { treeAuthority, treeAddress: treeKeypair.publicKey };
+  } catch (err: any) {
+    console.error("\nFailed to create merkle tree:", err);
+
+    // log a block explorer link for the failed transaction
+    await extractSignatureFromFailedTransaction(connection, err);
+
+    throw err;
+  }
+}
+
+/**
+ * Create an NFT collection on-chain, using the regular Metaplex standards
+ * with the `payer` as the authority
+ */
+export async function createCollection(
+  connection: Connection,
+  payer: Keypair,
+  metadataV3: CreateMetadataAccountArgsV3
+) {
+  // create and initialize the SPL token mint
+  console.log("Creating the collection's mint...");
+  const mint = await createMint(
+    connection,
+    payer,
+    // mint authority
+    payer.publicKey,
+    // freeze authority
+    payer.publicKey,
+    // decimals - use `0` for NFTs since they are non-fungible
+    0
+  );
+  console.log("Mint address:", mint.toBase58());
+
+  // create the token account
+  console.log("Creating a token account...");
+  const tokenAccount = await createAccount(
+    connection,
+    payer,
+    mint,
+    payer.publicKey
+    // undefined, undefined,
+  );
+  console.log("Token account:", tokenAccount.toBase58());
+
+  // mint 1 token ()
+  console.log("Minting 1 token for the collection...");
+  const mintSig = await mintTo(
+    connection,
+    payer,
+    mint,
+    tokenAccount,
+    payer,
+    // mint exactly 1 token
+    1,
+    // no `multiSigners`
+    [],
+    undefined,
+    TOKEN_PROGRAM_ID
+  );
+  // console.log(explorerURL({ txSignature: mintSig }));
+
+  // derive the PDA for the metadata account
+  const [metadataAccount, _bump] = PublicKey.findProgramAddressSync(
+    [
+      Buffer.from("metadata", "utf8"),
+      TOKEN_METADATA_PROGRAM_ID.toBuffer(),
+      mint.toBuffer(),
+    ],
+    TOKEN_METADATA_PROGRAM_ID
+  );
+  console.log("Metadata account:", metadataAccount.toBase58());
+
+  // create an instruction to create the metadata account
+  const createMetadataIx = createCreateMetadataAccountV3Instruction(
+    {
+      metadata: metadataAccount,
+      mint: mint,
+      mintAuthority: payer.publicKey,
+      payer: payer.publicKey,
+      updateAuthority: payer.publicKey,
+    },
+    {
+      createMetadataAccountArgsV3: metadataV3,
+    }
+  );
+
+  // derive the PDA for the metadata account
+  const [masterEditionAccount, _bump2] = PublicKey.findProgramAddressSync(
+    [
+      Buffer.from("metadata", "utf8"),
+      TOKEN_METADATA_PROGRAM_ID.toBuffer(),
+      mint.toBuffer(),
+      Buffer.from("edition", "utf8"),
+    ],
+    TOKEN_METADATA_PROGRAM_ID
+  );
+  console.log("Master edition account:", masterEditionAccount.toBase58());
+
+  // create an instruction to create the metadata account
+  const createMasterEditionIx = createCreateMasterEditionV3Instruction(
+    {
+      edition: masterEditionAccount,
+      mint: mint,
+      mintAuthority: payer.publicKey,
+      payer: payer.publicKey,
+      updateAuthority: payer.publicKey,
+      metadata: metadataAccount,
+    },
+    {
+      createMasterEditionArgs: {
+        maxSupply: 0,
+      },
+    }
+  );
+
+  // create the collection size instruction
+  const collectionSizeIX = createSetCollectionSizeInstruction(
+    {
+      collectionMetadata: metadataAccount,
+      collectionAuthority: payer.publicKey,
+      collectionMint: mint,
+    },
+    {
+      setCollectionSizeArgs: { size: 50 },
+    }
+  );
+
+  try {
+    // construct the transaction with our instructions, making the `payer` the `feePayer`
+    const tx = new Transaction()
+      .add(createMetadataIx)
+      .add(createMasterEditionIx)
+      .add(collectionSizeIX);
+    tx.feePayer = payer.publicKey;
+
+    // send the transaction to the cluster
+    const txSignature = await sendAndConfirmTransaction(
+      connection,
+      tx,
+      [payer],
+      {
+        commitment: "confirmed",
+        skipPreflight: true,
+      }
+    );
+
+    console.log("\nCollection successfully created!");
+    console.log(explorerURL({ txSignature }));
+  } catch (err) {
+    console.error("\nFailed to create collection:", err);
+
+    // log a block explorer link for the failed transaction
+    await extractSignatureFromFailedTransaction(connection, err);
+
+    throw err;
+  }
+
+  // return all the accounts
+  return { mint, tokenAccount, metadataAccount, masterEditionAccount };
+}
+
+/**
+ * Mint a single compressed NFTs to any address
+ */
+export async function mintCompressedNFT(
+  connection: Connection,
+  payer: Keypair,
+  treeAddress: PublicKey,
+  collectionMint: PublicKey,
+  collectionMetadata: PublicKey,
+  collectionMasterEditionAccount: PublicKey,
+  compressedNFTMetadata: MetadataArgs,
+  receiverAddress?: PublicKey
+) {
+  // derive the tree's authority (PDA), owned by Bubblegum
+  const [treeAuthority, _bump] = PublicKey.findProgramAddressSync(
+    [treeAddress.toBuffer()],
+    BUBBLEGUM_PROGRAM_ID
+  );
+
+  // derive a PDA (owned by Bubblegum) to act as the signer of the compressed minting
+  const [bubblegumSigner, _bump2] = PublicKey.findProgramAddressSync(
+    // `collection_cpi` is a custom prefix required by the Bubblegum program
+    [Buffer.from("collection_cpi", "utf8")],
+    BUBBLEGUM_PROGRAM_ID
+  );
+
+  // create an array of instruction, to mint multiple compressed NFTs at once
+  const mintIxs: TransactionInstruction[] = [];
+
+  /**
+   * correctly format the metadata args for the nft to mint
+   * ---
+   * note: minting an nft into a collection (via `createMintToCollectionV1Instruction`)
+   * will auto verify the collection. But, the `collection.verified` value inside the
+   * `metadataArgs` must be set to `false` in order for the instruction to succeed
+   */
+  const metadataArgs = Object.assign(compressedNFTMetadata, {
+    collection: { key: collectionMint, verified: false },
+  });
+
+  /**
+   * compute the data and creator hash for display in the console
+   *
+   * note: this is not required to do in order to mint new compressed nfts
+   * (since it is performed on chain via the Bubblegum program)
+   * this is only for demonstration
+   */
+  const computedDataHash = new PublicKey(
+    computeDataHash(metadataArgs)
+  ).toBase58();
+  const computedCreatorHash = new PublicKey(
+    computeCreatorHash(metadataArgs.creators)
+  ).toBase58();
+  console.log("computedDataHash:", computedDataHash);
+  console.log("computedCreatorHash:", computedCreatorHash);
+
+  /*
+    Add a single mint to collection instruction 
+    ---
+    But you could all multiple in the same transaction, as long as your 
+    transaction is still within the byte size limits
+  */
+  mintIxs.push(
+    createMintToCollectionV1Instruction(
+      {
+        payer: payer.publicKey,
+
+        merkleTree: treeAddress,
+        treeAuthority,
+        treeDelegate: payer.publicKey,
+
+        // set the receiver of the NFT
+        leafOwner: receiverAddress || payer.publicKey,
+        // set a delegated authority over this NFT
+        leafDelegate: payer.publicKey,
+
+        /*
+            You can set any delegate address at mint, otherwise should 
+            normally be the same as `leafOwner`
+            NOTE: the delegate will be auto cleared upon NFT transfer
+            ---
+            in this case, we are setting the payer as the delegate
+          */
+
+        // collection details
+        collectionAuthority: payer.publicKey,
+        collectionAuthorityRecordPda: BUBBLEGUM_PROGRAM_ID,
+        collectionMint: collectionMint,
+        collectionMetadata: collectionMetadata,
+        editionAccount: collectionMasterEditionAccount,
+
+        // other accounts
+        compressionProgram: SPL_ACCOUNT_COMPRESSION_PROGRAM_ID,
+        logWrapper: SPL_NOOP_PROGRAM_ID,
+        bubblegumSigner: bubblegumSigner,
+        tokenMetadataProgram: TOKEN_METADATA_PROGRAM_ID,
+      },
+      {
+        metadataArgs,
+      }
+    )
+  );
+
+  try {
+    // construct the transaction with our instructions, making the `payer` the `feePayer`
+    const tx = new Transaction().add(...mintIxs);
+    tx.feePayer = payer.publicKey;
+
+    // send the transaction to the cluster
+    const txSignature = await sendAndConfirmTransaction(
+      connection,
+      tx,
+      [payer],
+      {
+        commitment: "confirmed",
+        skipPreflight: true,
+      }
+    );
+
+    console.log("\nSuccessfully minted the compressed NFT!");
+    console.log(explorerURL({ txSignature }));
+
+    return txSignature;
+  } catch (err) {
+    console.error("\nFailed to mint compressed NFT:", err);
+
+    // log a block explorer link for the failed transaction
+    await extractSignatureFromFailedTransaction(connection, err);
+
+    throw err;
+  }
+}

+ 302 - 0
compression/cnft-burn/tests/utils/helpers.ts

@@ -0,0 +1,302 @@
+import fs from "fs";
+import path from "path";
+import {
+  Connection,
+  Keypair,
+  LAMPORTS_PER_SOL,
+  PublicKey,
+} from "@solana/web3.js";
+
+// define some default locations
+const DEFAULT_KEY_DIR_NAME = ".local_keys";
+const DEFAULT_PUBLIC_KEY_FILE = "keys.json";
+const DEFAULT_DEMO_DATA_FILE = "demo.json";
+
+/*
+  Load locally stored PublicKey addresses
+*/
+export function loadPublicKeysFromFile(
+  absPath: string = `${DEFAULT_KEY_DIR_NAME}/${DEFAULT_PUBLIC_KEY_FILE}`
+) {
+  try {
+    if (!absPath) throw Error("No path provided");
+    if (!fs.existsSync(absPath)) throw Error("File does not exist.");
+
+    // load the public keys from the file
+    const data =
+      JSON.parse(fs.readFileSync(absPath, { encoding: "utf-8" })) || {};
+
+    // convert all loaded keyed values into valid public keys
+    for (const [key, value] of Object.entries(data)) {
+      data[key] = new PublicKey(value as string) ?? "";
+    }
+
+    return data;
+  } catch (err) {
+    // console.warn("Unable to load local file");
+  }
+  // always return an object
+  return {};
+}
+
+/*
+  Locally save a demo data to the filesystem for later retrieval
+*/
+export function saveDemoDataToFile(
+  name: string,
+  newData: any,
+  absPath: string = `${DEFAULT_KEY_DIR_NAME}/${DEFAULT_DEMO_DATA_FILE}`
+) {
+  try {
+    let data: object = {};
+
+    // fetch all the current values, when the storage file exists
+    if (fs.existsSync(absPath))
+      data = JSON.parse(fs.readFileSync(absPath, { encoding: "utf-8" })) || {};
+
+    data = { ...data, [name]: newData };
+
+    // actually save the data to the file
+    fs.writeFileSync(absPath, JSON.stringify(data), {
+      encoding: "utf-8",
+    });
+
+    return data;
+  } catch (err) {
+    console.warn("Unable to save to file");
+    // console.warn(err);
+  }
+
+  // always return an object
+  return {};
+}
+
+/*
+  Locally save a PublicKey addresses to the filesystem for later retrieval
+*/
+export function savePublicKeyToFile(
+  name: string,
+  publicKey: PublicKey,
+  absPath: string = `${DEFAULT_KEY_DIR_NAME}/${DEFAULT_PUBLIC_KEY_FILE}`
+) {
+  try {
+    // if (!absPath) throw Error("No path provided");
+    // if (!fs.existsSync(absPath)) throw Error("File does not exist.");
+
+    // fetch all the current values
+    let data: any = loadPublicKeysFromFile(absPath);
+
+    // convert all loaded keyed values from PublicKeys to strings
+    for (const [key, value] of Object.entries(data)) {
+      data[key as any] = (value as PublicKey).toBase58();
+    }
+    data = { ...data, [name]: publicKey.toBase58() };
+
+    // actually save the data to the file
+    fs.writeFileSync(absPath, JSON.stringify(data), {
+      encoding: "utf-8",
+    });
+
+    // reload the keys for sanity
+    data = loadPublicKeysFromFile(absPath);
+
+    return data;
+  } catch (err) {
+    console.warn("Unable to save to file");
+  }
+  // always return an object
+  return {};
+}
+
+/*
+  Load a locally stored JSON keypair file and convert it to a valid Keypair
+*/
+export function loadKeypairFromFile(absPath: string) {
+  try {
+    if (!absPath) throw Error("No path provided");
+    if (!fs.existsSync(absPath)) throw Error("File does not exist.");
+
+    // load the keypair from the file
+    const keyfileBytes = JSON.parse(
+      fs.readFileSync(absPath, { encoding: "utf-8" })
+    );
+    // parse the loaded secretKey into a valid keypair
+    const keypair = Keypair.fromSecretKey(new Uint8Array(keyfileBytes));
+    return keypair;
+  } catch (err) {
+    // return false;
+    throw err;
+  }
+}
+
+/*
+  Save a locally stored JSON keypair file for later importing
+*/
+export function saveKeypairToFile(
+  keypair: Keypair,
+  fileName: string,
+  dirName: string = DEFAULT_KEY_DIR_NAME
+) {
+  fileName = path.join(dirName, `${fileName}.json`);
+
+  // create the `dirName` directory, if it does not exists
+  if (!fs.existsSync(`./${dirName}/`)) fs.mkdirSync(`./${dirName}/`);
+
+  // remove the current file, if it already exists
+  if (fs.existsSync(fileName)) fs.unlinkSync(fileName);
+
+  // write the `secretKey` value as a string
+  fs.writeFileSync(fileName, `[${keypair.secretKey.toString()}]`, {
+    encoding: "utf-8",
+  });
+
+  return fileName;
+}
+
+/*
+  Attempt to load a keypair from the filesystem, or generate and save a new one
+*/
+export function loadOrGenerateKeypair(
+  fileName: string,
+  dirName: string = DEFAULT_KEY_DIR_NAME
+) {
+  try {
+    // compute the path to locate the file
+    const searchPath = path.join(dirName, `${fileName}.json`);
+    let keypair = Keypair.generate();
+
+    // attempt to load the keypair from the file
+    if (fs.existsSync(searchPath)) keypair = loadKeypairFromFile(searchPath);
+    // when unable to locate the keypair, save the new one
+    else saveKeypairToFile(keypair, fileName, dirName);
+
+    return keypair;
+  } catch (err) {
+    console.error("loadOrGenerateKeypair:", err);
+    throw err;
+  }
+}
+
+/*
+  Compute the Solana explorer address for the various data
+*/
+export function explorerURL({
+  address,
+  txSignature,
+  cluster,
+}: {
+  address?: string;
+  txSignature?: string;
+  cluster?: "devnet" | "testnet" | "mainnet" | "mainnet-beta";
+}) {
+  let baseUrl: string;
+  //
+  if (address) baseUrl = `https://explorer.solana.com/address/${address}`;
+  else if (txSignature)
+    baseUrl = `https://explorer.solana.com/tx/${txSignature}`;
+  else return "[unknown]";
+
+  // auto append the desired search params
+  const url = new URL(baseUrl);
+  url.searchParams.append("cluster", cluster || "devnet");
+  return url.toString() + "\n";
+}
+
+/**
+ * Auto airdrop the given wallet of of a balance of < 0.5 SOL
+ */
+export async function airdropOnLowBalance(
+  connection: Connection,
+  keypair: Keypair,
+  forceAirdrop: boolean = false
+) {
+  // get the current balance
+  let balance = await connection.getBalance(keypair.publicKey);
+
+  // define the low balance threshold before airdrop
+  const MIN_BALANCE_TO_AIRDROP = LAMPORTS_PER_SOL / 2; // current: 0.5 SOL
+
+  // check the balance of the two accounts, airdrop when low
+  if (forceAirdrop === true || balance < MIN_BALANCE_TO_AIRDROP) {
+    console.log(
+      `Requesting airdrop of 1 SOL to ${keypair.publicKey.toBase58()}...`
+    );
+    await connection
+      .requestAirdrop(keypair.publicKey, LAMPORTS_PER_SOL)
+      .then((sig) => {
+        console.log("Tx signature:", sig);
+        // balance = balance + LAMPORTS_PER_SOL;
+      });
+
+    // fetch the new balance
+    // const newBalance = await connection.getBalance(keypair.publicKey);
+    // return newBalance;
+  }
+  // else console.log("Balance of:", balance / LAMPORTS_PER_SOL, "SOL");
+
+  return balance;
+}
+
+/*
+  Helper function to extract a transaction signature from a failed transaction's error message
+*/
+export async function extractSignatureFromFailedTransaction(
+  connection: Connection,
+  err: any,
+  fetchLogs?: boolean
+) {
+  if (err?.signature) return err.signature;
+
+  // extract the failed transaction's signature
+  const failedSig = new RegExp(
+    /^((.*)?Error: )?(Transaction|Signature) ([A-Z0-9]{32,}) /gim
+  ).exec(err?.message?.toString())?.[4];
+
+  // ensure a signature was found
+  if (failedSig) {
+    // when desired, attempt to fetch the program logs from the cluster
+    if (fetchLogs)
+      await connection
+        .getTransaction(failedSig, {
+          maxSupportedTransactionVersion: 0,
+        })
+        .then((tx) => {
+          console.log(`\n==== Transaction logs for ${failedSig} ====`);
+          console.log(explorerURL({ txSignature: failedSig }), "");
+          console.log(
+            tx?.meta?.logMessages ?? "No log messages provided by RPC"
+          );
+          console.log(`==== END LOGS ====\n`);
+        });
+    else {
+      console.log("\n========================================");
+      console.log(explorerURL({ txSignature: failedSig }));
+      console.log("========================================\n");
+    }
+  }
+
+  // always return the failed signature value
+  return failedSig;
+}
+
+/*
+  Standard number formatter
+*/
+export function numberFormatter(num: number, forceDecimals = false) {
+  // set the significant figures
+  const minimumFractionDigits = num < 1 || forceDecimals ? 10 : 2;
+
+  // do the formatting
+  return new Intl.NumberFormat(undefined, {
+    minimumFractionDigits,
+  }).format(num);
+}
+
+/*
+  Display a separator in the console, with our without a message
+*/
+export function printConsoleSeparator(message?: string) {
+  console.log("\n===============================================");
+  console.log("===============================================\n");
+  if (message) console.log(message);
+}

+ 11 - 0
compression/cnft-burn/tsconfig.json

@@ -0,0 +1,11 @@
+{
+            "compilerOptions": {
+              "types": ["mocha", "chai"],
+              "typeRoots": ["./node_modules/@types"],
+              "lib": ["es2015"],
+              "module": "commonjs",
+              "target": "es6",
+              "esModuleInterop": true
+            }
+          }
+