Browse Source

Bob/cutils (#32)

* initial setup

* delete unused

* Update README.md

* refactor

* add account data setup
Burger Bob 2 years ago
parent
commit
32dc87439e

+ 8 - 0
compression/cutils/.gitignore

@@ -0,0 +1,8 @@
+
+.anchor
+.DS_Store
+target
+**/*.rs.bk
+node_modules
+test-ledger
+.local_keys

+ 15 - 0
compression/cutils/Anchor.toml

@@ -0,0 +1,15 @@
+[features]
+seeds = false
+skip-lint = false
+[programs.devnet]
+cutils = "burZc1SfqbrAP35XG63YZZ82C9Zd22QUwhCXoEUZWNF"
+
+[registry]
+url = "https://api.apr.dev"
+
+[provider]
+cluster = "devnet"
+wallet = "~/.config/solana/test.json"
+
+[scripts]
+test = "yarn run ts-mocha -p ./tsconfig.json -t 1000000 tests/**/*.ts"

+ 4 - 0
compression/cutils/Cargo.toml

@@ -0,0 +1,4 @@
+[workspace]
+members = [
+    "programs/*"
+]

+ 52 - 0
compression/cutils/README.md

@@ -0,0 +1,52 @@
+# Solana Program cNFT utils
+
+This repo contains example code of how you can work with Metaplex compressed NFTs inside of Solana Anchor programs.
+
+The basic idea is to allow for custom logic in your own Solana program by doing a CPI to the bubblegum minting instruction. Two instructions:
+
+1. **mint**: mints a cNFT to your collection by doing a CPI to bubblegum. You could initialise your own program-specific PDA in this instruction
+2. **verify**: verifies that the owner of the cNFT did in fact actuate the instruction. This is more of a utility function, which is to be used for future program-specific use-cases.
+
+This program can be used as an inspiration on how to work with cNFTs in Solana programs.
+
+## Components
+- **programs**: the Solana program
+  - There is a validate/actuate setup which allows you to validate some constraints through an `access_control` macro. This might be useful to use in conjunction with the cNFT verification logic.
+
+- **tests**: 
+  - `setup.ts` which is to be executed first if you don't already have a collection with merkle tree(s). 
+  - `tests.ts` for running individual minting and verification tests 
+
+## Deployment
+
+The program is deployed on devnet at `burZc1SfqbrAP35XG63YZZ82C9Zd22QUwhCXoEUZWNF`. 
+You can deploy it yourself by changing the respective values in lib.rs and Anchor.toml.
+
+## Limitations
+
+This is just an example implementation. Use at your own discretion
+
+**This only works on anchor 0.26.0 for now due to mpl-bubblegum dependencies** 
+
+## Further resources
+A video about the creation of this code which also contains further explanations has been publised on Burger Bob's YouTube channel: COMING SOON
+
+## How-to
+1. Configure RPC path in _utils/readAPI.ts_. Personal preference: Helius RPCs.
+2. cd root folder
+2. Install packages: `yarn`
+3. Optional: run `npx ts-node tests/setup.ts` to setup a NFT collection and its underlying merkle tree.
+4. Comment-out the tests you don't want to execute in `tests/tests.ts`
+5. If minting, change to your appropriate NFT uri
+6. If verifying, change to your appropriate assetId (cNFT mint address)
+7. Run `anchor test --skip-build --skip-deploy --skip-local-validator`
+8. You can check your cNFTs on devnet through the Solflare wallet (thanks [@SolPlay_jonas](https://twitter.com/SolPlay_jonas))
+3. You might want to change the wallet-path in `Anchor.toml`
+
+
+## Acknowledgements
+This repo would not have been possible without the work of:
+- [@nickfrosty](https://twitter.com/nickfrosty) for providing sample code and doing a live demo [here](https://youtu.be/LxhTxS9DexU)
+- [@HeyAndyS](https://twitter.com/HeyAndyS) for laying the groundwork with cnft-vault
+- The kind folks responding to this [thread](https://twitter.com/burger606/status/1669289672076320771?s=20)
+- [Switchboard VRF-flip](https://github.com/switchboard-xyz/vrf-flip/tree/main/client) for inspiring the validate/actuate setup.

+ 26 - 0
compression/cutils/package.json

@@ -0,0 +1,26 @@
+{
+    "scripts": {
+        "lint:fix": "prettier */*.js \"*/**/*{.js,.ts}\" -w",
+        "lint": "prettier */*.js \"*/**/*{.js,.ts}\" --check"
+    },
+    "dependencies": {
+        "@coral-xyz/anchor": "^0.27.0",
+        "@metaplex-foundation/js": "^0.19.4",
+        "@metaplex-foundation/mpl-bubblegum": "^0.7.0",
+        "@solana/spl-account-compression": "^0.1.8",
+        "@solana/spl-token": "^0.3.8",
+        "@solana/web3.js": "^1.77.3",
+        "axios": "^1.4.0"
+    },
+    "devDependencies": {
+        "@types/bn.js": "^5.1.0",
+        "@types/chai": "^4.3.0",
+        "@types/mocha": "^9.0.0",
+        "chai": "^4.3.4",
+        "mocha": "^9.0.3",
+        "prettier": "^2.6.2",
+        "ts-mocha": "^10.0.0",
+        "ts-node": "^10.9.1",
+        "typescript": "^4.3.5"
+    }
+}

+ 27 - 0
compression/cutils/programs/cutils/Cargo.toml

@@ -0,0 +1,27 @@
+[package]
+name = "cutils"
+version = "0.1.0"
+description = "Created with Anchor"
+edition = "2021"
+
+[lib]
+crate-type = ["cdylib", "lib"]
+name = "cutils"
+
+[features]
+no-entrypoint = []
+no-idl = []
+no-log-ix-name = []
+cpi = ["no-entrypoint"]
+default = []
+
+[dependencies]
+anchor-lang = "0.26.0"
+solana-program = "~1.14.18"
+spl-account-compression = { version="0.1.8", features = ["cpi"] }
+mpl-bubblegum = { version = "0.7.0", features = ["no-entrypoint", "cpi"] }
+
+# Added due to anchor and solana-cli wonkyness as of late
+getrandom = { version = "0.2.10", features = ["custom"] }
+winnow = "=0.4.1"
+toml_datetime = "=0.6.1"

+ 2 - 0
compression/cutils/programs/cutils/Xargo.toml

@@ -0,0 +1,2 @@
+[target.bpfel-unknown-unknown.dependencies.std]
+features = []

+ 143 - 0
compression/cutils/programs/cutils/src/actions/mint.rs

@@ -0,0 +1,143 @@
+use crate::*;
+use mpl_bubblegum::{
+    state::{
+        TreeConfig,
+        COLLECTION_CPI_PREFIX,
+        metaplex_adapter::{Collection, Creator, MetadataArgs, TokenProgramVersion, TokenStandard},
+        metaplex_anchor::{
+            TokenMetadata, MplTokenMetadata
+        },
+    }
+};
+
+#[derive(Accounts)]
+#[instruction(params: MintParams)]
+pub struct Mint<'info> {
+    // #[account(
+    //     init,
+    //     seeds = [
+    //         SEED_DATA,
+    //         data.tree,
+    //         data.tree_nonce
+    //         // assetId directly?
+    //     ],
+    //     bump,
+    //     payer = payer,
+    //     space = Data::LEN,
+    // )]
+    // pub data: Account<'info, Data>,
+
+    pub payer: Signer<'info>,
+
+    // Bubblegum cNFT stuff MintToCollectionV1
+    #[account(
+        mut,
+        seeds = [merkle_tree.key().as_ref()],
+        seeds::program = bubblegum_program.key(),
+        bump,
+    )]
+    pub tree_authority: Box<Account<'info, TreeConfig>>,
+
+    /// CHECK: This account is neither written to nor read from.
+    pub leaf_owner: AccountInfo<'info>,
+
+    /// CHECK: This account is neither written to nor read from.
+    pub leaf_delegate: AccountInfo<'info>,
+
+    #[account(mut)]
+    /// CHECK: unsafe
+    pub merkle_tree: UncheckedAccount<'info>,
+
+    pub tree_delegate: Signer<'info>,
+
+    pub collection_authority: Signer<'info>,
+
+    /// CHECK: Optional collection authority record PDA.
+    /// If there is no collecton authority record PDA then
+    /// this must be the Bubblegum program address.
+    pub collection_authority_record_pda: UncheckedAccount<'info>,
+
+    /// CHECK: This account is checked in the instruction
+    pub collection_mint: UncheckedAccount<'info>,
+
+    #[account(mut)]
+    pub collection_metadata: Box<Account<'info, TokenMetadata>>,
+
+    /// CHECK: This account is checked in the instruction
+    pub edition_account: UncheckedAccount<'info>,
+
+    /// CHECK: This is just used as a signing PDA.
+    #[account(
+        seeds = [COLLECTION_CPI_PREFIX.as_ref()],
+        seeds::program = bubblegum_program.key(),
+        bump,
+    )]
+    pub bubblegum_signer: UncheckedAccount<'info>,
+    pub log_wrapper: Program<'info, Noop>,
+    pub compression_program: Program<'info, SplAccountCompression>,
+    pub token_metadata_program: Program<'info, MplTokenMetadata>,
+    pub bubblegum_program: Program<'info, MplBubblegum>,
+    pub system_program: Program<'info, System>,
+}
+
+#[derive(Clone, AnchorSerialize, AnchorDeserialize)]
+pub struct MintParams {
+    uri: String,
+}
+
+impl Mint<'_> {
+    pub fn validate(
+        &self,
+        _ctx: &Context<Self>,
+        _params: &MintParams,
+    ) -> Result<()> {
+        Ok(())
+    }
+
+    pub fn actuate<'info>(
+        ctx: Context<'_, '_, '_, 'info, Mint<'info>>,
+        params: MintParams
+    ) -> Result<()> {
+        mpl_bubblegum::cpi::mint_to_collection_v1(
+            CpiContext::new(
+                ctx.accounts.bubblegum_program.to_account_info(),
+                mpl_bubblegum::cpi::accounts::MintToCollectionV1 {
+                    tree_authority: ctx.accounts.tree_authority.to_account_info(),
+                    leaf_owner: ctx.accounts.leaf_owner.to_account_info(),
+                    leaf_delegate: ctx.accounts.leaf_delegate.to_account_info(),
+                    merkle_tree: ctx.accounts.merkle_tree.to_account_info(),
+                    payer: ctx.accounts.payer.to_account_info(),
+                    tree_delegate: ctx.accounts.tree_delegate.to_account_info(),
+                    collection_authority: ctx.accounts.collection_authority.to_account_info(),
+                    collection_authority_record_pda: ctx.accounts.collection_authority_record_pda.to_account_info(),
+                    collection_mint: ctx.accounts.collection_mint.to_account_info(),
+                    collection_metadata: ctx.accounts.collection_metadata.to_account_info(),
+                    edition_account: ctx.accounts.edition_account.to_account_info(),
+                    bubblegum_signer: ctx.accounts.bubblegum_signer.to_account_info(),
+                    log_wrapper: ctx.accounts.log_wrapper.to_account_info(),
+                    compression_program: ctx.accounts.compression_program.to_account_info(),
+                    token_metadata_program: ctx.accounts.token_metadata_program.to_account_info(),
+                    system_program: ctx.accounts.system_program.to_account_info(),
+                }
+            ),
+            MetadataArgs {
+                name: "BURGER".to_string(),
+                symbol: "BURG".to_string(),
+                uri: params.uri,
+                creators: vec![
+                    Creator {address: ctx.accounts.collection_authority.key(), verified: false, share: 100},
+                ],
+                seller_fee_basis_points: 0,
+                primary_sale_happened: false,
+                is_mutable: false,
+                edition_nonce: Some(0),
+                uses: None,
+                collection: Some(Collection {verified: false, key: ctx.accounts.collection_mint.key()}),
+                token_program_version: TokenProgramVersion::Original,
+                token_standard: Some(TokenStandard::NonFungible),
+            }
+        )?;
+
+        Ok(())
+    }
+}

+ 5 - 0
compression/cutils/programs/cutils/src/actions/mod.rs

@@ -0,0 +1,5 @@
+pub mod mint;
+pub use mint::*;
+
+pub mod verify;
+pub use verify::*;

+ 67 - 0
compression/cutils/programs/cutils/src/actions/verify.rs

@@ -0,0 +1,67 @@
+use crate::*;
+use mpl_bubblegum::state::leaf_schema::LeafSchema;
+use mpl_bubblegum::utils::get_asset_id;
+use spl_account_compression::{
+    program::SplAccountCompression
+};
+
+#[derive(Accounts)]
+#[instruction(params: VerifyParams)]
+pub struct Verify<'info> {
+    pub leaf_owner: Signer<'info>,
+
+    /// CHECK: This account is neither written to nor read from.
+    pub leaf_delegate: AccountInfo<'info>,
+
+    /// CHECK: unsafe
+    pub merkle_tree: UncheckedAccount<'info>,
+
+    pub compression_program: Program<'info, SplAccountCompression>,
+}
+
+#[derive(Clone, AnchorSerialize, AnchorDeserialize)]
+pub struct VerifyParams {
+    root: [u8; 32],
+    data_hash: [u8; 32],
+    creator_hash: [u8; 32],
+    nonce: u64,
+    index: u32,
+}
+
+impl Verify<'_> {
+    pub fn validate(
+        &self,
+        _ctx: &Context<Self>,
+        _params: &VerifyParams
+    ) -> Result<()> {
+        Ok(())
+    }
+
+    pub fn actuate<'info>(ctx: Context<'_, '_, '_, 'info, Verify<'info>>, params: &VerifyParams) -> Result<()> {
+        let asset_id = get_asset_id(&ctx.accounts.merkle_tree.key(), params.nonce);
+        let leaf = LeafSchema::new_v0(
+            asset_id,
+            ctx.accounts.leaf_owner.key(),
+            ctx.accounts.leaf_delegate.key(),
+            params.nonce,
+            params.data_hash,
+            params.creator_hash,
+        );
+
+        let cpi_ctx = CpiContext::new(
+            ctx.accounts.compression_program.to_account_info(),
+            spl_account_compression::cpi::accounts::VerifyLeaf {
+                merkle_tree: ctx.accounts.merkle_tree.to_account_info(),
+            },
+        ).with_remaining_accounts(ctx.remaining_accounts.to_vec());
+
+        spl_account_compression::cpi::verify_leaf(
+            cpi_ctx,
+            params.root,
+            leaf.to_node(),
+            params.index,
+        )?;
+
+        Ok(())
+    }
+}

+ 44 - 0
compression/cutils/programs/cutils/src/lib.rs

@@ -0,0 +1,44 @@
+pub mod actions;
+pub use actions::*;
+
+pub mod state;
+pub use state::*;
+
+use anchor_lang::prelude::*;
+use solana_program::{pubkey::Pubkey};
+use spl_account_compression::{
+    program::SplAccountCompression, Noop,
+};
+
+#[derive(Clone)]
+pub struct MplBubblegum;
+
+impl anchor_lang::Id for MplBubblegum {
+    fn id() -> Pubkey {
+        mpl_bubblegum::id()
+    }
+}
+
+declare_id!("burZc1SfqbrAP35XG63YZZ82C9Zd22QUwhCXoEUZWNF");
+
+#[program]
+pub mod cutils {
+    use super::*;
+
+    #[access_control(ctx.accounts.validate(&ctx, &params))]
+    pub fn mint<'info>(
+        ctx: Context<'_, '_, '_, 'info, Mint<'info>>,
+        params: MintParams
+    ) -> Result<()> {
+        Mint::actuate(ctx, params)
+    }
+
+    #[access_control(ctx.accounts.validate(&ctx, &params))]
+    pub fn verify<'info>(
+        ctx: Context<'_, '_, '_, 'info, Verify<'info>>,
+        params: VerifyParams
+    ) -> Result<()> {
+        Verify::actuate(ctx, &params)
+    }
+
+}

+ 24 - 0
compression/cutils/programs/cutils/src/state/data.rs

@@ -0,0 +1,24 @@
+use crate::*;
+
+pub const SEED_DATA: &[u8] = b"DATA";
+
+#[account]
+#[derive(Default, Debug)]
+pub struct Data {
+    /// The bump, used for PDA validation.
+    pub bump: u8,
+    pub tree: Pubkey,
+    pub tree_nonce: u64,
+}
+
+impl Data {
+    pub const LEN: usize = 8 + 1 + 32 + 8;
+
+    pub fn new(bump: u8, tree: Pubkey, tree_nonce: u64) -> Self {
+        Self {
+            bump,
+            tree,
+            tree_nonce
+        }
+    }
+}

+ 2 - 0
compression/cutils/programs/cutils/src/state/mod.rs

@@ -0,0 +1,2 @@
+pub mod data;
+pub use data::*;

+ 57 - 0
compression/cutils/tests/setup.ts

@@ -0,0 +1,57 @@
+import {loadOrGenerateKeypair, savePublicKeyToFile} from "./utils/helpers";
+import {Connection, Keypair} from "@solana/web3.js";
+import {ValidDepthSizePair} from "@solana/spl-account-compression";
+import {createCollection, createTree} from "./utils/compression";
+import {CreateMetadataAccountArgsV3} from "@metaplex-foundation/mpl-token-metadata";
+
+async function setup() {
+    const rpc = "https://api.devnet.solana.com"
+    const connection = new Connection(rpc, "confirmed")
+
+    // Collection auth and treeCreator
+    const payer = loadOrGenerateKeypair("payer");
+
+    // Airdrop
+    await connection.requestAirdrop(payer.publicKey, 1 * 10**9);
+    console.log("Payer address:", payer.publicKey.toBase58());
+
+    const treeKeypair = Keypair.generate();
+    const maxDepthSizePair: ValidDepthSizePair = {
+        maxDepth: 14,
+        maxBufferSize: 64,
+    };
+    const canopyDepth = maxDepthSizePair.maxDepth - 5;
+    const tree = await createTree(connection, payer, treeKeypair, maxDepthSizePair, canopyDepth);
+
+    // locally save the addresses for demo
+    savePublicKeyToFile("treeAddress", tree.treeAddress);
+
+    const collectionMetadataV3: CreateMetadataAccountArgsV3 = {
+        data: {
+            name: "Super Sweet NFT Collection",
+            symbol: "SSNC",
+            // specific json metadata for the collection
+            uri: "https://supersweetcollection.notarealurl/collection.json",
+            sellerFeeBasisPoints: 100,
+            creators: [
+                {
+                    address: payer.publicKey,
+                    verified: false,
+                    share: 100,
+                },
+            ],
+            collection: null,
+            uses: null,
+        },
+        isMutable: false,
+        collectionDetails: null,
+    };
+
+    // create a full token mint and initialize the collection (with the `payer` as the authority)
+    const collection = await createCollection(connection, payer, collectionMetadataV3);
+
+    // locally save the addresses for the demo
+    savePublicKeyToFile("collectionMint", collection.mint);
+}
+
+// setup()

+ 77 - 0
compression/cutils/tests/tests.ts

@@ -0,0 +1,77 @@
+import * as anchor from "@coral-xyz/anchor";
+import {decode, getAccounts, mapProof} from "./utils/utils";
+import {SPL_ACCOUNT_COMPRESSION_PROGRAM_ID, SPL_NOOP_PROGRAM_ID} from "@solana/spl-account-compression";
+import {getAsset, getAssetProof} from "./utils/readAPI";
+import {Cutils} from "../target/types/cutils";
+import {loadOrGenerateKeypair, loadPublicKeysFromFile} from "./utils/helpers";
+import { PROGRAM_ID as BUBBLEGUM_PROGRAM_ID } from "@metaplex-foundation/mpl-bubblegum/dist/src/generated";
+
+describe("cutils", () => {
+    const provider = anchor.AnchorProvider.env();
+    anchor.setProvider(provider);
+
+    const program = anchor.workspace.Cutils as anchor.Program<Cutils>;
+
+    // NFT metadata pointer
+    // TODO change
+    const uri = "https://arweave.net/nVRvZDaOk5YAdr4ZBEeMjOVhynuv8P3vywvuN5sYSPo"
+
+    const payer = loadOrGenerateKeypair("payer");
+
+    // cNFT receiver
+    const testWallet = loadOrGenerateKeypair("testWallet")
+
+    const {collectionMint, treeAddress} = loadPublicKeysFromFile()
+
+    it("Mint!", async () => {
+        const tx = await program.methods.mint({uri})
+            .accounts({
+                payer: payer.publicKey,
+                leafOwner: testWallet.publicKey,
+                leafDelegate: testWallet.publicKey, //verify
+                treeDelegate: payer.publicKey,
+                collectionAuthority: payer.publicKey,
+                collectionAuthorityRecordPda: BUBBLEGUM_PROGRAM_ID,
+                ...getAccounts(collectionMint, treeAddress)
+            })
+            .transaction()
+
+        const sx = await program.provider.sendAndConfirm(tx, [payer], {skipPreflight: true});
+        console.log(`   Tx Signature: ${sx}`);
+    });
+
+
+    // it("Verify", async () => {
+    //     // TODO: replace assetId
+    //     const assetId = "HUBMRAcYpow1ZUojdSMuvhcbNuCuRSAPWnXWjjYrpAVQ";
+    //
+    //     const asset = await getAsset(assetId);
+    //     const proof = await getAssetProof(assetId);
+    //     const proofPathAsAccounts = mapProof(proof);
+    //     const root = decode(proof.root);
+    //     const dataHash = decode(asset.compression.data_hash);
+    //     const creatorHash = decode(asset.compression.creator_hash);
+    //     const nonce = new anchor.BN(asset.compression.leaf_id);
+    //     const index = asset.compression.leaf_id;
+    //
+    //     const tx = await program.methods
+    //         .verify({
+    //             root, dataHash, creatorHash, nonce, index
+    //         })
+    //         .accounts({
+    //             leafOwner: testWallet.publicKey,
+    //             leafDelegate: testWallet.publicKey,
+    //             merkleTree: treeAddress,
+    //             compressionProgram: SPL_ACCOUNT_COMPRESSION_PROGRAM_ID,
+    //         })
+    //         .remainingAccounts(proofPathAsAccounts)
+    //         .transaction();
+    //
+    //     const sx = await program.provider.sendAndConfirm(tx, [testWallet], {skipPreflight: true});
+    //
+    //     // This fails due to incorrect owner
+    //     // const sx = await program.provider.sendAndConfirm(tx, [payer], {skipPreflight: true});
+    //
+    //     console.log(`   Tx Signature: ${sx}`);
+    // });
+});

+ 382 - 0
compression/cutils/tests/utils/compression.ts

@@ -0,0 +1,382 @@
+import {
+    Keypair,
+    PublicKey,
+    Connection,
+    Transaction,
+    sendAndConfirmTransaction,
+    TransactionInstruction,
+} from "@solana/web3.js";
+import { createAccount, createMint, mintTo, TOKEN_PROGRAM_ID } from "@solana/spl-token";
+import {
+    SPL_ACCOUNT_COMPRESSION_PROGRAM_ID,
+    createAllocTreeIx,
+    ValidDepthSizePair,
+    SPL_NOOP_PROGRAM_ID,
+} from "@solana/spl-account-compression";
+import {
+    PROGRAM_ID as BUBBLEGUM_PROGRAM_ID,
+    MetadataArgs,
+    computeCreatorHash,
+    computeDataHash,
+    createCreateTreeInstruction,
+    createMintToCollectionV1Instruction,
+} from "@metaplex-foundation/mpl-bubblegum";
+import {
+    PROGRAM_ID as TOKEN_METADATA_PROGRAM_ID,
+    CreateMetadataAccountArgsV3,
+    createCreateMetadataAccountV3Instruction,
+    createCreateMasterEditionV3Instruction,
+    createSetCollectionSizeInstruction,
+} from "@metaplex-foundation/mpl-token-metadata";
+
+// import local helper functions
+import { explorerURL, extractSignatureFromFailedTransaction } from "./helpers";
+
+/*
+  Helper function to create a merkle tree on chain, including allocating
+  all the space required to store all the nodes
+*/
+export async function createTree(
+    connection: Connection,
+    payer: Keypair,
+    treeKeypair: Keypair,
+    maxDepthSizePair: ValidDepthSizePair,
+    canopyDepth: number = 0,
+) {
+    console.log("Creating a new Merkle tree...");
+    console.log("treeAddress:", treeKeypair.publicKey.toBase58());
+
+    // derive the tree's authority (PDA), owned by Bubblegum
+    const [treeAuthority, _bump] = PublicKey.findProgramAddressSync(
+        [treeKeypair.publicKey.toBuffer()],
+        BUBBLEGUM_PROGRAM_ID,
+    );
+    console.log("treeAuthority:", treeAuthority.toBase58());
+
+    // allocate the tree's account on chain with the `space`
+    // NOTE: this will compute the space needed to store the tree on chain (and the lamports required to store it)
+    const allocTreeIx = await createAllocTreeIx(
+        connection,
+        treeKeypair.publicKey,
+        payer.publicKey,
+        maxDepthSizePair,
+        canopyDepth,
+    );
+
+    // create the instruction to actually create the tree
+    const createTreeIx = createCreateTreeInstruction(
+        {
+            payer: payer.publicKey,
+            treeCreator: payer.publicKey,
+            treeAuthority,
+            merkleTree: treeKeypair.publicKey,
+            compressionProgram: SPL_ACCOUNT_COMPRESSION_PROGRAM_ID,
+            // NOTE: this is used for some on chain logging
+            logWrapper: SPL_NOOP_PROGRAM_ID,
+        },
+        {
+            maxBufferSize: maxDepthSizePair.maxBufferSize,
+            maxDepth: maxDepthSizePair.maxDepth,
+            public: false,
+        },
+        BUBBLEGUM_PROGRAM_ID,
+    );
+
+    try {
+        // create and send the transaction to initialize the tree
+        const tx = new Transaction().add(allocTreeIx).add(createTreeIx);
+        tx.feePayer = payer.publicKey;
+        console.log("tx")
+
+        // send the transaction
+        const txSignature = await sendAndConfirmTransaction(
+            connection,
+            tx,
+            // ensuring the `treeKeypair` PDA and the `payer` are BOTH signers
+            [treeKeypair, payer],
+            {
+                commitment: "confirmed",
+                skipPreflight: true,
+            },
+        );
+
+        console.log("\nMerkle tree created successfully!");
+        console.log(explorerURL({ txSignature }));
+
+        // return useful info
+        return { treeAuthority, treeAddress: treeKeypair.publicKey };
+    } catch (err: any) {
+        console.error("\nFailed to create merkle tree:", err);
+
+        // log a block explorer link for the failed transaction
+        await extractSignatureFromFailedTransaction(connection, err);
+
+        throw err;
+    }
+}
+
+/**
+ * Create an NFT collection on-chain, using the regular Metaplex standards
+ * with the `payer` as the authority
+ */
+export async function createCollection(
+    connection: Connection,
+    payer: Keypair,
+    metadataV3: CreateMetadataAccountArgsV3,
+) {
+    // create and initialize the SPL token mint
+    console.log("Creating the collection's mint...");
+    const mint = await createMint(
+        connection,
+        payer,
+        // mint authority
+        payer.publicKey,
+        // freeze authority
+        payer.publicKey,
+        // decimals - use `0` for NFTs since they are non-fungible
+        0,
+    );
+    console.log("Mint address:", mint.toBase58());
+
+    // create the token account
+    console.log("Creating a token account...");
+    const tokenAccount = await createAccount(
+        connection,
+        payer,
+        mint,
+        payer.publicKey,
+        // undefined, undefined,
+    );
+    console.log("Token account:", tokenAccount.toBase58());
+
+    // mint 1 token ()
+    console.log("Minting 1 token for the collection...");
+    const mintSig = await mintTo(
+        connection,
+        payer,
+        mint,
+        tokenAccount,
+        payer,
+        // mint exactly 1 token
+        1,
+        // no `multiSigners`
+        [],
+        undefined,
+        TOKEN_PROGRAM_ID,
+    );
+    // console.log(explorerURL({ txSignature: mintSig }));
+
+    // derive the PDA for the metadata account
+    const [metadataAccount, _bump] = PublicKey.findProgramAddressSync(
+        [Buffer.from("metadata", "utf8"), TOKEN_METADATA_PROGRAM_ID.toBuffer(), mint.toBuffer()],
+        TOKEN_METADATA_PROGRAM_ID,
+    );
+    console.log("Metadata account:", metadataAccount.toBase58());
+
+    // create an instruction to create the metadata account
+    const createMetadataIx = createCreateMetadataAccountV3Instruction(
+        {
+            metadata: metadataAccount,
+            mint: mint,
+            mintAuthority: payer.publicKey,
+            payer: payer.publicKey,
+            updateAuthority: payer.publicKey,
+        },
+        {
+            createMetadataAccountArgsV3: metadataV3,
+        },
+    );
+
+    // derive the PDA for the metadata account
+    const [masterEditionAccount, _bump2] = PublicKey.findProgramAddressSync(
+        [
+            Buffer.from("metadata", "utf8"),
+            TOKEN_METADATA_PROGRAM_ID.toBuffer(),
+            mint.toBuffer(),
+            Buffer.from("edition", "utf8"),
+        ],
+        TOKEN_METADATA_PROGRAM_ID,
+    );
+    console.log("Master edition account:", masterEditionAccount.toBase58());
+
+    // create an instruction to create the metadata account
+    const createMasterEditionIx = createCreateMasterEditionV3Instruction(
+        {
+            edition: masterEditionAccount,
+            mint: mint,
+            mintAuthority: payer.publicKey,
+            payer: payer.publicKey,
+            updateAuthority: payer.publicKey,
+            metadata: metadataAccount,
+        },
+        {
+            createMasterEditionArgs: {
+                maxSupply: 0,
+            },
+        },
+    );
+
+    // create the collection size instruction
+    const collectionSizeIX = createSetCollectionSizeInstruction(
+        {
+            collectionMetadata: metadataAccount,
+            collectionAuthority: payer.publicKey,
+            collectionMint: mint,
+        },
+        {
+            setCollectionSizeArgs: { size: 50 },
+        },
+    );
+
+    try {
+        // construct the transaction with our instructions, making the `payer` the `feePayer`
+        const tx = new Transaction()
+            .add(createMetadataIx)
+            .add(createMasterEditionIx)
+            .add(collectionSizeIX);
+        tx.feePayer = payer.publicKey;
+
+        // send the transaction to the cluster
+        const txSignature = await sendAndConfirmTransaction(connection, tx, [payer], {
+            commitment: "confirmed",
+            skipPreflight: true,
+        });
+
+        console.log("\nCollection successfully created!");
+        console.log(explorerURL({ txSignature }));
+    } catch (err) {
+        console.error("\nFailed to create collection:", err);
+
+        // log a block explorer link for the failed transaction
+        await extractSignatureFromFailedTransaction(connection, err);
+
+        throw err;
+    }
+
+    // return all the accounts
+    return { mint, tokenAccount, metadataAccount, masterEditionAccount };
+}
+
+/**
+ * Mint a single compressed NFTs to any address
+ */
+export async function mintCompressedNFT(
+    connection: Connection,
+    payer: Keypair,
+    treeAddress: PublicKey,
+    collectionMint: PublicKey,
+    collectionMetadata: PublicKey,
+    collectionMasterEditionAccount: PublicKey,
+    compressedNFTMetadata: MetadataArgs,
+    receiverAddress?: PublicKey,
+) {
+    // derive the tree's authority (PDA), owned by Bubblegum
+    const [treeAuthority, _bump] = PublicKey.findProgramAddressSync(
+        [treeAddress.toBuffer()],
+        BUBBLEGUM_PROGRAM_ID,
+    );
+
+    // derive a PDA (owned by Bubblegum) to act as the signer of the compressed minting
+    const [bubblegumSigner, _bump2] = PublicKey.findProgramAddressSync(
+        // `collection_cpi` is a custom prefix required by the Bubblegum program
+        [Buffer.from("collection_cpi", "utf8")],
+        BUBBLEGUM_PROGRAM_ID,
+    );
+
+    // create an array of instruction, to mint multiple compressed NFTs at once
+    const mintIxs: TransactionInstruction[] = [];
+
+    /**
+     * correctly format the metadata args for the nft to mint
+     * ---
+     * note: minting an nft into a collection (via `createMintToCollectionV1Instruction`)
+     * will auto verify the collection. But, the `collection.verified` value inside the
+     * `metadataArgs` must be set to `false` in order for the instruction to succeed
+     */
+    const metadataArgs = Object.assign(compressedNFTMetadata, {
+        collection: { key: collectionMint, verified: false },
+    });
+
+    /**
+     * compute the data and creator hash for display in the console
+     *
+     * note: this is not required to do in order to mint new compressed nfts
+     * (since it is performed on chain via the Bubblegum program)
+     * this is only for demonstration
+     */
+    const computedDataHash = new PublicKey(computeDataHash(metadataArgs)).toBase58();
+    const computedCreatorHash = new PublicKey(computeCreatorHash(metadataArgs.creators)).toBase58();
+    console.log("computedDataHash:", computedDataHash);
+    console.log("computedCreatorHash:", computedCreatorHash);
+
+    /*
+      Add a single mint to collection instruction
+      ---
+      But you could all multiple in the same transaction, as long as your
+      transaction is still within the byte size limits
+    */
+    mintIxs.push(
+        createMintToCollectionV1Instruction(
+            {
+                payer: payer.publicKey,
+
+                merkleTree: treeAddress,
+                treeAuthority,
+                treeDelegate: payer.publicKey,
+
+                // set the receiver of the NFT
+                leafOwner: receiverAddress || payer.publicKey,
+                // set a delegated authority over this NFT
+                leafDelegate: payer.publicKey,
+
+                /*
+                    You can set any delegate address at mint, otherwise should
+                    normally be the same as `leafOwner`
+                    NOTE: the delegate will be auto cleared upon NFT transfer
+                    ---
+                    in this case, we are setting the payer as the delegate
+                  */
+
+                // collection details
+                collectionAuthority: payer.publicKey,
+                collectionAuthorityRecordPda: BUBBLEGUM_PROGRAM_ID,
+                collectionMint: collectionMint,
+                collectionMetadata: collectionMetadata,
+                editionAccount: collectionMasterEditionAccount,
+
+                // other accounts
+                compressionProgram: SPL_ACCOUNT_COMPRESSION_PROGRAM_ID,
+                logWrapper: SPL_NOOP_PROGRAM_ID,
+                bubblegumSigner: bubblegumSigner,
+                tokenMetadataProgram: TOKEN_METADATA_PROGRAM_ID,
+            },
+            {
+                metadataArgs,
+            },
+        ),
+    );
+
+    try {
+        // construct the transaction with our instructions, making the `payer` the `feePayer`
+        const tx = new Transaction().add(...mintIxs);
+        tx.feePayer = payer.publicKey;
+
+        // send the transaction to the cluster
+        const txSignature = await sendAndConfirmTransaction(connection, tx, [payer], {
+            commitment: "confirmed",
+            skipPreflight: true,
+        });
+
+        console.log("\nSuccessfully minted the compressed NFT!");
+        console.log(explorerURL({ txSignature }));
+
+        return txSignature;
+    } catch (err) {
+        console.error("\nFailed to mint compressed NFT:", err);
+
+        // log a block explorer link for the failed transaction
+        await extractSignatureFromFailedTransaction(connection, err);
+
+        throw err;
+    }
+}

+ 284 - 0
compression/cutils/tests/utils/helpers.ts

@@ -0,0 +1,284 @@
+import fs from "fs";
+import path from "path";
+import { Connection, Keypair, LAMPORTS_PER_SOL, PublicKey } from "@solana/web3.js";
+
+// define some default locations
+const DEFAULT_KEY_DIR_NAME = ".local_keys";
+const DEFAULT_PUBLIC_KEY_FILE = "keys.json";
+const DEFAULT_DEMO_DATA_FILE = "demo.json";
+
+/*
+  Load locally stored PublicKey addresses
+*/
+export function loadPublicKeysFromFile(
+    absPath: string = `${DEFAULT_KEY_DIR_NAME}/${DEFAULT_PUBLIC_KEY_FILE}`,
+) {
+    try {
+        if (!absPath) throw Error("No path provided");
+        if (!fs.existsSync(absPath)) throw Error("File does not exist.");
+
+        // load the public keys from the file
+        const data = JSON.parse(fs.readFileSync(absPath, { encoding: "utf-8" })) || {};
+
+        // convert all loaded keyed values into valid public keys
+        for (const [key, value] of Object.entries(data)) {
+            data[key] = new PublicKey(value as string) ?? "";
+        }
+
+        return data;
+    } catch (err) {
+        // console.warn("Unable to load local file");
+    }
+    // always return an object
+    return {};
+}
+
+/*
+  Locally save a demo data to the filesystem for later retrieval
+*/
+export function saveDemoDataToFile(
+    name: string,
+    newData: any,
+    absPath: string = `${DEFAULT_KEY_DIR_NAME}/${DEFAULT_DEMO_DATA_FILE}`,
+) {
+    try {
+        let data: object = {};
+
+        // fetch all the current values, when the storage file exists
+        if (fs.existsSync(absPath))
+            data = JSON.parse(fs.readFileSync(absPath, { encoding: "utf-8" })) || {};
+
+        data = { ...data, [name]: newData };
+
+        // actually save the data to the file
+        fs.writeFileSync(absPath, JSON.stringify(data), {
+            encoding: "utf-8",
+        });
+
+        return data;
+    } catch (err) {
+        console.warn("Unable to save to file");
+        // console.warn(err);
+    }
+
+    // always return an object
+    return {};
+}
+
+/*
+  Locally save a PublicKey addresses to the filesystem for later retrieval
+*/
+export function savePublicKeyToFile(
+    name: string,
+    publicKey: PublicKey,
+    absPath: string = `${DEFAULT_KEY_DIR_NAME}/${DEFAULT_PUBLIC_KEY_FILE}`,
+) {
+    try {
+        // if (!absPath) throw Error("No path provided");
+        // if (!fs.existsSync(absPath)) throw Error("File does not exist.");
+
+        // fetch all the current values
+        let data: any = loadPublicKeysFromFile(absPath);
+
+        // convert all loaded keyed values from PublicKeys to strings
+        for (const [key, value] of Object.entries(data)) {
+            data[key as any] = (value as PublicKey).toBase58();
+        }
+        data = { ...data, [name]: publicKey.toBase58() };
+
+        // actually save the data to the file
+        fs.writeFileSync(absPath, JSON.stringify(data), {
+            encoding: "utf-8",
+        });
+
+        // reload the keys for sanity
+        data = loadPublicKeysFromFile(absPath);
+
+        return data;
+    } catch (err) {
+        console.warn("Unable to save to file");
+    }
+    // always return an object
+    return {};
+}
+
+/*
+  Load a locally stored JSON keypair file and convert it to a valid Keypair
+*/
+export function loadKeypairFromFile(absPath: string) {
+    try {
+        if (!absPath) throw Error("No path provided");
+        if (!fs.existsSync(absPath)) throw Error("File does not exist.");
+
+        // load the keypair from the file
+        const keyfileBytes = JSON.parse(fs.readFileSync(absPath, { encoding: "utf-8" }));
+        // parse the loaded secretKey into a valid keypair
+        const keypair = Keypair.fromSecretKey(new Uint8Array(keyfileBytes));
+        return keypair;
+    } catch (err) {
+        // return false;
+        throw err;
+    }
+}
+
+/*
+  Save a locally stored JSON keypair file for later importing
+*/
+export function saveKeypairToFile(
+    keypair: Keypair,
+    fileName: string,
+    dirName: string = DEFAULT_KEY_DIR_NAME,
+) {
+    fileName = path.join(dirName, `${fileName}.json`);
+
+    // create the `dirName` directory, if it does not exists
+    if (!fs.existsSync(`./${dirName}/`)) fs.mkdirSync(`./${dirName}/`);
+
+    // remove the current file, if it already exists
+    if (fs.existsSync(fileName)) fs.unlinkSync(fileName);
+
+    // write the `secretKey` value as a string
+    fs.writeFileSync(fileName, `[${keypair.secretKey.toString()}]`, {
+        encoding: "utf-8",
+    });
+
+    return fileName;
+}
+
+/*
+  Attempt to load a keypair from the filesystem, or generate and save a new one
+*/
+export function loadOrGenerateKeypair(fileName: string, dirName: string = DEFAULT_KEY_DIR_NAME) {
+    try {
+        // compute the path to locate the file
+        const searchPath = path.join(dirName, `${fileName}.json`);
+        let keypair = Keypair.generate();
+
+        // attempt to load the keypair from the file
+        if (fs.existsSync(searchPath)) keypair = loadKeypairFromFile(searchPath);
+        // when unable to locate the keypair, save the new one
+        else saveKeypairToFile(keypair, fileName, dirName);
+
+        return keypair;
+    } catch (err) {
+        console.error("loadOrGenerateKeypair:", err);
+        throw err;
+    }
+}
+
+/*
+  Compute the Solana explorer address for the various data
+*/
+export function explorerURL({
+                                address,
+                                txSignature,
+                                cluster,
+                            }: {
+    address?: string;
+    txSignature?: string;
+    cluster?: "devnet" | "testnet" | "mainnet" | "mainnet-beta";
+}) {
+    let baseUrl: string;
+    //
+    if (address) baseUrl = `https://explorer.solana.com/address/${address}`;
+    else if (txSignature) baseUrl = `https://explorer.solana.com/tx/${txSignature}`;
+    else return "[unknown]";
+
+    // auto append the desired search params
+    const url = new URL(baseUrl);
+    url.searchParams.append("cluster", cluster || "devnet");
+    return url.toString() + "\n";
+}
+
+/**
+ * Auto airdrop the given wallet of of a balance of < 0.5 SOL
+ */
+export async function airdropOnLowBalance(
+    connection: Connection,
+    keypair: Keypair,
+    forceAirdrop: boolean = false,
+) {
+    // get the current balance
+    let balance = await connection.getBalance(keypair.publicKey);
+
+    // define the low balance threshold before airdrop
+    const MIN_BALANCE_TO_AIRDROP = LAMPORTS_PER_SOL / 2; // current: 0.5 SOL
+
+    // check the balance of the two accounts, airdrop when low
+    if (forceAirdrop === true || balance < MIN_BALANCE_TO_AIRDROP) {
+        console.log(`Requesting airdrop of 1 SOL to ${keypair.publicKey.toBase58()}...`);
+        await connection.requestAirdrop(keypair.publicKey, LAMPORTS_PER_SOL).then(sig => {
+            console.log("Tx signature:", sig);
+            // balance = balance + LAMPORTS_PER_SOL;
+        });
+
+        // fetch the new balance
+        // const newBalance = await connection.getBalance(keypair.publicKey);
+        // return newBalance;
+    }
+    // else console.log("Balance of:", balance / LAMPORTS_PER_SOL, "SOL");
+
+    return balance;
+}
+
+/*
+  Helper function to extract a transaction signature from a failed transaction's error message
+*/
+export async function extractSignatureFromFailedTransaction(
+    connection: Connection,
+    err: any,
+    fetchLogs?: boolean,
+) {
+    if (err?.signature) return err.signature;
+
+    // extract the failed transaction's signature
+    const failedSig = new RegExp(/^((.*)?Error: )?(Transaction|Signature) ([A-Z0-9]{32,}) /gim).exec(
+        err?.message?.toString(),
+    )?.[4];
+
+    // ensure a signature was found
+    if (failedSig) {
+        // when desired, attempt to fetch the program logs from the cluster
+        if (fetchLogs)
+            await connection
+                .getTransaction(failedSig, {
+                    maxSupportedTransactionVersion: 0,
+                })
+                .then(tx => {
+                    console.log(`\n==== Transaction logs for ${failedSig} ====`);
+                    console.log(explorerURL({ txSignature: failedSig }), "");
+                    console.log(tx?.meta?.logMessages ?? "No log messages provided by RPC");
+                    console.log(`==== END LOGS ====\n`);
+                });
+        else {
+            console.log("\n========================================");
+            console.log(explorerURL({ txSignature: failedSig }));
+            console.log("========================================\n");
+        }
+    }
+
+    // always return the failed signature value
+    return failedSig;
+}
+
+/*
+  Standard number formatter
+*/
+export function numberFormatter(num: number, forceDecimals = false) {
+    // set the significant figures
+    const minimumFractionDigits = num < 1 || forceDecimals ? 10 : 2;
+
+    // do the formatting
+    return new Intl.NumberFormat(undefined, {
+        minimumFractionDigits,
+    }).format(num);
+}
+
+/*
+  Display a separator in the console, with our without a message
+*/
+export function printConsoleSeparator(message?: string) {
+    console.log("\n===============================================");
+    console.log("===============================================\n");
+    if (message) console.log(message);
+}

+ 48 - 0
compression/cutils/tests/utils/readAPI.ts

@@ -0,0 +1,48 @@
+// I recommend using a WrappedConnection for production
+// as it supports more readAPI functionality
+// this is just a subset of functions for quick availabiity
+
+import axios from "axios";
+
+//TODO insert
+const RPC_PATH = "";
+
+export async function getAsset(assetId: any, rpcUrl = RPC_PATH): Promise<any> {
+  try {
+    const axiosInstance = axios.create({
+      baseURL: rpcUrl,
+    });
+    const response = await axiosInstance.post(rpcUrl, {
+      jsonrpc: "2.0",
+      method: "getAsset",
+      id: "rpd-op-123",
+      params: {
+        id: assetId
+      },
+    });
+    return response.data.result;
+  } catch (error) {
+    console.error(error);
+  }
+}
+
+
+export async function getAssetProof(assetId: any, rpcUrl = RPC_PATH): Promise<any> {
+  try {
+
+    const axiosInstance = axios.create({
+      baseURL: rpcUrl,
+    });
+    const response = await axiosInstance.post(rpcUrl, {
+      jsonrpc: "2.0",
+      method: "getAssetProof",
+      id: "rpd-op-123",
+      params: {
+        id: assetId
+      },
+    });
+    return response.data.result;
+  } catch (error) {
+    console.error(error);
+  }
+}

+ 73 - 0
compression/cutils/tests/utils/utils.ts

@@ -0,0 +1,73 @@
+import {AccountMeta, PublicKey, SystemProgram} from "@solana/web3.js";
+import {PROGRAM_ID as BUBBLEGUM_PROGRAM_ID} from "@metaplex-foundation/mpl-bubblegum/dist/src/generated";
+import {PROGRAM_ID as TOKEN_METADATA_PROGRAM_ID} from "@metaplex-foundation/mpl-token-metadata/dist/src/generated";
+import * as bs58 from "bs58";
+import {SPL_ACCOUNT_COMPRESSION_PROGRAM_ID, SPL_NOOP_PROGRAM_ID} from "@solana/spl-account-compression";
+
+export function decode(stuff: string) {
+  return bufferToArray(bs58.decode(stuff))
+}
+function bufferToArray(buffer: Buffer): number[] {
+  const nums: number[] = [];
+  for (let i = 0; i < buffer.length; i++) {
+    nums.push(buffer[i]);
+  }
+  return nums;
+}
+export const mapProof = (assetProof: { proof: string[] }): AccountMeta[] => {
+  if (!assetProof.proof || assetProof.proof.length === 0) {
+    throw new Error("Proof is empty");
+  }
+  return assetProof.proof.map((node) => ({
+    pubkey: new PublicKey(node),
+    isSigner: false,
+    isWritable: false,
+  }));
+};
+
+export function getAccounts(collectionMint: PublicKey, tree: PublicKey) {
+  // treeAuth
+  const [treeAuthority] = PublicKey.findProgramAddressSync(
+      [tree.toBuffer()],
+      BUBBLEGUM_PROGRAM_ID
+  );
+
+  // derive a PDA (owned by Bubblegum) to act as the signer of the compressed minting
+  const [bubblegumSigner] = PublicKey.findProgramAddressSync(
+      // `collection_cpi` is a custom prefix required by the Bubblegum program
+      [Buffer.from("collection_cpi", "utf8")],
+      BUBBLEGUM_PROGRAM_ID
+  );
+
+  // collection metadata account
+  const [metadataAccount] = PublicKey.findProgramAddressSync(
+      [Buffer.from("metadata", "utf8"), TOKEN_METADATA_PROGRAM_ID.toBuffer(), collectionMint.toBuffer()],
+      TOKEN_METADATA_PROGRAM_ID
+  );
+
+  // collection master edition
+  const [masterEditionAccount] = PublicKey.findProgramAddressSync(
+      [
+        Buffer.from("metadata", "utf8"),
+        TOKEN_METADATA_PROGRAM_ID.toBuffer(),
+        collectionMint.toBuffer(),
+        Buffer.from("edition", "utf8"),
+      ],
+      TOKEN_METADATA_PROGRAM_ID
+  );
+
+  return {
+    treeAuthority,
+    collectionMint,
+    collectionMetadata: metadataAccount,
+    editionAccount: masterEditionAccount,
+    merkleTree: tree,
+
+    bubblegumSigner,
+    logWrapper: SPL_NOOP_PROGRAM_ID,
+    compressionProgram: SPL_ACCOUNT_COMPRESSION_PROGRAM_ID,
+    tokenMetadataProgram: TOKEN_METADATA_PROGRAM_ID,
+    bubblegumProgram: BUBBLEGUM_PROGRAM_ID,
+    systemProgram: SystemProgram.programId,
+  };
+}

+ 14 - 0
compression/cutils/tsconfig.json

@@ -0,0 +1,14 @@
+{
+  "compilerOptions": {
+    "types": [
+      "mocha",
+      "chai"
+    ],
+    "typeRoots": [
+      "./node_modules/@types"
+    ],
+    "module": "commonjs",
+    "target": "esnext",
+    "esModuleInterop": true,
+  }
+}