浏览代码

Merge branch 'main' into sui/audit_fixes

optke3 2 年之前
父节点
当前提交
45255a3fa5
共有 100 个文件被更改,包括 4900 次插入2090 次删除
  1. 1 0
      .github/workflows/publish-js.yml
  2. 8 1
      .github/workflows/release-pyth-cosmwasm-contract.yml
  3. 22 9
      .pre-commit-config.yaml
  4. 0 297
      accumulator_updater/programs/accumulator_updater/src/lib.rs
  5. 0 25
      accumulator_updater/programs/accumulator_updater/src/macros.rs
  6. 0 20
      accumulator_updater/programs/mock-cpi-caller/Cargo.toml
  7. 0 299
      accumulator_updater/programs/mock-cpi-caller/src/lib.rs
  8. 0 195
      accumulator_updater/tests/accumulator_updater.ts
  9. 1 129
      governance/multisig_wh_message_builder/src/index.ts
  10. 1 1
      governance/xc_admin/packages/xc_admin_cli/README.md
  11. 40 3
      governance/xc_admin/packages/xc_admin_cli/src/index.ts
  12. 1 1
      governance/xc_admin/packages/xc_admin_common/package.json
  13. 44 0
      governance/xc_admin/packages/xc_admin_common/src/__tests__/UpgradeContract.test.ts
  14. 8 18
      governance/xc_admin/packages/xc_admin_common/src/cluster.ts
  15. 38 0
      governance/xc_admin/packages/xc_admin_common/src/governance_payload/UpgradeContract.ts
  16. 4 0
      governance/xc_admin/packages/xc_admin_common/src/governance_payload/index.ts
  17. 94 4
      governance/xc_admin/packages/xc_admin_common/src/propose.ts
  18. 8 2
      governance/xc_admin/packages/xc_admin_common/src/wormhole.ts
  19. 6 3
      governance/xc_admin/packages/xc_admin_frontend/components/ClusterSwitch.tsx
  20. 25 6
      governance/xc_admin/packages/xc_admin_frontend/components/tabs/General.tsx
  21. 5 13
      governance/xc_admin/packages/xc_admin_frontend/components/tabs/Proposals.tsx
  22. 10 3
      governance/xc_admin/packages/xc_admin_frontend/hooks/useMultisig.ts
  23. 11 1
      governance/xc_admin/packages/xc_admin_frontend/utils/pythClusterApiUrl.ts
  24. 5 0
      governance/xc_governance_sdk_js/src/chains.ts
  25. 1 0
      governance/xc_governance_sdk_js/src/index.ts
  26. 5 4
      hermes/Cargo.lock
  27. 4 4
      hermes/Cargo.toml
  28. 15 6
      hermes/src/config.rs
  29. 41 0
      hermes/src/macros.rs
  30. 11 3
      hermes/src/main.rs
  31. 116 117
      hermes/src/network/p2p.go
  32. 58 10
      hermes/src/network/p2p.rs
  33. 21 6
      hermes/src/network/rpc.rs
  34. 145 48
      hermes/src/network/rpc/rest.rs
  35. 76 0
      hermes/src/network/rpc/types.rs
  36. 308 0
      hermes/src/network/rpc/ws.rs
  37. 23 34
      hermes/src/store.rs
  38. 60 24
      hermes/src/store/proof/batch_vaa.rs
  39. 5 11
      hermes/src/store/storage.rs
  40. 4 0
      hermes/src/store/storage/local_cache.rs
  41. 2 0
      message_buffer/.dockerignore
  42. 0 0
      message_buffer/.gitignore
  43. 0 0
      message_buffer/.prettierignore
  44. 1 1
      message_buffer/Anchor.toml
  45. 11 9
      message_buffer/Cargo.lock
  46. 0 0
      message_buffer/Cargo.toml
  47. 36 0
      message_buffer/Dockerfile
  48. 20 3
      message_buffer/NOTES.md
  49. 0 0
      message_buffer/migrations/deploy.ts
  50. 0 0
      message_buffer/package.json
  51. 24 0
      message_buffer/programs/message_buffer/Cargo.toml
  52. 0 0
      message_buffer/programs/message_buffer/Xargo.toml
  53. 155 0
      message_buffer/programs/message_buffer/src/instructions/create_buffer.rs
  54. 64 0
      message_buffer/programs/message_buffer/src/instructions/delete_buffer.rs
  55. 27 0
      message_buffer/programs/message_buffer/src/instructions/mod.rs
  56. 54 0
      message_buffer/programs/message_buffer/src/instructions/put_all.rs
  57. 111 0
      message_buffer/programs/message_buffer/src/instructions/resize_buffer.rs
  58. 216 0
      message_buffer/programs/message_buffer/src/lib.rs
  59. 11 0
      message_buffer/programs/message_buffer/src/macros.rs
  60. 407 0
      message_buffer/programs/message_buffer/src/state/message_buffer.rs
  61. 7 0
      message_buffer/programs/message_buffer/src/state/mod.rs
  62. 65 0
      message_buffer/programs/message_buffer/src/state/whitelist.rs
  63. 4 3
      message_buffer/programs/mock-cpi-caller/Cargo.toml
  64. 0 0
      message_buffer/programs/mock-cpi-caller/Xargo.toml
  65. 147 0
      message_buffer/programs/mock-cpi-caller/src/instructions/add_price.rs
  66. 37 0
      message_buffer/programs/mock-cpi-caller/src/instructions/cpi_max_test.rs
  67. 27 0
      message_buffer/programs/mock-cpi-caller/src/instructions/mod.rs
  68. 132 0
      message_buffer/programs/mock-cpi-caller/src/instructions/update_price.rs
  69. 67 0
      message_buffer/programs/mock-cpi-caller/src/lib.rs
  70. 30 0
      message_buffer/programs/mock-cpi-caller/src/message.rs
  71. 147 0
      message_buffer/programs/mock-cpi-caller/src/message/price.rs
  72. 25 0
      message_buffer/programs/mock-cpi-caller/src/state/mod.rs
  73. 47 0
      message_buffer/programs/mock-cpi-caller/src/state/price.rs
  74. 888 0
      message_buffer/tests/message_buffer.ts
  75. 0 0
      message_buffer/tsconfig.json
  76. 0 0
      message_buffer/yarn.lock
  77. 302 434
      package-lock.json
  78. 1 0
      package.json
  79. 5 2
      price_pusher/README.md
  80. 1 1
      price_pusher/config.evm.testnet.sample.json
  81. 2 1
      price_pusher/config.injective.testnet.sample.json
  82. 3 3
      price_pusher/docker-compose.mainnet.sample.yaml
  83. 15 13
      price_pusher/docker-compose.testnet.sample.yaml
  84. 2 2
      price_pusher/package.json
  85. 15 1
      price_pusher/src/injective/command.ts
  86. 4 5
      price_pusher/src/injective/injective.ts
  87. 3 3
      price_service/client/js/package.json
  88. 2 0
      price_service/server/docker-compose.mainnet.yaml
  89. 2 0
      price_service/server/docker-compose.testnet.yaml
  90. 1 1
      price_service/server/package.json
  91. 52 0
      price_service/server/src/__tests__/rest.test.ts
  92. 47 0
      price_service/server/src/encoding.ts
  93. 10 0
      price_service/server/src/helpers.ts
  94. 24 10
      price_service/server/src/listen.ts
  95. 175 26
      price_service/server/src/rest.ts
  96. 1 1
      target_chains/aptos/sdk/js/package.json
  97. 3 0
      target_chains/cosmwasm/.gitignore
  98. 234 273
      target_chains/cosmwasm/Cargo.lock
  99. 1 1
      target_chains/cosmwasm/Cargo.toml
  100. 8 0
      target_chains/cosmwasm/README.md

+ 1 - 0
.github/workflows/publish-js.yml

@@ -16,6 +16,7 @@ jobs:
           node-version: "16"
           registry-url: "https://registry.npmjs.org"
       - run: npm ci
+      - run: npx lerna run build --no-private
       - run: npx lerna publish from-package --no-private --no-git-tag-version --yes
         env:
           NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}

+ 8 - 1
.github/workflows/release-pyth-cosmwasm-contract.yml

@@ -26,6 +26,11 @@ jobs:
           npm run build-contract -- --injective
           mv ../artifacts ../injective
           zip -r injective.zip ../injective
+      - name: Build osmosis cosmwasm contract
+        run: |
+          npm run build-contract -- --osmosis
+          mv ../artifacts ../osmosis
+          zip -r osmosis.zip ../osmosis
 
       - name: Set env
         run: |
@@ -38,10 +43,12 @@ jobs:
           files: |
             target_chains/cosmwasm/tools/cosmwasm.zip
             target_chains/cosmwasm/tools/injective.zip
+            target_chains/cosmwasm/tools/osmosis.zip
           body: |
             Contracts
             - cosmwasm.zip contains the generic cosmwasm contract for most Cosmos SDK chains.
-            - injective.zip contains injectives specific contract.
+            - injective.zip contains injective specific contract.
+            - osmosis.zip contains osmosis specific contract.
           draft: false
           # Setting VERSION in set env step and hence it will be available
           name: Pyth Cosmwasm Contract ${{ env.VERSION }}

+ 22 - 9
.pre-commit-config.yaml

@@ -65,16 +65,29 @@ repos:
         entry: cargo +nightly fmt --manifest-path ./hermes/Cargo.toml --all -- --config-path rustfmt.toml
         pass_filenames: false
         files: hermes
-      # Hooks for accumulator updater contract
-      - id: cargo-fmt-accumulator-updater
-        name: Cargo format for accumulator updater contract
+      # Hooks for message buffer contract
+      - id: cargo-fmt-message-buffer
+        name: Cargo format for message buffer contract
         language: "rust"
-        entry: cargo +nightly fmt --manifest-path ./accumulator_updater/Cargo.toml --all -- --config-path rustfmt.toml
+        entry: cargo +nightly fmt --manifest-path ./message_buffer/Cargo.toml --all -- --config-path rustfmt.toml
         pass_filenames: false
-        files: accumulator_updater
-      - id: cargo-clippy-accumulator-updater
-        name: Cargo clippy for accumulator-updater contract
+        files: message_buffer
+      - id: cargo-clippy-message-buffer
+        name: Cargo clippy for message buffer contract
         language: "rust"
-        entry: cargo +nightly clippy --manifest-path ./accumulator_updater/Cargo.toml --tests --fix --allow-dirty --allow-staged -- -D warnings
+        entry: cargo +nightly clippy --manifest-path ./message_buffer/Cargo.toml --tests --fix --allow-dirty --allow-staged -- -D warnings
         pass_filenames: false
-        files: accumulator_updater
+        files: message_buffer
+      # Hooks for solana receiver contract
+      - id: cargo-fmt-solana-receiver
+        name: Cargo format for solana target chain contract
+        language: "rust"
+        entry: cargo +nightly fmt --manifest-path ./target_chains/solana/Cargo.toml --all -- --config-path rustfmt.toml
+        pass_filenames: false
+        files: target_chains/solana
+      - id: cargo-clippy-solana-receiver
+        name: Cargo clippy for solana target chain contract
+        language: "rust"
+        entry: cargo +nightly clippy --manifest-path ./target_chains/solana/Cargo.toml --tests --fix --allow-dirty --allow-staged -- -D warnings
+        pass_filenames: false
+        files: target_chains/solana

+ 0 - 297
accumulator_updater/programs/accumulator_updater/src/lib.rs

@@ -1,297 +0,0 @@
-mod macros;
-
-use anchor_lang::{
-    prelude::*,
-    solana_program::sysvar::{
-        self,
-        instructions::get_instruction_relative,
-    },
-    system_program::{
-        self,
-        CreateAccount,
-    },
-};
-
-declare_id!("Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS");
-
-#[program]
-pub mod accumulator_updater {
-    use super::*;
-
-    pub fn initialize(ctx: Context<Initialize>) -> Result<()> {
-        let whitelist = &mut ctx.accounts.whitelist;
-        whitelist.bump = *ctx.bumps.get("whitelist").unwrap();
-        Ok(())
-    }
-
-    //TODO: add authorization mechanism for this
-    pub fn add_allowed_program(
-        ctx: Context<AddAllowedProgram>,
-        allowed_program: Pubkey,
-    ) -> Result<()> {
-        let whitelist = &mut ctx.accounts.whitelist;
-        require_keys_neq!(allowed_program, Pubkey::default());
-        require!(
-            !whitelist.allowed_programs.contains(&allowed_program),
-            AccumulatorUpdaterError::DuplicateAllowedProgram
-        );
-        whitelist.allowed_programs.push(allowed_program);
-        Ok(())
-    }
-
-    /// Add new account(s) to be included in the accumulator
-    ///
-    /// * `base_account` - Pubkey of the original account the AccumulatorInput(s) are derived from
-    /// * `data` - Vec of AccumulatorInput account data
-    /// * `account_type` - Marker to indicate base_account account_type
-    /// * `account_schemas` - Vec of markers to indicate schemas for AccumulatorInputs. In same respective
-    ///    order as data
-    pub fn create_inputs<'info>(
-        ctx: Context<'_, '_, '_, 'info, CreateInputs<'info>>,
-        base_account: Pubkey,
-        data: Vec<Vec<u8>>,
-        account_type: u32,
-        account_schemas: Vec<u8>,
-    ) -> Result<()> {
-        let cpi_caller = ctx.accounts.whitelist_verifier.is_allowed()?;
-        let accts = ctx.remaining_accounts;
-        require_eq!(accts.len(), data.len());
-        require_eq!(data.len(), account_schemas.len());
-        let mut zip = data.into_iter().zip(account_schemas.into_iter());
-
-        let rent = Rent::get()?;
-
-        for ai in accts {
-            let (account_data, account_schema) = zip.next().unwrap();
-            let seeds = accumulator_acc_seeds!(cpi_caller, base_account, account_schema);
-            let (pda, bump) = Pubkey::find_program_address(seeds, &crate::ID);
-            require_keys_eq!(ai.key(), pda);
-
-            //TODO: Update this with serialization logic
-            let accumulator_size = 8 + AccumulatorInput::get_initial_size(&account_data);
-            let accumulator_input = AccumulatorInput::new(
-                AccumulatorHeader::new(
-                    1, //from CPI caller?
-                    account_type,
-                    account_schema,
-                ),
-                account_data,
-            );
-            CreateInputs::create_and_initialize_accumulator_input_pda(
-                ai,
-                accumulator_input,
-                accumulator_size,
-                &ctx.accounts.payer,
-                &[accumulator_acc_seeds_with_bump!(
-                    cpi_caller,
-                    base_account,
-                    account_schema,
-                    bump
-                )],
-                &rent,
-                &ctx.accounts.system_program,
-            )?;
-        }
-
-        Ok(())
-    }
-}
-
-
-// Note: purposely not making this zero_copy
-// otherwise whitelist must always be marked mutable
-// and majority of operations are read
-#[account]
-#[derive(InitSpace)]
-pub struct Whitelist {
-    pub bump:             u8,
-    #[max_len(32)]
-    pub allowed_programs: Vec<Pubkey>,
-}
-
-
-#[derive(Accounts)]
-pub struct WhitelistVerifier<'info> {
-    #[account(
-        seeds = [b"accumulator".as_ref(), b"whitelist".as_ref()],
-        bump = whitelist.bump,
-    )]
-    pub whitelist:  Account<'info, Whitelist>,
-    /// CHECK: Instruction introspection sysvar
-    #[account(address = sysvar::instructions::ID)]
-    pub ixs_sysvar: UncheckedAccount<'info>,
-}
-
-impl<'info> WhitelistVerifier<'info> {
-    pub fn get_cpi_caller(&self) -> Result<Pubkey> {
-        let instruction = get_instruction_relative(0, &self.ixs_sysvar.to_account_info())?;
-        Ok(instruction.program_id)
-    }
-    pub fn is_allowed(&self) -> Result<Pubkey> {
-        let cpi_caller = self.get_cpi_caller()?;
-        let whitelist = &self.whitelist;
-        require!(
-            whitelist.allowed_programs.contains(&cpi_caller),
-            AccumulatorUpdaterError::CallerNotAllowed
-        );
-        Ok(cpi_caller)
-    }
-}
-
-#[derive(Accounts)]
-pub struct Initialize<'info> {
-    #[account(mut)]
-    pub payer:          Signer<'info>,
-    #[account(
-        init,
-        payer = payer,
-        seeds = [b"accumulator".as_ref(), b"whitelist".as_ref()],
-        bump,
-        space = 8 + Whitelist::INIT_SPACE
-    )]
-    pub whitelist:      Account<'info, Whitelist>,
-    pub system_program: Program<'info, System>,
-}
-
-#[derive(Accounts)]
-pub struct AddAllowedProgram<'info> {
-    #[account(mut)]
-    pub payer:          Signer<'info>,
-    #[account(
-    mut,
-    seeds = [b"accumulator".as_ref(), b"whitelist".as_ref()],
-    bump = whitelist.bump,
-    )]
-    pub whitelist:      Account<'info, Whitelist>,
-    pub system_program: Program<'info, System>,
-}
-
-
-#[derive(Accounts)]
-#[instruction(base_account: Pubkey, data: Vec<Vec<u8>>, account_type: u32)] // only needed if using optional accounts
-pub struct CreateInputs<'info> {
-    #[account(mut)]
-    pub payer:              Signer<'info>,
-    pub whitelist_verifier: WhitelistVerifier<'info>,
-    pub system_program:     Program<'info, System>,
-    //TODO: decide on using optional accounts vs ctx.remaining_accounts
-    //      - optional accounts can leverage anchor macros for PDA init/verification
-    //      - ctx.remaining_accounts can be used to pass in any number of accounts
-    //
-    // https://github.com/coral-xyz/anchor/pull/2101 - anchor optional accounts PR
-    // #[account(
-    //     init,
-    //     payer = payer,
-    //     seeds = [
-    //          whitelist_verifier.get_cpi_caller()?.as_ref(),
-    //          b"accumulator".as_ref(),
-    //          base_account.as_ref()
-    //          &account_type.to_le_bytes(),
-    //      ],
-    //     bump,
-    //     space = 8 + AccumulatorAccount::get_initial_size(&data[0])
-    // )]
-    // pub acc_input_0:          Option<Account<'info, AccumulatorInput>>,
-}
-
-impl<'info> CreateInputs<'info> {
-    fn create_and_initialize_accumulator_input_pda<'a>(
-        accumulator_input_ai: &AccountInfo<'a>,
-        accumulator_input: AccumulatorInput,
-        accumulator_input_size: usize,
-        payer: &AccountInfo<'a>,
-        seeds: &[&[&[u8]]],
-        rent: &Rent,
-        system_program: &AccountInfo<'a>,
-    ) -> Result<()> {
-        let lamports = rent.minimum_balance(accumulator_input_size);
-
-        system_program::create_account(
-            CpiContext::new_with_signer(
-                system_program.to_account_info(),
-                CreateAccount {
-                    from: payer.to_account_info(),
-                    to:   accumulator_input_ai.to_account_info(),
-                },
-                seeds,
-            ),
-            lamports,
-            accumulator_input_size.try_into().unwrap(),
-            &crate::ID,
-        )?;
-
-        AccountSerialize::try_serialize(
-            &accumulator_input,
-            &mut &mut accumulator_input_ai.data.borrow_mut()[..],
-        )
-        .map_err(|e| {
-            msg!("original error: {:?}", e);
-            AccumulatorUpdaterError::SerializeError
-        })?;
-        // msg!("accumulator_input_ai: {:#?}", accumulator_input_ai);
-
-        Ok(())
-    }
-}
-
-// TODO: should UpdateInput be allowed to resize an AccumulatorInput account?
-#[derive(Accounts)]
-pub struct UpdateInputs<'info> {
-    #[account(mut)]
-    pub payer:              Signer<'info>,
-    pub whitelist_verifier: WhitelistVerifier<'info>,
-}
-
-//TODO: implement custom serialization & set alignment
-#[account]
-pub struct AccumulatorInput {
-    pub header: AccumulatorHeader,
-    //TODO: Vec<u8> for resizing?
-    pub data:   Vec<u8>,
-}
-
-impl AccumulatorInput {
-    pub fn get_initial_size(data: &Vec<u8>) -> usize {
-        AccumulatorHeader::SIZE + 4 + data.len()
-    }
-
-    pub fn new(header: AccumulatorHeader, data: Vec<u8>) -> Self {
-        Self { header, data }
-    }
-}
-
-//TODO:
-// - implement custom serialization & set alignment
-// - what other fields are needed?
-#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug, Default)]
-pub struct AccumulatorHeader {
-    pub version:        u8,
-    // u32 for parity with pyth oracle contract
-    pub account_type:   u32,
-    pub account_schema: u8,
-}
-
-
-impl AccumulatorHeader {
-    pub const SIZE: usize = 1 + 4 + 1;
-
-    pub fn new(version: u8, account_type: u32, account_schema: u8) -> Self {
-        Self {
-            version,
-            account_type,
-            account_schema,
-        }
-    }
-}
-
-#[error_code]
-pub enum AccumulatorUpdaterError {
-    #[msg("CPI Caller not allowed")]
-    CallerNotAllowed,
-    #[msg("Whitelist already contains program")]
-    DuplicateAllowedProgram,
-    #[msg("Conversion Error")]
-    ConversionError,
-    #[msg("Serialization Error")]
-    SerializeError,
-}

+ 0 - 25
accumulator_updater/programs/accumulator_updater/src/macros.rs

@@ -1,25 +0,0 @@
-#[macro_export]
-macro_rules! accumulator_acc_seeds {
-    ($cpi_caller_pid:expr, $base_account:expr, $account_type:expr) => {
-        &[
-            $cpi_caller_pid.as_ref(),
-            b"accumulator".as_ref(),
-            $base_account.as_ref(),
-            &$account_type.to_le_bytes(),
-        ]
-    };
-}
-
-
-#[macro_export]
-macro_rules! accumulator_acc_seeds_with_bump {
-    ($cpi_caller_pid:expr, $base_account:expr, $account_type:expr, $bump:expr) => {
-        &[
-            $cpi_caller_pid.as_ref(),
-            b"accumulator".as_ref(),
-            $base_account.as_ref(),
-            &$account_type.to_le_bytes(),
-            &[$bump],
-        ]
-    };
-}

+ 0 - 20
accumulator_updater/programs/mock-cpi-caller/Cargo.toml

@@ -1,20 +0,0 @@
-[package]
-name = "mock-cpi-caller"
-version = "0.1.0"
-description = "Created with Anchor"
-edition = "2021"
-
-[lib]
-crate-type = ["cdylib", "lib"]
-name = "mock_cpi_caller"
-
-[features]
-no-entrypoint = []
-no-idl = []
-no-log-ix-name = []
-cpi = ["no-entrypoint"]
-default = []
-
-[dependencies]
-anchor-lang = "0.27.0"
-accumulator_updater = { path = "../accumulator_updater", features = ["cpi"] }

+ 0 - 299
accumulator_updater/programs/mock-cpi-caller/src/lib.rs

@@ -1,299 +0,0 @@
-use {
-    accumulator_updater::{
-        cpi::accounts as AccumulatorUpdaterCpiAccts,
-        program::AccumulatorUpdater as AccumulatorUpdaterProgram,
-    },
-    anchor_lang::{
-        prelude::*,
-        solana_program::{
-            hash::hashv,
-            sysvar,
-        },
-    },
-};
-
-declare_id!("Dg5PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS");
-
-#[program]
-pub mod mock_cpi_caller {
-    use super::*;
-
-    pub fn add_price<'info>(
-        ctx: Context<'_, '_, '_, 'info, AddPrice<'info>>,
-        params: AddPriceParams,
-    ) -> Result<()> {
-        let pyth_price_acct = &mut ctx.accounts.pyth_price_account;
-        pyth_price_acct.init(params)?;
-
-        let mut price_account_data_vec = vec![];
-        AccountSerialize::try_serialize(
-            &pyth_price_acct.clone().into_inner(),
-            &mut price_account_data_vec,
-        )?;
-
-
-        let price_only_data = PriceOnly::from(&pyth_price_acct.clone().into_inner())
-            .try_to_vec()
-            .unwrap();
-
-
-        let account_data: Vec<Vec<u8>> = vec![price_account_data_vec, price_only_data];
-        let account_schemas = [PythSchemas::Full, PythSchemas::Compact]
-            .into_iter()
-            .map(|s| s.to_u8())
-            .collect::<Vec<u8>>();
-
-        // 44444 compute units
-        // AddPrice::invoke_cpi_anchor(ctx, account_data, PythAccountType::Price, account_schemas)
-        // 44045 compute units
-        AddPrice::invoke_cpi_solana(ctx, account_data, PythAccountType::Price, account_schemas)
-    }
-}
-
-
-impl<'info> AddPrice<'info> {
-    fn create_inputs_ctx(
-        &self,
-        remaining_accounts: &[AccountInfo<'info>],
-    ) -> CpiContext<'_, '_, '_, 'info, AccumulatorUpdaterCpiAccts::CreateInputs<'info>> {
-        let mut cpi_ctx = CpiContext::new(
-            self.accumulator_program.to_account_info(),
-            AccumulatorUpdaterCpiAccts::CreateInputs {
-                payer:              self.payer.to_account_info(),
-                whitelist_verifier: AccumulatorUpdaterCpiAccts::WhitelistVerifier {
-                    whitelist:  self.accumulator_whitelist.to_account_info(),
-                    ixs_sysvar: self.ixs_sysvar.to_account_info(),
-                },
-                system_program:     self.system_program.to_account_info(),
-            },
-        );
-
-
-        cpi_ctx = cpi_ctx.with_remaining_accounts(remaining_accounts.to_vec());
-        cpi_ctx
-    }
-
-    /// invoke cpi call using anchor
-    fn invoke_cpi_anchor(
-        ctx: Context<'_, '_, '_, 'info, AddPrice<'info>>,
-        account_data: Vec<Vec<u8>>,
-        account_type: PythAccountType,
-        account_schemas: Vec<u8>,
-    ) -> Result<()> {
-        accumulator_updater::cpi::create_inputs(
-            // cpi_ctx,
-            ctx.accounts.create_inputs_ctx(ctx.remaining_accounts),
-            ctx.accounts.pyth_price_account.key(),
-            account_data,
-            account_type.to_u32(),
-            account_schemas,
-        )?;
-        Ok(())
-    }
-
-
-    /// invoke cpi call using solana
-    fn invoke_cpi_solana(
-        ctx: Context<'_, '_, '_, 'info, AddPrice<'info>>,
-        account_data: Vec<Vec<u8>>,
-        account_type: PythAccountType,
-        account_schemas: Vec<u8>,
-    ) -> Result<()> {
-        let mut accounts = vec![
-            AccountMeta::new(ctx.accounts.payer.key(), true),
-            AccountMeta::new_readonly(ctx.accounts.accumulator_whitelist.key(), false),
-            AccountMeta::new_readonly(ctx.accounts.ixs_sysvar.key(), false),
-            AccountMeta::new_readonly(ctx.accounts.system_program.key(), false),
-        ];
-        accounts.extend_from_slice(
-            &ctx.remaining_accounts
-                .iter()
-                .map(|a| AccountMeta::new(a.key(), false))
-                .collect::<Vec<_>>(),
-        );
-        let add_accumulator_input_ix = anchor_lang::solana_program::instruction::Instruction {
-            program_id: ctx.accounts.accumulator_program.key(),
-            accounts,
-            data: (
-                //anchor ix discriminator/identifier
-                sighash("global", "create_inputs"),
-                ctx.accounts.pyth_price_account.key(),
-                account_data,
-                account_type.to_u32(),
-                account_schemas,
-            )
-                .try_to_vec()
-                .unwrap(),
-        };
-        let account_infos = &mut ctx.accounts.to_account_infos();
-        account_infos.extend_from_slice(ctx.remaining_accounts);
-        anchor_lang::solana_program::program::invoke(&add_accumulator_input_ix, account_infos)?;
-        Ok(())
-    }
-}
-
-
-/// Generate discriminator to be able to call anchor program's ix
-/// * `namespace` - "global" for instructions
-/// * `name` - name of ix to call CASE-SENSITIVE
-pub fn sighash(namespace: &str, name: &str) -> [u8; 8] {
-    let preimage = format!("{namespace}:{name}");
-
-    let mut sighash = [0u8; 8];
-    sighash.copy_from_slice(&hashv(&[preimage.as_bytes()]).to_bytes()[..8]);
-    sighash
-}
-
-#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug, PartialEq, Eq)]
-pub struct AddPriceParams {
-    pub id:         u64,
-    pub price:      u64,
-    pub price_expo: u64,
-    pub ema:        u64,
-    pub ema_expo:   u64,
-}
-
-#[derive(Copy, Clone)]
-#[repr(u32)]
-pub enum PythAccountType {
-    Mapping     = 1,
-    Product     = 2,
-    Price       = 3,
-    Test        = 4,
-    Permissions = 5,
-}
-impl PythAccountType {
-    fn to_u32(&self) -> u32 {
-        *self as u32
-    }
-}
-
-#[derive(Copy, Clone)]
-#[repr(u8)]
-pub enum PythSchemas {
-    Full    = 0,
-    Compact = 1,
-    Minimal = 2,
-}
-
-impl PythSchemas {
-    fn to_u8(&self) -> u8 {
-        *self as u8
-    }
-}
-
-#[derive(Accounts)]
-#[instruction(params: AddPriceParams)]
-pub struct AddPrice<'info> {
-    #[account(
-        init,
-        payer = payer,
-        seeds = [b"pyth".as_ref(), b"price".as_ref(), &params.id.to_le_bytes()],
-        bump,
-        space = 8 + PriceAccount::INIT_SPACE
-    )]
-    pub pyth_price_account:    Account<'info, PriceAccount>,
-    #[account(mut)]
-    pub payer:                 Signer<'info>,
-    /// also needed for accumulator_updater
-    pub system_program:        Program<'info, System>,
-    /// CHECK: whitelist
-    pub accumulator_whitelist: UncheckedAccount<'info>,
-    /// CHECK: instructions introspection sysvar
-    #[account(address = sysvar::instructions::ID)]
-    pub ixs_sysvar:            UncheckedAccount<'info>,
-    pub accumulator_program:   Program<'info, AccumulatorUpdaterProgram>,
-    // Remaining Accounts
-    // should all be new uninitialized accounts
-}
-
-
-//Note: this will use anchor's default borsh serialization schema with the header
-#[account]
-#[derive(InitSpace)]
-pub struct PriceAccount {
-    pub id:         u64,
-    pub price:      u64,
-    pub price_expo: u64,
-    pub ema:        u64,
-    pub ema_expo:   u64,
-}
-
-impl PriceAccount {
-    fn init(&mut self, params: AddPriceParams) -> Result<()> {
-        self.id = params.id;
-        self.price = params.price;
-        self.price_expo = params.price_expo;
-        self.ema = params.ema;
-        self.ema_expo = params.ema_expo;
-        Ok(())
-    }
-}
-
-// #[derive(Default, Debug, borsh::BorshSerialize)]
-#[derive(AnchorSerialize, AnchorDeserialize, Default, Debug, Clone)]
-pub struct PriceOnly {
-    pub price_expo: u64,
-    pub price:      u64,
-    pub id:         u64,
-}
-
-impl PriceOnly {
-    fn serialize(&self) -> Vec<u8> {
-        self.try_to_vec().unwrap()
-    }
-
-    fn serialize_from_price_account(other: PriceAccount) -> Vec<u8> {
-        PriceOnly::from(&other).try_to_vec().unwrap()
-    }
-}
-
-
-impl From<&PriceAccount> for PriceOnly {
-    fn from(other: &PriceAccount) -> Self {
-        Self {
-            id:         other.id,
-            price:      other.price,
-            price_expo: other.price_expo,
-        }
-    }
-}
-
-
-impl From<PriceAccount> for PriceOnly {
-    fn from(other: PriceAccount) -> Self {
-        Self {
-            id:         other.id,
-            price:      other.price,
-            price_expo: other.price_expo,
-        }
-    }
-}
-
-#[cfg(test)]
-mod test {
-    use {
-        super::*,
-        anchor_lang::InstructionData,
-    };
-
-    #[test]
-    fn ix_discriminator() {
-        let a = &(accumulator_updater::instruction::CreateInputs {
-            base_account:    anchor_lang::prelude::Pubkey::default(),
-            data:            vec![],
-            account_type:    0,
-            account_schemas: vec![],
-        }
-        .data()[..8]);
-
-        let sighash = sighash("global", "create_inputs");
-        println!(
-            r"
-            a: {a:?}
-            sighash: {sighash:?}
-            ",
-        );
-        assert_eq!(a, &sighash);
-    }
-}

+ 0 - 195
accumulator_updater/tests/accumulator_updater.ts

@@ -1,195 +0,0 @@
-import * as anchor from "@coral-xyz/anchor";
-import { IdlTypes, Program, IdlAccounts } from "@coral-xyz/anchor";
-import { AccumulatorUpdater } from "../target/types/accumulator_updater";
-import { MockCpiCaller } from "../target/types/mock_cpi_caller";
-import lumina from "@lumina-dev/test";
-import { assert } from "chai";
-import { ComputeBudgetProgram } from "@solana/web3.js";
-
-// Enables tool that runs in localbrowser for easier debugging of txns
-// in this test -  https://lumina.fyi/debug
-lumina();
-
-const accumulatorUpdaterProgram = anchor.workspace
-  .AccumulatorUpdater as Program<AccumulatorUpdater>;
-const mockCpiProg = anchor.workspace.MockCpiCaller as Program<MockCpiCaller>;
-
-describe("accumulator_updater", () => {
-  // Configure the client to use the local cluster.
-  let provider = anchor.AnchorProvider.env();
-  anchor.setProvider(provider);
-
-  const [whitelistPda, whitelistBump] =
-    anchor.web3.PublicKey.findProgramAddressSync(
-      [Buffer.from("accumulator"), Buffer.from("whitelist")],
-      accumulatorUpdaterProgram.programId
-    );
-
-  it("Is initialized!", async () => {
-    // Add your test here.
-    const tx = await accumulatorUpdaterProgram.methods
-      .initialize()
-      .accounts({})
-      .rpc();
-    console.log("Your transaction signature", tx);
-
-    const whitelist = await accumulatorUpdaterProgram.account.whitelist.fetch(
-      whitelistPda
-    );
-    assert.strictEqual(whitelist.bump, whitelistBump);
-    console.info(`whitelist: ${JSON.stringify(whitelist)}`);
-  });
-
-  it("Adds a program to the whitelist", async () => {
-    const addToWhitelistTx = await accumulatorUpdaterProgram.methods
-      .addAllowedProgram(mockCpiProg.programId)
-      .accounts({})
-      .rpc();
-    const whitelist = await accumulatorUpdaterProgram.account.whitelist.fetch(
-      whitelistPda
-    );
-    console.info(`whitelist after add: ${JSON.stringify(whitelist)}`);
-
-    assert.isTrue(
-      whitelist.allowedPrograms
-        .map((pk) => pk.toString())
-        .includes(mockCpiProg.programId.toString())
-    );
-  });
-
-  it("Mock CPI program - AddPrice", async () => {
-    const addPriceParams = {
-      id: new anchor.BN(1),
-      price: new anchor.BN(2),
-      priceExpo: new anchor.BN(3),
-      ema: new anchor.BN(4),
-      emaExpo: new anchor.BN(5),
-    };
-
-    const mockCpiCallerAddPriceTxPubkeys = await mockCpiProg.methods
-      .addPrice(addPriceParams)
-      .accounts({
-        systemProgram: anchor.web3.SystemProgram.programId,
-        ixsSysvar: anchor.web3.SYSVAR_INSTRUCTIONS_PUBKEY,
-        accumulatorWhitelist: whitelistPda,
-        accumulatorProgram: accumulatorUpdaterProgram.programId,
-      })
-      .pubkeys();
-
-    const accumulatorPdas = [0, 1].map((pythSchema) => {
-      const [pda] = anchor.web3.PublicKey.findProgramAddressSync(
-        [
-          mockCpiProg.programId.toBuffer(),
-          Buffer.from("accumulator"),
-          mockCpiCallerAddPriceTxPubkeys.pythPriceAccount.toBuffer(),
-          new anchor.BN(pythSchema).toArrayLike(Buffer, "le", 1),
-        ],
-        accumulatorUpdaterProgram.programId
-      );
-      console.log(`pda for pyth schema ${pythSchema}: ${pda.toString()}`);
-      return {
-        pubkey: pda,
-        isSigner: false,
-        isWritable: true,
-      };
-      // return pda;
-    });
-
-    const mockCpiCallerAddPriceTxPrep = await mockCpiProg.methods
-      .addPrice(addPriceParams)
-      .accounts({
-        ...mockCpiCallerAddPriceTxPubkeys,
-      })
-      .remainingAccounts(accumulatorPdas)
-      .prepare();
-
-    console.log(
-      `ix: ${JSON.stringify(
-        mockCpiCallerAddPriceTxPrep.instruction,
-        (k, v) => {
-          if (k === "data") {
-            return v.toString();
-          } else {
-            return v;
-          }
-        },
-        2
-      )}`
-    );
-    for (const prop in mockCpiCallerAddPriceTxPrep.pubkeys) {
-      console.log(
-        `${prop}: ${mockCpiCallerAddPriceTxPrep.pubkeys[prop].toString()}`
-      );
-    }
-
-    const addPriceTx = await mockCpiProg.methods
-      .addPrice(addPriceParams)
-      .accounts({
-        ...mockCpiCallerAddPriceTxPubkeys,
-      })
-      .remainingAccounts(accumulatorPdas)
-      .preInstructions([
-        ComputeBudgetProgram.setComputeUnitLimit({ units: 1_000_000 }),
-      ])
-      .rpc({
-        skipPreflight: true,
-      });
-
-    console.log(`addPriceTx: ${addPriceTx}`);
-    const accumulatorInputkeys = accumulatorPdas.map((a) => a.pubkey);
-
-    const accumulatorInputs =
-      await accumulatorUpdaterProgram.account.accumulatorInput.fetchMultiple(
-        accumulatorInputkeys
-      );
-
-    const accumulatorPriceAccounts = accumulatorInputs.map((ai) => {
-      const { header, data } = ai;
-
-      return parseAccumulatorInput(ai);
-    });
-    console.log(
-      `accumulatorPriceAccounts: ${JSON.stringify(
-        accumulatorPriceAccounts,
-        null,
-        2
-      )}`
-    );
-    accumulatorPriceAccounts.forEach((pa) => {
-      assert.isTrue(pa.id.eq(addPriceParams.id));
-      assert.isTrue(pa.price.eq(addPriceParams.price));
-      assert.isTrue(pa.priceExpo.eq(addPriceParams.priceExpo));
-    });
-  });
-});
-
-type AccumulatorInputHeader = IdlTypes<AccumulatorUpdater>["AccumulatorHeader"];
-type AccumulatorInputPriceAccountTypes =
-  | IdlAccounts<MockCpiCaller>["priceAccount"] // case-sensitive
-  | IdlTypes<MockCpiCaller>["PriceOnly"];
-
-// Parses AccumulatorInput.data into a PriceAccount or PriceOnly object based on the
-// accountType and accountSchema.
-//
-// AccumulatorInput.data for AccumulatorInput<PriceAccount> will
-// have mockCpiCaller::PriceAccount.discriminator()
-// AccumulatorInput<PriceOnly> will not since its not an account
-function parseAccumulatorInput({
-  header,
-  data,
-}: {
-  header: AccumulatorInputHeader;
-  data: Buffer;
-}): AccumulatorInputPriceAccountTypes {
-  console.log(`header: ${JSON.stringify(header)}`);
-  assert.strictEqual(header.accountType, 3);
-  if (header.accountSchema === 0) {
-    console.log(`[full]data: ${data.toString("hex")}`);
-    // case-sensitive. Note that "P" is capitalized here and not in
-    // the AccumulatorInputPriceAccountTypes type alias.
-    return mockCpiProg.coder.accounts.decode("PriceAccount", data);
-  } else {
-    console.log(`[compact]data: ${data.toString("hex")}`);
-    return mockCpiProg.coder.types.decode("PriceOnly", data);
-  }
-}

+ 1 - 129
governance/multisig_wh_message_builder/src/index.ts

@@ -17,11 +17,7 @@ import { program } from "commander";
 import * as fs from "fs";
 import { LedgerNodeWallet } from "./wallet";
 import lodash from "lodash";
-import {
-  getActiveProposals,
-  getManyProposalsInstructions,
-  getProposalInstructions,
-} from "./multisig";
+import { getProposalInstructions } from "./multisig";
 import {
   WormholeNetwork,
   loadWormholeTools,
@@ -151,130 +147,6 @@ program
     );
   });
 
-program
-  .command("create")
-  .description("Create a new multisig transaction")
-  .option("-c, --cluster <network>", "solana cluster to use", "devnet")
-  .option("-l, --ledger", "use ledger")
-  .option(
-    "-lda, --ledger-derivation-account <number>",
-    "ledger derivation account to use"
-  )
-  .option(
-    "-ldc, --ledger-derivation-change <number>",
-    "ledger derivation change to use"
-  )
-  .option(
-    "-w, --wallet <filepath>",
-    "multisig wallet secret key filepath",
-    "keys/key.json"
-  )
-  .option("-f, --file <filepath>", "Path to a json file with instructions")
-  .option("-p, --payload <hex-string>", "Wormhole VAA payload")
-  .option("-s, --skip-duplicate-check", "Skip checking duplicates")
-  .action(async (options) => {
-    const cluster: Cluster = options.cluster;
-    const squad = await getSquadsClient(
-      cluster,
-      options.ledger,
-      options.ledgerDerivationAccount,
-      options.ledgerDerivationChange,
-      options.wallet
-    );
-
-    if (options.payload && options.file) {
-      console.log("Only one of --payload or --file must be provided");
-      return;
-    }
-
-    if (options.payload) {
-      const wormholeTools = await loadWormholeTools(cluster, squad.connection);
-
-      if (!options.skipDuplicateCheck) {
-        const activeProposals = await getActiveProposals(
-          squad,
-          CONFIG[cluster].vault
-        );
-        const activeInstructions = await getManyProposalsInstructions(
-          squad,
-          activeProposals
-        );
-
-        const msAccount = await squad.getMultisig(CONFIG[cluster].vault);
-        const emitter = squad.getAuthorityPDA(
-          msAccount.publicKey,
-          msAccount.authorityIndex
-        );
-
-        for (let i = 0; i < activeProposals.length; i++) {
-          if (
-            hasWormholePayload(
-              squad,
-              emitter,
-              activeProposals[i].publicKey,
-              options.payload,
-              activeInstructions[i],
-              wormholeTools
-            )
-          ) {
-            console.log(
-              `❌ Skipping, payload ${options.payload} matches instructions at ${activeProposals[i].publicKey}`
-            );
-            return;
-          }
-        }
-      }
-
-      await createWormholeMsgMultisigTx(
-        options.cluster,
-        squad,
-        CONFIG[cluster].vault,
-        options.payload,
-        wormholeTools
-      );
-    }
-
-    if (options.file) {
-      const instructions: SquadInstruction[] = loadInstructionsFromJson(
-        options.file
-      );
-
-      if (!options.skipDuplicateCheck) {
-        const activeProposals = await getActiveProposals(
-          squad,
-          CONFIG[cluster].vault
-        );
-        const activeInstructions = await getManyProposalsInstructions(
-          squad,
-          activeProposals
-        );
-
-        for (let i = 0; i < activeProposals.length; i++) {
-          if (
-            areEqualOnChainInstructions(
-              instructions.map((ix) => ix.instruction),
-              activeInstructions[i]
-            )
-          ) {
-            console.log(
-              `❌ Skipping, instructions from ${options.file} match instructions at ${activeProposals[i].publicKey}`
-            );
-            return;
-          }
-        }
-      }
-
-      const txKey = await createTx(squad, CONFIG[cluster].vault);
-      await addInstructionsToTx(
-        cluster,
-        squad,
-        CONFIG[cluster].vault,
-        txKey,
-        instructions
-      );
-    }
-  });
-
 program
   .command("verify")
   .description("Verify given proposal matches a payload")

+ 1 - 1
governance/xc_admin/packages/xc_admin_cli/README.md

@@ -28,6 +28,6 @@ To activate a transaction:
 ```
 npm install
 npx lerna run build --scope "xc_admin_common"
-npx ts-node src/index.ts activate -t <TRANSACTION_HASH> -c <CLUSTER: [mainnet|devnet|testnet|pythnet|pythtest]> -v <VAULT_ADDRESS> -w <WALLET_SECRET_KEY_FILEPATH: [filepath|"ledger"]> -lda <LEDGER_DERIVATION_ACCOUNT> -ldc <LEDGER_DERIVATION_CHANGE>
+npx ts-node src/index.ts activate -t <TRANSACTION_HASH> -c <CLUSTER: [mainnet|devnet|testnet] -v <VAULT_ADDRESS> -w <WALLET_SECRET_KEY_FILEPATH: [filepath|"ledger"]> -lda <LEDGER_DERIVATION_ACCOUNT> -ldc <LEDGER_DERIVATION_CHANGE>
 
 ```

+ 40 - 3
governance/xc_admin/packages/xc_admin_cli/src/index.ts

@@ -26,6 +26,7 @@ import {
   mapKey,
   MultisigParser,
   PROGRAM_AUTHORITY_ESCROW,
+  proposeArbitraryPayload,
   proposeInstructions,
   WORMHOLE_ADDRESS,
 } from "xc_admin_common";
@@ -64,7 +65,10 @@ const multisigCommand = (name: string, description: string) =>
       "-w, --wallet <filepath>",
       'path to the operations key or "ledger"'
     )
-    .requiredOption("-v, --vault <pubkey>", "multisig address")
+    .requiredOption(
+      "-v, --vault <pubkey>",
+      "multisig address, all the addresses can be found in xc_admin_common/src/multisig.ts"
+    )
     .option(
       "-lda, --ledger-derivation-account <number>",
       "ledger derivation account to use"
@@ -253,7 +257,7 @@ multisigCommand(
       getPythProgramKeyForCluster(cluster),
       provider
     )
-      .methods.initPrice(exponent, 1)
+      .methods.setExponent(exponent, 1)
       .accounts({ fundingAccount: vaultAuthority, priceAccount })
       .instruction();
     await proposeInstructions(squad, vault, [proposalInstruction], false);
@@ -286,7 +290,13 @@ program
         keys: ix.keys as AccountMeta[],
       })
     );
-    console.log(JSON.stringify(parsed, null, 2));
+    console.log(
+      JSON.stringify(
+        parsed,
+        (key, value) => (typeof value === "bigint" ? value.toString() : value), // return everything else unchanged
+        2
+      )
+    );
   });
 
 multisigCommand("approve", "Approve a transaction sitting in the multisig")
@@ -403,6 +413,33 @@ multisigCommand("propose-sol-transfer", "Propose sol transfer")
     );
   });
 
+multisigCommand("propose-arbitrary-payload", "Propose arbitrary payload")
+  .option("-p, --payload <hex-string>", "Wormhole VAA payload")
+  .action(async (options: any) => {
+    const wallet = await loadHotWalletOrLedger(
+      options.wallet,
+      options.ledgerDerivationAccount,
+      options.ledgerDerivationChange
+    );
+
+    const cluster: PythCluster = options.cluster;
+    const vault: PublicKey = new PublicKey(options.vault);
+
+    const squad = SquadsMesh.endpoint(getPythClusterApiUrl(cluster), wallet);
+
+    let payload = options.payload;
+    if (payload.startsWith("0x")) {
+      payload = payload.substring(2);
+    }
+
+    await proposeArbitraryPayload(
+      squad,
+      vault,
+      Buffer.from(payload, "hex"),
+      WORMHOLE_ADDRESS[cluster]!
+    );
+  });
+
 /**
  * Activate proposal, mostly useful for cleaning up draft proposals that happen when the browser wallet fails to send all transactions succesfully
  */

+ 1 - 1
governance/xc_admin/packages/xc_admin_common/package.json

@@ -21,7 +21,7 @@
   },
   "dependencies": {
     "@certusone/wormhole-sdk": "^0.9.8",
-    "@pythnetwork/client": "^2.10.0",
+    "@pythnetwork/client": "^2.17.0",
     "@solana/buffer-layout": "^4.0.1",
     "@solana/web3.js": "^1.73.0",
     "@sqds/mesh": "^1.0.6",

+ 44 - 0
governance/xc_admin/packages/xc_admin_common/src/__tests__/UpgradeContract.test.ts

@@ -0,0 +1,44 @@
+import {
+  decodeGovernancePayload,
+  PythGovernanceHeader,
+} from "../governance_payload";
+import { CosmosUpgradeContract } from "../governance_payload/UpgradeContract";
+
+test("Upgrade contract ser/de", (done) => {
+  jest.setTimeout(60000);
+
+  const expectedUpgradeContract = new CosmosUpgradeContract(
+    "injective",
+    BigInt("18446744073709551614")
+  );
+  const buffer = expectedUpgradeContract.encode();
+
+  console.log(buffer.toJSON());
+  expect(
+    buffer.equals(
+      Buffer.from([
+        80, 84, 71, 77, 1, 0, 0, 19, 255, 255, 255, 255, 255, 255, 255, 254,
+      ])
+    )
+  ).toBeTruthy();
+
+  const actualHeader = PythGovernanceHeader.decode(buffer);
+
+  if (actualHeader) {
+    expect(actualHeader.targetChainId).toBe("injective");
+    expect(actualHeader.action).toBe("UpgradeContract");
+  } else {
+    done("Not an instance of CosmosUpgradeContract");
+  }
+
+  const actualUpgradeContract = decodeGovernancePayload(buffer);
+
+  if (actualUpgradeContract instanceof CosmosUpgradeContract) {
+    expect(actualUpgradeContract.targetChainId).toBe("injective");
+    expect(actualUpgradeContract.codeId).toBe(BigInt("18446744073709551614"));
+  } else {
+    done("Not an instance of CosmosUpgradeContract");
+  }
+
+  done();
+});

+ 8 - 18
governance/xc_admin/packages/xc_admin_common/src/cluster.ts

@@ -5,7 +5,11 @@ import { Cluster } from "@solana/web3.js";
  * Return whether the cluster is governed remotely or not. For example Pythnet is governed remotely by a mainnet multisig.
  */
 export function isRemoteCluster(cluster: PythCluster) {
-  return cluster == "pythnet" || cluster == "pythtest";
+  return (
+    cluster == "pythnet" ||
+    cluster == "pythtest-conformance" ||
+    cluster == "pythtest-crosschain"
+  );
 }
 
 /**
@@ -15,24 +19,10 @@ export function getMultisigCluster(cluster: PythCluster): Cluster | "localnet" {
   switch (cluster) {
     case "pythnet":
       return "mainnet-beta";
-    case "pythtest":
+    case "pythtest-conformance":
+      return "devnet";
+    case "pythtest-crosschain":
       return "devnet";
-    default:
-      return cluster;
-  }
-}
-
-/**
- * For cluster that are governed remotely (ex : Pythnet from Mainnet) return the network of the remote cluster
- */
-export function getRemoteCluster(
-  cluster: PythCluster
-): PythCluster | "localnet" {
-  switch (cluster) {
-    case "devnet":
-      return "pythtest";
-    case "mainnet-beta":
-      return "pythnet";
     default:
       return cluster;
   }

+ 38 - 0
governance/xc_admin/packages/xc_admin_common/src/governance_payload/UpgradeContract.ts

@@ -0,0 +1,38 @@
+import { ChainName } from "@certusone/wormhole-sdk";
+import { PythGovernanceAction, PythGovernanceHeader } from ".";
+
+export class CosmosUpgradeContract implements PythGovernanceAction {
+  readonly targetChainId: ChainName;
+  readonly codeId: bigint;
+
+  constructor(targetChainId: ChainName, codeId: bigint) {
+    this.targetChainId = targetChainId;
+    this.codeId = codeId;
+  }
+
+  static span: number = 8;
+  static decode(data: Buffer): CosmosUpgradeContract | undefined {
+    const header = PythGovernanceHeader.decode(data);
+    if (!header) return undefined;
+
+    const codeId = data.subarray(PythGovernanceHeader.span).readBigUInt64BE();
+    if (!codeId) return undefined;
+
+    return new CosmosUpgradeContract(header.targetChainId, codeId);
+  }
+
+  /** Encode CosmosUpgradeContract */
+  encode(): Buffer {
+    const headerBuffer = new PythGovernanceHeader(
+      this.targetChainId,
+      "UpgradeContract"
+    ).encode();
+
+    const buffer = Buffer.alloc(
+      PythGovernanceHeader.span + CosmosUpgradeContract.span
+    );
+
+    const span = buffer.writeBigUInt64BE(this.codeId);
+    return Buffer.concat([headerBuffer, buffer.subarray(0, span)]);
+  }
+}

+ 4 - 0
governance/xc_admin/packages/xc_admin_common/src/governance_payload/index.ts

@@ -7,6 +7,7 @@ import {
 import * as BufferLayout from "@solana/buffer-layout";
 import { PACKET_DATA_SIZE } from "@solana/web3.js";
 import { ExecutePostedVaa } from "./ExecutePostedVaa";
+import { CosmosUpgradeContract } from "./UpgradeContract";
 
 export interface PythGovernanceAction {
   readonly targetChainId: ChainName;
@@ -148,6 +149,9 @@ export function decodeGovernancePayload(
   switch (header.action) {
     case "ExecutePostedVaa":
       return ExecutePostedVaa.decode(data);
+    case "UpgradeContract":
+      //TO DO : Support non-cosmos upgrades
+      return CosmosUpgradeContract.decode(data);
     default:
       return undefined;
   }

+ 94 - 4
governance/xc_admin/packages/xc_admin_common/src/propose.ts

@@ -30,6 +30,70 @@ type SquadInstruction = {
   authorityType?: string;
 };
 
+export async function proposeArbitraryPayload(
+  squad: Squads,
+  vault: PublicKey,
+  payload: Buffer,
+  wormholeAddress: PublicKey
+): Promise<PublicKey> {
+  const msAccount = await squad.getMultisig(vault);
+
+  let ixToSend: TransactionInstruction[] = [];
+  const proposalIndex = msAccount.transactionIndex + 1;
+  ixToSend.push(
+    await squad.buildCreateTransaction(
+      msAccount.publicKey,
+      msAccount.authorityIndex,
+      proposalIndex
+    )
+  );
+
+  const newProposalAddress = getTxPDA(
+    vault,
+    new BN(proposalIndex),
+    squad.multisigProgramId
+  )[0];
+
+  const instructionToPropose = await getPostMessageInstruction(
+    squad,
+    vault,
+    newProposalAddress,
+    1,
+    wormholeAddress,
+    payload
+  );
+  ixToSend.push(
+    await squad.buildAddInstruction(
+      vault,
+      newProposalAddress,
+      instructionToPropose.instruction,
+      1,
+      instructionToPropose.authorityIndex,
+      instructionToPropose.authorityBump,
+      instructionToPropose.authorityType
+    )
+  );
+  ixToSend.push(
+    await squad.buildActivateTransaction(vault, newProposalAddress)
+  );
+  ixToSend.push(await squad.buildApproveTransaction(vault, newProposalAddress));
+
+  const txToSend = batchIntoTransactions(ixToSend);
+
+  for (let i = 0; i < txToSend.length; i += SIZE_OF_SIGNED_BATCH) {
+    await new AnchorProvider(
+      squad.connection,
+      squad.wallet,
+      AnchorProvider.defaultOptions()
+    ).sendAll(
+      txToSend.slice(i, i + SIZE_OF_SIGNED_BATCH).map((tx) => {
+        return { tx, signers: [] };
+      })
+    );
+  }
+  return newProposalAddress;
+}
+
 /**
  * Propose an array of `TransactionInstructions` as a proposal
  * @param squad Squads client
@@ -291,14 +355,41 @@ export async function wrapAsRemoteInstruction(
   instructionIndex: number,
   wormholeAddress: PublicKey
 ): Promise<SquadInstruction> {
-  const emitter = squad.getAuthorityPDA(vault, 1);
+  const buffer: Buffer = new ExecutePostedVaa("pythnet", instructions).encode();
+  return await getPostMessageInstruction(
+    squad,
+    vault,
+    proposalAddress,
+    instructionIndex,
+    wormholeAddress,
+    buffer
+  );
+}
 
+/**
+ * Returns a postMessage instruction that will post the provided payload to wormhole when the multisig approves the proposal
+ * @param squad Squads client
+ * @param vault vault public key (the id of the multisig where these instructions should be proposed)
+ * @param proposalAddress address of the proposal
+ * @param instructionIndex index of the instruction within the proposal
+ * @param wormholeAddress address of the Wormhole bridge
+ * @param payload the payload to be posted
+ */
+async function getPostMessageInstruction(
+  squad: Squads,
+  vault: PublicKey,
+  proposalAddress: PublicKey,
+  instructionIndex: number,
+  wormholeAddress: PublicKey,
+  payload: Buffer
+): Promise<SquadInstruction> {
   const [messagePDA, messagePdaBump] = getIxAuthorityPDA(
     proposalAddress,
     new BN(instructionIndex),
     squad.multisigProgramId
   );
 
+  const emitter = squad.getAuthorityPDA(vault, 1);
   const provider = new AnchorProvider(
     squad.connection,
     squad.wallet,
@@ -309,8 +400,6 @@ export async function wrapAsRemoteInstruction(
     provider
   );
 
-  const buffer: Buffer = new ExecutePostedVaa("pythnet", instructions).encode();
-
   const accounts = getPostMessageAccounts(
     wormholeAddress,
     emitter,
@@ -320,7 +409,7 @@ export async function wrapAsRemoteInstruction(
 
   return {
     instruction: await wormholeProgram.methods
-      .postMessage(0, buffer, 0)
+      .postMessage(0, payload, 0)
       .accounts(accounts)
       .instruction(),
     authorityIndex: instructionIndex,
@@ -328,6 +417,7 @@ export async function wrapAsRemoteInstruction(
     authorityType: "custom",
   };
 }
+
 function getPostMessageAccounts(
   wormholeAddress: PublicKey,
   emitter: PublicKey,

+ 8 - 2
governance/xc_admin/packages/xc_admin_common/src/wormhole.ts

@@ -3,7 +3,12 @@ import { PublicKey } from "@solana/web3.js";
 
 export const WORMHOLE_ADDRESS: Record<PythCluster, PublicKey | undefined> = {
   "mainnet-beta": new PublicKey("worm2ZoG2kUd4vFXhvjh93UUH596ayRfgQ2MgjNMTth"),
-  pythtest: new PublicKey("EUrRARh92Cdc54xrDn6qzaqjA77NRrCcfbr8kPwoTL4z"),
+  "pythtest-conformance": new PublicKey(
+    "EUrRARh92Cdc54xrDn6qzaqjA77NRrCcfbr8kPwoTL4z"
+  ),
+  "pythtest-crosschain": new PublicKey(
+    "EUrRARh92Cdc54xrDn6qzaqjA77NRrCcfbr8kPwoTL4z"
+  ),
   devnet: new PublicKey("3u8hJUVTA4jH1wYAyUur7FFZVQ8H635K3tSHHF4ssjQ5"),
   pythnet: new PublicKey("H3fxXJ86ADW2PNuDDmZJg6mzTtPxkYCpNuQUTgmJ7AjU"),
   localnet: new PublicKey("Bridge1p5gheXUvJ6jGWGeCsgPKgnE3YgdGKRVCMY9o"),
@@ -13,7 +18,8 @@ export const WORMHOLE_ADDRESS: Record<PythCluster, PublicKey | undefined> = {
 // Source : https://book.wormhole.com/reference/rpcnodes.html
 export const WORMHOLE_API_ENDPOINT: Record<PythCluster, string | undefined> = {
   "mainnet-beta": "https://wormhole-v2-mainnet-api.certus.one",
-  pythtest: "https://wormhole-v2-testnet-api.certus.one",
+  "pythtest-conformance": "https://wormhole-v2-testnet-api.certus.one",
+  "pythtest-crosschain": "https://wormhole-v2-testnet-api.certus.one",
   devnet: "https://wormhole-v2-testnet-api.certus.one",
   pythnet: "https://wormhole-v2-mainnet-api.certus.one",
   localnet: undefined,

+ 6 - 3
governance/xc_admin/packages/xc_admin_frontend/components/ClusterSwitch.tsx

@@ -49,10 +49,13 @@ const ClusterSwitch = ({ light }: { light?: boolean | null }) => {
       value: 'devnet',
       name: 'devnet',
     },
-    // hide pythtest as its broken
     {
-      value: 'pythtest',
-      name: 'pythtest',
+      value: 'pythtest-conformance',
+      name: 'pythtest-conformance',
+    },
+    {
+      value: 'pythtest-crosschain',
+      name: 'pythtest-crosschain',
     },
   ]
 

+ 25 - 6
governance/xc_admin/packages/xc_admin_frontend/components/tabs/General.tsx

@@ -391,6 +391,23 @@ const General = () => {
                 .instruction()
             )
           }
+
+          if (
+            JSON.stringify(prev.priceAccounts[0].expo) !==
+            JSON.stringify(newChanges.priceAccounts[0].expo)
+          ) {
+            // create update exponent instruction
+            instructions.push(
+              await pythProgramClient.methods
+                .setExponent(newChanges.priceAccounts[0].expo, 1)
+                .accounts({
+                  fundingAccount,
+                  priceAccount: new PublicKey(prev.priceAccounts[0].address),
+                })
+                .instruction()
+            )
+          }
+
           // check if minPub has changed
           if (
             prev.priceAccounts[0].minPub !== newChanges.priceAccounts[0].minPub
@@ -609,14 +626,16 @@ const General = () => {
     )
   }
 
-  const OldPriceFeedsRows = ({ priceFeedData }: { priceFeedData: any }) => {
+  const OldPriceFeedsRows = ({
+    priceFeedSymbol,
+  }: {
+    priceFeedSymbol: string
+  }) => {
     return (
       <>
-        <tr key={priceFeedData.metadata.symbol}>
+        <tr key={priceFeedSymbol}>
           <td className="base16 py-4 pl-6 pr-2 lg:pl-6">Symbol</td>
-          <td className="base16 py-4 pl-1 pr-2 lg:pl-6">
-            {priceFeedData.metadata.symbol}
-          </td>
+          <td className="base16 py-4 pl-1 pr-2 lg:pl-6">{priceFeedSymbol}</td>
         </tr>
       </>
     )
@@ -659,7 +678,7 @@ const General = () => {
                   {addPriceFeed ? (
                     <NewPriceFeedsRows key={key} priceFeedData={newChanges} />
                   ) : deletePriceFeed ? (
-                    <OldPriceFeedsRows key={key} priceFeedData={prev} />
+                    <OldPriceFeedsRows key={key} priceFeedSymbol={key} />
                   ) : (
                     diff.map((k) =>
                       k === 'metadata' ? (

+ 5 - 13
governance/xc_admin/packages/xc_admin_frontend/components/tabs/Proposals.tsx

@@ -16,7 +16,6 @@ import {
   ExecutePostedVaa,
   getMultisigCluster,
   getProposals,
-  getRemoteCluster,
   MultisigInstruction,
   MultisigParser,
   PRICE_FEED_MULTISIG,
@@ -581,12 +580,7 @@ const Proposal = ({
                   className="flex justify-between"
                 >
                   <div>Target Chain</div>
-                  <div>
-                    {instruction.governanceAction.targetChainId === 'pythnet' &&
-                    getRemoteCluster(cluster) === 'pythtest'
-                      ? 'pythtest'
-                      : 'pythnet'}
-                  </div>
+                  <div>{cluster}</div>
                 </div>
               </>
             ) : null}
@@ -775,9 +769,8 @@ const Proposal = ({
                 {instruction.governanceAction instanceof ExecutePostedVaa
                   ? instruction.governanceAction.instructions.map(
                       (innerInstruction, index) => {
-                        const multisigParser = MultisigParser.fromCluster(
-                          getRemoteCluster(cluster)
-                        )
+                        const multisigParser =
+                          MultisigParser.fromCluster(cluster)
                         const parsedInstruction =
                           multisigParser.parseInstruction({
                             programId: innerInstruction.programId,
@@ -1123,9 +1116,8 @@ const Proposals = ({
                 ix.name === 'postMessage' &&
                 ix.governanceAction instanceof ExecutePostedVaa &&
                 ix.governanceAction.instructions.every((remoteIx) => {
-                  const innerMultisigParser = MultisigParser.fromCluster(
-                    getRemoteCluster(cluster)
-                  )
+                  const innerMultisigParser =
+                    MultisigParser.fromCluster(cluster)
                   const parsedRemoteInstruction =
                     innerMultisigParser.parseInstruction({
                       programId: remoteIx.programId,

+ 10 - 3
governance/xc_admin/packages/xc_admin_frontend/hooks/useMultisig.ts

@@ -1,5 +1,6 @@
 import { Wallet } from '@coral-xyz/anchor'
 import NodeWallet from '@coral-xyz/anchor/dist/cjs/nodewallet'
+import { getPythProgramKeyForCluster } from '@pythnetwork/client'
 import { useAnchorWallet } from '@solana/wallet-adapter-react'
 import {
   AccountMeta,
@@ -12,6 +13,7 @@ import SquadsMesh from '@sqds/mesh'
 import { MultisigAccount, TransactionAccount } from '@sqds/mesh/lib/types'
 import { useContext, useEffect, useState } from 'react'
 import {
+  ExecutePostedVaa,
   getManyProposalsInstructions,
   getMultisigCluster,
   getProposals,
@@ -129,7 +131,6 @@ export const useMultisig = (wallet: Wallet): MultisigHookData => {
         )
         try {
           if (cancelled) return
-          // DELETE THIS TRY CATCH ONCE THIS MULTISIG EXISTS EVERYWHERE
           setpriceFeedMultisigAccount(
             await readOnlySquads.getMultisig(
               PRICE_FEED_MULTISIG[getMultisigCluster(cluster)]
@@ -149,7 +150,6 @@ export const useMultisig = (wallet: Wallet): MultisigHookData => {
         )
         try {
           if (cancelled) return
-          // DELETE THIS TRY CATCH ONCE THIS MULTISIG EXISTS EVERYWHERE
           const sortedPriceFeedMultisigProposals = await getSortedProposals(
             readOnlySquads,
             PRICE_FEED_MULTISIG[getMultisigCluster(cluster)]
@@ -178,7 +178,14 @@ export const useMultisig = (wallet: Wallet): MultisigHookData => {
             if (
               isRemoteCluster(cluster) &&
               ixs.length > 0 &&
-              ixs.some((ix) => ix instanceof WormholeMultisigInstruction)
+              ixs.some(
+                (ix) =>
+                  ix instanceof WormholeMultisigInstruction &&
+                  ix.governanceAction instanceof ExecutePostedVaa &&
+                  ix.governanceAction.instructions.some((ix) =>
+                    ix.programId.equals(getPythProgramKeyForCluster(cluster))
+                  )
+              )
             ) {
               proposalsRes.push(sortedPriceFeedMultisigProposals[idx])
               instructionsRes.push(ixs)

+ 11 - 1
governance/xc_admin/packages/xc_admin_frontend/utils/pythClusterApiUrl.ts

@@ -47,7 +47,17 @@ const CLUSTER_URLS: Record<PythCluster, any> = {
       wsUrl: 'wss://api.testnet.solana.com/',
     },
   ],
-  pythtest: [
+  'pythtest-conformance': [
+    {
+      rpcUrl: 'http://pythtest.xyz.pyth.network',
+      wsUrl: 'ws://pythtest.xyz.pyth.network',
+    },
+    {
+      rpcUrl: 'https://api.pythtest.pyth.network/',
+      wsUrl: 'wss://api.pythtest.pyth.network/',
+    },
+  ],
+  'pythtest-crosschain': [
     {
       rpcUrl: 'http://pythtest.xyz.pyth.network',
       wsUrl: 'ws://pythtest.xyz.pyth.network',

+ 5 - 0
governance/xc_governance_sdk_js/src/chains.ts

@@ -12,6 +12,11 @@ export const RECEIVER_CHAINS = {
   polygon_zkevm: 60008,
   canto: 60009,
   meter: 60010,
+  mantle: 60011,
+  conflux_espace: 60012,
+  sei: 60013,
+  osmosis: 60014,
+  neutron: 60015,
 };
 
 // If there is any overlapping value the receiver chain will replace the wormhole

+ 1 - 0
governance/xc_governance_sdk_js/src/index.ts

@@ -9,6 +9,7 @@ export {
   SetValidPeriodInstruction,
   RequestGovernanceDataSourceTransferInstruction,
   AuthorizeGovernanceDataSourceTransferInstruction,
+  CosmwasmUpgradeContractInstruction,
   Instruction,
 } from "./instructions";
 

+ 5 - 4
hermes/Cargo.lock

@@ -335,6 +335,7 @@ checksum = "6137c6234afb339e75e764c866e3594900f0211e1315d33779f269bbe2ec6967"
 dependencies = [
  "async-trait",
  "axum-core",
+ "axum-macros",
  "base64 0.21.0",
  "bitflags",
  "bytes",
@@ -3466,9 +3467,9 @@ dependencies = [
 
 [[package]]
 name = "regex"
-version = "1.7.1"
+version = "1.7.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "48aaa5748ba571fb95cd2c85c09f629215d3a6ece942baa100950af03a34f733"
+checksum = "8b1f693b24f6ac912f4893ef08244d70b6067480d2f1a46e950c9691e6749d1d"
 dependencies = [
  "aho-corasick",
  "memchr",
@@ -3483,9 +3484,9 @@ checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
 
 [[package]]
 name = "regex-syntax"
-version = "0.6.28"
+version = "0.6.29"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "456c603be3e8d448b072f410900c09faf164fbce2d480456f50eea6e25f9c848"
+checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
 
 [[package]]
 name = "remove_dir_all"

+ 4 - 4
hermes/Cargo.toml

@@ -4,14 +4,16 @@ version                        = "0.1.0"
 edition                        = "2021"
 
 [dependencies]
-axum                           = { version = "0.6.9", features = ["json", "ws"] }
+axum                           = { version = "0.6.9", features = ["json", "ws", "macros"] }
 axum-extra                     = { version = "0.7.2", features = ["query"] }
 axum-macros                    = { version = "0.3.4" }
 anyhow                         = { version = "1.0.69" }
+base64                         = { version = "0.21.0" }
 borsh                          = { version = "0.9.0" }
 bs58                           = { version = "0.4.0" }
 dashmap                        = { version = "5.4.0" }
 der                            = { version = "0.7.0" }
+derive_more                    = { version = "0.99.17" }
 env_logger                     = { version = "0.10.0" }
 futures                        = { version = "0.3.26" }
 hex                            = { version = "0.4.3" }
@@ -26,7 +28,7 @@ secp256k1                      = { version = "0.26.0", features = ["rand", "reco
 serde                          = { version = "1.0.152", features = ["derive"] }
 serde_arrays                   = { version = "0.1.0" }
 serde_cbor                     = { version = "0.11.2" }
-serde_json                      = { version = "1.0.93" }
+serde_json                     = { version = "1.0.93" }
 sha256                         = { version = "1.1.2" }
 structopt                      = { version = "0.3.26" }
 tokio                          = { version = "1.26.0", features = ["full"] }
@@ -58,5 +60,3 @@ libp2p                         = { version = "0.51.1", features = [
     "websocket",
     "yamux",
 ]}
-base64 = "0.21.0"
-derive_more = "0.99.17"

+ 15 - 6
hermes/src/config.rs

@@ -24,13 +24,22 @@ pub enum Options {
         #[structopt(long)]
         id_secp256k1: Option<PathBuf>,
 
-        /// Multiaddress for a Wormhole bootstrap peer.
-        #[structopt(long)]
-        wormhole_peer: Option<String>,
+        /// Network ID for Wormhole
+        #[structopt(long, env = "WORMHOLE_NETWORK_ID")]
+        wh_network_id: String,
 
-        /// Multiaddress to bind Wormhole P2P to.
-        #[structopt(long)]
-        wormhole_addr: Option<Multiaddr>,
+        /// Multiaddresses for Wormhole bootstrap peers (separated by comma).
+        #[structopt(long, use_delimiter = true, env = "WORMHOLE_BOOTSTRAP_ADDRS")]
+        wh_bootstrap_addrs: Vec<Multiaddr>,
+
+        /// Multiaddresses to bind Wormhole P2P to (separated by comma)
+        #[structopt(
+            long,
+            use_delimiter = true,
+            default_value = "/ip4/0.0.0.0/udp/30910/quic,/ip6/::/udp/30910/quic",
+            env = "WORMHOLE_LISTEN_ADDRS"
+        )]
+        wh_listen_addrs: Vec<Multiaddr>,
 
         /// The address to bind the RPC server to.
         #[structopt(long, default_value = "127.0.0.1:33999")]

+ 41 - 0
hermes/src/macros.rs

@@ -0,0 +1,41 @@
+#[macro_export]
+/// A macro that generates Deserialize from string for a struct S that wraps [u8; N] where N is a
+/// compile-time constant. This macro deserializes a string with or without leading 0x and supports
+/// both lower case and upper case hex characters.
+macro_rules! impl_deserialize_for_hex_string_wrapper {
+    ($struct_name:ident, $array_size:expr) => {
+        impl<'de> serde::Deserialize<'de> for $struct_name {
+            fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+            where
+                D: serde::Deserializer<'de>,
+            {
+                struct HexVisitor;
+
+                impl<'de> serde::de::Visitor<'de> for HexVisitor {
+                    type Value = [u8; $array_size];
+
+                    fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
+                        write!(formatter, "a hex string of length {}", $array_size * 2)
+                    }
+
+                    fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
+                    where
+                        E: serde::de::Error,
+                    {
+                        let s = s.trim_start_matches("0x");
+                        let bytes = hex::decode(s)
+                            .map_err(|_| E::invalid_value(serde::de::Unexpected::Str(s), &self))?;
+                        if bytes.len() != $array_size {
+                            return Err(E::invalid_length(bytes.len(), &self));
+                        }
+                        let mut array = [0_u8; $array_size];
+                        array.copy_from_slice(&bytes);
+                        Ok(array)
+                    }
+                }
+
+                deserializer.deserialize_str(HexVisitor).map($struct_name)
+            }
+        }
+    };
+}

+ 11 - 3
hermes/src/main.rs

@@ -16,6 +16,7 @@ use {
 };
 
 mod config;
+mod macros;
 mod network;
 mod store;
 
@@ -51,8 +52,9 @@ async fn init(_update_channel: Receiver<AccountUpdate>) -> Result<()> {
         config::Options::Run {
             id: _,
             id_secp256k1: _,
-            wormhole_addr: _,
-            wormhole_peer: _,
+            wh_network_id,
+            wh_bootstrap_addrs,
+            wh_listen_addrs,
             rpc_addr,
             p2p_addr,
             p2p_peer: _,
@@ -61,7 +63,13 @@ async fn init(_update_channel: Receiver<AccountUpdate>) -> Result<()> {
 
             // Spawn the P2P layer.
             log::info!("Starting P2P server on {}", p2p_addr);
-            network::p2p::spawn(handle_message).await?;
+            network::p2p::spawn(
+                handle_message,
+                wh_network_id.to_string(),
+                wh_bootstrap_addrs,
+                wh_listen_addrs,
+            )
+            .await?;
 
             // Spawn the RPC server.
             log::info!("Starting RPC server on {}", rpc_addr);

+ 116 - 117
hermes/src/network/p2p.go

@@ -8,6 +8,7 @@
 package main
 
 // #include <stdlib.h>
+// #include <string.h>
 //
 // // A structure containing Wormhole VAA observations. This must match on both
 // // the Go and Rust side.
@@ -27,6 +28,7 @@ import "C"
 import (
 	"context"
 	"fmt"
+	"strings"
 
 	"github.com/libp2p/go-libp2p"
 	"github.com/libp2p/go-libp2p/core/crypto"
@@ -45,123 +47,120 @@ import (
 )
 
 //export RegisterObservationCallback
-func RegisterObservationCallback(f C.callback_t) {
-    go func() {
-        ctx := context.Background()
-
-        // Setup base network configuration.
-        networkID      := "/wormhole/mainnet/2"
-        priv, _, err   := crypto.GenerateKeyPair(crypto.Ed25519, -1)
-        bootstrapPeers := []string{
-            "/dns4/wormhole-mainnet-v2-bootstrap.certus.one/udp/8999/quic/p2p/12D3KooWQp644DK27fd3d4Km3jr7gHiuJJ5ZGmy8hH4py7fP4FP7",
-        }
-
-        // Setup libp2p Connection Manager.
-        mgr, err := connmgr.NewConnManager(
-            100,
-            400,
-            connmgr.WithGracePeriod(0),
-        )
-
-        if err != nil {
-            err := fmt.Errorf("Failed to create connection manager: %w", err)
-            fmt.Println(err)
-            return
-        }
-
-        // Setup libp2p Reactor.
-        h, err := libp2p.New(
-            libp2p.Identity(priv),
-            libp2p.ListenAddrStrings(
-                "/ip4/0.0.0.0/udp/30910/quic",
-                "/ip6/::/udp/30910/quic",
-            ),
-            libp2p.Security(libp2ptls.ID, libp2ptls.New),
-            libp2p.Transport(libp2pquic.NewTransport),
-            libp2p.ConnectionManager(mgr),
-            libp2p.Routing(func(h host.Host) (routing.PeerRouting, error) {
-                bootstrappers := make([]peer.AddrInfo, 0)
-                for _, addr := range bootstrapPeers {
-                    ma, err := multiaddr.NewMultiaddr(addr)
-                    if err != nil {
-                        continue
-                    }
-
-                    pi, err := peer.AddrInfoFromP2pAddr(ma)
-                    if err != nil || pi.ID == h.ID() {
-                        continue
-                    }
-
-                    bootstrappers = append(bootstrappers, *pi)
-                }
-                idht, err := dht.New(ctx, h, dht.Mode(dht.ModeServer),
-                    dht.ProtocolPrefix(protocol.ID("/"+networkID)),
-                    dht.BootstrapPeers(bootstrappers...),
-                )
-                return idht, err
-            }),
-        )
-
-        if err != nil {
-            err := fmt.Errorf("Failed to create libp2p host: %w", err)
-            fmt.Println(err)
-            return
-        }
-
-        topic := fmt.Sprintf("%s/%s", networkID, "broadcast")
-        ps, err := pubsub.NewGossipSub(ctx, h)
-        if err != nil {
-            err := fmt.Errorf("Failed to create Pubsub: %w", err)
-            fmt.Println(err)
-            return
-        }
-
-        th, err := ps.Join(topic)
-        if err != nil {
-             err := fmt.Errorf("Failed to join topic: %w", err)
-             fmt.Println(err)
-             return
-        }
-
-        sub, err := th.Subscribe()
-        if err != nil {
-            err := fmt.Errorf("Failed to subscribe topic: %w", err)
-            fmt.Println(err)
-            return
-        }
-
-        for {
-            for {
-                select {
-                case <-ctx.Done():
-                    return
-                default:
-                    envelope, err := sub.Next(ctx)
-                    if err != nil {
-                        err := fmt.Errorf("Failed to receive Pubsub message: %w", err)
-                        fmt.Println(err)
-                        return
-                    }
-
-                    // Definition for GossipMessage is generated by Protobuf, see `p2p.proto`.
-                    var msg GossipMessage
-                    err = proto.Unmarshal(envelope.Data, &msg)
-
-                    switch msg.Message.(type) {
-                    case *GossipMessage_SignedObservation:
-                    case *GossipMessage_SignedVaaWithQuorum:
-                        vaaBytes := msg.GetSignedVaaWithQuorum().GetVaa()
-                        cBytes := C.CBytes(vaaBytes)
-                        defer C.free(cBytes)
-                        C.invoke(f, C.observation_t{
-                            vaa: (*C.char)(cBytes),
-                            vaa_len: C.size_t(len(vaaBytes)),
-                        })
-                    }
-                }
-            }
-        }
-    }()
+func RegisterObservationCallback(f C.callback_t, network_id, bootstrap_addrs, listen_addrs *C.char) {
+	networkID := C.GoString(network_id)
+	bootstrapAddrs := strings.Split(C.GoString(bootstrap_addrs), ",")
+	listenAddrs := strings.Split(C.GoString(listen_addrs), ",")
+
+	go func() {
+		ctx := context.Background()
+
+		// Setup base network configuration.
+		priv, _, err := crypto.GenerateKeyPair(crypto.Ed25519, -1)
+
+		// Setup libp2p Connection Manager.
+		mgr, err := connmgr.NewConnManager(
+			100,
+			400,
+			connmgr.WithGracePeriod(0),
+		)
+
+		if err != nil {
+			err := fmt.Errorf("Failed to create connection manager: %w", err)
+			fmt.Println(err)
+			return
+		}
+
+		// Setup libp2p Reactor.
+		h, err := libp2p.New(
+			libp2p.Identity(priv),
+			libp2p.ListenAddrStrings(listenAddrs...),
+			libp2p.Security(libp2ptls.ID, libp2ptls.New),
+			libp2p.Transport(libp2pquic.NewTransport),
+			libp2p.ConnectionManager(mgr),
+			libp2p.Routing(func(h host.Host) (routing.PeerRouting, error) {
+				bootstrappers := make([]peer.AddrInfo, 0)
+				for _, addr := range bootstrapAddrs {
+					ma, err := multiaddr.NewMultiaddr(addr)
+					if err != nil {
+						continue
+					}
+
+					pi, err := peer.AddrInfoFromP2pAddr(ma)
+					if err != nil || pi.ID == h.ID() {
+						continue
+					}
+
+					bootstrappers = append(bootstrappers, *pi)
+				}
+				idht, err := dht.New(ctx, h, dht.Mode(dht.ModeServer),
+					dht.ProtocolPrefix(protocol.ID("/"+networkID)),
+					dht.BootstrapPeers(bootstrappers...),
+				)
+				return idht, err
+			}),
+		)
+
+		if err != nil {
+			err := fmt.Errorf("Failed to create libp2p host: %w", err)
+			fmt.Println(err)
+			return
+		}
+
+		topic := fmt.Sprintf("%s/%s", networkID, "broadcast")
+		ps, err := pubsub.NewGossipSub(ctx, h)
+		if err != nil {
+			err := fmt.Errorf("Failed to create Pubsub: %w", err)
+			fmt.Println(err)
+			return
+		}
+
+		th, err := ps.Join(topic)
+		if err != nil {
+			err := fmt.Errorf("Failed to join topic: %w", err)
+			fmt.Println(err)
+			return
+		}
+
+		sub, err := th.Subscribe()
+		if err != nil {
+			err := fmt.Errorf("Failed to subscribe topic: %w", err)
+			fmt.Println(err)
+			return
+		}
+
+		for {
+			for {
+				select {
+				case <-ctx.Done():
+					return
+				default:
+					envelope, err := sub.Next(ctx)
+					if err != nil {
+						err := fmt.Errorf("Failed to receive Pubsub message: %w", err)
+						fmt.Println(err)
+						return
+					}
+
+					// Definition for GossipMessage is generated by Protobuf, see `p2p.proto`.
+					var msg GossipMessage
+					err = proto.Unmarshal(envelope.Data, &msg)
+
+					switch msg.Message.(type) {
+					case *GossipMessage_SignedObservation:
+					case *GossipMessage_SignedVaaWithQuorum:
+						vaaBytes := msg.GetSignedVaaWithQuorum().GetVaa()
+						cBytes := C.CBytes(vaaBytes)
+						defer C.free(cBytes)
+						C.invoke(f, C.observation_t{
+							vaa:     (*C.char)(cBytes),
+							vaa_len: C.size_t(len(vaaBytes)),
+						})
+					}
+				}
+			}
+		}
+	}()
 }
 
 func main() {

+ 58 - 10
hermes/src/network/p2p.rs

@@ -11,17 +11,29 @@
 
 use {
     anyhow::Result,
-    std::sync::{
-        mpsc::{
-            Receiver,
-            Sender,
+    libp2p::Multiaddr,
+    std::{
+        ffi::{
+            c_char,
+            CString,
+        },
+        sync::{
+            mpsc::{
+                Receiver,
+                Sender,
+            },
+            Mutex,
         },
-        Mutex,
     },
 };
 
 extern "C" {
-    fn RegisterObservationCallback(cb: extern "C" fn(o: ObservationC));
+    fn RegisterObservationCallback(
+        cb: extern "C" fn(o: ObservationC),
+        network_id: *const c_char,
+        bootstrap_addrs: *const c_char,
+        listen_addrs: *const c_char,
+    );
 }
 
 // An `Observation` C type passed back to us from Go.
@@ -64,22 +76,58 @@ extern "C" fn proxy(o: ObservationC) {
 /// TODO: handle_message should be capable of handling more than just Observations. But we don't
 /// have our own P2P network, we pass it in to keep the code structure and read directly from the
 /// OBSERVATIONS channel in the RPC for now.
-pub fn bootstrap<H>(_handle_message: H) -> Result<()>
+pub fn bootstrap<H>(
+    _handle_message: H,
+    network_id: String,
+    wh_bootstrap_addrs: Vec<Multiaddr>,
+    wh_listen_addrs: Vec<Multiaddr>,
+) -> Result<()>
 where
     H: Fn(Observation) -> Result<()> + 'static,
 {
+    let network_id_cstr = CString::new(network_id)?;
+    let wh_bootstrap_addrs_cstr = CString::new(
+        wh_bootstrap_addrs
+            .iter()
+            .map(|a| a.to_string())
+            .collect::<Vec<_>>()
+            .join(","),
+    )?;
+    let wh_listen_addrs_cstr = CString::new(
+        wh_listen_addrs
+            .iter()
+            .map(|a| a.to_string())
+            .collect::<Vec<_>>()
+            .join(","),
+    )?;
+
     // Launch the Go LibP2P Reactor.
     unsafe {
-        RegisterObservationCallback(proxy as extern "C" fn(o: ObservationC));
+        RegisterObservationCallback(
+            proxy as extern "C" fn(observation: ObservationC),
+            network_id_cstr.as_ptr(),
+            wh_bootstrap_addrs_cstr.as_ptr(),
+            wh_listen_addrs_cstr.as_ptr(),
+        );
     }
     Ok(())
 }
 
 // Spawn's the P2P layer as a separate thread via Go.
-pub async fn spawn<H>(handle_message: H) -> Result<()>
+pub async fn spawn<H>(
+    handle_message: H,
+    network_id: String,
+    wh_bootstrap_addrs: Vec<Multiaddr>,
+    wh_listen_addrs: Vec<Multiaddr>,
+) -> Result<()>
 where
     H: Fn(Observation) -> Result<()> + Send + 'static,
 {
-    bootstrap(handle_message)?;
+    bootstrap(
+        handle_message,
+        network_id,
+        wh_bootstrap_addrs,
+        wh_listen_addrs,
+    )?;
     Ok(())
 }

+ 21 - 6
hermes/src/network/rpc.rs

@@ -1,4 +1,5 @@
 use {
+    self::ws::dispatch_updates,
     crate::{
         network::p2p::OBSERVATIONS,
         store::{
@@ -11,24 +12,31 @@ use {
         routing::get,
         Router,
     },
+    std::sync::Arc,
 };
 
 mod rest;
+mod types;
+mod ws;
 
 #[derive(Clone)]
 pub struct State {
     pub store: Store,
+    pub ws:    Arc<ws::WsState>,
 }
 
 impl State {
     pub fn new(store: Store) -> Self {
-        Self { store }
+        Self {
+            store,
+            ws: Arc::new(ws::WsState::new()),
+        }
     }
 }
 
 /// This method provides a background service that responds to REST requests
 ///
-/// Currently this is based on Axum due to the simplicity and strong ecosystem support for the
+/// Currently this is based on Axum due to the simplicity and strong ecosyjtem support for the
 /// packages they are based on (tokio & hyper).
 pub async fn spawn(rpc_addr: String, store: Store) -> Result<()> {
     let state = State::new(store);
@@ -39,16 +47,23 @@ pub async fn spawn(rpc_addr: String, store: Store) -> Result<()> {
     let app = app
         .route("/", get(rest::index))
         .route("/live", get(rest::live))
-        .route("/latest_price_feeds", get(rest::latest_price_feeds))
-        .route("/latest_vaas", get(rest::latest_vaas))
+        .route("/ws", get(ws::ws_route_handler))
+        .route("/api/latest_price_feeds", get(rest::latest_price_feeds))
+        .route("/api/latest_vaas", get(rest::latest_vaas))
+        .route("/api/get_vaa", get(rest::get_vaa))
+        .route("/api/get_vaa_ccip", get(rest::get_vaa_ccip))
+        .route("/api/price_feed_ids", get(rest::price_feed_ids))
         .with_state(state.clone());
 
     // Listen in the background for new VAA's from the Wormhole RPC.
     tokio::spawn(async move {
         loop {
             if let Ok(observation) = OBSERVATIONS.1.lock().unwrap().recv() {
-                if let Err(e) = state.store.store_update(Update::Vaa(observation)) {
-                    log::error!("Failed to process VAA: {:?}", e);
+                match state.store.store_update(Update::Vaa(observation)) {
+                    Ok(updated_feed_ids) => {
+                        tokio::spawn(dispatch_updates(updated_feed_ids, state.clone()));
+                    }
+                    Err(e) => log::error!("Failed to process VAA: {:?}", e),
                 }
             }
         }

+ 145 - 48
hermes/src/network/rpc/rest.rs

@@ -1,18 +1,11 @@
+use super::types::PriceIdInput;
 use {
+    super::types::RpcPriceFeed,
     crate::store::RequestTime,
-    base64::{
-        engine::general_purpose::STANDARD as base64_standard_engine,
-        Engine as _,
+    crate::{
+        impl_deserialize_for_hex_string_wrapper,
+        store::UnixTimestamp,
     },
-    pyth_sdk::{
-        PriceFeed,
-        PriceIdentifier,
-    },
-};
-// This file implements a REST service for the Price Service. This is a mostly direct copy of the
-// TypeScript implementation in the `pyth-crosschain` repo. It uses `axum` as the web framework and
-// `tokio` as the async runtime.
-use {
     anyhow::Result,
     axum::{
         extract::State,
@@ -24,89 +17,186 @@ use {
         Json,
     },
     axum_extra::extract::Query, // Axum extra Query allows us to parse multi-value query parameters.
+    base64::{
+        engine::general_purpose::STANDARD as base64_standard_engine,
+        Engine as _,
+    },
+    derive_more::{
+        Deref,
+        DerefMut,
+    },
+    pyth_sdk::PriceIdentifier,
 };
 
 pub enum RestError {
-    InvalidPriceId,
     UpdateDataNotFound,
+    CcipUpdateDataNotFound,
 }
 
 impl IntoResponse for RestError {
     fn into_response(self) -> Response {
         match self {
-            RestError::InvalidPriceId => {
-                (StatusCode::BAD_REQUEST, "Invalid Price Id").into_response()
-            }
             RestError::UpdateDataNotFound => {
                 (StatusCode::NOT_FOUND, "Update data not found").into_response()
             }
+            RestError::CcipUpdateDataNotFound => {
+                // Returning Bad Gateway error because CCIP expects a 5xx error if it needs to
+                // retry or try other endpoints. Bad Gateway seems the best choice here as this
+                // is not an internal error and could happen on two scenarios:
+                // 1. DB Api is not responding well (Bad Gateway is appropriate here)
+                // 2. Publish time is a few seconds before current time and a VAA
+                //    Will be available in a few seconds. So we want the client to retry.
+
+                (StatusCode::BAD_GATEWAY, "CCIP update data not found").into_response()
+            }
         }
     }
 }
 
-#[derive(Debug, serde::Serialize, serde::Deserialize)]
-pub struct LatestVaaQueryParams {
-    ids: Vec<String>,
+pub async fn price_feed_ids(
+    State(state): State<super::State>,
+) -> Result<Json<Vec<PriceIdentifier>>, RestError> {
+    let price_feeds = state.store.get_price_feed_ids();
+    Ok(Json(price_feeds))
+}
+
+#[derive(Debug, serde::Deserialize)]
+pub struct LatestVaasQueryParams {
+    ids: Vec<PriceIdInput>,
 }
 
-/// REST endpoint /latest_vaas?ids[]=...&ids[]=...&ids[]=...
-///
-/// TODO: This endpoint returns update data as an array of base64 encoded strings. We want
-/// to support other formats such as hex in the future.
+
 pub async fn latest_vaas(
     State(state): State<super::State>,
-    Query(params): Query<LatestVaaQueryParams>,
+    Query(params): Query<LatestVaasQueryParams>,
 ) -> Result<Json<Vec<String>>, RestError> {
-    // TODO: Find better ways to validate query parameters.
-    // FIXME: Handle ids with leading 0x
-    let price_ids: Vec<PriceIdentifier> = params
-        .ids
-        .iter()
-        .map(PriceIdentifier::from_hex)
-        .collect::<Result<Vec<PriceIdentifier>, _>>()
-        .map_err(|_| RestError::InvalidPriceId)?;
+    let price_ids: Vec<PriceIdentifier> = params.ids.into_iter().map(|id| id.into()).collect();
     let price_feeds_with_update_data = state
         .store
         .get_price_feeds_with_update_data(price_ids, RequestTime::Latest)
         .map_err(|_| RestError::UpdateDataNotFound)?;
     Ok(Json(
         price_feeds_with_update_data
-            .update_data
             .batch_vaa
+            .update_data
             .iter()
-            .map(|vaa_bytes| base64_standard_engine.encode(vaa_bytes))
+            .map(|vaa_bytes| base64_standard_engine.encode(vaa_bytes)) // TODO: Support multiple
+            // encoding formats
             .collect(),
     ))
 }
 
-#[derive(Debug, serde::Serialize, serde::Deserialize)]
-pub struct LatestPriceFeedParams {
-    ids: Vec<String>,
+#[derive(Debug, serde::Deserialize)]
+pub struct LatestPriceFeedsQueryParams {
+    ids:     Vec<PriceIdInput>,
+    #[serde(default)]
+    verbose: bool,
+    #[serde(default)]
+    binary:  bool,
 }
 
-/// REST endpoint /latest_vaas?ids[]=...&ids[]=...&ids[]=...
 pub async fn latest_price_feeds(
     State(state): State<super::State>,
-    Query(params): Query<LatestPriceFeedParams>,
-) -> Result<Json<Vec<PriceFeed>>, RestError> {
-    let price_ids: Vec<PriceIdentifier> = params
-        .ids
-        .iter()
-        .map(PriceIdentifier::from_hex)
-        .collect::<Result<Vec<PriceIdentifier>, _>>()
-        .map_err(|_| RestError::InvalidPriceId)?;
+    Query(params): Query<LatestPriceFeedsQueryParams>,
+) -> Result<Json<Vec<RpcPriceFeed>>, RestError> {
+    let price_ids: Vec<PriceIdentifier> = params.ids.into_iter().map(|id| id.into()).collect();
     let price_feeds_with_update_data = state
         .store
         .get_price_feeds_with_update_data(price_ids, RequestTime::Latest)
         .map_err(|_| RestError::UpdateDataNotFound)?;
     Ok(Json(
         price_feeds_with_update_data
-            .price_feeds
+            .batch_vaa
+            .price_infos
             .into_values()
+            .map(|price_info| {
+                RpcPriceFeed::from_price_info(price_info, params.verbose, params.binary)
+            })
             .collect(),
     ))
 }
 
+#[derive(Debug, serde::Deserialize)]
+pub struct GetVaaQueryParams {
+    id:           PriceIdInput,
+    publish_time: UnixTimestamp,
+}
+
+#[derive(Debug, serde::Serialize)]
+pub struct GetVaaResponse {
+    pub vaa:          String,
+    #[serde(rename = "publishTime")]
+    pub publish_time: UnixTimestamp,
+}
+
+pub async fn get_vaa(
+    State(state): State<super::State>,
+    Query(params): Query<GetVaaQueryParams>,
+) -> Result<Json<GetVaaResponse>, RestError> {
+    let price_id: PriceIdentifier = params.id.into();
+
+    let price_feeds_with_update_data = state
+        .store
+        .get_price_feeds_with_update_data(
+            vec![price_id],
+            RequestTime::FirstAfter(params.publish_time),
+        )
+        .map_err(|_| RestError::UpdateDataNotFound)?;
+
+    let vaa = price_feeds_with_update_data
+        .batch_vaa
+        .update_data
+        .get(0)
+        .map(|vaa_bytes| base64_standard_engine.encode(vaa_bytes))
+        .ok_or(RestError::UpdateDataNotFound)?;
+
+    let publish_time = price_feeds_with_update_data
+        .batch_vaa
+        .price_infos
+        .get(&price_id)
+        .map(|price_info| price_info.publish_time)
+        .ok_or(RestError::UpdateDataNotFound)?;
+
+    Ok(Json(GetVaaResponse { vaa, publish_time }))
+}
+
+#[derive(Debug, Clone, Deref, DerefMut)]
+pub struct GetVaaCcipInput([u8; 40]);
+impl_deserialize_for_hex_string_wrapper!(GetVaaCcipInput, 40);
+
+#[derive(Debug, serde::Deserialize)]
+pub struct GetVaaCcipQueryParams {
+    data: GetVaaCcipInput,
+}
+
+#[derive(Debug, serde::Serialize)]
+pub struct GetVaaCcipResponse {
+    data: String, // TODO: Use a typed wrapper for the hex output with leading 0x.
+}
+
+pub async fn get_vaa_ccip(
+    State(state): State<super::State>,
+    Query(params): Query<GetVaaCcipQueryParams>,
+) -> Result<Json<GetVaaCcipResponse>, RestError> {
+    let price_id: PriceIdentifier = PriceIdentifier::new(params.data[0..32].try_into().unwrap());
+    let publish_time = UnixTimestamp::from_be_bytes(params.data[32..40].try_into().unwrap());
+
+    let price_feeds_with_update_data = state
+        .store
+        .get_price_feeds_with_update_data(vec![price_id], RequestTime::FirstAfter(publish_time))
+        .map_err(|_| RestError::CcipUpdateDataNotFound)?;
+
+    let vaa = price_feeds_with_update_data
+        .batch_vaa
+        .update_data
+        .get(0) // One price feed has only a single VAA as proof.
+        .ok_or(RestError::UpdateDataNotFound)?;
+
+    Ok(Json(GetVaaCcipResponse {
+        data: format!("0x{}", hex::encode(vaa)),
+    }))
+}
+
 // This function implements the `/live` endpoint. It returns a `200` status code. This endpoint is
 // used by the Kubernetes liveness probe.
 pub async fn live() -> Result<impl IntoResponse, std::convert::Infallible> {
@@ -116,5 +206,12 @@ pub async fn live() -> Result<impl IntoResponse, std::convert::Infallible> {
 // This is the index page for the REST service. It will list all the available endpoints.
 // TODO: Dynamically generate this list if possible.
 pub async fn index() -> impl IntoResponse {
-    Json(["/live", "/latest_price_feeds", "/latest_vaas"])
+    Json([
+        "/live",
+        "/api/price_feed_ids",
+        "/api/latest_price_feeds?ids[]=<price_feed_id>&ids[]=<price_feed_id_2>&..(&verbose=true)(&binary=true)",
+        "/api/latest_vaas?ids[]=<price_feed_id>&ids[]=<price_feed_id_2>&...",
+        "/api/get_vaa?id=<price_feed_id>&publish_time=<publish_time_in_unix_timestamp>",
+        "/api/get_vaa_ccip?data=<0x<price_feed_id_32_bytes>+<publish_time_unix_timestamp_be_8_bytes>>",
+    ])
 }

+ 76 - 0
hermes/src/network/rpc/types.rs

@@ -0,0 +1,76 @@
+use {
+    crate::{
+        impl_deserialize_for_hex_string_wrapper,
+        store::{
+            proof::batch_vaa::PriceInfo,
+            UnixTimestamp,
+        },
+    },
+    base64::{
+        engine::general_purpose::STANDARD as base64_standard_engine,
+        Engine as _,
+    },
+    derive_more::{
+        Deref,
+        DerefMut,
+    },
+    pyth_sdk::{
+        Price,
+        PriceIdentifier,
+    },
+};
+
+
+/// PriceIdInput is a wrapper around a 32-byte hex string.
+/// that supports a flexible deserialization from a hex string.
+/// It supports both 0x-prefixed and non-prefixed hex strings,
+/// and also supports both lower and upper case characters.
+#[derive(Debug, Clone, Deref, DerefMut)]
+pub struct PriceIdInput([u8; 32]);
+// TODO: Use const generics instead of macro.
+impl_deserialize_for_hex_string_wrapper!(PriceIdInput, 32);
+
+impl From<PriceIdInput> for PriceIdentifier {
+    fn from(id: PriceIdInput) -> Self {
+        Self::new(*id)
+    }
+}
+
+type Base64String = String;
+
+#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
+pub struct RpcPriceFeedMetadata {
+    pub emitter_chain:              u16,
+    pub attestation_time:           UnixTimestamp,
+    pub sequence_number:            u64,
+    pub price_service_receive_time: UnixTimestamp,
+}
+
+#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
+pub struct RpcPriceFeed {
+    pub id:        PriceIdentifier,
+    pub price:     Price,
+    pub ema_price: Price,
+    pub metadata:  Option<RpcPriceFeedMetadata>,
+    /// Vaa binary represented in base64.
+    pub vaa:       Option<Base64String>,
+}
+
+impl RpcPriceFeed {
+    // TODO: Use a Encoding type to have None, Base64, and Hex variants instead of binary flag.
+    // TODO: Use a Verbosity type to define None, or Full instead of verbose flag.
+    pub fn from_price_info(price_info: PriceInfo, verbose: bool, binary: bool) -> Self {
+        Self {
+            id:        price_info.price_feed.id,
+            price:     price_info.price_feed.get_price_unchecked(),
+            ema_price: price_info.price_feed.get_ema_price_unchecked(),
+            metadata:  verbose.then_some(RpcPriceFeedMetadata {
+                emitter_chain:              price_info.emitter_chain,
+                attestation_time:           price_info.attestation_time,
+                sequence_number:            price_info.sequence_number,
+                price_service_receive_time: price_info.receive_time,
+            }),
+            vaa:       binary.then_some(base64_standard_engine.encode(price_info.vaa_bytes)),
+        }
+    }
+}

+ 308 - 0
hermes/src/network/rpc/ws.rs

@@ -0,0 +1,308 @@
+use {
+    super::types::{
+        PriceIdInput,
+        RpcPriceFeed,
+    },
+    crate::store::Store,
+    anyhow::Result,
+    axum::{
+        extract::{
+            ws::{
+                Message,
+                WebSocket,
+                WebSocketUpgrade,
+            },
+            State,
+        },
+        response::IntoResponse,
+    },
+    dashmap::DashMap,
+    futures::{
+        future::join_all,
+        stream::{
+            SplitSink,
+            SplitStream,
+        },
+        SinkExt,
+        StreamExt,
+    },
+    pyth_sdk::PriceIdentifier,
+    serde::{
+        Deserialize,
+        Serialize,
+    },
+    std::{
+        collections::HashMap,
+        sync::atomic::{
+            AtomicUsize,
+            Ordering,
+        },
+    },
+    tokio::sync::mpsc,
+};
+
+
+pub async fn ws_route_handler(
+    ws: WebSocketUpgrade,
+    State(state): State<super::State>,
+) -> impl IntoResponse {
+    ws.on_upgrade(|socket| websocket_handler(socket, state))
+}
+
+async fn websocket_handler(stream: WebSocket, state: super::State) {
+    let ws_state = state.ws.clone();
+    let id = ws_state.subscriber_counter.fetch_add(1, Ordering::SeqCst);
+
+    let (sender, receiver) = stream.split();
+
+    // TODO: Use a configured value for the buffer size or make it const static
+    // TODO: Use redis stream to source the updates instead of a channel
+    let (tx, rx) = mpsc::channel::<Vec<PriceIdentifier>>(1000);
+
+    ws_state.subscribers.insert(id, tx);
+
+    log::debug!("New websocket connection, assigning id: {}", id);
+
+    let mut subscriber = Subscriber::new(id, state.store.clone(), rx, receiver, sender);
+
+    subscriber.run().await;
+}
+
+pub type SubscriberId = usize;
+
+/// Subscriber is an actor that handles a single websocket connection.
+/// It listens to the store for updates and sends them to the client.
+pub struct Subscriber {
+    id:                      SubscriberId,
+    closed:                  bool,
+    store:                   Store,
+    update_rx:               mpsc::Receiver<Vec<PriceIdentifier>>,
+    receiver:                SplitStream<WebSocket>,
+    sender:                  SplitSink<WebSocket, Message>,
+    price_feeds_with_config: HashMap<PriceIdentifier, PriceFeedClientConfig>,
+}
+
+impl Subscriber {
+    pub fn new(
+        id: SubscriberId,
+        store: Store,
+        update_rx: mpsc::Receiver<Vec<PriceIdentifier>>,
+        receiver: SplitStream<WebSocket>,
+        sender: SplitSink<WebSocket, Message>,
+    ) -> Self {
+        Self {
+            id,
+            closed: false,
+            store,
+            update_rx,
+            receiver,
+            sender,
+            price_feeds_with_config: HashMap::new(),
+        }
+    }
+
+    pub async fn run(&mut self) {
+        while !self.closed {
+            if let Err(e) = self.handle_next().await {
+                log::error!("Subscriber {}: Error handling next message: {}", self.id, e);
+                break;
+            }
+        }
+    }
+
+    async fn handle_next(&mut self) -> Result<()> {
+        tokio::select! {
+            maybe_update_feed_ids = self.update_rx.recv() => {
+                let update_feed_ids = maybe_update_feed_ids.ok_or_else(|| {
+                    anyhow::anyhow!("Update channel closed.")
+                })?;
+                self.handle_price_feeds_update(update_feed_ids).await?;
+            },
+            maybe_message_or_err = self.receiver.next() => {
+                match maybe_message_or_err {
+                    None => {
+                        log::debug!("Subscriber {} closed connection unexpectedly.", self.id);
+                        self.closed = true;
+                        return Ok(());
+                    },
+                    Some(message_or_err) => self.handle_client_message(message_or_err?).await?
+                }
+            },
+        }
+
+        Ok(())
+    }
+
+    async fn handle_price_feeds_update(
+        &mut self,
+        price_feed_ids: Vec<PriceIdentifier>,
+    ) -> Result<()> {
+        for price_feed_id in price_feed_ids {
+            if let Some(config) = self.price_feeds_with_config.get(&price_feed_id) {
+                let price_feeds_with_update_data = self.store.get_price_feeds_with_update_data(
+                    vec![price_feed_id],
+                    crate::store::RequestTime::Latest,
+                )?;
+                let price_info = price_feeds_with_update_data
+                    .batch_vaa
+                    .price_infos
+                    .get(&price_feed_id)
+                    .ok_or_else(|| {
+                        anyhow::anyhow!("Price feed {} not found.", price_feed_id.to_string())
+                    })?
+                    .clone();
+                let price_feed =
+                    RpcPriceFeed::from_price_info(price_info, config.verbose, config.binary);
+                // Feed does not flush the message and will allow us
+                // to send multiple messages in a single flush.
+                self.sender
+                    .feed(Message::Text(serde_json::to_string(
+                        &ServerMessage::PriceUpdate { price_feed },
+                    )?))
+                    .await?;
+            }
+        }
+        self.sender.flush().await?;
+        Ok(())
+    }
+
+    async fn handle_client_message(&mut self, message: Message) -> Result<()> {
+        if let Message::Close(_) = message {
+            log::debug!("Subscriber {} closed connection", self.id);
+            self.closed = true;
+            return Ok(());
+        }
+
+        let maybe_client_message = match message {
+            Message::Text(text) => serde_json::from_str::<ClientMessage>(&text),
+            Message::Binary(data) => serde_json::from_slice::<ClientMessage>(&data),
+            _ => {
+                return Ok(());
+            }
+        };
+
+        match maybe_client_message {
+            Err(e) => {
+                self.sender
+                    .feed(
+                        serde_json::to_string(&ServerMessage::Response(
+                            ServerResponseMessage::Err {
+                                error: e.to_string(),
+                            },
+                        ))?
+                        .into(),
+                    )
+                    .await?;
+                return Ok(());
+            }
+            Ok(ClientMessage::Subscribe {
+                ids,
+                verbose,
+                binary,
+            }) => {
+                for id in ids {
+                    let price_id: PriceIdentifier = id.into();
+                    self.price_feeds_with_config
+                        .insert(price_id, PriceFeedClientConfig { verbose, binary });
+                }
+            }
+            Ok(ClientMessage::Unsubscribe { ids }) => {
+                for id in ids {
+                    let price_id: PriceIdentifier = id.into();
+                    self.price_feeds_with_config.remove(&price_id);
+                }
+            }
+        }
+
+        self.sender
+            .send(
+                serde_json::to_string(&ServerMessage::Response(ServerResponseMessage::Ok))?.into(),
+            )
+            .await?;
+
+        Ok(())
+    }
+}
+
+pub async fn dispatch_updates(update_feed_ids: Vec<PriceIdentifier>, state: super::State) {
+    let ws_state = state.ws.clone();
+    let update_feed_ids_ref = &update_feed_ids;
+
+    let closed_subscribers: Vec<Option<SubscriberId>> = join_all(
+        ws_state
+            .subscribers
+            .iter_mut()
+            .map(|subscriber| async move {
+                match subscriber.send(update_feed_ids_ref.clone()).await {
+                    Ok(_) => None,
+                    Err(e) => {
+                        log::debug!("Error sending update to subscriber: {}", e);
+                        Some(subscriber.key().clone())
+                    }
+                }
+            }),
+    )
+    .await;
+
+    // Remove closed_subscribers from ws_state
+    closed_subscribers.into_iter().for_each(|id| {
+        if let Some(id) = id {
+            ws_state.subscribers.remove(&id);
+        }
+    });
+}
+
+#[derive(Clone)]
+pub struct PriceFeedClientConfig {
+    verbose: bool,
+    binary:  bool,
+}
+
+pub struct WsState {
+    pub subscriber_counter: AtomicUsize,
+    pub subscribers:        DashMap<SubscriberId, mpsc::Sender<Vec<PriceIdentifier>>>,
+}
+
+impl WsState {
+    pub fn new() -> Self {
+        Self {
+            subscriber_counter: AtomicUsize::new(0),
+            subscribers:        DashMap::new(),
+        }
+    }
+}
+
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(tag = "type")]
+enum ClientMessage {
+    #[serde(rename = "subscribe")]
+    Subscribe {
+        ids:     Vec<PriceIdInput>,
+        #[serde(default)]
+        verbose: bool,
+        #[serde(default)]
+        binary:  bool,
+    },
+    #[serde(rename = "unsubscribe")]
+    Unsubscribe { ids: Vec<PriceIdInput> },
+}
+
+
+#[derive(Serialize, Debug, Clone)]
+#[serde(tag = "type")]
+enum ServerMessage {
+    #[serde(rename = "response")]
+    Response(ServerResponseMessage),
+    #[serde(rename = "price_update")]
+    PriceUpdate { price_feed: RpcPriceFeed },
+}
+
+#[derive(Serialize, Debug, Clone)]
+#[serde(tag = "status")]
+enum ServerResponseMessage {
+    #[serde(rename = "ok")]
+    Ok,
+    #[serde(rename = "error")]
+    Err { error: String },
+}

+ 23 - 34
hermes/src/store.rs

@@ -1,22 +1,15 @@
 use {
-    self::storage::Storage,
-    anyhow::Result,
-    pyth_sdk::{
-        PriceFeed,
-        PriceIdentifier,
-    },
-    serde::{
-        Deserialize,
-        Serialize,
-    },
-    std::{
-        collections::HashMap,
-        sync::Arc,
+    self::{
+        proof::batch_vaa::PriceInfosWithUpdateData,
+        storage::Storage,
     },
+    anyhow::Result,
+    pyth_sdk::PriceIdentifier,
+    std::sync::Arc,
 };
 
-mod proof;
-mod storage;
+pub mod proof;
+pub mod storage;
 
 pub type UnixTimestamp = u64;
 
@@ -30,19 +23,8 @@ pub enum Update {
     Vaa(Vec<u8>),
 }
 
-#[derive(Clone, Default, Serialize, Deserialize)]
-pub struct UpdateData {
-    pub batch_vaa: Vec<Vec<u8>>,
-}
-
-// TODO: A price feed might not have update data in all different
-// formats. For example, Batch VAA and Merkle updates will result
-// in different price feeds. We need to figure out how to handle
-// it properly.
-#[derive(Clone, Default)]
 pub struct PriceFeedsWithUpdateData {
-    pub price_feeds: HashMap<PriceIdentifier, PriceFeed>,
-    pub update_data: UpdateData,
+    pub batch_vaa: PriceInfosWithUpdateData,
 }
 
 pub type State = Arc<Box<dyn Storage>>;
@@ -61,8 +43,9 @@ impl Store {
         }
     }
 
-    // TODO: This should return the updated feeds so the subscribers can be notified.
-    pub fn store_update(&self, update: Update) -> Result<()> {
+    /// Stores the update data in the store and returns the price identifiers for which
+    /// price feeds were updated.
+    pub fn store_update(&self, update: Update) -> Result<Vec<PriceIdentifier>> {
         match update {
             Update::Vaa(vaa_bytes) => {
                 proof::batch_vaa::store_vaa_update(self.state.clone(), vaa_bytes)
@@ -75,10 +58,16 @@ impl Store {
         price_ids: Vec<PriceIdentifier>,
         request_time: RequestTime,
     ) -> Result<PriceFeedsWithUpdateData> {
-        proof::batch_vaa::get_price_feeds_with_update_data(
-            self.state.clone(),
-            price_ids,
-            request_time,
-        )
+        Ok(PriceFeedsWithUpdateData {
+            batch_vaa: proof::batch_vaa::get_price_infos_with_update_data(
+                self.state.clone(),
+                price_ids,
+                request_time,
+            )?,
+        })
+    }
+
+    pub fn get_price_feed_ids(&self) -> Vec<PriceIdentifier> {
+        proof::batch_vaa::get_price_feed_ids(self.state.clone())
     }
 }

+ 60 - 24
hermes/src/store/proof/batch_vaa.rs

@@ -4,11 +4,9 @@ use {
             Key,
             StorageData,
         },
-        PriceFeedsWithUpdateData,
         RequestTime,
         State,
         UnixTimestamp,
-        UpdateData,
     },
     anyhow::{
         anyhow,
@@ -24,9 +22,15 @@ use {
         PriceAttestation,
         PriceStatus,
     },
-    std::collections::{
-        HashMap,
-        HashSet,
+    std::{
+        collections::{
+            HashMap,
+            HashSet,
+        },
+        time::{
+            SystemTime,
+            UNIX_EPOCH,
+        },
     },
     wormhole::VAA,
 };
@@ -34,21 +38,34 @@ use {
 // TODO: We need to add more metadata to this struct.
 #[derive(Clone, Default, PartialEq, Debug)]
 pub struct PriceInfo {
-    pub price_feed:   PriceFeed,
-    pub vaa_bytes:    Vec<u8>,
-    pub publish_time: UnixTimestamp,
+    pub price_feed:       PriceFeed,
+    pub vaa_bytes:        Vec<u8>,
+    pub publish_time:     UnixTimestamp,
+    pub emitter_chain:    u16,
+    pub attestation_time: UnixTimestamp,
+    pub receive_time:     UnixTimestamp,
+    pub sequence_number:  u64,
 }
 
+#[derive(Clone, Default)]
+pub struct PriceInfosWithUpdateData {
+    pub price_infos: HashMap<PriceIdentifier, PriceInfo>,
+    pub update_data: Vec<Vec<u8>>,
+}
 
-pub fn store_vaa_update(state: State, vaa_bytes: Vec<u8>) -> Result<()> {
+pub fn store_vaa_update(state: State, vaa_bytes: Vec<u8>) -> Result<Vec<PriceIdentifier>> {
     // FIXME: Vaa bytes might not be a valid Pyth BatchUpdate message nor originate from Our emitter.
     // We should check that.
+    // FIXME: We receive multiple vaas for the same update (due to different signedVAAs). We need
+    // to drop them.
     let vaa = VAA::from_bytes(&vaa_bytes)?;
     let batch_price_attestation = BatchPriceAttestation::deserialize(vaa.payload.as_slice())
         .map_err(|_| anyhow!("Failed to deserialize VAA"))?;
 
+    let mut updated_price_feed_ids = Vec::new();
+
     for price_attestation in batch_price_attestation.price_attestations {
-        let price_feed = price_attestation_to_price_feed(price_attestation);
+        let price_feed = price_attestation_to_price_feed(price_attestation.clone());
 
         let publish_time = price_feed.get_price_unchecked().publish_time.try_into()?;
 
@@ -56,47 +73,66 @@ pub fn store_vaa_update(state: State, vaa_bytes: Vec<u8>) -> Result<()> {
             price_feed,
             vaa_bytes: vaa_bytes.clone(),
             publish_time,
+            emitter_chain: vaa.emitter_chain.into(),
+            attestation_time: price_attestation.attestation_time.try_into()?,
+            receive_time: SystemTime::now().duration_since(UNIX_EPOCH)?.as_secs(),
+            sequence_number: vaa.sequence,
         };
 
-        let key = Key::new(price_feed.id.to_bytes().to_vec());
+        let key = Key::BatchVaa(price_feed.id);
         state.insert(key, publish_time, StorageData::BatchVaa(price_info))?;
+
+        // FIXME: Only add price feed if it's newer
+        // or include whether it's newer or not in the vector
+        updated_price_feed_ids.push(price_feed.id);
     }
-    Ok(())
+
+    Ok(updated_price_feed_ids)
 }
 
 
-pub fn get_price_feeds_with_update_data(
+pub fn get_price_infos_with_update_data(
     state: State,
     price_ids: Vec<PriceIdentifier>,
     request_time: RequestTime,
-) -> Result<PriceFeedsWithUpdateData> {
-    let mut price_feeds = HashMap::new();
+) -> Result<PriceInfosWithUpdateData> {
+    let mut price_infos = HashMap::new();
     let mut vaas: HashSet<Vec<u8>> = HashSet::new();
     for price_id in price_ids {
-        let key = Key::new(price_id.to_bytes().to_vec());
+        let key = Key::BatchVaa(price_id);
         let maybe_data = state.get(key, request_time.clone())?;
 
         match maybe_data {
             Some(StorageData::BatchVaa(price_info)) => {
-                price_feeds.insert(price_info.price_feed.id, price_info.price_feed);
-                vaas.insert(price_info.vaa_bytes);
+                vaas.insert(price_info.vaa_bytes.clone());
+                price_infos.insert(price_id, price_info);
             }
             None => {
-                log::info!("No price feed found for price id: {:?}", price_id);
                 return Err(anyhow!("No price feed found for price id: {:?}", price_id));
             }
         }
     }
-    let update_data = UpdateData {
-        batch_vaa: vaas.into_iter().collect(),
-    };
-    Ok(PriceFeedsWithUpdateData {
-        price_feeds,
+    let update_data: Vec<Vec<u8>> = vaas.into_iter().collect();
+    Ok(PriceInfosWithUpdateData {
+        price_infos,
         update_data,
     })
 }
 
 
+pub fn get_price_feed_ids(state: State) -> Vec<PriceIdentifier> {
+    // Currently we have only one type and filter map is not necessary.
+    // But we might have more types in the future.
+    #[allow(clippy::unnecessary_filter_map)]
+    state
+        .keys()
+        .into_iter()
+        .filter_map(|key| match key {
+            Key::BatchVaa(price_id) => Some(price_id),
+        })
+        .collect()
+}
+
 /// Convert a PriceAttestation to a PriceFeed.
 ///
 /// We cannot implmenet this function as From/Into trait because none of these types are defined in this crate.

+ 5 - 11
hermes/src/store/storage.rs

@@ -5,10 +5,7 @@ use {
         UnixTimestamp,
     },
     anyhow::Result,
-    derive_more::{
-        Deref,
-        DerefMut,
-    },
+    pyth_sdk::PriceIdentifier,
 };
 
 pub mod local_cache;
@@ -18,13 +15,9 @@ pub enum StorageData {
     BatchVaa(PriceInfo),
 }
 
-#[derive(Clone, PartialEq, Eq, Debug, Hash, Deref, DerefMut)]
-pub struct Key(Vec<u8>);
-
-impl Key {
-    pub fn new(key: Vec<u8>) -> Self {
-        Self(key)
-    }
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub enum Key {
+    BatchVaa(PriceIdentifier),
 }
 
 /// This trait defines the interface for update data storage
@@ -37,4 +30,5 @@ impl Key {
 pub trait Storage: Sync + Send {
     fn insert(&self, key: Key, time: UnixTimestamp, value: StorageData) -> Result<()>;
     fn get(&self, key: Key, request_time: RequestTime) -> Result<Option<StorageData>>;
+    fn keys(&self) -> Vec<Key>;
 }

+ 4 - 0
hermes/src/store/storage/local_cache.rs

@@ -99,4 +99,8 @@ impl Storage for LocalCache {
             None => Ok(None),
         }
     }
+
+    fn keys(&self) -> Vec<Key> {
+        self.cache.iter().map(|entry| entry.key().clone()).collect()
+    }
 }

+ 2 - 0
message_buffer/.dockerignore

@@ -0,0 +1,2 @@
+target
+node_modules

+ 0 - 0
accumulator_updater/.gitignore → message_buffer/.gitignore


+ 0 - 0
accumulator_updater/.prettierignore → message_buffer/.prettierignore


+ 1 - 1
accumulator_updater/Anchor.toml → message_buffer/Anchor.toml

@@ -2,7 +2,7 @@
 seeds = true
 skip-lint = false
 [programs.localnet]
-accumulator_updater = "Fg6PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"
+message_buffer = "Vbmv1jt4vyuqBZcpYPpnVhrqVe5e6ZPb6JxDcffRHUM"
 mock_cpi_caller = "Dg5PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS"
 
 [registry]

+ 11 - 9
accumulator_updater/Cargo.lock → message_buffer/Cargo.lock

@@ -2,14 +2,6 @@
 # It is not intended for manual editing.
 version = 3
 
-[[package]]
-name = "accumulator_updater"
-version = "0.1.0"
-dependencies = [
- "anchor-lang",
- "bytemuck",
-]
-
 [[package]]
 name = "ahash"
 version = "0.7.6"
@@ -893,12 +885,22 @@ dependencies = [
  "autocfg",
 ]
 
+[[package]]
+name = "message_buffer"
+version = "0.1.0"
+dependencies = [
+ "anchor-lang",
+ "bytemuck",
+ "byteorder",
+]
+
 [[package]]
 name = "mock-cpi-caller"
 version = "0.1.0"
 dependencies = [
- "accumulator_updater",
  "anchor-lang",
+ "bytemuck",
+ "message_buffer",
 ]
 
 [[package]]

+ 0 - 0
accumulator_updater/Cargo.toml → message_buffer/Cargo.toml


+ 36 - 0
message_buffer/Dockerfile

@@ -0,0 +1,36 @@
+ARG RUST_VERSION=1.69
+FROM rust:${RUST_VERSION}
+
+RUN apt-get update && apt-get install -qq nodejs npm curl
+
+RUN npm install --global yarn
+
+RUN rustup default ${RUST_VERSION}
+
+ARG SOLANA_VERSION=v1.14.11
+RUN curl -sSfL https://release.solana.com/${SOLANA_VERSION}/install > install_solana.sh
+RUN sh install_solana.sh
+ENV PATH="/root/.local/share/solana/install/active_release/bin:$PATH"
+
+RUN --mount=type=cache,target=target cargo install --git https://github.com/coral-xyz/anchor avm --locked --force --target-dir target
+
+ARG ANCHOR_VERSION=0.27.0
+RUN --mount=type=cache,target=target CARGO_TARGET_DIR=target avm install ${ANCHOR_VERSION} && avm use ${ANCHOR_VERSION}
+
+# Trigger Anchor's BPF tools download for caching
+RUN anchor init decoy-crate
+RUN cd decoy-crate && anchor build
+RUN rm -rf decoy-crate
+
+WORKDIR message_buffer
+
+# layer-cache cargo deps
+ADD Cargo.toml Cargo.lock ./
+ADD programs/message_buffer/Cargo.toml programs/message_buffer/
+RUN mkdir -p programs/message_buffer/src && touch programs/message_buffer/src/lib.rs
+RUN cargo fetch --locked
+
+ADD . .
+
+RUN --mount=type=cache,target=target anchor build -p message_buffer && cp -r target target_tmp
+RUN rm -rf target && mv target_tmp target

+ 20 - 3
accumulator_updater/NOTES.md → message_buffer/NOTES.md

@@ -1,12 +1,29 @@
+## Testing
+
+- run `anchor test` if no special customization for the test-validator is needed
+- `anchor test` will run `solana-test-validator` will all features activated.
+  One of the features activated on the test-validator which is not currently activated on pythnet is
+
+```
+"GDH5TVdbTPUpRnXaRyQqiKUa7uZAbZ28Q2N9bhbKoMLm  loosen cpi size restrictions #26641"
+```
+
+In order to run `solana-test-validator` with this feature deactivated, do the following:
+
+1. open a terminal and run `solana-test-validator --reset --deactivate-feature GDH5TVdbTPUpRnXaRyQqiKUa7uZAbZ28Q2N9bhbKoMLm`
+2. open a separate terminal and run `anchor build` in the `accumulator_updater` dir
+3. get the pubkeys of the program keypairs `solana address -k accumulator_updater/target/deploy/<program_keypair>.json`
+4. change the pubkeys in the `declare_id!` macro to these keypairs
+5. update `Anchor.toml` `[programs.localnet]` programIds as well
+6. run `anchor test --skip-local-validator`
+
 ## Questions
 
 1.  Do we need to support multiple Whitelists?
 2.  Support multiple accumulators
     1.  should each accumulator store a different type of data?
         => implications for length of merkle proof
-    2.
-3.  authority?
-4.  how to know what went into the `AccumulatorAccount` (for deserializing/proofs)
+3.  how to know what went into the `AccumulatorAccount` (for deserializing/proofs)
     1.  Header?
 
 ## To Do

+ 0 - 0
accumulator_updater/migrations/deploy.ts → message_buffer/migrations/deploy.ts


+ 0 - 0
accumulator_updater/package.json → message_buffer/package.json


+ 24 - 0
message_buffer/programs/message_buffer/Cargo.toml

@@ -0,0 +1,24 @@
+[package]
+name = "message_buffer"
+version = "0.1.0"
+description = "Message Buffer Pythnet Program"
+edition = "2021"
+
+[lib]
+crate-type = ["cdylib", "lib"]
+name = "message_buffer"
+
+[features]
+no-entrypoint = []
+no-idl = []
+no-log-ix-name = []
+cpi = ["no-entrypoint"]
+default = []
+
+[dependencies]
+anchor-lang = { version = "0.27.0" }
+# needed for the new #[account(zero_copy)] in anchor 0.27.0
+bytemuck = { version = "1.4.0", features = ["derive", "min_const_generics"]}
+
+[dev-dependencies]
+byteorder = "1.4.3"

+ 0 - 0
accumulator_updater/programs/accumulator_updater/Xargo.toml → message_buffer/programs/message_buffer/Xargo.toml


+ 155 - 0
message_buffer/programs/message_buffer/src/instructions/create_buffer.rs

@@ -0,0 +1,155 @@
+use {
+    crate::{
+        instructions::is_uninitialized_account,
+        state::*,
+        MessageBufferError,
+        MESSAGE,
+    },
+    anchor_lang::{
+        prelude::*,
+        solana_program::entrypoint::MAX_PERMITTED_DATA_INCREASE,
+        system_program::{
+            self,
+            Allocate,
+            Assign,
+            Transfer,
+        },
+    },
+};
+
+pub fn create_buffer<'info>(
+    ctx: Context<'_, '_, '_, 'info, CreateBuffer<'info>>,
+    allowed_program_auth: Pubkey,
+    base_account_key: Pubkey,
+    target_size: u32,
+) -> Result<()> {
+    let buffer_account = ctx
+        .remaining_accounts
+        .first()
+        .ok_or(MessageBufferError::MessageBufferNotProvided)?;
+
+    ctx.accounts
+        .whitelist
+        .is_allowed_program_auth(&allowed_program_auth)?;
+
+    require_gte!(
+        target_size,
+        MessageBuffer::HEADER_LEN as u32,
+        MessageBufferError::MessageBufferTooSmall
+    );
+
+    require_gte!(
+        MAX_PERMITTED_DATA_INCREASE,
+        target_size as usize,
+        MessageBufferError::TargetSizeDeltaExceeded
+    );
+    if is_uninitialized_account(buffer_account) {
+        let (pda, bump) = Pubkey::find_program_address(
+            &[
+                allowed_program_auth.as_ref(),
+                MESSAGE.as_bytes(),
+                base_account_key.as_ref(),
+            ],
+            &crate::ID,
+        );
+        require_keys_eq!(buffer_account.key(), pda);
+        let signer_seeds = [
+            allowed_program_auth.as_ref(),
+            MESSAGE.as_bytes(),
+            base_account_key.as_ref(),
+            &[bump],
+        ];
+
+        CreateBuffer::create_account(
+            buffer_account,
+            target_size as usize,
+            &ctx.accounts.admin,
+            &[signer_seeds.as_slice()],
+            &ctx.accounts.system_program,
+        )?;
+
+        let loader =
+            AccountLoader::<MessageBuffer>::try_from_unchecked(&crate::ID, buffer_account)?;
+        {
+            let mut message_buffer = loader.load_init()?;
+            *message_buffer = MessageBuffer::new(bump);
+        }
+        loader.exit(&crate::ID)?;
+    } else {
+        // FIXME: change this to be emit!(Event)
+        msg!("Buffer account already initialized");
+    }
+
+    Ok(())
+}
+
+
+#[derive(Accounts)]
+pub struct CreateBuffer<'info> {
+    #[account(
+    seeds = [b"message".as_ref(), b"whitelist".as_ref()],
+    bump = whitelist.bump,
+    has_one = admin,
+    )]
+    pub whitelist: Account<'info, Whitelist>,
+
+    // Also pays for account creation
+    #[account(mut)]
+    pub admin: Signer<'info>,
+
+    pub system_program: Program<'info, System>,
+    // remaining_accounts:  - [AccumulatorInput PDA]
+}
+
+
+impl<'info> CreateBuffer<'info> {
+    /// Manually invoke transfer, allocate & assign ixs to create an account
+    /// to handle situation where an account already has lamports
+    /// since system_program::create_account will fail in this case
+    fn create_account<'a>(
+        new_account_info: &AccountInfo<'a>,
+        space: usize,
+        payer: &Signer<'a>,
+        seeds: &[&[&[u8]]],
+        system_program: &AccountInfo<'a>,
+    ) -> Result<()> {
+        let target_rent = Rent::get()?.minimum_balance(space);
+        if new_account_info.lamports() < target_rent {
+            system_program::transfer(
+                CpiContext::new_with_signer(
+                    system_program.to_account_info(),
+                    Transfer {
+                        from: payer.to_account_info(),
+                        to:   new_account_info.to_account_info(),
+                    },
+                    seeds,
+                ),
+                target_rent - new_account_info.lamports(),
+            )?;
+        };
+
+        system_program::allocate(
+            CpiContext::new_with_signer(
+                system_program.to_account_info(),
+                Allocate {
+                    account_to_allocate: new_account_info.to_account_info(),
+                },
+                seeds,
+            ),
+            space.try_into().unwrap(),
+        )?;
+
+        system_program::assign(
+            CpiContext::new_with_signer(
+                system_program.to_account_info(),
+                Assign {
+                    account_to_assign: new_account_info.to_account_info(),
+                },
+                seeds,
+            ),
+            &crate::ID,
+        )?;
+
+        Ok(())
+    }
+}

+ 64 - 0
message_buffer/programs/message_buffer/src/instructions/delete_buffer.rs

@@ -0,0 +1,64 @@
+use {
+    crate::{
+        state::*,
+        MessageBufferError,
+        MESSAGE,
+    },
+    anchor_lang::prelude::*,
+};
+
+pub fn delete_buffer<'info>(
+    ctx: Context<'_, '_, '_, 'info, DeleteBuffer<'info>>,
+    allowed_program_auth: Pubkey,
+    base_account_key: Pubkey,
+    bump: u8,
+) -> Result<()> {
+    let message_buffer_account_info = ctx
+        .remaining_accounts
+        .first()
+        .ok_or(MessageBufferError::MessageBufferNotProvided)?;
+
+    ctx.accounts
+        .whitelist
+        .is_allowed_program_auth(&allowed_program_auth)?;
+
+    MessageBuffer::check_discriminator(message_buffer_account_info)?;
+
+    let expected_key = Pubkey::create_program_address(
+        &[
+            allowed_program_auth.as_ref(),
+            MESSAGE.as_bytes(),
+            base_account_key.as_ref(),
+            &[bump],
+        ],
+        &crate::ID,
+    )
+    .map_err(|_| MessageBufferError::InvalidPDA)?;
+
+    require_keys_eq!(
+        message_buffer_account_info.key(),
+        expected_key,
+        MessageBufferError::InvalidPDA
+    );
+    let loader = AccountLoader::<MessageBuffer>::try_from_unchecked(
+        &crate::ID,
+        message_buffer_account_info,
+    )?;
+    loader.close(ctx.accounts.admin.to_account_info())?;
+    Ok(())
+}
+
+#[derive(Accounts)]
+pub struct DeleteBuffer<'info> {
+    #[account(
+        seeds = [b"message".as_ref(), b"whitelist".as_ref()],
+        bump = whitelist.bump,
+        has_one = admin,
+    )]
+    pub whitelist: Account<'info, Whitelist>,
+
+    // Also the recipient of the lamports from closing the buffer account
+    #[account(mut)]
+    pub admin: Signer<'info>,
+    // remaining_account:  - [AccumulatorInput PDA]
+}

+ 27 - 0
message_buffer/programs/message_buffer/src/instructions/mod.rs

@@ -0,0 +1,27 @@
+use anchor_lang::{
+    prelude::*,
+    system_program,
+};
+pub use {
+    create_buffer::*,
+    delete_buffer::*,
+    put_all::*,
+    resize_buffer::*,
+};
+
+
+mod create_buffer;
+mod delete_buffer;
+mod put_all;
+mod resize_buffer;
+
+// String constants for deriving PDAs.
+//
+// An authorized program's message buffer will have PDA seeds [authorized_program_pda, MESSAGE, base_account_key],
+// where authorized_program_pda is the where `allowed_program_auth`
+// is the whitelisted pubkey who authorized this call.
+pub const MESSAGE: &str = "message";
+
+pub fn is_uninitialized_account(ai: &AccountInfo) -> bool {
+    ai.data_is_empty() && ai.owner == &system_program::ID
+}

+ 54 - 0
message_buffer/programs/message_buffer/src/instructions/put_all.rs

@@ -0,0 +1,54 @@
+use {
+    crate::{
+        state::*,
+        MessageBufferError,
+    },
+    anchor_lang::prelude::*,
+    std::mem,
+};
+
+
+pub fn put_all<'info>(
+    ctx: Context<'_, '_, '_, 'info, PutAll<'info>>,
+    base_account_key: Pubkey,
+    messages: Vec<Vec<u8>>,
+) -> Result<()> {
+    let cpi_caller_auth = ctx.accounts.whitelist_verifier.is_allowed()?;
+    let message_buffer_account_info = ctx
+        .remaining_accounts
+        .first()
+        .ok_or(MessageBufferError::MessageBufferNotProvided)?;
+
+    MessageBuffer::check_discriminator(message_buffer_account_info)?;
+
+    let account_data = &mut message_buffer_account_info.try_borrow_mut_data()?;
+    let header_end_index = mem::size_of::<MessageBuffer>() + 8;
+
+    let (header_bytes, body_bytes) = account_data.split_at_mut(header_end_index);
+
+    let message_buffer: &mut MessageBuffer = bytemuck::from_bytes_mut(&mut header_bytes[8..]);
+
+    message_buffer.validate(
+        message_buffer_account_info.key(),
+        cpi_caller_auth,
+        base_account_key,
+    )?;
+
+    message_buffer.refresh_header();
+
+    let (num_msgs, num_bytes) = message_buffer.put_all_in_buffer(body_bytes, &messages);
+
+    if num_msgs != messages.len() {
+        // FIXME: make this into an emit! event
+        msg!("unable to fit all messages in accumulator input account. Wrote {}/{} messages and {} bytes", num_msgs, messages.len(), num_bytes);
+    }
+
+    Ok(())
+}
+
+#[derive(Accounts)]
+#[instruction( base_account_key: Pubkey)]
+pub struct PutAll<'info> {
+    pub whitelist_verifier: WhitelistVerifier<'info>,
+    // remaining_accounts:  - [AccumulatorInput PDA]
+}

+ 111 - 0
message_buffer/programs/message_buffer/src/instructions/resize_buffer.rs

@@ -0,0 +1,111 @@
+use {
+    crate::{
+        state::*,
+        MessageBufferError,
+        MESSAGE,
+    },
+    anchor_lang::{
+        prelude::*,
+        solana_program::entrypoint::MAX_PERMITTED_DATA_INCREASE,
+        system_program::{
+            self,
+            Transfer,
+        },
+    },
+};
+
+pub fn resize_buffer<'info>(
+    ctx: Context<'_, '_, '_, 'info, ResizeBuffer<'info>>,
+    allowed_program_auth: Pubkey,
+    base_account_key: Pubkey,
+    buffer_bump: u8,
+    target_size: u32,
+) -> Result<()> {
+    let message_buffer_account_info = ctx
+        .remaining_accounts
+        .first()
+        .ok_or(MessageBufferError::MessageBufferNotProvided)?;
+
+    ctx.accounts
+        .whitelist
+        .is_allowed_program_auth(&allowed_program_auth)?;
+    MessageBuffer::check_discriminator(message_buffer_account_info)?;
+
+    require_gte!(
+        target_size,
+        MessageBuffer::HEADER_LEN as u32,
+        MessageBufferError::MessageBufferTooSmall
+    );
+    let target_size = target_size as usize;
+    let target_size_delta = target_size.saturating_sub(message_buffer_account_info.data_len());
+    require_gte!(
+        MAX_PERMITTED_DATA_INCREASE,
+        target_size_delta,
+        MessageBufferError::TargetSizeDeltaExceeded
+    );
+
+    let expected_key = Pubkey::create_program_address(
+        &[
+            allowed_program_auth.as_ref(),
+            MESSAGE.as_bytes(),
+            base_account_key.as_ref(),
+            &[buffer_bump],
+        ],
+        &crate::ID,
+    )
+    .map_err(|_| MessageBufferError::InvalidPDA)?;
+
+    require_keys_eq!(
+        message_buffer_account_info.key(),
+        expected_key,
+        MessageBufferError::InvalidPDA
+    );
+
+    // allow for delta == 0 in case Rent requirements have changed
+    // and additional lamports need to be transferred.
+    // the realloc step will be a no-op in this case.
+    if target_size_delta >= 0 {
+        let target_rent = Rent::get()?.minimum_balance(target_size);
+        if message_buffer_account_info.lamports() < target_rent {
+            system_program::transfer(
+                CpiContext::new(
+                    ctx.accounts.system_program.to_account_info(),
+                    Transfer {
+                        from: ctx.accounts.admin.to_account_info(),
+                        to:   message_buffer_account_info.to_account_info(),
+                    },
+                ),
+                target_rent - message_buffer_account_info.lamports(),
+            )?;
+        }
+        message_buffer_account_info
+            .realloc(target_size, false)
+            .map_err(|_| MessageBufferError::ReallocFailed)?;
+    } else {
+        // Not transferring excess lamports back to admin.
+        // Account will retain more lamports than necessary.
+        message_buffer_account_info.realloc(target_size, false)?;
+    }
+    Ok(())
+}
+
+#[derive(Accounts)]
+#[instruction(
+    allowed_program_auth: Pubkey, base_account_key: Pubkey,
+    buffer_bump: u8, target_size: u32
+)]
+pub struct ResizeBuffer<'info> {
+    #[account(
+        seeds = [b"message".as_ref(), b"whitelist".as_ref()],
+        bump = whitelist.bump,
+        has_one = admin,
+    )]
+    pub whitelist: Account<'info, Whitelist>,
+
+    // Also pays for account creation
+    #[account(mut)]
+    pub admin: Signer<'info>,
+
+    pub system_program: Program<'info, System>,
+    // remaining_accounts:  - [AccumulatorInput PDA]
+}

+ 216 - 0
message_buffer/programs/message_buffer/src/lib.rs

@@ -0,0 +1,216 @@
+mod instructions;
+mod macros;
+mod state;
+
+
+use {
+    anchor_lang::prelude::*,
+    instructions::*,
+    state::*,
+};
+
+declare_id!("Vbmv1jt4vyuqBZcpYPpnVhrqVe5e6ZPb6JxDcffRHUM");
+
+#[program]
+pub mod message_buffer {
+    use super::*;
+
+
+    /// Initializes the whitelist and sets it's admin to the provided pubkey
+    /// Once initialized, the authority must sign all further changes to the whitelist.
+    pub fn initialize(ctx: Context<Initialize>, admin: Pubkey) -> Result<()> {
+        require_keys_neq!(admin, Pubkey::default());
+        let whitelist = &mut ctx.accounts.whitelist;
+        whitelist.bump = *ctx.bumps.get("whitelist").unwrap();
+        whitelist.admin = admin;
+        Ok(())
+    }
+
+    /// Sets the programs that are allowed to invoke this program through CPI
+    ///
+    /// * `allowed_programs` - Entire list of programs that are allowed to
+    ///                       invoke this program through CPI
+    pub fn set_allowed_programs(
+        ctx: Context<UpdateWhitelist>,
+        allowed_programs: Vec<Pubkey>,
+    ) -> Result<()> {
+        let whitelist = &mut ctx.accounts.whitelist;
+        whitelist.validate_programs(&allowed_programs)?;
+        whitelist.allowed_programs = allowed_programs;
+        Ok(())
+    }
+
+    /// Sets the new admin for the whitelist
+    pub fn update_whitelist_admin(ctx: Context<UpdateWhitelist>, new_admin: Pubkey) -> Result<()> {
+        let whitelist = &mut ctx.accounts.whitelist;
+        whitelist.validate_new_admin(new_admin)?;
+        whitelist.admin = new_admin;
+        Ok(())
+    }
+
+
+    /// Put messages into the Accumulator. All messages put for the same
+    /// `base_account_key` go into the same buffer PDA. The PDA's address is
+    /// `[allowed_program_auth, MESSAGE, base_account_key]`, where `allowed_program_auth`
+    /// is the whitelisted pubkey who authorized this call.
+    ///
+    /// * `base_account_key`    - Pubkey of the original account the
+    ///                           `MessageBuffer` is derived from
+    ///                           (e.g. pyth price account)
+    /// * `messages`            - Vec of vec of bytes, each representing a message
+    ///                           to be hashed and accumulated
+    ///
+    /// This ix will write as many of the messages up to the length
+    /// of the `accumulator_input.data`.
+    /// If `accumulator_input.data.len() < messages.map(|x| x.len()).sum()`
+    /// then the remaining messages will be ignored.
+    ///
+    /// The current implementation assumes that each invocation of this
+    /// ix is independent of any previous invocations. It will overwrite
+    /// any existing contents.
+    ///
+    /// TODO:
+    ///     - handle updates ("paging/batches of messages")
+    ///
+    pub fn put_all<'info>(
+        ctx: Context<'_, '_, '_, 'info, PutAll<'info>>,
+        base_account_key: Pubkey,
+        messages: Vec<Vec<u8>>,
+    ) -> Result<()> {
+        instructions::put_all(ctx, base_account_key, messages)
+    }
+
+
+    /// Initializes the buffer account with the `target_size`
+    ///
+    /// *`allowed_program_auth` - The whitelisted pubkey representing an
+    ///                            allowed program. Used as one of the seeds
+    ///                            for deriving the `MessageBuffer` PDA.
+    /// * `base_account_key`    - Pubkey of the original account the
+    ///                           `MessageBuffer` is derived from
+    ///                           (e.g. pyth price account)
+    /// *`target_size`          - Initial size to allocate for the
+    ///                           `MessageBuffer` PDA. `target_size`
+    ///                           must be >= HEADER_LEN && <= 10240
+    pub fn create_buffer<'info>(
+        ctx: Context<'_, '_, '_, 'info, CreateBuffer<'info>>,
+        allowed_program_auth: Pubkey,
+        base_account_key: Pubkey,
+        target_size: u32,
+    ) -> Result<()> {
+        instructions::create_buffer(ctx, allowed_program_auth, base_account_key, target_size)
+    }
+
+    /// Resizes the buffer account to the `target_size`
+    ///
+    /// *`allowed_program_auth` - The whitelisted pubkey representing an
+    ///                            allowed program. Used as one of the seeds
+    ///                            for deriving the `MessageBuffer` PDA.
+    /// * `base_account_key`    - Pubkey of the original account the
+    ///                           `MessageBuffer` is derived from
+    ///                           (e.g. pyth price account)
+    /// *`target_size`          -  Size to re-allocate for the
+    ///                           `MessageBuffer` PDA. If increasing the size,
+    ///                           max delta of current_size & target_size is 10240
+    /// *`buffer_bump`          -  Bump seed for the `MessageBuffer` PDA
+    pub fn resize_buffer<'info>(
+        ctx: Context<'_, '_, '_, 'info, ResizeBuffer<'info>>,
+        allowed_program_auth: Pubkey,
+        base_account_key: Pubkey,
+        buffer_bump: u8,
+        target_size: u32,
+    ) -> Result<()> {
+        instructions::resize_buffer(
+            ctx,
+            allowed_program_auth,
+            base_account_key,
+            buffer_bump,
+            target_size,
+        )
+    }
+
+    /// Closes the buffer account and transfers the remaining lamports to the
+    /// `admin` account
+    ///
+    /// *`allowed_program_auth` - The whitelisted pubkey representing an
+    ///                            allowed program. Used as one of the seeds
+    ///                            for deriving the `MessageBuffer` PDA.
+    /// * `base_account_key`    - Pubkey of the original account the
+    ///                           `MessageBuffer` is derived from
+    ///                           (e.g. pyth price account)
+    /// *`buffer_bump`          -  Bump seed for the `MessageBuffer` PDA
+    pub fn delete_buffer<'info>(
+        ctx: Context<'_, '_, '_, 'info, DeleteBuffer<'info>>,
+        allowed_program_auth: Pubkey,
+        base_account_key: Pubkey,
+        buffer_bump: u8,
+    ) -> Result<()> {
+        instructions::delete_buffer(ctx, allowed_program_auth, base_account_key, buffer_bump)
+    }
+}
+
+#[derive(Accounts)]
+pub struct Initialize<'info> {
+    #[account(mut)]
+    pub payer:          Signer<'info>,
+    #[account(
+        init,
+        payer = payer,
+        seeds = [b"message".as_ref(), b"whitelist".as_ref()],
+        bump,
+        space = 8 + Whitelist::INIT_SPACE
+    )]
+    pub whitelist:      Account<'info, Whitelist>,
+    pub system_program: Program<'info, System>,
+}
+
+
+#[derive(Accounts)]
+pub struct UpdateWhitelist<'info> {
+    #[account(mut)]
+    pub payer: Signer<'info>,
+
+    pub admin:     Signer<'info>,
+    #[account(
+        mut,
+        seeds = [b"message".as_ref(), b"whitelist".as_ref()],
+        bump = whitelist.bump,
+        has_one = admin
+    )]
+    pub whitelist: Account<'info, Whitelist>,
+}
+
+
+#[error_code]
+pub enum MessageBufferError {
+    #[msg("CPI Caller not allowed")]
+    CallerNotAllowed,
+    #[msg("Whitelist already contains program")]
+    DuplicateAllowedProgram,
+    #[msg("Conversion Error")]
+    ConversionError,
+    #[msg("Serialization Error")]
+    SerializeError,
+    #[msg("Whitelist admin required on initialization")]
+    WhitelistAdminRequired,
+    #[msg("Invalid allowed program")]
+    InvalidAllowedProgram,
+    #[msg("Maximum number of allowed programs exceeded")]
+    MaximumAllowedProgramsExceeded,
+    #[msg("Invalid PDA")]
+    InvalidPDA,
+    #[msg("Update data exceeds current length")]
+    CurrentDataLengthExceeded,
+    #[msg("Message Buffer not provided")]
+    MessageBufferNotProvided,
+    #[msg("Message Buffer is not sufficiently large")]
+    MessageBufferTooSmall,
+    #[msg("Fund Bump not found")]
+    FundBumpNotFound,
+    #[msg("Reallocation failed")]
+    ReallocFailed,
+    #[msg("Target size too large for reallocation/initialization. Max delta is 10240")]
+    TargetSizeDeltaExceeded,
+    #[msg("MessageBuffer Uninitialized")]
+    MessageBufferUninitialized,
+}

+ 11 - 0
message_buffer/programs/message_buffer/src/macros.rs

@@ -0,0 +1,11 @@
+#[macro_export]
+macro_rules! accumulator_input_seeds {
+    ($accumulator_input:expr, $cpi_caller_pid:expr, $base_account:expr) => {
+        &[
+            $cpi_caller_pid.as_ref(),
+            b"message".as_ref(),
+            $base_account.as_ref(),
+            &[$accumulator_input.bump],
+        ]
+    };
+}

+ 407 - 0
message_buffer/programs/message_buffer/src/state/message_buffer.rs

@@ -0,0 +1,407 @@
+use {
+    crate::{
+        accumulator_input_seeds,
+        instructions,
+        MessageBufferError,
+    },
+    anchor_lang::{
+        prelude::*,
+        Discriminator,
+    },
+};
+
+/// A MessageBuffer will have the following structure
+/// ```ignore
+/// struct MessageBuffer {
+///     header: BufferHeader,
+///     messages: [u8; accountInfo.data.len - header.header_len]
+/// }
+/// ```
+///
+/// where `MESSAGES_LEN` can be dynamic. There is actual
+/// no messages field in the `MessageBuffer` struct definition due to messages
+/// needing to be a dynamic length while supporting zero_copy
+/// at the same time.
+///
+/// A `MessageBuffer` AccountInfo.data will look like:
+/// [  <discrimintator>, <buffer_header>, <messages> ]
+///         (0..8)       (8..header_len) (header_len...accountInfo.data.len)
+///
+///<br>
+///
+/// NOTE: The defined fields are read as *Little Endian*. The actual messages
+/// are read as *Big Endian*. The MessageBuffer fields are only ever read
+/// by the Pythnet validator & Hermes so don't need to be in Big Endian
+/// for cross-platform compatibility.
+#[account(zero_copy)]
+#[derive(InitSpace, Debug)]
+pub struct MessageBuffer {
+    /* header */
+    pub bump:        u8, // 1
+    pub version:     u8, // 1
+    // byte offset of accounts where data starts
+    // e.g. account_info.data[offset + header_len]
+    pub header_len:  u16, // 2
+    /// endpoints of every message.
+    /// ex: [10, 14]
+    /// => msg1 = account_info.data[(header_len + 0)..(header_len + 10)]
+    /// => msg2 = account_info.data[(header_len + 10)..(header_len + 14)]
+    pub end_offsets: [u16; 255], // 510
+
+                          /* messages */
+                          //  not defined in struct since needs to support variable length
+                          //  and work with zero_copy
+                          // pub messages: [u8; accountInfo.data.len - header_len]
+}
+
+
+impl MessageBuffer {
+    // HEADER_LEN allows for append-only forward-compatibility for the header.
+    // this is the number of bytes from the beginning of the account_info.data
+    // to the start of the `AccumulatorInput` data.
+    pub const HEADER_LEN: u16 = 8 + MessageBuffer::INIT_SPACE as u16;
+
+    pub const CURRENT_VERSION: u8 = 1;
+
+    pub fn new(bump: u8) -> Self {
+        Self {
+            bump,
+            header_len: Self::HEADER_LEN,
+            version: Self::CURRENT_VERSION,
+            end_offsets: [0u16; u8::MAX as usize],
+        }
+    }
+
+    pub fn refresh_header(&mut self) {
+        self.header_len = Self::HEADER_LEN;
+        self.version = Self::CURRENT_VERSION;
+        self.end_offsets = [0u16; u8::MAX as usize];
+    }
+
+    /// `put_all` writes all the messages to the `AccumulatorInput` account
+    /// and updates the `end_offsets` array.
+    ///
+    /// TODO: the first byte of destination is the first non-header byte of the
+    /// message buffer account
+    ///
+    /// Returns tuple of the number of messages written and the end_offset
+    /// of the last message
+    ///
+    // TODO: add a end_offsets index parameter for "continuation"
+    // TODO: test max size of parameters that can be passed into CPI call
+    pub fn put_all_in_buffer(
+        &mut self,
+        destination: &mut [u8],
+        values: &Vec<Vec<u8>>,
+    ) -> (usize, u16) {
+        let mut offset = 0u16;
+
+        for (i, v) in values.iter().enumerate() {
+            let start = offset;
+            let len = u16::try_from(v.len());
+            if len.is_err() {
+                msg!("len err");
+                return (i, start);
+            }
+            let end = offset.checked_add(len.unwrap());
+            if end.is_none() {
+                return (i, start);
+            }
+            let end = end.unwrap();
+            if end > destination.len() as u16 {
+                return (i, start);
+            }
+            self.end_offsets[i] = end;
+            destination[(start as usize)..(end as usize)].copy_from_slice(v);
+            offset = end
+        }
+        (values.len(), offset)
+    }
+
+    fn derive_pda(&self, cpi_caller: Pubkey, base_account: Pubkey) -> Result<Pubkey> {
+        let res = Pubkey::create_program_address(
+            accumulator_input_seeds!(self, cpi_caller, base_account),
+            &crate::ID,
+        )
+        .map_err(|_| MessageBufferError::InvalidPDA)?;
+        Ok(res)
+    }
+
+    pub fn validate(&self, key: Pubkey, cpi_caller: Pubkey, base_account: Pubkey) -> Result<()> {
+        let expected_key = self.derive_pda(cpi_caller, base_account)?;
+        require_keys_eq!(expected_key, key);
+        Ok(())
+    }
+
+    /// Verify message buffer account is initialized and has the correct discriminator.
+    ///
+    /// Note: manually checking because using anchor's `AccountLoader.load()`
+    /// will panic since the `AccountInfo.data_len()` will not match the
+    /// size of the `MessageBuffer` since the `MessageBuffer` struct does not
+    /// include the messages.
+    pub fn check_discriminator(message_buffer_account_info: &AccountInfo) -> Result<()> {
+        if instructions::is_uninitialized_account(message_buffer_account_info) {
+            return err!(MessageBufferError::MessageBufferUninitialized);
+        }
+        let data = message_buffer_account_info.try_borrow_data()?;
+        if data.len() < MessageBuffer::discriminator().len() {
+            return Err(ErrorCode::AccountDiscriminatorNotFound.into());
+        }
+
+        let disc_bytes = &data[0..8];
+        if disc_bytes != &MessageBuffer::discriminator() {
+            return Err(ErrorCode::AccountDiscriminatorMismatch.into());
+        }
+        Ok(())
+    }
+}
+
+#[cfg(test)]
+mod test {
+    use {
+        super::*,
+        anchor_lang::solana_program::keccak::hashv,
+        bytemuck::bytes_of_mut,
+        std::{
+            io::Write,
+            mem::{
+                align_of,
+                size_of,
+            },
+        },
+    };
+
+    fn data_bytes(data: Vec<u8>) -> Vec<u8> {
+        let mut bytes = vec![];
+        for d in data {
+            bytes.extend_from_slice(&d.to_be_bytes());
+        }
+        bytes
+    }
+
+    fn sighash(namespace: &str, name: &str) -> [u8; 8] {
+        let preimage = format!("{namespace}:{name}");
+
+        let mut sighash = [0u8; 8];
+        sighash.copy_from_slice(&hashv(&[preimage.as_bytes()]).to_bytes()[..8]);
+        sighash
+    }
+
+    fn generate_message_buffer_bytes(_data_bytes: &Vec<Vec<u8>>) -> Vec<u8> {
+        let message_buffer = &mut MessageBuffer::new(0);
+        let header_len = message_buffer.header_len as usize;
+
+        let account_info_data = &mut vec![];
+        let discriminator = &mut sighash("accounts", "MessageBuffer");
+        let destination = &mut vec![0u8; 10_240 - header_len];
+
+
+        account_info_data.write_all(discriminator).unwrap();
+        account_info_data
+            .write_all(bytes_of_mut(message_buffer))
+            .unwrap();
+        account_info_data.write_all(destination).unwrap();
+        account_info_data.to_vec()
+    }
+
+
+    #[test]
+    fn test_sizes_and_alignments() {
+        let (message_buffer_size, message_buffer_align) =
+            (size_of::<MessageBuffer>(), align_of::<MessageBuffer>());
+
+        assert_eq!(message_buffer_size, 514);
+        assert_eq!(message_buffer_align, 2);
+    }
+
+    #[test]
+    fn test_put_all() {
+        let data = vec![vec![12, 34], vec![56, 78, 90]];
+        let data_bytes: Vec<Vec<u8>> = data.into_iter().map(data_bytes).collect();
+
+        let account_info_data = &mut generate_message_buffer_bytes(&data_bytes);
+
+        let header_len = MessageBuffer::HEADER_LEN as usize;
+
+
+        let (header_bytes, body_bytes) = account_info_data.split_at_mut(header_len);
+        let message_buffer: &mut MessageBuffer = bytemuck::from_bytes_mut(&mut header_bytes[8..]);
+
+        let (num_msgs, num_bytes) = message_buffer.put_all_in_buffer(body_bytes, &data_bytes);
+
+
+        let message_buffer: &MessageBuffer =
+            bytemuck::from_bytes(&account_info_data.as_slice()[8..header_len]);
+
+        assert_eq!(num_msgs, 2);
+        assert_eq!(num_bytes, 5);
+
+
+        assert_eq!(message_buffer.end_offsets[0], 2);
+        assert_eq!(message_buffer.end_offsets[1], 5);
+
+
+        let iter = message_buffer.end_offsets.iter().take_while(|x| **x != 0);
+        let mut start = header_len;
+        let mut data_iter = data_bytes.iter();
+        let read_data = &mut vec![];
+        for offset in iter {
+            let end_offset = header_len + *offset as usize;
+            let message_buffer_data = &account_info_data[start..end_offset];
+            read_data.push(message_buffer_data);
+            start = end_offset;
+        }
+        println!("read_data: {:?}", read_data);
+        assert_eq!(read_data.len(), num_msgs);
+        for d in read_data.iter() {
+            let expected_data = data_iter.next().unwrap();
+            assert_eq!(d, &expected_data.as_slice());
+        }
+    }
+
+    #[test]
+    fn test_put_all_exceed_max() {
+        let data = vec![vec![0u8; 9_718 - 2], vec![0u8], vec![0u8; 2]];
+
+        let data_bytes: Vec<Vec<u8>> = data.into_iter().map(data_bytes).collect();
+
+        let account_info_data = &mut generate_message_buffer_bytes(&data_bytes);
+
+        let header_len = MessageBuffer::HEADER_LEN as usize;
+
+        let (header_bytes, body_bytes) = account_info_data.split_at_mut(header_len);
+        let message_buffer: &mut MessageBuffer = bytemuck::from_bytes_mut(&mut header_bytes[8..]);
+
+        let (num_msgs, num_bytes) = message_buffer.put_all_in_buffer(body_bytes, &data_bytes);
+
+
+        let message_buffer: &MessageBuffer =
+            bytemuck::from_bytes(&account_info_data.as_slice()[8..header_len]);
+
+        assert_eq!(num_msgs, 2);
+        assert_eq!(
+            num_bytes,
+            data_bytes[0..2].iter().map(|x| x.len()).sum::<usize>() as u16
+        );
+
+
+        let iter = message_buffer.end_offsets.iter().take_while(|x| **x != 0);
+        let mut start = header_len;
+        let mut data_iter = data_bytes.iter();
+        for offset in iter {
+            let end_offset = header_len + *offset as usize;
+            let message_buffer_data = &account_info_data[start..end_offset];
+            let expected_data = data_iter.next().unwrap();
+            assert_eq!(message_buffer_data, expected_data.as_slice());
+            start = end_offset;
+        }
+
+        assert_eq!(message_buffer.end_offsets[2], 0);
+    }
+
+
+    #[test]
+    fn test_put_all_long_vec() {
+        let data = vec![
+            vec![0u8; 9_718 - 3],
+            vec![0u8],
+            vec![0u8],
+            vec![0u8; u16::MAX as usize + 2],
+            vec![0u8],
+        ];
+
+        let data_bytes: Vec<Vec<u8>> = data.into_iter().map(data_bytes).collect();
+
+        let account_info_data = &mut generate_message_buffer_bytes(&data_bytes);
+
+        let header_len = MessageBuffer::HEADER_LEN as usize;
+
+
+        let (header_bytes, body_bytes) = account_info_data.split_at_mut(header_len);
+        let message_buffer: &mut MessageBuffer = bytemuck::from_bytes_mut(&mut header_bytes[8..]);
+
+        let (num_msgs, num_bytes) = message_buffer.put_all_in_buffer(body_bytes, &data_bytes);
+
+
+        let message_buffer: &MessageBuffer =
+            bytemuck::from_bytes(&account_info_data.as_slice()[8..header_len]);
+
+
+        assert_eq!(num_msgs, 3);
+        assert_eq!(
+            num_bytes,
+            data_bytes[0..3].iter().map(|x| x.len()).sum::<usize>() as u16
+        );
+
+
+        let iter = message_buffer.end_offsets.iter().take_while(|x| **x != 0);
+        let mut start = header_len;
+        let mut data_iter = data_bytes.iter();
+        for offset in iter {
+            let end_offset = header_len + *offset as usize;
+            let message_buffer_data = &account_info_data[start..end_offset];
+            let expected_data = data_iter.next().unwrap();
+            assert_eq!(message_buffer_data, expected_data.as_slice());
+            start = end_offset;
+        }
+
+        assert_eq!(message_buffer.end_offsets[0], 9_715);
+        assert_eq!(message_buffer.end_offsets[1], 9_716);
+        assert_eq!(message_buffer.end_offsets[2], 9_717);
+        assert_eq!(message_buffer.end_offsets[3], 0);
+        assert_eq!(message_buffer.end_offsets[4], 0);
+    }
+
+    #[test]
+    pub fn test_cursor_read() {
+        use byteorder::{
+            LittleEndian,
+            ReadBytesExt,
+        };
+
+        let data = vec![vec![12, 34], vec![56, 78, 90]];
+        let data_bytes: Vec<Vec<u8>> = data.into_iter().map(data_bytes).collect();
+        let account_info_data = &mut generate_message_buffer_bytes(&data_bytes);
+
+        let header_len = MessageBuffer::HEADER_LEN as usize;
+
+        let (header_bytes, body_bytes) = account_info_data.split_at_mut(header_len);
+        let message_buffer: &mut MessageBuffer = bytemuck::from_bytes_mut(&mut header_bytes[8..]);
+
+        let (num_msgs, num_bytes) = message_buffer.put_all_in_buffer(body_bytes, &data_bytes);
+        assert_eq!(num_msgs, 2);
+        assert_eq!(num_bytes, 5);
+
+
+        let message_buffer: &MessageBuffer =
+            bytemuck::from_bytes(&account_info_data.as_slice()[8..header_len]);
+
+        assert_eq!(message_buffer.end_offsets[0], 2);
+        assert_eq!(message_buffer.end_offsets[1], 5);
+
+
+        let mut cursor = std::io::Cursor::new(&account_info_data[10..]);
+        let header_len = cursor.read_u16::<LittleEndian>().unwrap();
+        println!("header_len: {}", header_len);
+        let mut current_msg_start = header_len;
+        let mut end_offset = cursor.read_u16::<LittleEndian>().unwrap();
+        let mut data_iter = data_bytes.iter();
+        println!("init header_end: {}", end_offset);
+        let read_data = &mut vec![];
+        while end_offset != 0 {
+            let current_msg_end = header_len + end_offset;
+            let accumulator_input_data =
+                &account_info_data[current_msg_start as usize..current_msg_end as usize];
+            end_offset = cursor.read_u16::<LittleEndian>().unwrap();
+            current_msg_start = current_msg_end;
+            read_data.push(accumulator_input_data);
+        }
+
+        println!("read_data: {:?}", read_data);
+        for d in read_data.iter() {
+            let expected_data = data_iter.next().unwrap();
+            assert_eq!(d, &expected_data.as_slice());
+        }
+        assert_eq!(read_data.len(), 2);
+    }
+}

+ 7 - 0
message_buffer/programs/message_buffer/src/state/mod.rs

@@ -0,0 +1,7 @@
+pub use {
+    self::message_buffer::*,
+    whitelist::*,
+};
+
+mod message_buffer;
+mod whitelist;

+ 65 - 0
message_buffer/programs/message_buffer/src/state/whitelist.rs

@@ -0,0 +1,65 @@
+use {
+    crate::MessageBufferError,
+    anchor_lang::prelude::*,
+};
+
+// Note: purposely not making this zero_copy
+// otherwise whitelist must always be marked mutable
+// and majority of operations are read
+#[account]
+#[derive(InitSpace)]
+pub struct Whitelist {
+    pub bump:             u8,
+    pub admin:            Pubkey,
+    #[max_len(32)]
+    pub allowed_programs: Vec<Pubkey>,
+}
+
+impl Whitelist {
+    pub fn validate_programs(&self, allowed_programs: &[Pubkey]) -> Result<()> {
+        require!(
+            !self.allowed_programs.contains(&Pubkey::default()),
+            MessageBufferError::InvalidAllowedProgram
+        );
+        require_gte!(
+            32,
+            allowed_programs.len(),
+            MessageBufferError::MaximumAllowedProgramsExceeded
+        );
+        Ok(())
+    }
+
+    pub fn validate_new_admin(&self, new_admin: Pubkey) -> Result<()> {
+        require_keys_neq!(new_admin, Pubkey::default());
+        Ok(())
+    }
+
+    pub fn is_allowed_program_auth(&self, auth: &Pubkey) -> Result<()> {
+        require!(
+            self.allowed_programs.contains(auth),
+            MessageBufferError::CallerNotAllowed
+        );
+        Ok(())
+    }
+}
+
+#[derive(Accounts)]
+pub struct WhitelistVerifier<'info> {
+    #[account(
+        seeds = [b"message".as_ref(), b"whitelist".as_ref()],
+        bump = whitelist.bump,
+    )]
+    // Using a Box to move account from stack to heap
+    pub whitelist: Box<Account<'info, Whitelist>>,
+    /// PDA representing authorized cpi caller
+    pub cpi_caller_auth: Signer<'info>,
+}
+
+impl<'info> WhitelistVerifier<'info> {
+    pub fn is_allowed(&self) -> Result<Pubkey> {
+        let auth = self.cpi_caller_auth.key();
+        let whitelist = &self.whitelist;
+        whitelist.is_allowed_program_auth(&auth)?;
+        Ok(auth)
+    }
+}

+ 4 - 3
accumulator_updater/programs/accumulator_updater/Cargo.toml → message_buffer/programs/mock-cpi-caller/Cargo.toml

@@ -1,12 +1,12 @@
 [package]
-name = "accumulator_updater"
+name = "mock-cpi-caller"
 version = "0.1.0"
-description = "Accumulator Updater Pythnet Program"
+description = "Created with Anchor"
 edition = "2021"
 
 [lib]
 crate-type = ["cdylib", "lib"]
-name = "accumulator_updater"
+name = "mock_cpi_caller"
 
 [features]
 no-entrypoint = []
@@ -17,5 +17,6 @@ default = []
 
 [dependencies]
 anchor-lang = "0.27.0"
+message_buffer = { path = "../message_buffer", features = ["cpi"] }
 # needed for the new #[account(zero_copy)] in anchor 0.27.0
 bytemuck = { version = "1.4.0", features = ["derive", "min_const_generics"]}

+ 0 - 0
accumulator_updater/programs/mock-cpi-caller/Xargo.toml → message_buffer/programs/mock-cpi-caller/Xargo.toml


+ 147 - 0
message_buffer/programs/mock-cpi-caller/src/instructions/add_price.rs

@@ -0,0 +1,147 @@
+use {
+    crate::{
+        instructions::{
+            sighash,
+            ACCUMULATOR_UPDATER_IX_NAME,
+            CPI,
+        },
+        message::{
+            get_schemas,
+            price::{
+                CompactPriceMessage,
+                FullPriceMessage,
+            },
+            AccumulatorSerializer,
+        },
+        state::{
+            PriceAccount,
+            PythAccountType,
+        },
+    },
+    anchor_lang::{
+        prelude::*,
+        system_program,
+    },
+    message_buffer::program::MessageBuffer as MessageBufferProgram,
+};
+
+pub fn add_price<'info>(
+    ctx: Context<'_, '_, '_, 'info, AddPrice<'info>>,
+    params: AddPriceParams,
+) -> Result<()> {
+    let mut inputs: Vec<Vec<u8>> = vec![];
+    let _schemas = get_schemas(PythAccountType::Price);
+
+    {
+        let pyth_price_acct = &mut ctx.accounts.pyth_price_account.load_init()?;
+
+        pyth_price_acct.init(params)?;
+
+        let price_full_data = FullPriceMessage::from(&**pyth_price_acct).accumulator_serialize()?;
+
+        inputs.push(price_full_data);
+
+
+        let price_compact_data =
+            CompactPriceMessage::from(&**pyth_price_acct).accumulator_serialize()?;
+        inputs.push(price_compact_data);
+    }
+
+
+    // Note: normally pyth oracle add_price wouldn't call emit_accumulator_inputs
+    // since add_price doesn't actually add/update any price data we would
+    // want included in the accumulator anyways. This is just for testing
+    AddPrice::emit_accumulator_inputs(ctx, inputs)
+}
+
+
+impl<'info> AddPrice<'info> {
+    /// Invoke accumulator-updater emit-inputs ix cpi call using solana
+    pub fn emit_accumulator_inputs(
+        ctx: Context<'_, '_, '_, 'info, AddPrice<'info>>,
+        inputs: Vec<Vec<u8>>,
+    ) -> anchor_lang::Result<()> {
+        let mut accounts = vec![
+            AccountMeta::new_readonly(ctx.accounts.accumulator_whitelist.key(), false),
+            AccountMeta::new_readonly(ctx.accounts.auth.key(), true),
+        ];
+        accounts.extend_from_slice(
+            &ctx.remaining_accounts
+                .iter()
+                .map(|a| AccountMeta::new(a.key(), false))
+                .collect::<Vec<_>>(),
+        );
+        let create_inputs_ix = anchor_lang::solana_program::instruction::Instruction {
+            program_id: ctx.accounts.message_buffer_program.key(),
+            accounts,
+            data: (
+                //anchor ix discriminator/identifier
+                sighash("global", ACCUMULATOR_UPDATER_IX_NAME),
+                ctx.accounts.pyth_price_account.key(),
+                inputs,
+            )
+                .try_to_vec()
+                .unwrap(),
+        };
+        let account_infos = &mut ctx.accounts.to_account_infos();
+        account_infos.extend_from_slice(ctx.remaining_accounts);
+        // using find_program_address here instead of ctx.bumps.get since
+        // that won't be available in the oracle program
+        let (_, bump) = Pubkey::find_program_address(
+            &[
+                ctx.accounts.message_buffer_program.key().as_ref(),
+                CPI.as_bytes(),
+            ],
+            &crate::ID,
+        );
+        anchor_lang::solana_program::program::invoke_signed(
+            &create_inputs_ix,
+            account_infos,
+            &[&[
+                ctx.accounts.message_buffer_program.key().as_ref(),
+                CPI.as_bytes(),
+                &[bump],
+            ]],
+        )?;
+        Ok(())
+    }
+}
+
+#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug, PartialEq, Eq)]
+pub struct AddPriceParams {
+    pub id:         u64,
+    pub price:      u64,
+    pub price_expo: u64,
+    pub ema:        u64,
+    pub ema_expo:   u64,
+}
+
+#[derive(Accounts)]
+#[instruction(params: AddPriceParams)]
+pub struct AddPrice<'info> {
+    #[account(
+        init,
+        payer = payer,
+        seeds = [b"pyth".as_ref(), b"price".as_ref(), &params.id.to_le_bytes()],
+        bump,
+        space = 8 + PriceAccount::INIT_SPACE
+    )]
+    pub pyth_price_account:     AccountLoader<'info, PriceAccount>,
+    #[account(mut)]
+    pub payer:                  Signer<'info>,
+    /// also needed for accumulator_updater
+    pub system_program:         Program<'info, System>,
+    /// CHECK: whitelist
+    pub accumulator_whitelist:  UncheckedAccount<'info>,
+    /// PDA representing this program's authority
+    /// to call the accumulator program
+    #[account(
+        seeds = [message_buffer_program.key().as_ref(), b"cpi".as_ref()],
+        owner = system_program::System::id(),
+        bump,
+    )]
+    pub auth:                   SystemAccount<'info>,
+    pub message_buffer_program: Program<'info, MessageBufferProgram>,
+    // Remaining Accounts
+    // MessageBuffer PDA
+}

+ 37 - 0
message_buffer/programs/mock-cpi-caller/src/instructions/cpi_max_test.rs

@@ -0,0 +1,37 @@
+use {
+    crate::{
+        instructions::{
+            UpdatePrice,
+            UpdatePriceParams,
+        },
+        message::{
+            price::DummyPriceMessage,
+            AccumulatorSerializer,
+        },
+    },
+    anchor_lang::prelude::*,
+};
+
+pub fn cpi_max_test<'info>(
+    ctx: Context<'_, '_, '_, 'info, UpdatePrice<'info>>,
+    params: UpdatePriceParams,
+    msg_sizes: Vec<u16>,
+) -> Result<()> {
+    let mut inputs = vec![];
+
+    {
+        let pyth_price_acct = &mut ctx.accounts.pyth_price_account.load_mut()?;
+        pyth_price_acct.update(params)?;
+
+        for msg_size in msg_sizes {
+            let price_dummy_data = DummyPriceMessage::new(msg_size).accumulator_serialize()?;
+            inputs.push(price_dummy_data);
+        }
+    }
+
+    let input_len = inputs.iter().map(|x| x.len()).sum::<usize>();
+    msg!("input_len: {}", input_len);
+
+
+    UpdatePrice::emit_accumulator_inputs(ctx, inputs)
+}

+ 27 - 0
message_buffer/programs/mock-cpi-caller/src/instructions/mod.rs

@@ -0,0 +1,27 @@
+use anchor_lang::solana_program::hash::hashv;
+pub use {
+    add_price::*,
+    cpi_max_test::*,
+    update_price::*,
+};
+
+mod add_price;
+mod cpi_max_test;
+mod update_price;
+
+/// Generate discriminator to be able to call anchor program's ix
+/// * `namespace` - "global" for instructions
+/// * `name` - name of ix to call CASE-SENSITIVE
+///
+/// Note: this could probably be converted into a constant hash value
+/// since it will always be the same.
+pub fn sighash(namespace: &str, name: &str) -> [u8; 8] {
+    let preimage = format!("{namespace}:{name}");
+
+    let mut sighash = [0u8; 8];
+    sighash.copy_from_slice(&hashv(&[preimage.as_bytes()]).to_bytes()[..8]);
+    sighash
+}
+
+pub const ACCUMULATOR_UPDATER_IX_NAME: &str = "put_all";
+pub const CPI: &str = "cpi";

+ 132 - 0
message_buffer/programs/mock-cpi-caller/src/instructions/update_price.rs

@@ -0,0 +1,132 @@
+use {
+    crate::{
+        instructions::{
+            sighash,
+            ACCUMULATOR_UPDATER_IX_NAME,
+            CPI,
+        },
+        message::{
+            price::{
+                CompactPriceMessage,
+                FullPriceMessage,
+            },
+            AccumulatorSerializer,
+        },
+        state::PriceAccount,
+    },
+    anchor_lang::{
+        prelude::*,
+        system_program,
+    },
+    message_buffer::program::MessageBuffer as MessageBufferProgram,
+};
+
+#[derive(AnchorSerialize, AnchorDeserialize, Clone, Debug, PartialEq, Eq)]
+pub struct UpdatePriceParams {
+    pub price:      u64,
+    pub price_expo: u64,
+    pub ema:        u64,
+    pub ema_expo:   u64,
+}
+
+
+#[derive(Accounts)]
+pub struct UpdatePrice<'info> {
+    #[account(
+    mut,
+    seeds = [
+    b"pyth".as_ref(),
+    b"price".as_ref(),
+    &pyth_price_account.load()?.id.to_le_bytes()
+    ],
+    bump,
+    )]
+    pub pyth_price_account:     AccountLoader<'info, PriceAccount>,
+    /// CHECK: whitelist
+    pub accumulator_whitelist:  UncheckedAccount<'info>,
+    #[account(
+        seeds = [message_buffer_program.key().as_ref(), b"cpi".as_ref()],
+        owner = system_program::System::id(),
+        bump,
+    )]
+    pub auth:                   SystemAccount<'info>,
+    pub message_buffer_program: Program<'info, MessageBufferProgram>,
+}
+
+/// Updates the mock pyth price account and calls accumulator-updater
+/// update_inputs ix
+pub fn update_price<'info>(
+    ctx: Context<'_, '_, '_, 'info, UpdatePrice<'info>>,
+    params: UpdatePriceParams,
+) -> Result<()> {
+    let mut inputs = vec![];
+
+    {
+        let pyth_price_acct = &mut ctx.accounts.pyth_price_account.load_mut()?;
+        pyth_price_acct.update(params)?;
+
+        let price_full_data = FullPriceMessage::from(&**pyth_price_acct).accumulator_serialize()?;
+
+        inputs.push(price_full_data);
+
+
+        let price_compact_data =
+            CompactPriceMessage::from(&**pyth_price_acct).accumulator_serialize()?;
+        inputs.push(price_compact_data);
+    }
+
+
+    UpdatePrice::emit_accumulator_inputs(ctx, inputs)
+}
+
+impl<'info> UpdatePrice<'info> {
+    /// Invoke accumulator-updater emit-inputs ix cpi call
+    pub fn emit_accumulator_inputs(
+        ctx: Context<'_, '_, '_, 'info, UpdatePrice<'info>>,
+        values: Vec<Vec<u8>>,
+    ) -> anchor_lang::Result<()> {
+        let mut accounts = vec![
+            AccountMeta::new_readonly(ctx.accounts.accumulator_whitelist.key(), false),
+            AccountMeta::new_readonly(ctx.accounts.auth.key(), true),
+        ];
+        accounts.extend_from_slice(
+            &ctx.remaining_accounts
+                .iter()
+                .map(|a| AccountMeta::new(a.key(), false))
+                .collect::<Vec<_>>(),
+        );
+        let update_inputs_ix = anchor_lang::solana_program::instruction::Instruction {
+            program_id: ctx.accounts.message_buffer_program.key(),
+            accounts,
+            data: (
+                //anchor ix discriminator/identifier
+                sighash("global", ACCUMULATOR_UPDATER_IX_NAME),
+                ctx.accounts.pyth_price_account.key(),
+                values,
+            )
+                .try_to_vec()
+                .unwrap(),
+        };
+        let account_infos = &mut ctx.accounts.to_account_infos();
+        account_infos.extend_from_slice(ctx.remaining_accounts);
+        // using find_program_address here instead of ctx.bumps.get since
+        // that won't be available in the oracle program
+        let (_, bump) = Pubkey::find_program_address(
+            &[
+                ctx.accounts.message_buffer_program.key().as_ref(),
+                CPI.as_bytes(),
+            ],
+            &crate::ID,
+        );
+        anchor_lang::solana_program::program::invoke_signed(
+            &update_inputs_ix,
+            account_infos,
+            &[&[
+                ctx.accounts.message_buffer_program.key().as_ref(),
+                CPI.as_bytes(),
+                &[bump],
+            ]],
+        )?;
+        Ok(())
+    }
+}

+ 67 - 0
message_buffer/programs/mock-cpi-caller/src/lib.rs

@@ -0,0 +1,67 @@
+use {
+    anchor_lang::prelude::*,
+    instructions::*,
+};
+
+pub mod instructions;
+pub mod message;
+mod state;
+
+declare_id!("Dg5PaFpoGXkYsidMpWTK6W2BeZ7FEfcYkg476zPFsLnS");
+
+#[program]
+pub mod mock_cpi_caller {
+    use super::*;
+
+    /// Creates a `PriceAccount` with the given parameters
+    pub fn add_price<'info>(
+        ctx: Context<'_, '_, '_, 'info, AddPrice<'info>>,
+        params: AddPriceParams,
+    ) -> Result<()> {
+        instructions::add_price(ctx, params)
+    }
+
+    /// Updates a `PriceAccount` with the given parameters
+    pub fn update_price<'info>(
+        ctx: Context<'_, '_, '_, 'info, UpdatePrice<'info>>,
+        params: UpdatePriceParams,
+    ) -> Result<()> {
+        instructions::update_price(ctx, params)
+    }
+
+    /// num_messages is the number of 1kb messages to send to the CPI
+    pub fn cpi_max_test<'info>(
+        ctx: Context<'_, '_, '_, 'info, UpdatePrice<'info>>,
+        params: UpdatePriceParams,
+        msg_sizes: Vec<u16>,
+    ) -> Result<()> {
+        instructions::cpi_max_test(ctx, params, msg_sizes)
+    }
+}
+
+
+#[cfg(test)]
+mod test {
+    use {
+        super::*,
+        anchor_lang::InstructionData,
+    };
+
+    #[test]
+    fn ix_discriminator() {
+        let a = &(message_buffer::instruction::PutAll {
+            base_account_key: anchor_lang::prelude::Pubkey::default(),
+            messages:         vec![],
+        }
+        .data()[..8]);
+
+        let sighash = sighash("global", ACCUMULATOR_UPDATER_IX_NAME);
+        println!(
+            r"
+            a: {a:?}
+            sighash: {sighash:?}
+            ",
+        );
+        assert_eq!(a, &sighash);
+    }
+}

+ 30 - 0
message_buffer/programs/mock-cpi-caller/src/message.rs

@@ -0,0 +1,30 @@
+use crate::state::PythAccountType;
+
+pub mod price;
+
+#[derive(Copy, Clone)]
+#[repr(u8)]
+pub enum MessageSchema {
+    Full    = 0,
+    Compact = 1,
+    Minimal = 2,
+    Dummy   = 3,
+}
+
+impl MessageSchema {
+    pub fn to_u8(&self) -> u8 {
+        *self as u8
+    }
+}
+
+
+pub fn get_schemas(account_type: PythAccountType) -> Vec<MessageSchema> {
+    match account_type {
+        PythAccountType::Price => vec![MessageSchema::Full, MessageSchema::Compact],
+        _ => vec![MessageSchema::Full],
+    }
+}
+
+pub trait AccumulatorSerializer {
+    fn accumulator_serialize(&self) -> anchor_lang::Result<Vec<u8>>;
+}

+ 147 - 0
message_buffer/programs/mock-cpi-caller/src/message/price.rs

@@ -0,0 +1,147 @@
+use {
+    crate::{
+        message::{
+            AccumulatorSerializer,
+            MessageSchema,
+        },
+        state::PriceAccount,
+    },
+    anchor_lang::prelude::*,
+    std::io::Write,
+};
+
+
+#[repr(C)]
+#[derive(Clone, Default, Debug, Eq, PartialEq)]
+pub struct MessageHeader {
+    pub schema:  u8,
+    pub version: u16,
+    pub size:    u32,
+}
+
+impl MessageHeader {
+    pub const CURRENT_VERSION: u8 = 2;
+
+    pub fn new(schema: MessageSchema, size: u32) -> Self {
+        Self {
+            schema: schema.to_u8(),
+            version: Self::CURRENT_VERSION as u16,
+            size,
+        }
+    }
+}
+
+#[repr(C)]
+#[derive(Clone, Default, Debug, Eq, PartialEq)]
+pub struct CompactPriceMessage {
+    pub header:     MessageHeader,
+    pub price_expo: u64,
+    pub price:      u64,
+    pub id:         u64,
+}
+
+
+impl CompactPriceMessage {
+    // size without header
+    pub const SIZE: usize = 24;
+}
+
+impl AccumulatorSerializer for CompactPriceMessage {
+    fn accumulator_serialize(&self) -> Result<Vec<u8>> {
+        let mut bytes = vec![];
+        bytes.write_all(&self.header.schema.to_be_bytes())?;
+        bytes.write_all(&self.header.version.to_be_bytes())?;
+        bytes.write_all(&self.header.size.to_be_bytes())?;
+        bytes.write_all(&self.id.to_be_bytes())?;
+        bytes.write_all(&self.price.to_be_bytes())?;
+        bytes.write_all(&self.price_expo.to_be_bytes())?;
+        Ok(bytes)
+    }
+}
+
+impl From<&PriceAccount> for CompactPriceMessage {
+    fn from(other: &PriceAccount) -> Self {
+        Self {
+            header:     MessageHeader::new(MessageSchema::Compact, Self::SIZE as u32),
+            id:         other.id,
+            price:      other.price,
+            price_expo: other.price_expo,
+        }
+    }
+}
+
+
+#[repr(C)]
+#[derive(Clone, Default, Debug, Eq, PartialEq)]
+pub struct FullPriceMessage {
+    pub header:     MessageHeader,
+    pub id:         u64,
+    pub price:      u64,
+    pub price_expo: u64,
+    pub ema:        u64,
+    pub ema_expo:   u64,
+}
+
+impl FullPriceMessage {
+    pub const SIZE: usize = 40;
+}
+
+impl From<&PriceAccount> for FullPriceMessage {
+    fn from(other: &PriceAccount) -> Self {
+        Self {
+            header:     MessageHeader::new(MessageSchema::Full, Self::SIZE as u32),
+            id:         other.id,
+            price:      other.price,
+            price_expo: other.price_expo,
+            ema:        other.ema,
+            ema_expo:   other.ema_expo,
+        }
+    }
+}
+
+impl AccumulatorSerializer for FullPriceMessage {
+    fn accumulator_serialize(&self) -> Result<Vec<u8>> {
+        let mut bytes = vec![];
+        bytes.write_all(&self.header.schema.to_be_bytes())?;
+        bytes.write_all(&self.header.version.to_be_bytes())?;
+        bytes.write_all(&self.header.size.to_be_bytes())?;
+        bytes.write_all(&self.id.to_be_bytes())?;
+        bytes.write_all(&self.price.to_be_bytes())?;
+        bytes.write_all(&self.price_expo.to_be_bytes())?;
+        bytes.write_all(&self.ema.to_be_bytes())?;
+        bytes.write_all(&self.ema_expo.to_be_bytes())?;
+        Ok(bytes)
+    }
+}
+
+
+#[repr(C)]
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct DummyPriceMessage {
+    pub header: MessageHeader,
+    pub data:   Vec<u8>,
+}
+
+
+impl DummyPriceMessage {
+    pub const SIZE: usize = 1017;
+
+    pub fn new(msg_size: u16) -> Self {
+        Self {
+            header: MessageHeader::new(MessageSchema::Dummy, msg_size as u32),
+            data:   vec![0u8; msg_size as usize],
+        }
+    }
+}
+
+
+impl AccumulatorSerializer for DummyPriceMessage {
+    fn accumulator_serialize(&self) -> Result<Vec<u8>> {
+        let mut bytes = vec![];
+        bytes.write_all(&self.header.schema.to_be_bytes())?;
+        bytes.write_all(&self.header.version.to_be_bytes())?;
+        bytes.write_all(&self.header.size.to_be_bytes())?;
+        bytes.extend_from_slice(&self.data);
+        Ok(bytes)
+    }
+}

+ 25 - 0
message_buffer/programs/mock-cpi-caller/src/state/mod.rs

@@ -0,0 +1,25 @@
+pub use price::*;
+
+mod price;
+
+trait PythAccount {
+    const ACCOUNT_TYPE: PythAccountType;
+    fn account_type() -> PythAccountType {
+        Self::ACCOUNT_TYPE
+    }
+}
+
+#[derive(Copy, Clone)]
+#[repr(u32)]
+pub enum PythAccountType {
+    Mapping     = 1,
+    Product     = 2,
+    Price       = 3,
+    Test        = 4,
+    Permissions = 5,
+}
+impl PythAccountType {
+    pub(crate) fn to_u32(&self) -> u32 {
+        *self as u32
+    }
+}

+ 47 - 0
message_buffer/programs/mock-cpi-caller/src/state/price.rs

@@ -0,0 +1,47 @@
+use {
+    crate::{
+        instructions::{
+            AddPriceParams,
+            UpdatePriceParams,
+        },
+        state::{
+            PythAccount,
+            PythAccountType,
+        },
+    },
+    anchor_lang::prelude::*,
+};
+
+#[account(zero_copy)]
+#[derive(InitSpace)]
+pub struct PriceAccount {
+    pub id:         u64,
+    pub price:      u64,
+    pub price_expo: u64,
+    pub ema:        u64,
+    pub ema_expo:   u64,
+    pub comp_:      [Pubkey; 32],
+}
+
+impl PriceAccount {
+    pub(crate) fn init(&mut self, params: AddPriceParams) -> Result<()> {
+        self.id = params.id;
+        self.price = params.price;
+        self.price_expo = params.price_expo;
+        self.ema = params.ema;
+        self.ema_expo = params.ema_expo;
+        Ok(())
+    }
+
+    pub(crate) fn update(&mut self, params: UpdatePriceParams) -> Result<()> {
+        self.price = params.price;
+        self.price_expo = params.price_expo;
+        self.ema = params.ema;
+        self.ema_expo = params.ema_expo;
+        Ok(())
+    }
+}
+
+impl PythAccount for PriceAccount {
+    const ACCOUNT_TYPE: PythAccountType = PythAccountType::Price;
+}

+ 888 - 0
message_buffer/tests/message_buffer.ts

@@ -0,0 +1,888 @@
+import * as anchor from "@coral-xyz/anchor";
+import {
+  IdlAccounts,
+  IdlTypes,
+  Program,
+  BorshAccountsCoder,
+} from "@coral-xyz/anchor";
+import { MessageBuffer } from "../target/types/message_buffer";
+import { MockCpiCaller } from "../target/types/mock_cpi_caller";
+import lumina from "@lumina-dev/test";
+import { assert } from "chai";
+import { AccountMeta, ComputeBudgetProgram } from "@solana/web3.js";
+import bs58 from "bs58";
+
+// Enables tool that runs in local browser for easier debugging of
+// transactions in this test -  https://lumina.fyi/debug
+// lumina();
+
+const messageBufferProgram = anchor.workspace
+  .MessageBuffer as Program<MessageBuffer>;
+const mockCpiProg = anchor.workspace.MockCpiCaller as Program<MockCpiCaller>;
+let whitelistAdmin = anchor.web3.Keypair.generate();
+
+const [mockCpiCallerAuth] = anchor.web3.PublicKey.findProgramAddressSync(
+  [messageBufferProgram.programId.toBuffer(), Buffer.from("cpi")],
+  mockCpiProg.programId
+);
+
+const pythPriceAccountId = new anchor.BN(1);
+const addPriceParams = {
+  id: pythPriceAccountId,
+  price: new anchor.BN(2),
+  priceExpo: new anchor.BN(3),
+  ema: new anchor.BN(4),
+  emaExpo: new anchor.BN(5),
+};
+const [pythPriceAccountPk] = anchor.web3.PublicKey.findProgramAddressSync(
+  [
+    Buffer.from("pyth"),
+    Buffer.from("price"),
+    pythPriceAccountId.toArrayLike(Buffer, "le", 8),
+  ],
+  mockCpiProg.programId
+);
+const MESSAGE = Buffer.from("message");
+const [accumulatorPdaKey, accumulatorPdaBump] =
+  anchor.web3.PublicKey.findProgramAddressSync(
+    [mockCpiCallerAuth.toBuffer(), MESSAGE, pythPriceAccountPk.toBuffer()],
+    messageBufferProgram.programId
+  );
+
+const pythPriceAccountId2 = new anchor.BN(2);
+const [pythPriceAccountPk2] = anchor.web3.PublicKey.findProgramAddressSync(
+  [
+    Buffer.from("pyth"),
+    Buffer.from("price"),
+    pythPriceAccountId2.toArrayLike(Buffer, "le", 8),
+  ],
+  mockCpiProg.programId
+);
+
+const [accumulatorPdaKey2, accumulatorPdaBump2] =
+  anchor.web3.PublicKey.findProgramAddressSync(
+    [mockCpiCallerAuth.toBuffer(), MESSAGE, pythPriceAccountPk2.toBuffer()],
+    messageBufferProgram.programId
+  );
+
+const accumulatorPdaMeta2 = {
+  pubkey: accumulatorPdaKey2,
+  isSigner: false,
+  isWritable: true,
+};
+
+console.log("3");
+
+let fundBalance = 100 * anchor.web3.LAMPORTS_PER_SOL;
+
+const discriminator = BorshAccountsCoder.accountDiscriminator("MessageBuffer");
+const messageBufferDiscriminator = bs58.encode(discriminator);
+
+describe("accumulator_updater", () => {
+  // Configure the client to use the local cluster.
+  let provider = anchor.AnchorProvider.env();
+  anchor.setProvider(provider);
+
+  const [whitelistPubkey, whitelistBump] =
+    anchor.web3.PublicKey.findProgramAddressSync(
+      [MESSAGE, Buffer.from("whitelist")],
+      messageBufferProgram.programId
+    );
+
+  before("transfer lamports to needed accounts", async () => {
+    const airdropTxnSig = await provider.connection.requestAirdrop(
+      whitelistAdmin.publicKey,
+      fundBalance
+    );
+
+    await provider.connection.confirmTransaction({
+      signature: airdropTxnSig,
+      ...(await provider.connection.getLatestBlockhash()),
+    });
+    const whitelistAuthorityBalance = await provider.connection.getBalance(
+      whitelistAdmin.publicKey
+    );
+    assert.isTrue(whitelistAuthorityBalance === fundBalance);
+  });
+
+  it("Is initialized!", async () => {
+    // Add your test here.
+    const tx = await messageBufferProgram.methods
+      .initialize(whitelistAdmin.publicKey)
+      .accounts({})
+      .rpc();
+    console.log("Your transaction signature", tx);
+
+    const whitelist = await messageBufferProgram.account.whitelist.fetch(
+      whitelistPubkey
+    );
+    assert.strictEqual(whitelist.bump, whitelistBump);
+    assert.isTrue(whitelist.admin.equals(whitelistAdmin.publicKey));
+    console.info(`whitelist: ${JSON.stringify(whitelist)}`);
+  });
+
+  it("Sets allowed programs to the whitelist", async () => {
+    const allowedProgramAuthorities = [mockCpiCallerAuth];
+    await messageBufferProgram.methods
+      .setAllowedPrograms(allowedProgramAuthorities)
+      .accounts({
+        admin: whitelistAdmin.publicKey,
+      })
+      .signers([whitelistAdmin])
+      .rpc();
+    const whitelist = await messageBufferProgram.account.whitelist.fetch(
+      whitelistPubkey
+    );
+    console.info(`whitelist after add: ${JSON.stringify(whitelist)}`);
+    const whitelistAllowedPrograms = whitelist.allowedPrograms.map((pk) =>
+      pk.toString()
+    );
+    assert.deepEqual(
+      whitelistAllowedPrograms,
+      allowedProgramAuthorities.map((p) => p.toString())
+    );
+  });
+
+  it("Creates a buffer", async () => {
+    const accumulatorPdaMetas = [
+      {
+        pubkey: accumulatorPdaKey,
+        isSigner: false,
+        isWritable: true,
+      },
+    ];
+
+    await messageBufferProgram.methods
+      .createBuffer(mockCpiCallerAuth, pythPriceAccountPk, 1024 * 8)
+      .accounts({
+        whitelist: whitelistPubkey,
+        admin: whitelistAdmin.publicKey,
+        systemProgram: anchor.web3.SystemProgram.programId,
+      })
+      .signers([whitelistAdmin])
+      .remainingAccounts(accumulatorPdaMetas)
+      .rpc({ skipPreflight: true });
+
+    const messageBufferAccountData = await getMessageBuffer(
+      provider.connection,
+      accumulatorPdaKey
+    );
+    const messageBufferHeader = deserializeMessageBufferHeader(
+      messageBufferProgram,
+      messageBufferAccountData
+    );
+    assert.equal(messageBufferHeader.version, 1);
+    assert.equal(messageBufferHeader.bump, accumulatorPdaBump);
+  });
+
+  it("Creates a buffer even if the account already has lamports", async () => {
+    const minimumEmptyRent =
+      await provider.connection.getMinimumBalanceForRentExemption(0);
+    await provider.sendAndConfirm(
+      (() => {
+        const tx = new anchor.web3.Transaction();
+        tx.add(
+          anchor.web3.SystemProgram.transfer({
+            fromPubkey: provider.wallet.publicKey,
+            toPubkey: accumulatorPdaKey2,
+            lamports: minimumEmptyRent,
+          })
+        );
+        return tx;
+      })()
+    );
+
+    const accumulatorPdaBalance = await provider.connection.getBalance(
+      accumulatorPdaKey2
+    );
+    console.log(`accumulatorPdaBalance: ${accumulatorPdaBalance}`);
+    assert.isTrue(accumulatorPdaBalance === minimumEmptyRent);
+
+    await messageBufferProgram.methods
+      .createBuffer(mockCpiCallerAuth, pythPriceAccountPk2, 1000)
+      .accounts({
+        whitelist: whitelistPubkey,
+        admin: whitelistAdmin.publicKey,
+        systemProgram: anchor.web3.SystemProgram.programId,
+      })
+      .signers([whitelistAdmin])
+      .remainingAccounts([accumulatorPdaMeta2])
+      .rpc({ skipPreflight: true });
+
+    const messageBufferAccountData = await getMessageBuffer(
+      provider.connection,
+      accumulatorPdaKey2
+    );
+
+    const minimumMessageBufferRent =
+      await provider.connection.getMinimumBalanceForRentExemption(
+        messageBufferAccountData.length
+      );
+    const accumulatorPdaBalanceAfter = await provider.connection.getBalance(
+      accumulatorPdaKey2
+    );
+    assert.isTrue(accumulatorPdaBalanceAfter === minimumMessageBufferRent);
+    const messageBufferHeader = deserializeMessageBufferHeader(
+      messageBufferProgram,
+      messageBufferAccountData
+    );
+
+    console.log(`header: ${JSON.stringify(messageBufferHeader)}`);
+    assert.equal(messageBufferHeader.bump, accumulatorPdaBump2);
+    assert.equal(messageBufferAccountData[8], accumulatorPdaBump2);
+
+    assert.equal(messageBufferHeader.version, 1);
+  });
+
+  it("Updates the whitelist authority", async () => {
+    const newWhitelistAdmin = anchor.web3.Keypair.generate();
+    await messageBufferProgram.methods
+      .updateWhitelistAdmin(newWhitelistAdmin.publicKey)
+      .accounts({
+        admin: whitelistAdmin.publicKey,
+      })
+      .signers([whitelistAdmin])
+      .rpc();
+
+    let whitelist = await messageBufferProgram.account.whitelist.fetch(
+      whitelistPubkey
+    );
+    assert.isTrue(whitelist.admin.equals(newWhitelistAdmin.publicKey));
+
+    // swap back to original authority
+    await messageBufferProgram.methods
+      .updateWhitelistAdmin(whitelistAdmin.publicKey)
+      .accounts({
+        admin: newWhitelistAdmin.publicKey,
+      })
+      .signers([newWhitelistAdmin])
+      .rpc();
+
+    whitelist = await messageBufferProgram.account.whitelist.fetch(
+      whitelistPubkey
+    );
+    assert.isTrue(whitelist.admin.equals(whitelistAdmin.publicKey));
+  });
+
+  it("Mock CPI program - AddPrice", async () => {
+    const mockCpiCallerAddPriceTxPubkeys = await mockCpiProg.methods
+      .addPrice(addPriceParams)
+      .accounts({
+        systemProgram: anchor.web3.SystemProgram.programId,
+        auth: mockCpiCallerAuth,
+        accumulatorWhitelist: whitelistPubkey,
+        messageBufferProgram: messageBufferProgram.programId,
+      })
+      .pubkeys();
+
+    const accumulatorPdaMetas = [
+      {
+        pubkey: accumulatorPdaKey,
+        isSigner: false,
+        isWritable: true,
+      },
+    ];
+
+    const mockCpiCallerAddPriceTxPrep = await mockCpiProg.methods
+      .addPrice(addPriceParams)
+      .accounts({
+        ...mockCpiCallerAddPriceTxPubkeys,
+      })
+      .remainingAccounts(accumulatorPdaMetas)
+      .prepare();
+
+    console.log(
+      `ix: ${JSON.stringify(
+        mockCpiCallerAddPriceTxPrep.instruction,
+        (k, v) => {
+          if (k === "data") {
+            return v.toString();
+          } else {
+            return v;
+          }
+        },
+        2
+      )}`
+    );
+    for (const prop in mockCpiCallerAddPriceTxPrep.pubkeys) {
+      console.log(
+        `${prop}: ${mockCpiCallerAddPriceTxPrep.pubkeys[prop].toString()}`
+      );
+    }
+
+    const addPriceTx = await mockCpiProg.methods
+      .addPrice(addPriceParams)
+      .accounts({
+        ...mockCpiCallerAddPriceTxPubkeys,
+      })
+      .remainingAccounts(accumulatorPdaMetas)
+      .preInstructions([
+        ComputeBudgetProgram.setComputeUnitLimit({ units: 1_000_000 }),
+      ])
+      .rpc({
+        skipPreflight: true,
+      });
+
+    console.log(`addPriceTx: ${addPriceTx}`);
+    const pythPriceAccount = await provider.connection.getAccountInfo(
+      mockCpiCallerAddPriceTxPubkeys.pythPriceAccount
+    );
+
+    const messageBufferAccount = await provider.connection.getAccountInfo(
+      accumulatorPdaKey
+    );
+
+    const accumulatorPriceMessages = parseMessageBuffer(
+      messageBufferProgram,
+      messageBufferAccount.data
+    );
+
+    console.log(
+      `accumulatorPriceMessages: ${JSON.stringify(
+        accumulatorPriceMessages,
+        null,
+        2
+      )}`
+    );
+    accumulatorPriceMessages.forEach((pm) => {
+      assert.isTrue(pm.id.eq(addPriceParams.id));
+      assert.isTrue(pm.price.eq(addPriceParams.price));
+      assert.isTrue(pm.priceExpo.eq(addPriceParams.priceExpo));
+    });
+  });
+
+  it("Fetches MessageBuffer using getProgramAccounts with discriminator", async () => {
+    const messageBufferAccounts = await getProgramAccountsForMessageBuffers(
+      provider.connection
+    );
+    const msgBufferAcctKeys = messageBufferAccounts.map((ai) =>
+      ai.pubkey.toString()
+    );
+    console.log(
+      `messageBufferAccounts: ${JSON.stringify(msgBufferAcctKeys, null, 2)}`
+    );
+
+    assert.isTrue(messageBufferAccounts.length === 2);
+    msgBufferAcctKeys.includes(accumulatorPdaKey.toString());
+  });
+
+  it("Mock CPI Program - UpdatePrice", async () => {
+    const updatePriceParams = {
+      price: new anchor.BN(5),
+      priceExpo: new anchor.BN(6),
+      ema: new anchor.BN(7),
+      emaExpo: new anchor.BN(8),
+    };
+
+    let accumulatorPdaMeta = getAccumulatorPdaMeta(
+      mockCpiCallerAuth,
+      pythPriceAccountPk
+    );
+    await mockCpiProg.methods
+      .updatePrice(updatePriceParams)
+      .accounts({
+        pythPriceAccount: pythPriceAccountPk,
+        auth: mockCpiCallerAuth,
+        accumulatorWhitelist: whitelistPubkey,
+        messageBufferProgram: messageBufferProgram.programId,
+      })
+      .remainingAccounts([accumulatorPdaMeta])
+      .preInstructions([
+        ComputeBudgetProgram.setComputeUnitLimit({ units: 1_000_000 }),
+      ])
+      .rpc({
+        skipPreflight: true,
+      });
+
+    const pythPriceAccount = await mockCpiProg.account.priceAccount.fetch(
+      pythPriceAccountPk
+    );
+    assert.isTrue(pythPriceAccount.price.eq(updatePriceParams.price));
+    assert.isTrue(pythPriceAccount.priceExpo.eq(updatePriceParams.priceExpo));
+    assert.isTrue(pythPriceAccount.ema.eq(updatePriceParams.ema));
+    assert.isTrue(pythPriceAccount.emaExpo.eq(updatePriceParams.emaExpo));
+
+    const messageBufferAccountData = await getMessageBuffer(
+      provider.connection,
+      accumulatorPdaKey
+    );
+
+    const updatedAccumulatorPriceMessages = parseMessageBuffer(
+      messageBufferProgram,
+      messageBufferAccountData
+    );
+
+    console.log(
+      `updatedAccumulatorPriceMessages: ${JSON.stringify(
+        updatedAccumulatorPriceMessages,
+        null,
+        2
+      )}`
+    );
+    updatedAccumulatorPriceMessages.forEach((pm) => {
+      assert.isTrue(pm.id.eq(addPriceParams.id));
+      assert.isTrue(pm.price.eq(updatePriceParams.price));
+      assert.isTrue(pm.priceExpo.eq(updatePriceParams.priceExpo));
+    });
+  });
+
+  it("Mock CPI Program - CPI Max Test", async () => {
+    // with loosen CPI feature activated, max cpi instruction size len is 10KB
+    let testCases = [[1024], [1024, 2048], [1024, 2048, 4096]];
+    // for (let i = 1; i < 8; i++) {
+    for (let i = 0; i < testCases.length; i++) {
+      let testCase = testCases[i];
+      console.info(`testCase: ${testCase}`);
+      const updatePriceParams = {
+        price: new anchor.BN(10 * (i + 5)),
+        priceExpo: new anchor.BN(10 * (i + 6)),
+        ema: new anchor.BN(10 * i + 7),
+        emaExpo: new anchor.BN(10 * i + 8),
+      };
+      console.log(`updatePriceParams: ${JSON.stringify(updatePriceParams)}`);
+
+      let accumulatorPdaMeta = getAccumulatorPdaMeta(
+        mockCpiCallerAuth,
+        pythPriceAccountPk
+      );
+      await mockCpiProg.methods
+        .cpiMaxTest(updatePriceParams, testCase)
+        .accounts({
+          pythPriceAccount: pythPriceAccountPk,
+          auth: mockCpiCallerAuth,
+          accumulatorWhitelist: whitelistPubkey,
+          messageBufferProgram: messageBufferProgram.programId,
+        })
+        .remainingAccounts([accumulatorPdaMeta])
+        .preInstructions([
+          ComputeBudgetProgram.setComputeUnitLimit({ units: 1_000_000 }),
+        ])
+        .rpc({
+          skipPreflight: true,
+        });
+
+      const pythPriceAccount = await mockCpiProg.account.priceAccount.fetch(
+        pythPriceAccountPk
+      );
+      assert.isTrue(pythPriceAccount.price.eq(updatePriceParams.price));
+      assert.isTrue(pythPriceAccount.priceExpo.eq(updatePriceParams.priceExpo));
+      assert.isTrue(pythPriceAccount.ema.eq(updatePriceParams.ema));
+      assert.isTrue(pythPriceAccount.emaExpo.eq(updatePriceParams.emaExpo));
+
+      const messageBufferAccountData = await getMessageBuffer(
+        provider.connection,
+        accumulatorPdaKey
+      );
+
+      const messageBufferHeader = deserializeMessageBufferHeader(
+        messageBufferProgram,
+        messageBufferAccountData
+      );
+
+      console.log(`header: ${JSON.stringify(messageBufferHeader)}`);
+      let mockCpiMessageHeaderLen = 7;
+
+      let currentExpectedOffset = 0;
+      for (let j = 0; j < testCase.length; j++) {
+        currentExpectedOffset += testCase[j];
+        currentExpectedOffset += mockCpiMessageHeaderLen;
+        console.log(`
+          header.endOffsets[${j}]: ${messageBufferHeader.endOffsets[j]}
+          currentExpectedOffset: ${currentExpectedOffset}
+        `);
+        assert.isTrue(
+          messageBufferHeader.endOffsets[j] === currentExpectedOffset
+        );
+      }
+    }
+  });
+
+  it("Mock CPI Program - Exceed CPI Max Test ", async () => {
+    // with loosen CPI feature activated, max cpi instruction size len is 10KB
+    let testCases = [[1024, 2048, 4096, 8192]];
+    // for (let i = 1; i < 8; i++) {
+    for (let i = 0; i < testCases.length; i++) {
+      let testCase = testCases[i];
+      console.info(`testCase: ${testCase}`);
+      const updatePriceParams = {
+        price: new anchor.BN(10 * i + 5),
+        priceExpo: new anchor.BN(10 * (i + 6)),
+        ema: new anchor.BN(10 * i + 7),
+        emaExpo: new anchor.BN(10 * i + 8),
+      };
+
+      let accumulatorPdaMeta = getAccumulatorPdaMeta(
+        mockCpiCallerAuth,
+        pythPriceAccountPk
+      );
+      let errorThrown = false;
+      try {
+        await mockCpiProg.methods
+          .cpiMaxTest(updatePriceParams, testCase)
+          .accounts({
+            pythPriceAccount: pythPriceAccountPk,
+            auth: mockCpiCallerAuth,
+            accumulatorWhitelist: whitelistPubkey,
+            messageBufferProgram: messageBufferProgram.programId,
+          })
+          .remainingAccounts([accumulatorPdaMeta])
+          .preInstructions([
+            ComputeBudgetProgram.setComputeUnitLimit({ units: 1_000_000 }),
+          ])
+          .rpc({
+            skipPreflight: true,
+          });
+      } catch (_err) {
+        errorThrown = true;
+      }
+      assert.ok(errorThrown);
+    }
+  });
+
+  it("Resizes a buffer to a valid larger size", async () => {
+    const messageBufferAccountDataBefore = await getMessageBuffer(
+      provider.connection,
+      accumulatorPdaKey2
+    );
+    const messageBufferAccountDataLenBefore =
+      messageBufferAccountDataBefore.length;
+
+    // check that header is stil the same as before
+    const messageBufferHeaderBefore = deserializeMessageBufferHeader(
+      messageBufferProgram,
+      messageBufferAccountDataBefore
+    );
+
+    const whitelistAuthorityBalanceBefore =
+      await provider.connection.getBalance(whitelistAdmin.publicKey);
+    console.log(
+      `whitelistAuthorityBalance: ${whitelistAuthorityBalanceBefore}`
+    );
+    const targetSize = 10 * 1024;
+    await messageBufferProgram.methods
+      .resizeBuffer(
+        mockCpiCallerAuth,
+        pythPriceAccountPk2,
+        accumulatorPdaBump2,
+        targetSize
+      )
+      .accounts({
+        whitelist: whitelistPubkey,
+        admin: whitelistAdmin.publicKey,
+        systemProgram: anchor.web3.SystemProgram.programId,
+      })
+      .signers([whitelistAdmin])
+      .remainingAccounts([accumulatorPdaMeta2])
+      .rpc({ skipPreflight: true });
+
+    const whitelistAuthorityBalanceAfter = await provider.connection.getBalance(
+      whitelistAdmin.publicKey
+    );
+    assert.isTrue(
+      whitelistAuthorityBalanceAfter < whitelistAuthorityBalanceBefore
+    );
+
+    const messageBufferAccountData = await getMessageBuffer(
+      provider.connection,
+      accumulatorPdaKey2
+    );
+    assert.equal(messageBufferAccountData.length, targetSize);
+
+    // check that header is still the same as before
+    const messageBufferHeader = deserializeMessageBufferHeader(
+      messageBufferProgram,
+      messageBufferAccountData
+    );
+    assert.deepEqual(
+      messageBufferHeader.endOffsets,
+      messageBufferHeaderBefore.endOffsets
+    );
+    assert.deepEqual(
+      messageBufferAccountData.subarray(0, messageBufferAccountDataLenBefore),
+      messageBufferAccountDataBefore
+    );
+  });
+
+  it("Resizes a buffer to a smaller size", async () => {
+    const targetSize = 4 * 1024;
+    await messageBufferProgram.methods
+      .resizeBuffer(
+        mockCpiCallerAuth,
+        pythPriceAccountPk2,
+        accumulatorPdaBump2,
+        targetSize
+      )
+      .accounts({
+        whitelist: whitelistPubkey,
+        admin: whitelistAdmin.publicKey,
+        systemProgram: anchor.web3.SystemProgram.programId,
+      })
+      .signers([whitelistAdmin])
+      .remainingAccounts([accumulatorPdaMeta2])
+      .rpc({ skipPreflight: true });
+
+    const messageBufferAccountData = await getMessageBuffer(
+      provider.connection,
+      accumulatorPdaKey2
+    );
+    assert.equal(messageBufferAccountData.length, targetSize);
+  });
+
+  it("Fails to resize buffers to invalid sizes", async () => {
+    // resize more than 10KB in one txn and less than header.header_len should be fail
+    const testCases = [20 * 1024, 2];
+    for (const testCase of testCases) {
+      let errorThrown = false;
+      try {
+        await messageBufferProgram.methods
+          .resizeBuffer(
+            mockCpiCallerAuth,
+            pythPriceAccountPk2,
+            accumulatorPdaBump2,
+            testCase
+          )
+          .accounts({
+            whitelist: whitelistPubkey,
+            admin: whitelistAdmin.publicKey,
+            systemProgram: anchor.web3.SystemProgram.programId,
+          })
+          .signers([whitelistAdmin])
+          .remainingAccounts([accumulatorPdaMeta2])
+          .rpc({ skipPreflight: true });
+      } catch (_err) {
+        errorThrown = true;
+      }
+      assert.ok(errorThrown);
+    }
+  });
+
+  it("Deletes a buffer", async () => {
+    await messageBufferProgram.methods
+      .deleteBuffer(mockCpiCallerAuth, pythPriceAccountPk2, accumulatorPdaBump2)
+      .accounts({
+        whitelist: whitelistPubkey,
+        admin: whitelistAdmin.publicKey,
+      })
+      .signers([whitelistAdmin])
+      .remainingAccounts([accumulatorPdaMeta2])
+      .rpc({ skipPreflight: true });
+
+    const messageBufferAccountData = await getMessageBuffer(
+      provider.connection,
+      accumulatorPdaKey2
+    );
+
+    if (messageBufferAccountData != null) {
+      assert.fail("messageBufferAccountData should be null");
+    }
+
+    const messageBufferAccounts = await getProgramAccountsForMessageBuffers(
+      provider.connection
+    );
+    assert.equal(messageBufferAccounts.length, 1);
+
+    assert.isFalse(
+      messageBufferAccounts
+        .map((a) => a.pubkey.toString())
+        .includes(accumulatorPdaKey2.toString())
+    );
+  });
+
+  it("Can recreate a buffer after it's been deleted", async () => {
+    await messageBufferProgram.methods
+      .createBuffer(mockCpiCallerAuth, pythPriceAccountPk2, 1000)
+      .accounts({
+        whitelist: whitelistPubkey,
+        admin: whitelistAdmin.publicKey,
+        systemProgram: anchor.web3.SystemProgram.programId,
+      })
+      .signers([whitelistAdmin])
+      .remainingAccounts([accumulatorPdaMeta2])
+      .rpc({ skipPreflight: true });
+
+    const messageBufferAccountData = await getMessageBuffer(
+      provider.connection,
+      accumulatorPdaKey2
+    );
+
+    const minimumMessageBufferRent =
+      await provider.connection.getMinimumBalanceForRentExemption(
+        messageBufferAccountData.length
+      );
+    const accumulatorPdaBalanceAfter = await provider.connection.getBalance(
+      accumulatorPdaKey2
+    );
+    assert.isTrue(accumulatorPdaBalanceAfter === minimumMessageBufferRent);
+    const messageBufferHeader = deserializeMessageBufferHeader(
+      messageBufferProgram,
+      messageBufferAccountData
+    );
+
+    console.log(`header: ${JSON.stringify(messageBufferHeader)}`);
+    assert.equal(messageBufferHeader.bump, accumulatorPdaBump2);
+    assert.equal(messageBufferAccountData[8], accumulatorPdaBump2);
+
+    assert.equal(messageBufferHeader.version, 1);
+  });
+});
+
+export const getAccumulatorPdaMeta = (
+  cpiCallerAuth: anchor.web3.PublicKey,
+  pythAccount: anchor.web3.PublicKey
+): AccountMeta => {
+  const accumulatorPdaKey = anchor.web3.PublicKey.findProgramAddressSync(
+    [cpiCallerAuth.toBuffer(), MESSAGE, pythAccount.toBuffer()],
+    messageBufferProgram.programId
+  )[0];
+  return {
+    pubkey: accumulatorPdaKey,
+    isSigner: false,
+    isWritable: true,
+  };
+};
+
+async function getMessageBuffer(
+  connection: anchor.web3.Connection,
+  accountKey: anchor.web3.PublicKey
+): Promise<Buffer | null> {
+  let accountInfo = await connection.getAccountInfo(accountKey);
+  return accountInfo ? accountInfo.data : null;
+}
+
+// Parses MessageBuffer.data into a PriceAccount or PriceOnly object based on the
+// accountType and accountSchema.
+function parseMessageBuffer(
+  messageBufferProgram: Program<MessageBuffer>,
+  accountData: Buffer
+): AccumulatorPriceMessage[] {
+  const msgBufferHeader = deserializeMessageBufferHeader(
+    messageBufferProgram,
+    accountData
+  );
+
+  const accumulatorMessages = [];
+  // let dataBuffer = Buffer.from(messages);
+
+  let dataBuffer = accountData.subarray(
+    msgBufferHeader.headerLen,
+    accountData.length
+  );
+  let start = 0;
+  for (let i = 0; i < msgBufferHeader.endOffsets.length; i++) {
+    const endOffset = msgBufferHeader.endOffsets[i];
+
+    if (endOffset == 0) {
+      console.log(`endOffset = 0. breaking`);
+      break;
+    }
+
+    const messageBytes = dataBuffer.subarray(start, endOffset);
+    const { header: msgHeader, data: msgData } =
+      parseMessageBytes(messageBytes);
+    console.info(`header: ${JSON.stringify(msgHeader, null, 2)}`);
+    if (msgHeader.schema == 0) {
+      accumulatorMessages.push(parseFullPriceMessage(msgData));
+    } else if (msgHeader.schema == 1) {
+      accumulatorMessages.push(parseCompactPriceMessage(msgData));
+    } else {
+      console.warn("unknown msgHeader.schema: " + i);
+      continue;
+    }
+    start = endOffset;
+  }
+  return accumulatorMessages;
+}
+
+type MessageHeader = {
+  schema: number;
+  version: number;
+  size: number;
+};
+
+type MessageBufferType = {
+  header: MessageHeader;
+  data: Buffer;
+};
+
+function deserializeMessageBufferHeader(
+  messageBufferProgram: Program<MessageBuffer>,
+  accountData: Buffer
+): IdlAccounts<MessageBuffer>["messageBuffer"] {
+  return messageBufferProgram.coder.accounts.decode(
+    "MessageBuffer",
+    accountData
+  );
+}
+
+function parseMessageBytes(data: Buffer): MessageBufferType {
+  let offset = 0;
+
+  const schema = data.readInt8(offset);
+  offset += 1;
+
+  const version = data.readInt16BE(offset);
+  offset += 2;
+
+  const size = data.readUInt32BE(offset);
+  offset += 4;
+
+  const messageHeader = {
+    schema,
+    version,
+    size,
+  };
+  let messageData = data.subarray(offset, offset + size);
+  return {
+    header: messageHeader,
+    data: messageData,
+  };
+}
+
+type AccumulatorPriceMessage = FullPriceMessage | CompactPriceMessage;
+
+type FullPriceMessage = {
+  id: anchor.BN;
+  price: anchor.BN;
+  priceExpo: anchor.BN;
+  ema: anchor.BN;
+  emaExpo: anchor.BN;
+};
+function parseFullPriceMessage(data: Uint8Array): FullPriceMessage {
+  return {
+    id: new anchor.BN(data.subarray(0, 8), "be"),
+    price: new anchor.BN(data.subarray(8, 16), "be"),
+    priceExpo: new anchor.BN(data.subarray(16, 24), "be"),
+    ema: new anchor.BN(data.subarray(24, 32), "be"),
+    emaExpo: new anchor.BN(data.subarray(32, 40), "be"),
+  };
+}
+
+type CompactPriceMessage = {
+  id: anchor.BN;
+  price: anchor.BN;
+  priceExpo: anchor.BN;
+};
+
+function parseCompactPriceMessage(data: Uint8Array): CompactPriceMessage {
+  return {
+    id: new anchor.BN(data.subarray(0, 8), "be"),
+    price: new anchor.BN(data.subarray(8, 16), "be"),
+    priceExpo: new anchor.BN(data.subarray(16, 24), "be"),
+  };
+}
+
+// fetch MessageBuffer accounts using `getProgramAccounts` and memcmp filter
+async function getProgramAccountsForMessageBuffers(
+  connection: anchor.web3.Connection
+) {
+  return await connection.getProgramAccounts(messageBufferProgram.programId, {
+    filters: [
+      {
+        memcmp: {
+          offset: 0,
+          bytes: messageBufferDiscriminator,
+        },
+      },
+    ],
+  });
+}

+ 0 - 0
accumulator_updater/tsconfig.json → message_buffer/tsconfig.json


+ 0 - 0
accumulator_updater/yarn.lock → message_buffer/yarn.lock


文件差异内容过多而无法显示
+ 302 - 434
package-lock.json


+ 1 - 0
package.json

@@ -10,6 +10,7 @@
     "price_service/client/js",
     "target_chains/aptos/sdk/js",
     "target_chains/cosmwasm/sdk/js",
+    "target_chains/cosmwasm/tools",
     "target_chains/ethereum/contracts",
     "target_chains/ethereum/sdk/js",
     "target_chains/ethereum/sdk/solidity",

+ 5 - 2
price_pusher/README.md

@@ -46,9 +46,12 @@ You can get the list of available price feeds from
 To run the price pusher, please run the following commands, replacing the command line arguments as necessary:
 
 ```sh
-# Only run it the first time to build the code
+# Please run the two following commands once from the root of the repo to build the code.
 npm install
-npx lerna run build
+npx lerna run build --scope @pythnetwork/price-pusher --include-dependencies
+
+# Navigate to the price_pusher folder
+cd price_pusher
 
 # For EVM
 npm run start -- evm --endpoint wss://example-rpc.com \

+ 1 - 1
price_pusher/config.evm.testnet.sample.json

@@ -1,6 +1,6 @@
 {
   "endpoint": "https://endpoints.omniatech.io/v1/fantom/testnet/public",
-  "pyth-contract-address": "0xff1a0f4744e8582DF1aE09D5611b887B6a12925CZ",
+  "pyth-contract-address": "0xff1a0f4744e8582DF1aE09D5611b887B6a12925C",
   "price-service-endpoint": "https://xc-testnet.pyth.network",
   "mnemonic-file": "./mnemonic",
   "price-config-file": "./price-config.testnet.sample.yaml"

+ 2 - 1
price_pusher/config.injective.testnet.sample.json

@@ -3,5 +3,6 @@
   "pyth-contract-address": "inj1z60tg0tekdzcasenhuuwq3htjcd5slmgf7gpez",
   "price-service-endpoint": "https://xc-testnet.pyth.network",
   "mnemonic-file": "./mnemonic",
-  "price-config-file": "./price-config.testnet.sample.yaml"
+  "price-config-file": "./price-config.testnet.sample.yaml",
+  "network": "testnet"
 }

+ 3 - 3
price_pusher/docker-compose.mainnet.sample.yaml

@@ -52,9 +52,9 @@ services:
     build:
       context: .
     # Uncomment this line (and comment out the above lines) to use a prebuilt image. Replace <version>
-    # with the latest released image of the EVM price pusher from this repo release page:
-    # https://github.com/pyth-network/pyth-js/releases
-    # image: public.ecr.aws/pyth-network/xc-evm-price-pusher:v<version>
+    # with the latest released image of the price pusher from this repo release page:
+    # https://github.com/pyth-network/pyth-crosschain/releases
+    # image: public.ecr.aws/pyth-network/xc-price-pusher:v<version>
     restart: always
     command:
       - "--"

+ 15 - 13
price_pusher/docker-compose.testnet.sample.yaml

@@ -52,29 +52,31 @@ services:
     build:
       context: .
     # Uncomment this line (and comment out the above lines) to use a prebuilt image. Replace <version>
-    # with the latest released image of the EVM price pusher from this repo release page:
-    # https://github.com/pyth-network/pyth-js/releases
-    # image: public.ecr.aws/pyth-network/xc-evm-price-pusher:v<version>
+    # with the latest released image of the price pusher from this repo release page:
+    # https://github.com/pyth-network/pyth-crosschain/releases
+    # image: public.ecr.aws/pyth-network/xc-price-pusher:v<version>
     restart: always
     command:
       - "--"
-      - "injective"
-      # you can choose to provide all the options here or a path to the config file
-      # we are providing a path to the config file
-      - "--config"
-      - "/command_config"
+      - "evm"
+      - "--endpoint"
+      # Replace this with RPC endpoint URL for the EVM network.
+      - "https://endpoints.omniatech.io/v1/fantom/testnet/public"
+      - "--mnemonic-file"
+      - "/mnemonic"
+      - "--pyth-contract-address"
+      - "0xff1a0f4744e8582DF1aE09D5611b887B6a12925C"
+      - "--price-service-endpoint"
+      - "http://price-service:4200"
+      - "--price-config-file"
+      - "/price_config"
     configs:
-      - command_config
       - mnemonic
       - price_config
     depends_on:
       price-service:
         condition: service_healthy
 configs:
-  command_config:
-    # Replace this with the path to the configuration file. You need to update the paths defined in
-    # the config file
-    file: ./config.injective.testnet.sample.json
   mnemonic:
     file: ./mnemonic # Replace this with the path to the mnemonic file
   price_config:

+ 2 - 2
price_pusher/package.json

@@ -1,6 +1,6 @@
 {
   "name": "@pythnetwork/price-pusher",
-  "version": "4.1.1",
+  "version": "5.0.0",
   "description": "Pyth Price Pusher",
   "homepage": "https://pyth.network",
   "main": "lib/index.js",
@@ -51,7 +51,7 @@
     "typescript": "^4.6.3"
   },
   "dependencies": {
-    "@injectivelabs/sdk-ts": "^1.0.484",
+    "@injectivelabs/sdk-ts": "1.10.72",
     "@pythnetwork/price-service-client": "*",
     "@pythnetwork/pyth-sdk-solidity": "*",
     "@truffle/hdwallet-provider": "^2.1.3",

+ 15 - 1
price_pusher/src/injective/command.ts

@@ -6,6 +6,7 @@ import { InjectivePriceListener, InjectivePricePusher } from "./injective";
 import { PythPriceListener } from "../pyth-price-listener";
 import { Controller } from "../controller";
 import { Options } from "yargs";
+import { getNetworkInfo } from "@injectivelabs/networks";
 
 export default {
   command: "injective",
@@ -19,6 +20,11 @@ export default {
       type: "string",
       required: true,
     } as Options,
+    network: {
+      description: "testnet or mainnet",
+      type: "string",
+      required: true,
+    } as Options,
     ...options.priceConfigFile,
     ...options.priceServiceEndpoint,
     ...options.mnemonicFile,
@@ -36,8 +42,13 @@ export default {
       pythContractAddress,
       pushingFrequency,
       pollingFrequency,
+      network,
     } = argv;
 
+    if (network !== "testnet" && network !== "mainnet") {
+      throw new Error("Please specify network. One of [testnet, mainnet]");
+    }
+
     const priceConfigs = readPriceConfigFile(priceConfigFile);
     const priceServiceConnection = new PriceServiceConnection(
       priceServiceEndpoint,
@@ -73,7 +84,10 @@ export default {
       priceServiceConnection,
       pythContractAddress,
       grpcEndpoint,
-      mnemonic
+      mnemonic,
+      {
+        chainId: getNetworkInfo(network).chainId,
+      }
     );
 
     const controller = new Controller(

+ 4 - 5
price_pusher/src/injective/injective.ts

@@ -20,7 +20,7 @@ import {
   createTransactionFromMsg,
 } from "@injectivelabs/sdk-ts";
 
-import { DEFAULT_GAS_PRICE } from "@injectivelabs/utils";
+const DEFAULT_GAS_PRICE = 500000000;
 
 type PriceQueryResponse = {
   price_feed: {
@@ -63,7 +63,7 @@ export class InjectivePriceListener extends ChainPriceListener {
         Buffer.from(`{"price_feed":{"id":"${priceId}"}}`).toString("base64")
       );
 
-      const json = Buffer.from(data as string, "base64").toString();
+      const json = Buffer.from(data).toString();
       priceQueryResponse = JSON.parse(json);
     } catch (e) {
       console.error(`Polling on-chain price for ${priceId} failed. Error:`);
@@ -163,8 +163,7 @@ export class InjectivePricePusher implements IPricePusher {
     const sig = await this.wallet.sign(Buffer.from(signBytes));
 
     /** Append Signatures */
-    txRaw.setSignaturesList([sig]);
-
+    txRaw.signatures = [sig];
     const txResponse = await txService.broadcast(txRaw);
 
     return txResponse;
@@ -215,7 +214,7 @@ export class InjectivePricePusher implements IPricePusher {
         ).toString("base64")
       );
 
-      const json = Buffer.from(data as string, "base64").toString();
+      const json = Buffer.from(data).toString();
       updateFeeQueryResponse = JSON.parse(json);
     } catch (e) {
       console.error("Error fetching update fee");

+ 3 - 3
price_service/client/js/package.json

@@ -1,6 +1,6 @@
 {
   "name": "@pythnetwork/price-service-client",
-  "version": "1.4.0",
+  "version": "1.4.1",
   "description": "Pyth price service client",
   "author": {
     "name": "Pyth Data Association"
@@ -50,8 +50,8 @@
   "dependencies": {
     "@pythnetwork/price-service-sdk": "*",
     "@types/ws": "^8.5.3",
-    "axios": "^1.2.5",
-    "axios-retry": "^3.4.0",
+    "axios": "=1.1.0",
+    "axios-retry": "~3.3.0",
     "isomorphic-ws": "^4.0.1",
     "ts-log": "^2.2.4",
     "ws": "^8.6.0"

+ 2 - 0
price_service/server/docker-compose.mainnet.yaml

@@ -2,6 +2,7 @@ services:
   spy:
     # Find latest Guardian images in https://github.com/wormhole-foundation/wormhole/pkgs/container/guardiand
     image: ghcr.io/wormhole-foundation/guardiand:v2.14.8.1
+    restart: on-failure
     command:
       - "spy"
       - "--nodeKey"
@@ -17,6 +18,7 @@ services:
   price-service:
     # Find latest price service images https://gallery.ecr.aws/pyth-network/xc-server
     image: public.ecr.aws/pyth-network/xc-server:v3.0.0
+    restart: on-failure
     # Or alternatively use a locally built image
     # image: pyth_price_server
     environment:

+ 2 - 0
price_service/server/docker-compose.testnet.yaml

@@ -2,6 +2,7 @@ services:
   spy:
     # Find latest Guardian images in https://github.com/wormhole-foundation/wormhole/pkgs/container/guardiand
     image: ghcr.io/wormhole-foundation/guardiand:v2.14.8.1
+    restart: on-failure
     command:
       - "spy"
       - "--nodeKey"
@@ -17,6 +18,7 @@ services:
   price-service:
     # Find latest price service images https://gallery.ecr.aws/pyth-network/xc-server
     image: public.ecr.aws/pyth-network/xc-server:v3.0.0
+    restart: on-failure
     # Or alternatively use a locally built image
     # image: pyth_price_server
     environment:

+ 1 - 1
price_service/server/package.json

@@ -1,6 +1,6 @@
 {
   "name": "@pythnetwork/price-service-server",
-  "version": "3.0.0",
+  "version": "3.0.1",
   "description": "Webservice for retrieving prices from the Pyth oracle.",
   "private": "true",
   "main": "index.js",

+ 52 - 0
price_service/server/src/__tests__/rest.test.ts

@@ -152,6 +152,23 @@ describe("Latest Price Feed Endpoint", () => {
     });
   });
 
+  test("When called with a target_chain, returns correct price feed with binary vaa encoded properly", async () => {
+    const ids = [expandTo64Len("abcd"), expandTo64Len("3456")];
+    const resp = await request(app)
+      .get("/api/latest_price_feeds")
+      .query({ ids, target_chain: "evm" });
+    expect(resp.status).toBe(StatusCodes.OK);
+    expect(resp.body.length).toBe(2);
+    expect(resp.body).toContainEqual({
+      ...priceInfoMap.get(ids[0])!.priceFeed.toJson(),
+      vaa: "0x" + priceInfoMap.get(ids[0])!.vaa.toString("hex"),
+    });
+    expect(resp.body).toContainEqual({
+      ...priceInfoMap.get(ids[1])!.priceFeed.toJson(),
+      vaa: "0x" + priceInfoMap.get(ids[1])!.vaa.toString("hex"),
+    });
+  });
+
   test("When called with some non-existent ids within ids, returns error mentioning non-existent ids", async () => {
     const ids = [
       expandTo64Len("ab01"),
@@ -186,6 +203,21 @@ describe("Latest Vaa Bytes Endpoint", () => {
     );
   });
 
+  test("When called with target_chain, returns vaa bytes encoded correctly", async () => {
+    const ids = [
+      expandTo64Len("abcd"),
+      expandTo64Len("ef01"),
+      expandTo64Len("3456"),
+    ];
+    const resp = await request(app)
+      .get("/api/latest_vaas")
+      .query({ ids, target_chain: "evm" });
+    expect(resp.status).toBe(StatusCodes.OK);
+    expect(resp.body.length).toBe(2);
+    expect(resp.body).toContain("0xa1b2c3d4");
+    expect(resp.body).toContain("0xbad01bad");
+  });
+
   test("When called with valid ids with leading 0x, returns vaa bytes as array, merged if necessary", async () => {
     const ids = [
       expandTo64Len("abcd"),
@@ -271,6 +303,26 @@ describe("Get VAA endpoint and Get VAA CCIP", () => {
     });
   });
 
+  test("When called with target_chain, encodes resulting VAA in the right format", async () => {
+    const id = expandTo64Len("abcd");
+    vaasCache.set(id, 10, "abcd10");
+    vaasCache.set(id, 20, "abcd20");
+    vaasCache.set(id, 30, "abcd30");
+
+    const resp = await request(app)
+      .get("/api/get_vaa")
+      .query({
+        id: "0x" + id,
+        publish_time: 16,
+        target_chain: "evm",
+      });
+    expect(resp.status).toBe(StatusCodes.OK);
+    expect(resp.body).toEqual<VaaConfig>({
+      vaa: "0x" + Buffer.from("abcd20", "base64").toString("hex"),
+      publishTime: 20,
+    });
+  });
+
   test("When called with invalid id returns price id found", async () => {
     // dead does not exist in the ids
     const id = expandTo64Len("dead");

+ 47 - 0
price_service/server/src/encoding.ts

@@ -0,0 +1,47 @@
+// Utilities for encoding VAAs for specific target chains
+
+// List of all possible target chains. Note that "default" is an option because we need at least one chain
+// with a base64 encoding (which is the old default behavior of all API methods).
+export type TargetChain = "evm" | "cosmos" | "aptos" | "default";
+export const validTargetChains = ["evm", "cosmos", "aptos", "default"];
+export const defaultTargetChain: TargetChain = "default";
+
+// Possible encodings of the binary VAA data as a string.
+// "0x" is the same as "hex" with a leading "0x" prepended to the hex string.
+export type VaaEncoding = "base64" | "hex" | "0x";
+export const defaultVaaEncoding: VaaEncoding = "base64";
+export const chainToEncoding: Record<TargetChain, VaaEncoding> = {
+  evm: "0x",
+  cosmos: "base64",
+  // TODO: I think aptos actually wants a number[] for this data... need to decide how to
+  // handle that case.
+  aptos: "base64",
+  default: "base64",
+};
+
+// Given a VAA represented as either a string in base64 or a Buffer, encode it as a string
+// appropriate for the given targetChain.
+export function encodeVaaForChain(
+  vaa: string | Buffer,
+  targetChain: TargetChain
+): string {
+  const encoding = chainToEncoding[targetChain];
+
+  let vaaBuffer: Buffer;
+  if (typeof vaa === "string") {
+    if (encoding === defaultVaaEncoding) {
+      return vaa;
+    } else {
+      vaaBuffer = Buffer.from(vaa, defaultVaaEncoding as BufferEncoding);
+    }
+  } else {
+    vaaBuffer = vaa;
+  }
+
+  switch (encoding) {
+    case "0x":
+      return "0x" + vaaBuffer.toString("hex");
+    default:
+      return vaaBuffer.toString(encoding);
+  }
+}

+ 10 - 0
price_service/server/src/helpers.ts

@@ -33,3 +33,13 @@ export function removeLeading0x(s: string): string {
 
   return s;
 }
+
+// Helper for treating T | undefined as an optional value. This lets you pick a
+// default if value is undefined.
+export function getOrElse<T>(value: T | undefined, defaultValue: T): T {
+  if (value === undefined) {
+    return defaultValue;
+  } else {
+    return value;
+  }
+}

+ 24 - 10
price_service/server/src/listen.ts

@@ -14,6 +14,7 @@ import {
   getBatchSummary,
   parseBatchPriceAttestation,
   priceAttestationToPriceFeed,
+  PriceAttestation,
 } from "@pythnetwork/wormhole-attester-sdk";
 import { HexString, PriceFeed } from "@pythnetwork/price-service-sdk";
 import LRUCache from "lru-cache";
@@ -31,6 +32,24 @@ export type PriceInfo = {
   priceServiceReceiveTime: number;
 };
 
+export function createPriceInfo(
+  priceAttestation: PriceAttestation,
+  vaa: Buffer,
+  sequence: bigint,
+  emitterChain: number
+): PriceInfo {
+  const priceFeed = priceAttestationToPriceFeed(priceAttestation);
+  return {
+    seqNum: Number(sequence),
+    vaa,
+    publishTime: priceAttestation.publishTime,
+    attestationTime: priceAttestation.attestationTime,
+    priceFeed,
+    emitterChainId: emitterChain,
+    priceServiceReceiveTime: Math.floor(new Date().getTime() / 1000),
+  };
+}
+
 export interface PriceStore {
   getPriceIds(): Set<HexString>;
   getLatestPriceInfo(priceFeedId: HexString): PriceInfo | undefined;
@@ -324,17 +343,12 @@ export class Listener implements PriceStore {
     for (const priceAttestation of batchAttestation.priceAttestations) {
       const key = priceAttestation.priceId;
 
-      const priceFeed = priceAttestationToPriceFeed(priceAttestation);
-      const priceInfo = {
-        seqNum: Number(parsedVaa.sequence),
+      const priceInfo = createPriceInfo(
+        priceAttestation,
         vaa,
-        publishTime: priceAttestation.publishTime,
-        attestationTime: priceAttestation.attestationTime,
-        priceFeed,
-        emitterChainId: parsedVaa.emitterChain,
-        priceServiceReceiveTime: Math.floor(new Date().getTime() / 1000),
-      };
-
+        parsedVaa.sequence,
+        parsedVaa.emitterChain
+      );
       const cachedPriceInfo = this.priceFeedVaaMap.get(key);
 
       if (this.isNewPriceInfo(cachedPriceInfo, priceInfo)) {

+ 175 - 26
price_service/server/src/rest.ts

@@ -6,16 +6,34 @@ import { Server } from "http";
 import { StatusCodes } from "http-status-codes";
 import morgan from "morgan";
 import fetch from "node-fetch";
+import {
+  parseBatchPriceAttestation,
+  priceAttestationToPriceFeed,
+} from "@pythnetwork/wormhole-attester-sdk";
 import { removeLeading0x, TimestampInSec } from "./helpers";
-import { PriceStore, VaaConfig } from "./listen";
+import { createPriceInfo, PriceInfo, PriceStore, VaaConfig } from "./listen";
 import { logger } from "./logging";
 import { PromClient } from "./promClient";
 import { retry } from "ts-retry-promise";
+import { parseVaa } from "@certusone/wormhole-sdk";
+import { getOrElse } from "./helpers";
+import {
+  TargetChain,
+  validTargetChains,
+  defaultTargetChain,
+  VaaEncoding,
+  encodeVaaForChain,
+} from "./encoding";
 
 const MORGAN_LOG_FORMAT =
   ':remote-addr - :remote-user ":method :url HTTP/:http-version"' +
   ' :status :res[content-length] :response-time ms ":referrer" ":user-agent"';
 
+// GET argument string to represent the options for target_chain
+export const targetChainArgString = `target_chain=<${validTargetChains.join(
+  "|"
+)}>`;
+
 export class RestException extends Error {
   statusCode: number;
   message: string;
@@ -71,7 +89,10 @@ export class RestAPI {
     this.promClient = promClient;
   }
 
-  async getVaaWithDbLookup(priceFeedId: string, publishTime: TimestampInSec) {
+  async getVaaWithDbLookup(
+    priceFeedId: string,
+    publishTime: TimestampInSec
+  ): Promise<VaaConfig | undefined> {
     // Try to fetch the vaa from the local cache
     let vaa = this.priceFeedVaaInfo.getVaa(priceFeedId, publishTime);
 
@@ -104,6 +125,56 @@ export class RestAPI {
     return vaa;
   }
 
+  vaaToPriceInfo(priceFeedId: string, vaa: Buffer): PriceInfo | undefined {
+    const parsedVaa = parseVaa(vaa);
+
+    let batchAttestation;
+
+    try {
+      batchAttestation = parseBatchPriceAttestation(
+        Buffer.from(parsedVaa.payload)
+      );
+    } catch (e: any) {
+      logger.error(e, e.stack);
+      logger.error("Parsing historical VAA failed: %o", parsedVaa);
+      return undefined;
+    }
+
+    for (const priceAttestation of batchAttestation.priceAttestations) {
+      if (priceAttestation.priceId === priceFeedId) {
+        return createPriceInfo(
+          priceAttestation,
+          vaa,
+          parsedVaa.sequence,
+          parsedVaa.emitterChain
+        );
+      }
+    }
+
+    return undefined;
+  }
+
+  priceInfoToJson(
+    priceInfo: PriceInfo,
+    verbose: boolean,
+    targetChain: TargetChain | undefined
+  ): object {
+    return {
+      ...priceInfo.priceFeed.toJson(),
+      ...(verbose && {
+        metadata: {
+          emitter_chain: priceInfo.emitterChainId,
+          attestation_time: priceInfo.attestationTime,
+          sequence_number: priceInfo.seqNum,
+          price_service_receive_time: priceInfo.priceServiceReceiveTime,
+        },
+      }),
+      ...(targetChain !== undefined && {
+        vaa: encodeVaaForChain(priceInfo.vaa, targetChain),
+      }),
+    };
+  }
+
   // Run this function without blocking (`await`) if you want to run it async.
   async createApp() {
     const app = express();
@@ -124,6 +195,9 @@ export class RestAPI {
         ids: Joi.array()
           .items(Joi.string().regex(/^(0x)?[a-f0-9]{64}$/))
           .required(),
+        target_chain: Joi.string()
+          .valid(...validTargetChains)
+          .optional(),
       }).required(),
     };
     app.get(
@@ -131,6 +205,10 @@ export class RestAPI {
       validate(latestVaasInputSchema),
       (req: Request, res: Response) => {
         const priceIds = (req.query.ids as string[]).map(removeLeading0x);
+        const targetChain = getOrElse(
+          req.query.target_chain as TargetChain | undefined,
+          defaultTargetChain
+        );
 
         // Multiple price ids might share same vaa, we use sequence number as
         // key of a vaa and deduplicate using a map of seqnum to vaa bytes.
@@ -154,14 +232,14 @@ export class RestAPI {
         }
 
         const jsonResponse = Array.from(vaaMap.values(), (vaa) =>
-          vaa.toString("base64")
+          encodeVaaForChain(vaa, targetChain)
         );
 
         res.json(jsonResponse);
       }
     );
     endpoints.push(
-      "api/latest_vaas?ids[]=<price_feed_id>&ids[]=<price_feed_id_2>&.."
+      `api/latest_vaas?ids[]=<price_feed_id>&ids[]=<price_feed_id_2>&..&${targetChainArgString}`
     );
 
     const getVaaInputSchema: schema = {
@@ -170,6 +248,9 @@ export class RestAPI {
           .regex(/^(0x)?[a-f0-9]{64}$/)
           .required(),
         publish_time: Joi.number().required(),
+        target_chain: Joi.string()
+          .valid(...validTargetChains)
+          .optional(),
       }).required(),
     };
 
@@ -179,6 +260,10 @@ export class RestAPI {
       asyncWrapper(async (req: Request, res: Response) => {
         const priceFeedId = removeLeading0x(req.query.id as string);
         const publishTime = Number(req.query.publish_time as string);
+        const targetChain = getOrElse(
+          req.query.target_chain as TargetChain | undefined,
+          defaultTargetChain
+        );
 
         if (
           this.priceFeedVaaInfo.getLatestPriceInfo(priceFeedId) === undefined
@@ -186,18 +271,21 @@ export class RestAPI {
           throw RestException.PriceFeedIdNotFound([priceFeedId]);
         }
 
-        const vaa = await this.getVaaWithDbLookup(priceFeedId, publishTime);
-
-        if (vaa === undefined) {
+        const vaaConfig = await this.getVaaWithDbLookup(
+          priceFeedId,
+          publishTime
+        );
+        if (vaaConfig === undefined) {
           throw RestException.VaaNotFound();
         } else {
-          res.json(vaa);
+          vaaConfig.vaa = encodeVaaForChain(vaaConfig.vaa, targetChain);
+          res.json(vaaConfig);
         }
       })
     );
 
     endpoints.push(
-      "api/get_vaa?id=<price_feed_id>&publish_time=<publish_time_in_unix_timestamp>"
+      `api/get_vaa?id=<price_feed_id>&publish_time=<publish_time_in_unix_timestamp>&${targetChainArgString}`
     );
 
     const getVaaCcipInputSchema: schema = {
@@ -259,6 +347,9 @@ export class RestAPI {
           .required(),
         verbose: Joi.boolean(),
         binary: Joi.boolean(),
+        target_chain: Joi.string()
+          .valid(...validTargetChains)
+          .optional(),
       }).required(),
     };
     app.get(
@@ -268,8 +359,12 @@ export class RestAPI {
         const priceIds = (req.query.ids as string[]).map(removeLeading0x);
         // verbose is optional, default to false
         const verbose = req.query.verbose === "true";
-        // binary is optional, default to false
-        const binary = req.query.binary === "true";
+        // The binary and target_chain are somewhat redundant. Binary still exists for backward compatibility reasons.
+        // No VAA will be returned if both arguments are omitted. binary=true is the same as target_chain=default
+        let targetChain = req.query.target_chain as TargetChain | undefined;
+        if (targetChain === undefined && req.query.binary === "true") {
+          targetChain = defaultTargetChain;
+        }
 
         const responseJson = [];
 
@@ -283,21 +378,9 @@ export class RestAPI {
             continue;
           }
 
-          responseJson.push({
-            ...latestPriceInfo.priceFeed.toJson(),
-            ...(verbose && {
-              metadata: {
-                emitter_chain: latestPriceInfo.emitterChainId,
-                attestation_time: latestPriceInfo.attestationTime,
-                sequence_number: latestPriceInfo.seqNum,
-                price_service_receive_time:
-                  latestPriceInfo.priceServiceReceiveTime,
-              },
-            }),
-            ...(binary && {
-              vaa: latestPriceInfo.vaa.toString("base64"),
-            }),
-          });
+          responseJson.push(
+            this.priceInfoToJson(latestPriceInfo, verbose, targetChain)
+          );
         }
 
         if (notFoundIds.length > 0) {
@@ -316,6 +399,72 @@ export class RestAPI {
     endpoints.push(
       "api/latest_price_feeds?ids[]=<price_feed_id>&ids[]=<price_feed_id_2>&..&verbose=true&binary=true"
     );
+    endpoints.push(
+      `api/latest_price_feeds?ids[]=<price_feed_id>&ids[]=<price_feed_id_2>&..&verbose=true&${targetChainArgString}`
+    );
+
+    const getPriceFeedInputSchema: schema = {
+      query: Joi.object({
+        id: Joi.string()
+          .regex(/^(0x)?[a-f0-9]{64}$/)
+          .required(),
+        publish_time: Joi.number().required(),
+        verbose: Joi.boolean(),
+        binary: Joi.boolean(),
+        target_chain: Joi.string()
+          .valid(...validTargetChains)
+          .optional(),
+      }).required(),
+    };
+
+    app.get(
+      "/api/get_price_feed",
+      validate(getPriceFeedInputSchema),
+      asyncWrapper(async (req: Request, res: Response) => {
+        const priceFeedId = removeLeading0x(req.query.id as string);
+        const publishTime = Number(req.query.publish_time as string);
+        // verbose is optional, default to false
+        const verbose = req.query.verbose === "true";
+        // The binary and target_chain are somewhat redundant. Binary still exists for backward compatibility reasons.
+        // No VAA will be returned if both arguments are omitted. binary=true is the same as target_chain=default
+        let targetChain = req.query.target_chain as TargetChain | undefined;
+        if (targetChain === undefined && req.query.binary === "true") {
+          targetChain = defaultTargetChain;
+        }
+
+        if (
+          this.priceFeedVaaInfo.getLatestPriceInfo(priceFeedId) === undefined
+        ) {
+          throw RestException.PriceFeedIdNotFound([priceFeedId]);
+        }
+
+        const vaa = await this.getVaaWithDbLookup(priceFeedId, publishTime);
+        if (vaa === undefined) {
+          throw RestException.VaaNotFound();
+        }
+
+        const priceInfo = this.vaaToPriceInfo(
+          priceFeedId,
+          Buffer.from(vaa.vaa, "base64")
+        );
+
+        if (priceInfo === undefined) {
+          throw RestException.VaaNotFound();
+        } else {
+          res.json(this.priceInfoToJson(priceInfo, verbose, targetChain));
+        }
+      })
+    );
+
+    endpoints.push(
+      "api/get_price_feed?id=<price_feed_id>&publish_time=<publish_time_in_unix_timestamp>"
+    );
+    endpoints.push(
+      "api/get_price_feed?id=<price_feed_id>&publish_time=<publish_time_in_unix_timestamp>&verbose=true"
+    );
+    endpoints.push(
+      "api/get_price_feed?id=<price_feed_id>&publish_time=<publish_time_in_unix_timestamp>&binary=true"
+    );
 
     app.get("/api/price_feed_ids", (req: Request, res: Response) => {
       const availableIds = this.priceFeedVaaInfo.getPriceIds();

+ 1 - 1
target_chains/aptos/sdk/js/package.json

@@ -1,6 +1,6 @@
 {
   "name": "@pythnetwork/pyth-aptos-js",
-  "version": "1.0.1",
+  "version": "1.0.2",
   "description": "Pyth Network Aptos Utilities",
   "homepage": "https://pyth.network",
   "author": {

+ 3 - 0
target_chains/cosmwasm/.gitignore

@@ -1,2 +1,5 @@
 artifacts/
+lib
+
 !bin
+!wormhole-stub/artifacts

文件差异内容过多而无法显示
+ 234 - 273
target_chains/cosmwasm/Cargo.lock


+ 1 - 1
target_chains/cosmwasm/Cargo.toml

@@ -1,5 +1,5 @@
 [workspace]
-members = ["contracts/pyth", "pyth-sdk-cw"]
+members = ["contracts/pyth", "sdk/rust"]
 exclude = ["examples/cw-contract"]
 
 [profile.release]

+ 8 - 0
target_chains/cosmwasm/README.md

@@ -49,6 +49,14 @@ Deployed Code ID:  11
 
 ### Instantiating new contract
 
+Pyth contract needs the Wormhole core contract deployed on the same chain. Some chains won't have it deployed.
+In that case, you have to deploy wormhole contract yourself. You can build the contract using the scripts given in `wormhole-stub`.
+Run and you will get the compiled contract code in `wormhole-stub/artifacts`
+
+```sh
+./build.sh
+```
+
 This command will upload the code and instantiates a new Pyth contract with the given code id:
 
 ```sh

部分文件因为文件数量过多而无法显示