Преглед на файлове

node: Add Transfer Verifier mechanism (#4169)

Adds a new package and command-line tool for Transfer Verification. This is a process of validating message publications from the core contracts. When a message is emitted, Transfer Verification will examine the corresponding receipt or other logs to ensure that funds were sent into the token bridge whenever this kind of message is emitted from the core bridge. 
This is a defense-in-depth mechanism to guard against a scenario where an attacker finds a way to spoof message publications.
John Saigle преди 10 месеца
родител
ревизия
2615d55a5e

+ 4 - 0
.golangci.yml

@@ -28,3 +28,7 @@ issues:
       text: "^func.*supervisor.*(waitSettle|waitSettleError).*$"
       linters:
         - unused
+    # This file contains hard-coded Sui core contract addresses that are marked as hardcoded credentials.
+    - path: pkg/txverifier/sui_test.go
+
+      text: "G101: Potential hardcoded credentials"

+ 34 - 0
Tiltfile

@@ -604,6 +604,11 @@ if evm2:
     )
 
 
+# Note that ci_tests requires other resources in order to build properly:
+# - eth-devnet  -- required by: accountant_tests, ntt_accountant_tests, tx-verifier
+# - eth-devnet2 -- required by: accountant_tests, ntt_accountant_tests
+# - wormchain   -- required by: accountant_tests, ntt_accountant_tests
+# - solana      -- required by: spydk-ci-tests
 if ci_tests:
     docker_build(
         ref = "sdk-test-image",
@@ -635,6 +640,16 @@ if ci_tests:
             sync("./testing", "/app/testing"),
         ],
     )
+    docker_build(
+        ref = "tx-verifier-monitor", 
+        context = "./devnet/tx-verifier-monitor/",
+        dockerfile = "./devnet/tx-verifier-monitor/Dockerfile"
+    )
+    docker_build(
+        ref = "tx-verifier-test", 
+        context = "./devnet/tx-verifier-monitor/",
+        dockerfile = "./devnet/tx-verifier-monitor/Dockerfile.cast"
+    )
 
     k8s_yaml_with_ns(
         encode_yaml_stream(
@@ -644,6 +659,11 @@ if ci_tests:
                     "BOOTSTRAP_PEERS", str(ccqBootstrapPeers)),
                     "MAX_WORKERS", max_workers))
     )
+    
+    # transfer-verifier -- daemon and log monitoring
+    k8s_yaml_with_ns("devnet/tx-verifier.yaml")
+
+    k8s_yaml_with_ns("devnet/tx-verifier-test.yaml")
 
     # separate resources to parallelize docker builds
     k8s_resource(
@@ -676,6 +696,20 @@ if ci_tests:
         trigger_mode = trigger_mode,
         resource_deps = [], # testing/querysdk.sh handles waiting for query-server, not having deps gets the build earlier
     )
+    # launches tx-verifier binary and sets up monitoring script
+    k8s_resource(
+        "tx-verifier-with-monitor",
+        resource_deps = ["eth-devnet"],
+        labels = ["tx-verifier"],
+        trigger_mode = trigger_mode,
+    )
+    # triggers the integration tests that will be detected by the monitor
+    k8s_resource(
+        "tx-verifier-test",
+        resource_deps = ["eth-devnet", "tx-verifier-with-monitor"],
+        labels = ["tx-verifier"],
+        trigger_mode = trigger_mode,
+    )
 
 if terra_classic:
     docker_build(

+ 4 - 1
devnet/eth-devnet.yaml

@@ -33,13 +33,16 @@ spec:
       containers:
         - name: anvil
           image: eth-node
+          # This command generates additional accounts compared to the default of 10. The purpose is to use dedicated
+          # accounts for different aspects of the test suite. When adding new integration tests, consider increasing
+          # the number of accounts below and using a fresh key for the new tests.
           command:
             - anvil
             - --silent
             - --mnemonic=myth like bonus scare over problem client lizard pioneer submit female collect
             - --block-time=1
             - --host=0.0.0.0
-            - --accounts=13
+            - --accounts=14
             - --chain-id=1337
           ports:
             - containerPort: 8545

+ 4 - 1
devnet/eth-devnet2.yaml

@@ -34,13 +34,16 @@ spec:
       containers:
         - name: anvil
           image: eth-node
+          # This command generates additional accounts compared to the default of 10. The purpose is to use dedicated
+          # accounts for different aspects of the test suite. When adding new integration tests, consider increasing
+          # the number of accounts below and using a fresh key for the new tests.
           command:
             - anvil
             - --silent
             - --mnemonic=myth like bonus scare over problem client lizard pioneer submit female collect
             - --block-time=1
             - --host=0.0.0.0
-            - --accounts=13
+            - --accounts=14
             - --chain-id=1397
           ports:
             - containerPort: 8545

+ 10 - 0
devnet/tx-verifier-monitor/Dockerfile

@@ -0,0 +1,10 @@
+# There's nothing special about this version, it is simply the `latest` as of
+# the creation date of this file.
+FROM alpine:3.20.3@sha256:1e42bbe2508154c9126d48c2b8a75420c3544343bf86fd041fb7527e017a4b4a
+
+RUN apk add --no-cache inotify-tools
+
+COPY monitor.sh /monitor.sh
+RUN chmod +x /monitor.sh
+
+CMD ["/monitor.sh"]

+ 13 - 0
devnet/tx-verifier-monitor/Dockerfile.cast

@@ -0,0 +1,13 @@
+# These versions are pinned to match the Dockerfile in the `ethereum/`
+# directory. Otherwise, there is nothing special about them and they can be
+# updated alongside the other Dockerfile.
+FROM --platform=linux/amd64 ghcr.io/foundry-rs/foundry:nightly-55bf41564f605cae3ca4c95ac5d468b1f14447f9@sha256:8c15d322da81a6deaf827222e173f3f81c653136a3518d5eeb41250a0f2e17ea as foundry
+# node is required to install Foundry
+FROM node:19.6.1-slim@sha256:a1ba21bf0c92931d02a8416f0a54daad66cb36a85d2b73af9d73b044f5f57cfc
+
+COPY --from=foundry /usr/local/bin/cast /bin/cast
+
+COPY transfer-verifier-test.sh /transfer-verifier-test.sh
+RUN chmod +x /transfer-verifier-test.sh
+
+CMD ["/transfer-verifier-test.sh"]

+ 64 - 0
devnet/tx-verifier-monitor/README.md

@@ -0,0 +1,64 @@
+# Transfer Verifier -- Integration Tests
+
+## EVM Integration Tests
+
+### Overview
+
+The Transfer Verifier tests involve interacting with the local ethereum devnet defined by the Tilt set-up in this repository.
+
+The basic idea is as follows:
+* Interact with the local Ethereum testnet. This should already have important pieces such as the Token Bridge and Core Bridge deployed.
+* Use `cast` from the foundry tool set to simulate malicious interactions with the Token Bridge.
+* Transfer Verifier detects the malicious messages and emits errors about what went wrong.
+* The error messages are logged to a file
+* A "monitor" script is used to detect the expected error message, waiting until the file is written to
+* If the monitor script sees the expected error message in the error log, it terminates
+
+## Components
+
+### Scripts
+
+#### transfer-verifier-test.sh
+
+Contains the `cast` commands that simulate malicious interactions with the Token Bridge and Core Bridge. It is able to broadcast
+transactions to the `anvil` instance that powers the Ethereum testnet while being able to impersonate arbitrary senders.
+
+This lets us perform actions that otherwise should be impossible, like causing a Publish Message event to be emitted from the Core Bridge
+without a corresponding deposit or transfer into the Token Bridge.
+
+#### monitor.sh
+
+A bash script that monitors the error log file for a specific error pattern. It runs in an infinite loop so it will
+not exit until the error pattern is detected.
+
+The error pattern is defined in `wormhole/devnet/tx-verifier.yaml` and matches an error string in the Transfer Verifier package.
+
+Once the pattern is detected, a success message is logged to a status file. Currently this is unused but this set-up
+could be modified to detect that this script has written the success message to figure out whether the whole test completed successfully.
+
+### Pods
+
+The files detailed below each have a primary role and are responsible for running one of the main pieces of the test functionality:
+
+* The Transfer Verifier binary which monitors the state of the local Ethereum network
+* The integration test script that generates activity that the Transfer Verifier classifies as malicious
+* The monitor script which ensures that the Transfer Verifier successfully
+detected the error we expected, and signals to Tilt that the overall test has
+succeeded
+
+#### devnet/tx-verifier.yaml
+
+Runs the Transfer Verifier binary and redirects its STDERR to the error log file. This allows the output of the binary
+to be monitored by `monitor.sh`.
+
+#### devnet/tx-verifier-test.yaml
+
+Runs the `transfer-verifier-test.sh` script which simulates malicious Token Bridge activity. Defines the RPC URL used
+by that bash script, which corresponds to the `anvil` instance created in the Ethereum devnet.
+
+#### devnet/tx-verifier-monitor.yaml
+
+Defines the expected error string that should be emitted by the Transfer Verifier code assuming that it successfully recognizes
+the malicious Token Bridge activity simulated by the `cast` commands in `transfer-verifier-test.sh`.
+
+It also defines a path to the log file that contains this string.

+ 25 - 0
devnet/tx-verifier-monitor/monitor.sh

@@ -0,0 +1,25 @@
+#!/bin/sh
+
+log_file="${ERROR_LOG_PATH:-/logs/error.log}"
+error_pattern="${ERROR_PATTERN:-ERROR}"
+status_file="/logs/status"
+
+# Wait for log file to exist and be non-empty
+while [ ! -s "${log_file}" ]; do
+    echo "Waiting for ${log_file} to be created and contain data..."
+    sleep 5
+done
+
+# Initialize status
+echo "RUNNING" > "$status_file"
+echo "Monitoring file '${log_file}' for error pattern: '${error_pattern}'"
+
+# Watch for changes in the log file. If we find the error pattern that means we have
+# succeeded. (Transfer verifier should correctly detect errors.
+inotifywait -m -e modify "${log_file}" | while read -r directory events filename; do
+    if grep -q "$error_pattern" "$log_file"; then
+        echo "SUCCESS" > "$status_file"
+        echo "Found error pattern. Exiting."
+        exit 0
+    fi
+done

+ 120 - 0
devnet/tx-verifier-monitor/transfer-verifier-test.sh

@@ -0,0 +1,120 @@
+#!/usr/bin/env bash
+set -euo pipefail
+
+RPC="${RPC_URL:-ws://eth-devnet:8545}"
+
+# mainnet values
+# export CORE_CONTRACT="0x98f3c9e6E3fAce36bAAd05FE09d375Ef1464288B"
+# export TOKEN_BRIDGE_CONTRACT="0x3ee18B2214AFF97000D974cf647E7C347E8fa585"
+
+# TODO these could be CLI params from the sh/devnet script
+CORE_BRIDGE_CONTRACT=0xC89Ce4735882C9F0f0FE26686c53074E09B0D550
+TOKEN_BRIDGE_CONTRACT=0x0290FB167208Af455bB137780163b7B7a9a10C16
+
+MNEMONIC=0x4f3edf983ac636a65a842ce7c78d9aa706d3b113bce9c46f30d7d21715b23b1d
+
+ERC20_ADDR="0x47bdB2D7d6528C760b6f228b3B8F9F650169a10f" # Test token A
+
+VALUE="1000" # Wei value sent as msg.value
+TRANSFER_AMOUNT="10"
+
+# This account is generated by anvil and can be confirmed by running `anvil --accounts=13`.
+# The accounts at other indices are used by other tests in the test suite, so
+# account[13] is used here to help encapsulate the tests.
+ANVIL_USER="0x64E078A8Aa15A41B85890265648e965De686bAE6" 
+ETH_WHALE="${ANVIL_USER}"
+FROM="${ETH_WHALE}"
+# Anvil user1 normalized to Wormhole size. (The value itself it unchecked but must have this format.)
+RECIPIENT="0x00000000000000000000000064E078A8Aa15A41B85890265648e965De686bAE6" 
+NONCE="234" # arbitrary
+
+# Build the payload for token transfers. Declared on multiple lines to
+# be more legible. Data pulled from an arbitrary LogMessagePublished event
+# on etherscan. Metadata and fees commented out, leaving only the payload
+PAYLOAD="0x"
+declare -a SLOTS=(
+   # "0000000000000000000000000000000000000000000000000000000000055baf"
+   # "0000000000000000000000000000000000000000000000000000000000000000"
+   # "0000000000000000000000000000000000000000000000000000000000000080"
+   # "0000000000000000000000000000000000000000000000000000000000000001"
+   # "00000000000000000000000000000000000000000000000000000000000000ae"
+   "030000000000000000000000000000000000000000000000000000000005f5e1"
+   "000000000000000000000000002260fac5e5542a773aa44fbcfedf7c193bc2c5"
+   "9900020000000000000000000000000000000000000000000000000000000000"
+   "000816001000000000000000000000000044eca3f6295d6d559ca1d99a5ef5a8"
+   "f72b4160f10001010200c91f01004554480044eca3f6295d6d559ca1d99a5ef5"
+   "a8f72b4160f10000000000000000000000000000000000000000000000000000"
+)
+for i in "${SLOTS[@]}"
+do
+   PAYLOAD="$PAYLOAD$i"
+done
+
+echo "DEBUG:"
+echo "- RPC=${RPC}"
+echo "- CORE_BRIDGE_CONTRACT=${CORE_BRIDGE_CONTRACT}"
+echo "- TOKEN_BRIDGE_CONTRACT=${TOKEN_BRIDGE_CONTRACT}"
+echo "- MNEMONIC=${MNEMONIC}"
+echo "- FROM=${FROM}"
+echo "- VALUE=${VALUE}" 
+echo "- RECIPIENT=${RECIPIENT}" 
+echo 
+
+# Fund the token bridge from the user
+echo "Start impersonating Anvil key: ${ANVIL_USER}"
+cast rpc \
+   anvil_impersonateAccount "${ANVIL_USER}" \
+   --rpc-url "${RPC}"
+echo "Funding token bridge using the user's balance"
+cast send --unlocked \
+   --rpc-url "${RPC}" \
+   --from $ANVIL_USER \
+   --value 100000000000000 \
+   ${TOKEN_BRIDGE_CONTRACT}
+echo ""
+echo "End impersonating User0"
+cast rpc \
+   anvil_stopImpersonatingAccount "${ANVIL_USER}" \
+   --rpc-url "${RPC}"
+
+BALANCE_CORE=$(cast balance --rpc-url "${RPC}" $CORE_BRIDGE_CONTRACT)
+BALANCE_TOKEN=$(cast balance --rpc-url "${RPC}" $TOKEN_BRIDGE_CONTRACT)
+BALANCE_USER=$(cast balance --rpc-url "${RPC}" $ANVIL_USER)
+echo "BALANCES:"
+echo "- CORE_BRIDGE_CONTRACT=${BALANCE_CORE}"
+echo "- TOKEN_BRIDGE_CONTRACT=${BALANCE_TOKEN}"
+echo "- ANVIL_USER=${BALANCE_USER}"
+echo 
+
+# === Malicious call to transferTokensWithPayload()
+# This is the exploit scenario: the token bridge has called publishMessage() without a ERC20 Transfer or Deposit
+# being present in the same receipt.
+# This is done by impersonating the token bridge contract and sending a message directly to the core bridge.
+# Ensure that anvil is using `--auto-impersonate` or else that account impersonation is enabled in your local environment.
+# --private-key "$MNEMONIC" \
+# --max-fee 500000 \
+echo "Start impersonate token bridge" 
+cast rpc \
+   --rpc-url "${RPC}" \
+   anvil_impersonateAccount "${TOKEN_BRIDGE_CONTRACT}"
+echo "Calling publishMessage as ${TOKEN_BRIDGE_CONTRACT}" 
+cast send --unlocked \
+   --rpc-url "${RPC}" \
+   --json \
+   --gas-limit 10000000 \
+   --priority-gas-price 1 \
+   --from "${TOKEN_BRIDGE_CONTRACT}" \
+   --value "0" \
+   "${CORE_BRIDGE_CONTRACT}" \
+   "publishMessage(uint32,bytes,uint8)" \
+   0 "${PAYLOAD}" 1
+echo ""
+cast rpc \
+   --rpc-url "${RPC}" \
+   anvil_stopImpersonatingAccount "${TOKEN_BRIDGE_CONTRACT}"
+echo "End impersonate token bridge" 
+
+# TODO add the 'multicall' scenario encoded in the forge script
+
+echo "Done Transfer Verifier integration test."
+echo "Exiting."

+ 32 - 0
devnet/tx-verifier-test.yaml

@@ -0,0 +1,32 @@
+apiVersion: batch/v1
+kind: Job
+metadata:
+  name: tx-verifier-test
+spec:
+  # Number of successful pod completions needed
+  completions: 1
+  # Number of pods to run in parallel
+  parallelism: 1
+  # Time limit after which the job is terminated (optional)
+  # activeDeadlineSeconds: 100
+  # Number of retries before marking as failed
+  backoffLimit: 4
+  template:
+    metadata:
+      labels:
+        app: tx-verifier-test
+    spec:
+      restartPolicy: Never
+      containers:
+        - name: tx-verifier-test
+          image: tx-verifier-test
+          command:
+            - /bin/bash
+            - -c
+            - "/transfer-verifier-test.sh"
+          env:
+            - name: RPC_URL
+              value: "ws://eth-devnet:8545"
+      volumes:
+      - name: log-volume
+        emptyDir: {}

+ 51 - 0
devnet/tx-verifier.yaml

@@ -0,0 +1,51 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: tx-verifier-with-monitor
+spec:
+  selector:
+    matchLabels:
+      app: tx-verifier-with-monitor
+  template:
+    metadata:
+      labels:
+        app: tx-verifier-with-monitor
+    spec:
+      securityContext:
+        runAsUser: 1000
+        runAsGroup: 1000
+        fsGroup: 1000
+      containers:
+        - name: tx-verifier
+          image: guardiand-image
+          volumeMounts:
+           - name: log-volume
+             mountPath: /logs
+          command:
+            ["/bin/sh", "-c"]
+          # See `ethereum/.env.test` and related shell scripts for how these values are configured in localnet testing.
+          args:
+            - |
+              exec /guardiand \
+                           transfer-verifier \
+                           evm \
+                           --rpcUrl ws://eth-devnet:8545 \
+                           --coreContract 0xC89Ce4735882C9F0f0FE26686c53074E09B0D550 \
+                           --tokenContract 0x0290FB167208Af455bB137780163b7B7a9a10C16 \
+                           --wrappedNativeContract 0xDDb64fE46a91D46ee29420539FC25FD07c5FEa3E \
+                           --logLevel=info \
+                           2> /logs/error.log \
+        - name: tx-verifier-monitor
+          image: tx-verifier-monitor
+          volumeMounts:
+          - name: log-volume
+            mountPath: /logs
+          env:
+            - name: ERROR_PATTERN
+              # This error string comes from the transfer-verifier binary in node/
+              value: "invalid receipt: no deposits and no transfers"
+            - name: ERROR_LOG_PATH
+              value: "/logs/error.log"
+      volumes:
+      - name: log-volume
+        emptyDir: {}

+ 32 - 5
ethereum/forge-scripts/DeployTestToken.s.sol

@@ -17,7 +17,8 @@ contract DeployTestToken is Script {
             address deployedTokenAddress,
             address deployedNFTaddress,
             address deployedWETHaddress,
-            address deployedAccountantTokenAddress
+            address deployedAccountantTokenAddress,
+            address transferVerificationTokenA
         )
     {
         vm.startBroadcast();
@@ -25,7 +26,8 @@ contract DeployTestToken is Script {
             deployedTokenAddress,
             deployedNFTaddress,
             deployedWETHaddress,
-            deployedAccountantTokenAddress
+            deployedAccountantTokenAddress,
+            transferVerificationTokenA
         ) = _deploy();
         vm.stopBroadcast();
     }
@@ -36,10 +38,11 @@ contract DeployTestToken is Script {
             address deployedTokenAddress,
             address deployedNFTaddress,
             address deployedWETHaddress,
-            address deployedAccountantTokenAddress
+            address deployedAccountantTokenAddress,
+            address transferVerificationTokenA
         )
     {
-        address[] memory accounts = new address[](13);
+        address[] memory accounts = new address[](14);
         accounts[0] = 0x90F8bf6A479f320ead074411a4B0e7944Ea8c9C1;
         accounts[1] = 0xFFcf8FDEE72ac11b5c542428B35EEF5769C409f0;
         accounts[2] = 0x22d491Bde2303f2f43325b2108D26f1eAbA1e32b;
@@ -53,6 +56,8 @@ contract DeployTestToken is Script {
         accounts[10] = 0x610Bb1573d1046FCb8A70Bbbd395754cD57C2b60;
         accounts[11] = 0x855FA758c77D68a04990E992aA4dcdeF899F654A;
         accounts[12] = 0xfA2435Eacf10Ca62ae6787ba2fB044f8733Ee843;
+        accounts[13] = 0x64E078A8Aa15A41B85890265648e965De686bAE6;
+
         
         ERC20PresetMinterPauser token = new ERC20PresetMinterPauser(
             "Ethereum Test Token",
@@ -95,11 +100,33 @@ contract DeployTestToken is Script {
         // mint 1000 units
         accountantToken.mint(accounts[9], 1_000_000_000_000_000_000_000);
 
+        for(uint16 i=0; i<11; i++) {
+            // Give the accounts enough eth to send transactions
+            vm.deal(accounts[i], 1e18);
+        }
+
+        // Deploy a test token for Transfer Verification
+        ERC20PresetMinterPauser deployedA = new ERC20PresetMinterPauser(
+            "TransferVerifier Test Token A",
+            "TVA"
+        );
+        console.log("Test token A deployed at: ", address(deployedA));
+
+        // Mint Eth and test tokens to Anvil's default account at index 13.
+        // This will be used for Transfer Verification integration tests.
+        // The other accounts created by Anvil are used for other tests in the devnet, so
+        // using account 14 to send transfers will hopefully cause things to be
+        // better encapsulated.
+        deployedA.mint(accounts[13], 1_000_000_000_000_000_000_000);
+        token.mint(accounts[13], 1_000_000_000_000_000_000_000);
+        vm.deal(accounts[13], 1e18);
+
         return (
             address(token),
             address(nft),
             address(mockWeth),
-            address(accountantToken)
+            address(accountantToken),
+            address(deployedA)
         );
     }
 }

+ 2 - 0
node/cmd/root.go

@@ -7,6 +7,7 @@ import (
 	"github.com/certusone/wormhole/node/cmd/ccq"
 	"github.com/certusone/wormhole/node/cmd/debug"
 	"github.com/certusone/wormhole/node/cmd/spy"
+	txverifier "github.com/certusone/wormhole/node/cmd/txverifier"
 	"github.com/certusone/wormhole/node/pkg/version"
 
 	"github.com/spf13/cobra"
@@ -49,6 +50,7 @@ func init() {
 	rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/.guardiand.yaml)")
 	rootCmd.AddCommand(guardiand.NodeCmd)
 	rootCmd.AddCommand(spy.SpyCmd)
+	rootCmd.AddCommand(txverifier.TransferVerifierCmd)
 	rootCmd.AddCommand(ccq.QueryServerCmd)
 	rootCmd.AddCommand(guardiand.KeygenCmd)
 	rootCmd.AddCommand(guardiand.AdminCmd)

+ 157 - 0
node/cmd/txverifier/evm.go

@@ -0,0 +1,157 @@
+package txverifier
+
+import (
+	"context"
+	"fmt"
+	"os"
+
+	"github.com/certusone/wormhole/node/pkg/telemetry"
+	txverifier "github.com/certusone/wormhole/node/pkg/txverifier"
+	"github.com/certusone/wormhole/node/pkg/version"
+	"github.com/certusone/wormhole/node/pkg/watchers/evm/connectors"
+	"github.com/ethereum/go-ethereum/common"
+	ipfslog "github.com/ipfs/go-log/v2"
+	"github.com/spf13/cobra"
+	"go.uber.org/zap"
+)
+
+var TransferVerifierCmdEvm = &cobra.Command{
+	Use:   "evm",
+	Short: "Transfer Verifier for EVM-based chains",
+	Run:   runTransferVerifierEvm,
+}
+
+// Configuration variables for EVM interactions.
+var (
+	// RPC endpoint URL for interacting with an EVM node.
+	evmRpc *string
+	// Contract address of the core bridge.
+	evmCoreContract *string
+	// Contract address of the token bridge.
+	evmTokenBridgeContract *string
+	// Contract address of the wrapped native asset, e.g. WETH for Ethereum
+	wrappedNativeContract *string
+	// Height difference between pruning windows (in blocks).
+	pruneHeightDelta *uint64
+)
+
+// Function to initialize the configuration for the TransferVerifierCmdEvm flags.
+// The MarkFlagRequired calls will cause the script to fail on their own. No need to handle the errors manually.
+//
+//nolint:errcheck
+func init() {
+	evmRpc = TransferVerifierCmdEvm.Flags().String("rpcUrl", "ws://localhost:8546", "RPC url")
+	evmCoreContract = TransferVerifierCmdEvm.Flags().String("coreContract", "", "core bridge address")
+	evmTokenBridgeContract = TransferVerifierCmdEvm.Flags().String("tokenContract", "", "token bridge")
+	wrappedNativeContract = TransferVerifierCmdEvm.Flags().String("wrappedNativeContract", "", "wrapped native address (e.g. WETH on Ethereum)")
+	pruneHeightDelta = TransferVerifierCmdEvm.Flags().Uint64("pruneHeightDelta", 10, "The number of blocks for which to retain transaction receipts. Defaults to 10 blocks.")
+
+	TransferVerifierCmd.MarkFlagRequired("rpcUrl")
+	TransferVerifierCmd.MarkFlagRequired("coreContract")
+	TransferVerifierCmd.MarkFlagRequired("tokenContract")
+	TransferVerifierCmd.MarkFlagRequired("wrappedNativeContract")
+}
+
+// Note: logger.Error should be reserved only for conditions that break the
+// invariants of the Token Bridge
+func runTransferVerifierEvm(cmd *cobra.Command, args []string) {
+
+	// Setup logging
+	lvl, logErr := ipfslog.LevelFromString(*logLevel)
+	if logErr != nil {
+		fmt.Println("Invalid log level")
+		os.Exit(1)
+	}
+
+	logger := ipfslog.Logger("wormhole-transfer-verifier").Desugar()
+	ipfslog.SetAllLoggers(lvl)
+
+	// Setup logging to Loki if configured
+	if *telemetryLokiUrl != "" && *telemetryNodeName != "" {
+		labels := map[string]string{
+			// Is this required?
+			// "network":   *p2pNetworkID,
+			"node_name": *telemetryNodeName,
+			"version":   version.Version(),
+		}
+
+		tm, err := telemetry.NewLokiCloudLogger(
+			context.Background(),
+			logger,
+			*telemetryLokiUrl,
+			// Note: the product name parameter here is representing a per-chain configuration, so 'eth' is used
+			// rather than 'evm'. This allows us to distinguish this instance from other EVM chains that may be added in
+			// the future.
+			"transfer-verifier-eth",
+			// Private logs are not used in this code
+			false,
+			labels,
+		)
+		if err != nil {
+			logger.Fatal("Failed to initialize telemetry", zap.Error(err))
+		}
+
+		defer tm.Close()
+		logger = tm.WrapLogger(logger) // Wrap logger with telemetry logger
+	}
+
+	logger.Info("Starting EVM transfer verifier")
+
+	logger.Debug("EVM rpc connection", zap.String("url", *evmRpc))
+	logger.Debug("EVM core contract", zap.String("address", *evmCoreContract))
+	logger.Debug("EVM token bridge contract", zap.String("address", *evmTokenBridgeContract))
+	logger.Debug("EVM wrapped native asset contract", zap.String("address", *wrappedNativeContract))
+	logger.Debug("EVM prune config",
+		zap.Uint64("height delta", *pruneHeightDelta))
+
+	// Create the RPC connection, context, and channels
+	ctx, ctxCancel := context.WithCancel(context.Background())
+	defer ctxCancel()
+
+	var evmConnector connectors.Connector
+	evmConnector, connectErr := connectors.NewEthereumBaseConnector(ctx, "eth", *evmRpc, common.HexToAddress(*evmCoreContract), logger)
+	if connectErr != nil {
+		logger.Fatal("could not create new evm base connector",
+			zap.Error(connectErr))
+	}
+
+	// Create main configuration for Transfer Verification
+	transferVerifier, err := txverifier.NewTransferVerifier(
+		evmConnector,
+		&txverifier.TVAddresses{
+			CoreBridgeAddr:    common.HexToAddress(*evmCoreContract),
+			TokenBridgeAddr:   common.HexToAddress(*evmTokenBridgeContract),
+			WrappedNativeAddr: common.HexToAddress(*wrappedNativeContract),
+		},
+		*pruneHeightDelta,
+		logger,
+	)
+
+	if err != nil {
+		logger.Fatal("could not create new transfer verifier", zap.Error(err))
+	}
+
+	// Set-up for main processing loop
+
+	// Subscription for LogMessagePublished events
+	sub := txverifier.NewSubscription(evmConnector.Client(), evmConnector)
+	sub.Subscribe(ctx)
+	defer sub.Close()
+
+	// MAIN LOOP:
+	// - watch for LogMessagePublished events coming from the connector attached to the core bridge.
+	// - process the events through the transfer verifier.
+	for {
+		select {
+		case <-ctx.Done():
+			logger.Info("context cancelled, exiting")
+			return
+		case subErr := <-sub.Errors():
+			logger.Warn("error on subscription", zap.Error(subErr))
+
+		// Process observed LogMessagePublished events
+		case vLog := <-sub.Events():
+			transferVerifier.ProcessEvent(ctx, vLog, nil)
+		}
+	}
+}

+ 209 - 0
node/cmd/txverifier/sui.go

@@ -0,0 +1,209 @@
+package txverifier
+
+import (
+	"context"
+	"fmt"
+	"os"
+	"strconv"
+	"time"
+
+	"github.com/certusone/wormhole/node/pkg/telemetry"
+	txverifier "github.com/certusone/wormhole/node/pkg/txverifier"
+	"github.com/certusone/wormhole/node/pkg/version"
+
+	ipfslog "github.com/ipfs/go-log/v2"
+	"github.com/spf13/cobra"
+	"go.uber.org/zap"
+)
+
+const (
+	INITIAL_EVENT_FETCH_LIMIT = 25
+)
+
+// CLI args
+var (
+	suiRPC                  *string
+	suiCoreContract         *string
+	suiTokenBridgeEmitter   *string
+	suiTokenBridgeContract  *string
+	suiProcessInitialEvents *bool
+)
+
+var TransferVerifierCmdSui = &cobra.Command{
+	Use:   "sui",
+	Short: "Transfer Verifier for Sui",
+	Run:   runTransferVerifierSui,
+}
+
+// CLI parameters
+// The MarkFlagRequired calls will cause the script to fail on their own. No need to handle the errors manually.
+//
+//nolint:errcheck
+func init() {
+	suiRPC = TransferVerifierCmdSui.Flags().String("suiRPC", "", "Sui RPC url")
+	suiCoreContract = TransferVerifierCmdSui.Flags().String("suiCoreContract", "", "Sui core contract address")
+	suiTokenBridgeEmitter = TransferVerifierCmdSui.Flags().String("suiTokenBridgeEmitter", "", "Token bridge emitter on Sui")
+	suiTokenBridgeContract = TransferVerifierCmdSui.Flags().String("suiTokenBridgeContract", "", "Token bridge contract on Sui")
+	suiProcessInitialEvents = TransferVerifierCmdSui.Flags().Bool("suiProcessInitialEvents", false, "Indicate whether the Sui transfer verifier should process the initial events it fetches")
+
+	TransferVerifierCmd.MarkFlagRequired("suiRPC")
+	TransferVerifierCmd.MarkFlagRequired("suiCoreContract")
+	TransferVerifierCmd.MarkFlagRequired("suiTokenBridgeEmitter")
+	TransferVerifierCmd.MarkFlagRequired("suiTokenBridgeContract")
+}
+
+func runTransferVerifierSui(cmd *cobra.Command, args []string) {
+	ctx := context.Background()
+
+	// Setup logging
+	// lvl, err := ipfslog.LevelFromString(*logLevel)
+	lvl, err := ipfslog.LevelFromString("info")
+	if err != nil {
+		fmt.Println("Invalid log level")
+		os.Exit(1)
+	}
+
+	logger := ipfslog.Logger("wormhole-transfer-verifier-sui").Desugar()
+
+	ipfslog.SetAllLoggers(lvl)
+
+	// Setup logging to Loki if configured
+	if *telemetryLokiUrl != "" && *telemetryNodeName != "" {
+		labels := map[string]string{
+			// Is this required?
+			// "network":   *p2pNetworkID,
+			"node_name": *telemetryNodeName,
+			"version":   version.Version(),
+		}
+
+		tm, err := telemetry.NewLokiCloudLogger(
+			context.Background(),
+			logger,
+			*telemetryLokiUrl,
+			"transfer-verifier-sui",
+			// Private logs are not used in this code
+			false,
+			labels,
+		)
+		if err != nil {
+			logger.Fatal("Failed to initialize telemetry", zap.Error(err))
+		}
+
+		defer tm.Close()
+		logger = tm.WrapLogger(logger) // Wrap logger with telemetry logger
+	}
+
+	logger.Info("Starting Sui transfer verifier")
+	logger.Debug("Sui rpc connection", zap.String("url", *suiRPC))
+	logger.Debug("Sui core contract", zap.String("address", *suiCoreContract))
+	logger.Debug("Sui token bridge contract", zap.String("address", *suiTokenBridgeContract))
+	logger.Debug("token bridge event emitter", zap.String("object id", *suiTokenBridgeEmitter))
+	logger.Debug("process initial events", zap.Bool("processInitialEvents", *suiProcessInitialEvents))
+
+	// Verify CLI parameters
+	if *suiRPC == "" || *suiCoreContract == "" || *suiTokenBridgeEmitter == "" || *suiTokenBridgeContract == "" {
+		logger.Fatal("One or more CLI parameters are empty",
+			zap.String("suiRPC", *suiRPC),
+			zap.String("suiCoreContract", *suiCoreContract),
+			zap.String("suiTokenBridgeEmitter", *suiTokenBridgeEmitter),
+			zap.String("suiTokenBridgeContract", *suiTokenBridgeContract))
+	}
+
+	// Create a new SuiTransferVerifier
+	suiTransferVerifier := txverifier.NewSuiTransferVerifier(*suiCoreContract, *suiTokenBridgeEmitter, *suiTokenBridgeContract)
+
+	// Get the event filter
+	eventFilter := suiTransferVerifier.GetEventFilter()
+
+	suiApiConnection := txverifier.NewSuiApiConnection(*suiRPC)
+
+	// Initial event fetching
+	resp, err := suiApiConnection.QueryEvents(eventFilter, "null", INITIAL_EVENT_FETCH_LIMIT, true)
+	if err != nil {
+		logger.Fatal("Error in querying initial events", zap.Error(err))
+	}
+
+	initialEvents := resp.Result.Data
+
+	// Use the latest timestamp to determine the starting point for live processing
+	var latestTimestamp int
+	for _, event := range initialEvents {
+		if event.Timestamp != nil {
+			timestampInt, err := strconv.Atoi(*event.Timestamp)
+			if err != nil {
+				logger.Error("Error converting timestamp to int", zap.Error(err))
+				continue
+			}
+			if timestampInt > latestTimestamp {
+				latestTimestamp = timestampInt
+			}
+		}
+	}
+	logger.Info("Initial events fetched", zap.Int("number of initial events", len(initialEvents)), zap.Int("latestTimestamp", latestTimestamp))
+
+	// If specified, process the initial events. This is useful for running a number of digests
+	// through the verifier before starting live processing.
+	if *suiProcessInitialEvents {
+		logger.Info("Processing initial events")
+		for _, event := range initialEvents {
+			if event.ID.TxDigest != nil {
+				_, err = suiTransferVerifier.ProcessDigest(*event.ID.TxDigest, suiApiConnection, logger)
+				if err != nil {
+					logger.Error(err.Error())
+				}
+			}
+		}
+	}
+
+	// Ticker for live processing
+	ticker := time.NewTicker(5 * time.Second)
+	defer ticker.Stop()
+
+	for {
+		select {
+		case <-ctx.Done():
+			logger.Info("Context cancelled")
+		case <-ticker.C:
+			// Fetch new events
+			resp, err := suiApiConnection.QueryEvents(eventFilter, "null", 25, true)
+			if err != nil {
+				logger.Error("Error in querying new events", zap.Error(err))
+				continue
+			}
+
+			newEvents := resp.Result.Data
+
+			// List of transaction digests for transactions in which the WormholeMessage
+			// event was emitted.
+			var txDigests []string
+
+			// Iterate over all events and get the transaction digests for events younger
+			// than latestTimestamp. Also update latestTimestamp.
+			for _, event := range newEvents {
+				if event.Timestamp != nil {
+					timestampInt, err := strconv.Atoi(*event.Timestamp)
+					if err != nil {
+						logger.Error("Error converting timestamp to int", zap.Error(err))
+						continue
+					}
+					if timestampInt > latestTimestamp {
+						latestTimestamp = timestampInt
+						if event.ID.TxDigest != nil {
+							txDigests = append(txDigests, *event.ID.TxDigest)
+						}
+					}
+				}
+			}
+
+			for _, txDigest := range txDigests {
+				_, err := suiTransferVerifier.ProcessDigest(txDigest, suiApiConnection, logger)
+				if err != nil {
+					logger.Error(err.Error())
+				}
+			}
+
+			logger.Info("New events processed", zap.Int("latestTimestamp", latestTimestamp), zap.Int("txDigestCount", len(txDigests)))
+
+		}
+	}
+}

+ 36 - 0
node/cmd/txverifier/txverifier.go

@@ -0,0 +1,36 @@
+package txverifier
+
+import (
+	"github.com/spf13/cobra"
+)
+
+var TransferVerifierCmd = &cobra.Command{
+	Use:   "transfer-verifier",
+	Short: "Transfer Verifier",
+}
+
+var (
+	// logLevel is a global flag that is used to set the logging level for the TransferVerifierCmd
+	logLevel *string
+	// telemetryLokiUrl is a global flag that is used to set the Loki cloud logging URL for the TransferVerifierCmd.
+	telemetryLokiUrl *string
+	// telemetryNodeName is a global flag that is used to set the node name used in telemetry for the TransferVerifierCmd.
+	telemetryNodeName *string
+)
+
+// init initializes the global flags and subcommands for the TransferVerifierCmd.
+// It sets up a persistent flag for logging level with a default value of "info"
+// and adds subcommands for EVM and Sui transfer verification.
+func init() {
+	// Global flags
+	logLevel = TransferVerifierCmd.PersistentFlags().String("logLevel", "info", "Logging level (debug, info, warn, error, dpanic, panic, fatal)")
+	telemetryLokiUrl = TransferVerifierCmd.PersistentFlags().String("telemetryLokiUrl", "", "Loki cloud logging URL")
+	telemetryNodeName = TransferVerifierCmd.PersistentFlags().String("telemetryNodeName", "", "Node name used in telemetry")
+
+	// Either both loki flags should be present or neither of them.
+	TransferVerifierCmd.MarkFlagsRequiredTogether("telemetryLokiUrl", "telemetryNodeName")
+
+	// Subcommands corresponding to chains supported by the Transfer Verifier.
+	TransferVerifierCmd.AddCommand(TransferVerifierCmdEvm)
+	TransferVerifierCmd.AddCommand(TransferVerifierCmdSui)
+}

+ 43 - 0
node/pkg/txverifier/README.md

@@ -0,0 +1,43 @@
+# Transfer Verifier - Package Documentation
+
+## Package Structure
+
+```
+├── README.md
+├── transfer-verifier-evm-structs.go
+├── transfer-verifier-evm-structs_test.go
+├── transfer-verifier-evm.go
+├── transfer-verifier-evm_test.go
+├── transfer-verifier-sui-structs.go
+├── transfer-verifier-sui.go
+├── transfer-verifier-sui_test.go
+├── transfer-verifier-utils.go
+└── transfer-verifier-utils_test.go
+```
+
+The package is organized by runtime environment. Currently there are implementations for the Ethereum and Sui blockchains.
+Because the Ethereum implementation is (hopefully) generalizable to other EVM-chains, it is referred to as 
+`transfer-verifier-evm` rather than `transfer-verifier-ethereum`.
+
+For each implementation, the code is divided into separate files. The core logic is contained in the main file
+and the supporting structs and utility methods are defined in a separate file. The hope here is that this makes the
+overall algorithm easier to reason about: a developer new to the program can focus on the main file and high-level
+concepts rather and avoid low-level details.
+
+## Main file -- Core Algorithm
+
+The main file contains the algorithm for Transfer Verification, handling tasks such as tracking deposits and transfers
+into the Token Bridge, cross-referencing these with messages emitted from the core bridge, and emitting errors when
+suspicious activity is detected.
+
+## Structs file -- Parsing and Encapsulation
+
+The structs file defines the major conceptual building blocks used by the algorithm in the main file. It is also responsible
+for lower-level operations such as establishing a subscription or polling mechanisms to a supported chain. This file
+also handles parsing and conversions, transforming things like JSON blobs or byte slices into concepts like a
+Message Receipt or Deposit. 
+
+## Utilities file
+
+There is also a utilities file that contains functions used by more than one runtime implementation, such as
+performing de/normalization of decimals.

+ 652 - 0
node/pkg/txverifier/evm.go

@@ -0,0 +1,652 @@
+package txverifier
+
+// TODOs
+//	add comments at the top of this file
+//	fix up contexts where it makes sense
+//	fix issue where cross-chain transfers show an invariant violation because of they cannot be found in the wrapped asset map
+
+import (
+	"context"
+	"errors"
+	"fmt"
+	"math/big"
+	"time"
+
+	"github.com/certusone/wormhole/node/pkg/watchers/evm/connectors/ethabi"
+	"github.com/ethereum/go-ethereum/common"
+	geth "github.com/ethereum/go-ethereum/core/types"
+	"github.com/wormhole-foundation/wormhole/sdk/vaa"
+
+	"go.uber.org/zap"
+)
+
+// // Global variables for caching RPC responses.
+// var (
+
+// )
+
+const (
+	// Seconds to wait before trying to reconnect to the core contract event subscription.
+	RECONNECT_DELAY = 5 * time.Second
+)
+
+// ProcessEvent processes a LogMessagePublished event, and is either called
+// from a watcher or from the transfer verifier standalone process. It fetches
+// the full transaction receipt associated with the log, and parses all
+// events emitted in the transaction, tracking LogMessagePublished events as outbound
+// transfers and token deposits into the token bridge as inbound transfers. It then
+// verifies that the sum of the inbound transfers is at least as much as the sum of
+// the outbound transfers.
+// If the return value is true, it implies that the event was processed successfully.
+// If the return value is false, it implies that something serious has gone wrong.
+func (tv *TransferVerifier[ethClient, Connector]) ProcessEvent(
+	ctx context.Context,
+	vLog *ethabi.AbiLogMessagePublished,
+	// If nil, this code will fetch the receipt using the TransferVerifier's connector.
+	receipt *geth.Receipt,
+) bool {
+
+	// Use this opportunity to prune old transaction information from the cache.
+	tv.pruneCache()
+
+	tv.logger.Debug("detected LogMessagePublished event",
+		zap.String("txHash", vLog.Raw.TxHash.String()))
+
+	// Caching: record used/inspected tx hash.
+	if _, exists := tv.processedTransactions[vLog.Raw.TxHash]; exists {
+		tv.logger.Debug("skip: transaction hash already processed",
+			zap.String("txHash", vLog.Raw.TxHash.String()))
+		return true
+	}
+
+	// This check also occurs when processing a receipt but skipping here avoids unnecessary
+	// processing.
+	if cmp(vLog.Sender, tv.Addresses.TokenBridgeAddr) != 0 {
+		tv.logger.Debug("skip: sender is not token bridge",
+			zap.String("txHash", vLog.Raw.TxHash.String()),
+			zap.String("sender", vLog.Sender.String()),
+			zap.String("tokenBridge", tv.Addresses.TokenBridgeAddr.String()))
+		return true
+	}
+
+	if receipt == nil {
+		tv.logger.Debug("receipt was not passed as an argument. fetching it using the connector")
+		// Get the full transaction receipt for this log if it was not provided as an argument.
+		var txReceiptErr error
+		receipt, txReceiptErr = tv.evmConnector.TransactionReceipt(ctx, vLog.Raw.TxHash)
+		if txReceiptErr != nil {
+			tv.logger.Warn("could not find core bridge receipt", zap.Error(txReceiptErr))
+			return true
+		}
+	}
+
+	// Caching: record a new lastBlockNumber.
+	tv.lastBlockNumber = receipt.BlockNumber.Uint64()
+	tv.processedTransactions[vLog.Raw.TxHash] = receipt
+
+	// Parse raw transaction receipt into high-level struct containing transfer details.
+	transferReceipt, parseErr := tv.ParseReceipt(receipt)
+	if parseErr != nil || transferReceipt == nil {
+		tv.logger.Warn("error when parsing receipt. skipping validation",
+			zap.String("receipt hash", receipt.TxHash.String()),
+			zap.Error(parseErr))
+		return true
+	}
+
+	// Add wormhole-specific data to the receipt by making
+	// RPC calls for data that is not included in the logs,
+	// such as a token's native address and its decimals.
+	updateErr := tv.UpdateReceiptDetails(transferReceipt)
+	if updateErr != nil {
+		tv.logger.Warn("error when fetching receipt details from the token bridge. can't continue processing",
+			zap.String("receipt hash", receipt.TxHash.String()),
+			zap.Error(updateErr))
+		return true
+	}
+
+	// Ensure that the amount coming in is at least as much as the amount requested out.
+	summary, processErr := tv.ProcessReceipt(transferReceipt)
+	tv.logger.Debug("finished processing receipt", zap.String("summary", summary.String()))
+
+	if processErr != nil {
+		// This represents a serious error. Normal, valid transactions should return an
+		// error here. If this error is returned, it means that the core invariant that
+		// transfer verifier is monitoring is broken.
+		tv.logger.Error("error when processing receipt. can't continue processing",
+			zap.Error(processErr),
+			zap.String("txHash", vLog.Raw.TxHash.String()))
+		return false
+	}
+
+	// Update statistics
+	if summary.logsProcessed == 0 {
+		tv.logger.Warn("receipt logs empty for tx", zap.String("txHash", vLog.Raw.TxHash.Hex()))
+		return true
+	}
+
+	return true
+}
+
+func (tv *TransferVerifier[ethClient, Connector]) pruneCache() {
+	// Prune the cache of processed receipts
+	numPrunedReceipts := int(0)
+	// Iterate over recorded transaction hashes, and clear receipts older than `pruneDelta` blocks
+	for hash, receipt := range tv.processedTransactions {
+		if receipt.BlockNumber.Uint64() < tv.lastBlockNumber-tv.pruneHeightDelta {
+			numPrunedReceipts++
+			delete(tv.processedTransactions, hash)
+		}
+	}
+
+	tv.logger.Debug("pruned cached transaction receipts",
+		zap.Int("numPrunedReceipts", numPrunedReceipts))
+}
+
+// Do additional processing on the raw data that has been parsed. This
+// consists of checking whether assets are wrapped for both ERC20
+// Transfer logs and LogMessagePublished events. If so, unwrap the
+// assets and fetch information about the native chain, native address,
+// and token decimals. All of this information is required to determine
+// whether the amounts deposited into the token bridge match the amount
+// that was requested out. This is done separately from parsing step so
+// that RPC calls are done independently of parsing code, which
+// facilitates testing.
+// Updates the receipt parameter directly.
+func (tv *TransferVerifier[ethClient, Connector]) UpdateReceiptDetails(
+	receipt *TransferReceipt,
+) (updateErr error) {
+
+	tv.logger.Debug(
+		"updating details for receipt",
+		zap.String("receiptRaw", receipt.String()),
+	)
+
+	// Populate details for all transfers in this receipt.
+	tv.logger.Debug("populating native data for ERC20 Transfers")
+	for _, transfer := range *receipt.Transfers {
+		// The native address is returned here, but it is ignored. The goal here is only to correct
+		// the native chain ID so that it can be compared against the destination asset in the
+		// LogMessagePublished payload.
+		nativeChainID, _, fetchErr := tv.fetchNativeInfo(transfer.TokenAddress, transfer.TokenChain)
+		if fetchErr != nil {
+			// It's somewhat common for transfers to be made across the bridge for assets
+			// that are not properly registered. In this case, calls to isWrappedAsset() on
+			// the Token Bridge will return true but the calls to wrappedAsset() will return
+			// the zero address. In this case it's impossible to determine the decimals and
+			// therefore there is no way to compare the amount transferred or burned with
+			// the LogMessagePublished payload. In this case, we can't process this receipt.
+
+			return errors.Join(errors.New("error when fetching native info for ERC20 Transfer. Can't continue to process this receipt"), fetchErr)
+		}
+
+		// Update ChainID if this is a wrapped asset
+		if nativeChainID != 0 {
+			tv.logger.Debug("updating chain ID for Token with its native chain ID",
+				zap.String("tokenAddr", transfer.TokenChain.String()),
+				zap.Uint16("new chainID", uint16(nativeChainID)),
+				zap.String("chain name", nativeChainID.String()))
+			transfer.TokenChain = nativeChainID
+			continue
+		}
+
+		tv.logger.Debug("token is native. no info updated")
+	}
+
+	// Populate the native asset information and token decimals for assets
+	// recorded in LogMessagePublished events for this receipt.
+	tv.logger.Debug("populating native data for LogMessagePublished assets")
+	for _, message := range *receipt.MessagePublicatons {
+		newDetails, logFetchErr := tv.fetchLogMessageDetails(message.TransferDetails)
+		if logFetchErr != nil {
+			// The unwrapped address and the denormalized amount are necessary for checking
+			// that the amount matches.
+			return errors.Join(errors.New("error when populating wormhole details. cannot verify receipt"), logFetchErr)
+		}
+		message.TransferDetails = newDetails
+	}
+
+	tv.logger.Debug(
+		"new details for receipt",
+		zap.String("receipt", receipt.String()),
+	)
+
+	tv.logger.Debug("finished updating receipt details")
+	return nil
+}
+
+// fetchNativeInfo queries the token bridge about whether the token address is wrapped, and if so, returns the native chain
+// and address where the token was minted.
+func (tv *TransferVerifier[ethClient, Connector]) fetchNativeInfo(
+	tokenAddr common.Address,
+	tokenChain vaa.ChainID,
+) (nativeChain vaa.ChainID, nativeAddr common.Address, err error) {
+	tv.logger.Debug("checking if ERC20 asset is wrapped")
+
+	wrapped, isWrappedErr := tv.isWrappedAsset(tokenAddr)
+	if isWrappedErr != nil {
+		return 0, ZERO_ADDRESS, errors.Join(errors.New("could not check if asset was wrapped"), isWrappedErr)
+	}
+
+	if !wrapped {
+		tv.logger.Debug("asset is native (not wrapped)", zap.String("tokenAddr", tokenAddr.String()))
+		return 0, ZERO_ADDRESS, nil
+	}
+
+	// Unwrap the asset
+	unwrapped, unwrapErr := tv.unwrapIfWrapped(tokenAddr.Bytes(), tokenChain)
+	if unwrapErr != nil {
+		return 0, ZERO_ADDRESS, errors.Join(errors.New("error when unwrapping asset"), unwrapErr)
+	}
+
+	// Asset is wrapped but not in wrappedAsset map for the Token Bridge.
+	if cmp(unwrapped, ZERO_ADDRESS) == 0 {
+		return 0, ZERO_ADDRESS, errors.New("asset is wrapped but unwrapping gave the zero address. this is an unusual asset or there is a bug in the program")
+	}
+
+	// Get the native chain ID
+	nativeChain, chainIdErr := tv.chainId(unwrapped)
+	if chainIdErr != nil {
+		return 0, ZERO_ADDRESS, errors.Join(errors.New("error when fetching chain ID"), chainIdErr)
+	}
+
+	return nativeChain, nativeAddr, nil
+}
+
+// ParseReceipt converts a go-ethereum receipt struct into a TransferReceipt.
+// It makes use of the ethConnector to parse information from the logs within
+// the receipt. This function is mainly helpful to isolate the parsing code
+// from the verification logic, which makes the latter easier to test without
+// needing an active RPC connection.
+
+// This function parses only events with topics needed for Transfer
+// Verification. Any other events will be discarded.
+// This function is not responsible for checking that the values for the
+// various fields are relevant, only that they are well-formed.
+func (tv *TransferVerifier[evmClient, connector]) ParseReceipt(
+	receipt *geth.Receipt,
+) (*TransferReceipt, error) {
+	// Sanity checks. Shouldn't be necessary but no harm
+	if receipt == nil {
+		return &TransferReceipt{}, errors.New("receipt parameter is nil")
+	}
+	if receipt.Status != 1 {
+		return &TransferReceipt{}, errors.New("non-success transaction status")
+	}
+	if len(receipt.Logs) == 0 {
+		return &TransferReceipt{}, errors.New("no logs in receipt")
+	}
+
+	var deposits []*NativeDeposit
+	var transfers []*ERC20Transfer
+	var messagePublications []*LogMessagePublished
+
+	// Aggregate all errors without returning early
+	var receiptErr error
+
+	for _, log := range receipt.Logs {
+		switch log.Topics[0] {
+		case common.HexToHash(EVENTHASH_WETH_DEPOSIT):
+			deposit, depositErr := DepositFromLog(log, tv.chainIds.wormholeChainId)
+
+			if depositErr != nil {
+				tv.logger.Error("error when parsing Deposit from log",
+					zap.Error(depositErr),
+					zap.String("txHash", log.TxHash.String()),
+				)
+				receiptErr = errors.Join(receiptErr, depositErr)
+				continue
+			}
+
+			tv.logger.Debug("adding deposit", zap.String("deposit", deposit.String()))
+			deposits = append(deposits, deposit)
+		case common.HexToHash(EVENTHASH_ERC20_TRANSFER):
+			transfer, transferErr := ERC20TransferFromLog(log, tv.chainIds.wormholeChainId)
+
+			if transferErr != nil {
+				tv.logger.Error("error when parsing ERC20 Transfer from log",
+					zap.Error(transferErr),
+					zap.String("txHash", log.TxHash.String()),
+				)
+				receiptErr = errors.Join(receiptErr, transferErr)
+				continue
+			}
+
+			tv.logger.Debug("adding transfer", zap.String("transfer", transfer.String()))
+			transfers = append(transfers, transfer)
+		case common.HexToHash(EVENTHASH_WORMHOLE_LOG_MESSAGE_PUBLISHED):
+			if len(log.Data) == 0 {
+				// tv.logger.Error("receipt data has length 0")
+				receiptErr = errors.Join(receiptErr, errors.New("receipt data has length 0"))
+				continue
+			}
+
+			logMessagePublished, parseLogErr := tv.evmConnector.ParseLogMessagePublished(*log)
+			if parseLogErr != nil {
+				tv.logger.Error("failed to parse LogMessagePublished event")
+				receiptErr = errors.Join(receiptErr, parseLogErr)
+				continue
+			}
+
+			// If there is no payload, then there's no point in further processing.
+			// This should never happen.
+			if len(logMessagePublished.Payload) == 0 {
+				emptyErr := errors.New("a LogMessagePayload event from the token bridge was received with a zero-sized payload")
+				tv.logger.Error(
+					"issue parsing receipt",
+					zap.Error(emptyErr),
+					zap.String("txhash", log.TxHash.String()))
+				receiptErr = errors.Join(receiptErr, emptyErr)
+				continue
+			}
+
+			// This check is required. Payload parsing will fail if performed on a message emitted from another contract or sent
+			// by a contract other than the token bridge
+			if log.Address != tv.Addresses.CoreBridgeAddr {
+				tv.logger.Debug("skip: LogMessagePublished not emitted from the core bridge",
+					zap.String("emitter", log.Address.String()))
+				continue
+			}
+
+			if log.Topics[1] != tv.Addresses.TokenBridgeAddr.Hash() {
+				tv.logger.Debug("skip: LogMessagePublished with sender not equal to the token bridge",
+					zap.String("sender", log.Topics[1].String()),
+					zap.String("tokenBridgeAddr", tv.Addresses.TokenBridgeAddr.Hex()),
+				)
+				continue
+			}
+
+			// Validation is complete. Now, parse the raw bytes of the payload into a TransferDetails instance.
+			transferDetails, parsePayloadErr := parseLogMessagePublishedPayload(logMessagePublished.Payload)
+			if parsePayloadErr != nil {
+				receiptErr = errors.Join(receiptErr, parsePayloadErr)
+				continue
+			}
+
+			// If everything went well, append the message publication
+			messagePublications = append(messagePublications, &LogMessagePublished{
+				EventEmitter:    log.Address,
+				MsgSender:       logMessagePublished.Sender,
+				TransferDetails: transferDetails,
+			})
+
+		}
+	}
+
+	if len(messagePublications) == 0 {
+		receiptErr = errors.Join(receiptErr, errors.New("parsed receipts but received no LogMessagePublished events"))
+	}
+
+	if receiptErr != nil {
+		return &TransferReceipt{}, receiptErr
+	}
+
+	return &TransferReceipt{
+			Deposits:           &deposits,
+			Transfers:          &transfers,
+			MessagePublicatons: &messagePublications},
+		nil
+}
+
+// Custom error type used to signal that a core invariant of the token bridge has been violated.
+type InvariantError struct {
+	Msg string
+}
+
+func (i InvariantError) Error() string {
+	return fmt.Sprintf("invariant violated: %s", i.Msg)
+}
+
+// ProcessReceipt verifies that a receipt for a LogMessagedPublished event does
+// not verify a fundamental invariant of Wormhole token transfers: when the
+// core bridge reports a transfer has occurred, there must be a corresponding
+// transfer in the token bridge. This is determined by iterating through the
+// logs of the receipt and ensuring that the sum transferred into the token
+// bridge does not exceed the sum emitted by the core bridge.
+// If this function returns an error, that means there is some serious trouble.
+// An error should be returned if a deposit or transfer in the receipt is missing
+// crucial information, or else if the sum of the funds in are less than
+// the funds out.
+// When modifying this code, be cautious not to return errors unless something
+// is really wrong.
+func (tv *TransferVerifier[evmClient, connector]) ProcessReceipt(
+	receipt *TransferReceipt,
+) (summary *ReceiptSummary, err error) {
+
+	tv.logger.Debug("beginning to process receipt",
+		zap.String("receipt", receipt.String()),
+	)
+
+	summary = NewReceiptSummary()
+
+	// Sanity checks.
+	if receipt == nil {
+		return summary, errors.New("got nil transfer receipt")
+	}
+	if len(*receipt.MessagePublicatons) == 0 {
+		return summary, errors.New("no message publications in receipt")
+	}
+
+	if len(*receipt.Deposits) == 0 && len(*receipt.Transfers) == 0 {
+		return summary, errors.New("invalid receipt: no deposits and no transfers")
+	}
+
+	// Process NativeDeposits
+	for _, deposit := range *receipt.Deposits {
+
+		validateErr := validate[*NativeDeposit](deposit)
+		if validateErr != nil {
+			return summary, validateErr
+		}
+
+		key, relevant := relevant[*NativeDeposit](deposit, tv.Addresses)
+		if !relevant {
+			tv.logger.Debug("skip: irrelevant deposit",
+				zap.String("emitter", deposit.Emitter().String()),
+				zap.String("deposit", deposit.String()),
+			)
+			continue
+		}
+		if key == "" {
+			return summary, errors.New("couldn't get key")
+		}
+
+		upsert(&summary.in, key, deposit.TransferAmount())
+
+		tv.logger.Debug("a deposit into the token bridge was recorded",
+			zap.String("tokenAddress", deposit.TokenAddress.String()),
+			zap.String("amount", deposit.Amount.String()))
+	}
+
+	// Process ERC20Transfers
+	for _, transfer := range *receipt.Transfers {
+		validateErr := validate[*ERC20Transfer](transfer)
+		if validateErr != nil {
+			return summary, validateErr
+		}
+
+		key, relevant := relevant[*ERC20Transfer](transfer, tv.Addresses)
+		if !relevant {
+			tv.logger.Debug("skipping irrelevant transfer",
+				zap.String("emitter", transfer.Emitter().String()),
+				zap.String("erc20Transfer", transfer.String()))
+			continue
+		}
+		if key == "" {
+			return summary, errors.New("couldn't get key")
+		}
+
+		upsert(&summary.in, key, transfer.TransferAmount())
+
+		tv.logger.Debug("a transfer into the token bridge was recorded",
+			zap.String("tokenAddress", transfer.TokenAddress.String()),
+			zap.String("amount", transfer.Amount.String()))
+	}
+
+	// Process LogMessagePublished events.
+	for _, message := range *receipt.MessagePublicatons {
+		td := message.TransferDetails
+
+		validateErr := validate[*LogMessagePublished](message)
+		if validateErr != nil {
+			return summary, validateErr
+		}
+
+		key, relevant := relevant[*LogMessagePublished](message, tv.Addresses)
+		if !relevant {
+			tv.logger.Debug("skip: irrelevant LogMessagePublished event")
+			continue
+		}
+
+		upsert(&summary.out, key, message.TransferAmount())
+
+		tv.logger.Debug("successfully parsed a LogMessagePublished event payload",
+			zap.String("tokenAddress", td.OriginAddress.String()),
+			zap.String("tokenChain", td.TokenChain.String()),
+			zap.String("amount", td.Amount.String()))
+
+		summary.logsProcessed++
+	}
+
+	err = nil
+	for key, amountOut := range summary.out {
+		var localErr error
+		if amountIn, exists := summary.in[key]; !exists {
+			tv.logger.Error("transfer-out request for tokens that were never deposited",
+				zap.String("key", key))
+			localErr = &InvariantError{Msg: "transfer-out request for tokens that were never deposited"}
+		} else {
+			if amountOut.Cmp(amountIn) == 1 {
+				tv.logger.Error("requested amount out is larger than amount in")
+				localErr = &InvariantError{Msg: "requested amount out is larger than amount in"}
+			}
+
+			// Normally the amounts should be equal. This case indicates
+			// an unusual transfer or else a bug in the program.
+			if amountOut.Cmp(amountIn) == -1 {
+				tv.logger.Info("requested amount in is larger than amount out.",
+					zap.String("key", key),
+					zap.String("amountIn", amountIn.String()),
+					zap.String("amountOut", amountOut.String()),
+				)
+			}
+
+			tv.logger.Debug("bridge request processed",
+				zap.String("key", key),
+				zap.String("amountOut", amountOut.String()),
+				zap.String("amountIn", amountIn.String()))
+		}
+
+		if err != nil {
+			err = errors.Join(err, localErr)
+		} else {
+			err = localErr
+		}
+	}
+
+	return
+}
+
+// parseLogMessagePublishedPayload parses the details of a transfer from a
+// LogMessagePublished event's Payload field.
+func parseLogMessagePublishedPayload(
+	// Corresponds to LogMessagePublished.Payload as returned by the ABI parsing operation in the ethConnector.
+	data []byte,
+) (*TransferDetails, error) {
+	// This method is already called by DecodeTransferPayloadHdr but the
+	// error message is unclear. Doing a manual check here lets us return a
+	// more helpful error message.
+	if !vaa.IsTransfer(data) {
+		return nil, errors.New("payload is not a transfer type. no need to process")
+	}
+
+	// Note: vaa.DecodeTransferPayloadHdr performs validation on data, e.g. length checks.
+	hdr, err := vaa.DecodeTransferPayloadHdr(data)
+	if err != nil {
+		return nil, err
+	}
+	return &TransferDetails{
+		PayloadType:      VAAPayloadType(hdr.Type),
+		AmountRaw:        hdr.Amount,
+		OriginAddressRaw: hdr.OriginAddress.Bytes(),
+		TokenChain:       vaa.ChainID(hdr.OriginChain),
+		TargetAddress:    hdr.TargetAddress,
+		// these fields are populated by RPC calls later
+		Amount:        nil,
+		OriginAddress: common.Address{},
+	}, nil
+}
+
+// fetchLogMessageDetails makes requests to the token bridge and token contract to get detailed, wormhole-specific information about
+// the transfer details parsed from a LogMessagePublished event.
+func (tv *TransferVerifier[ethClient, connector]) fetchLogMessageDetails(details *TransferDetails) (newDetails *TransferDetails, decimalErr error) {
+	// This function adds information to a TransferDetails struct, filling out its uninitialized fields.
+	// It populates the following fields:
+	// - Amount: populate the Amount field by denormalizing details.AmountRaw.
+	// - OriginAddress: use the wormhole ChainID and OriginAddressRaw to determine whether the token is wrapped.
+
+	// If the token was minted on the chain monitored by this program, set its OriginAddress equal to OriginAddressRaw.
+	var originAddress common.Address
+	if details.TokenChain == tv.chainIds.wormholeChainId {
+		// The token was minted on this chain.
+		originAddress = common.BytesToAddress(details.OriginAddressRaw)
+		tv.logger.Debug("token is native. no need to unwrap",
+			zap.String("originAddressRaw", fmt.Sprintf("%x", details.OriginAddressRaw)),
+		)
+	} else {
+		// The token was minted on a foreign chain. Unwrap it.
+		tv.logger.Debug("unwrapping",
+			zap.String("originAddressRaw", fmt.Sprintf("%x", details.OriginAddressRaw)),
+		)
+		// If the token was minted on another chain, try to unwrap it.
+		unwrappedAddress, unwrapErr := tv.unwrapIfWrapped(details.OriginAddressRaw, details.TokenChain)
+		if unwrapErr != nil {
+			return newDetails, unwrapErr
+		}
+
+		if cmp(unwrappedAddress, ZERO_ADDRESS) == 0 {
+			// If the unwrap function returns the zero address, that means
+			// it has no knowledge of this token. In this case set the
+			// OriginAddress to OriginAddressRaw rather than to the zero
+			// address. The program will still be able to know that this is
+			// a non-native address by examining the chain ID.
+			//
+			// This case can occur if a token is transferred when the wrapped asset hasn't been set-up yet.
+			// https://github.com/wormhole-foundation/wormhole/blob/main/whitepapers/0003_token_bridge.md#setup-of-wrapped-assets
+			tv.logger.Warn("unwrap call for foreign asset returned the zero address. Either token has not been registered or there is a bug in the program. .",
+				zap.String("originAddressRaw", details.OriginAddress.String()),
+				zap.String("tokenChain", details.TokenChain.String()),
+			)
+			return newDetails, errors.New("unwrap call for foreign asset returned the zero address. Either token has not been registered or there is a bug in the program")
+		} else {
+			originAddress = unwrappedAddress
+		}
+	}
+
+	// Fetch the token's decimals and update TransferDetails with the denormalized amount.
+	// This must be done on the unwrapped address.
+	decimals, decimalErr := tv.getDecimals(originAddress)
+	if decimalErr != nil {
+		return newDetails, decimalErr
+	}
+
+	denormalized := denormalize(details.AmountRaw, decimals)
+
+	newDetails = details
+	newDetails.OriginAddress = originAddress
+	newDetails.Amount = denormalized
+	return newDetails, nil
+}
+
+// upsert inserts a new key and value into a map or update the value if the key already exists.
+func upsert(
+	dict *map[string]*big.Int,
+	key string,
+	amount *big.Int,
+) {
+	d := *dict
+	if _, exists := d[key]; !exists {
+		d[key] = new(big.Int).Set(amount)
+	} else {
+		d[key] = new(big.Int).Add(d[key], amount)
+	}
+}

+ 736 - 0
node/pkg/txverifier/evm_test.go

@@ -0,0 +1,736 @@
+package txverifier
+
+// TODO:
+// - more robust mocking of RPC return values so that we can test multiple cases
+// - add tests checking amount values from ProcessReceipt
+
+import (
+	"context"
+	"math/big"
+	"testing"
+
+	"github.com/ethereum/go-ethereum/common"
+	"github.com/ethereum/go-ethereum/core/types"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/require"
+	"github.com/wormhole-foundation/wormhole/sdk/vaa"
+
+	ethereum "github.com/ethereum/go-ethereum"
+
+	"github.com/certusone/wormhole/node/pkg/watchers/evm/connectors/ethabi"
+	ipfslog "github.com/ipfs/go-log/v2"
+)
+
+// Important addresses for testing. Arbitrary, but Ethereum mainnet values used here
+var (
+	coreBridgeAddr  = common.HexToAddress("0x98f3c9e6E3fAce36bAAd05FE09d375Ef1464288B")
+	tokenBridgeAddr = common.HexToAddress("0x3ee18B2214AFF97000D974cf647E7C347E8fa585")
+	nativeAddr      = common.HexToAddress("0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2") // weth
+	usdcAddr        = common.HexToAddress("0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48")
+	eoaAddrGeth     = common.HexToAddress("0xbeefcafe")
+	eoaAddrVAA, _   = vaa.BytesToAddress([]byte{0xbe, 0xef, 0xca, 0xfe})
+)
+
+type mockConnections struct {
+	transferVerifier *TransferVerifier[*mockClient, *mockConnector]
+	ctx              *context.Context
+	ctxCancel        context.CancelFunc
+}
+
+// Stub struct, only exist to implement the interfaces
+type mockClient struct{}
+
+// TODO add a helper method to actually populate the results of the mocked method
+// TODO this should maybe be mocked differently. CallContract is used for both 'get decimals' and 'unwrap'.
+// Depending on how much mocking we want to do, this might need edits. On the other hand, we don't really need to
+// test geth's functions and this functionality is better handled by integration testing anyway
+func (m *mockClient) CallContract(ctx context.Context, msg ethereum.CallMsg, blockNumber *big.Int) ([]byte, error) {
+	// this is used by the calling code only to get decimal values
+	// always return 8
+	return common.LeftPadBytes([]byte{0x08}, 32), nil
+}
+
+type mockConnector struct{}
+
+// TODO add a helper method to actually populate the results of the mocked method
+// TODO add different results here so we can test different values
+func (c *mockConnector) ParseLogMessagePublished(log types.Log) (*ethabi.AbiLogMessagePublished, error) {
+	// add mock data
+	return &ethabi.AbiLogMessagePublished{
+		Sender:   tokenBridgeAddr,
+		Sequence: 0,
+		Nonce:    0,
+		Payload:  transferTokensPayload(big.NewInt(1)),
+		Raw:      log,
+	}, nil
+}
+
+func (c *mockConnector) TransactionReceipt(ctx context.Context, txHash common.Hash) (*types.Receipt, error) {
+	return nil, nil
+}
+
+// Create the connections and loggers expected by the functions we are testing
+func setup() *mockConnections {
+	logger := ipfslog.Logger("wormhole-transfer-verifier-tests").Desugar()
+	ipfslog.SetAllLoggers(ipfslog.LevelDebug)
+	transferVerifier := &TransferVerifier[*mockClient, *mockConnector]{
+		Addresses: &TVAddresses{
+			CoreBridgeAddr:    coreBridgeAddr,
+			TokenBridgeAddr:   tokenBridgeAddr,
+			WrappedNativeAddr: nativeAddr,
+		},
+		chainIds:     &chainIds{evmChainId: 1, wormholeChainId: vaa.ChainIDEthereum},
+		evmConnector: &mockConnector{},
+		client:       &mockClient{},
+		logger:       *logger,
+	}
+	ctx, ctxCancel := context.WithCancel(context.Background())
+
+	return &mockConnections{
+		transferVerifier,
+		&ctx,
+		ctxCancel,
+	}
+}
+
+// Define some transfer logs to make it easier to write tests for parsing receipts.
+// Typical receipt logs that can be included in various receipt test cases
+var (
+	// A valid transfer log for an ERC20 transfer event.
+	transferLog = &types.Log{
+		Address: usdcAddr,
+		Topics: []common.Hash{
+			// Transfer(address,address,uint256)
+			common.HexToHash(EVENTHASH_ERC20_TRANSFER),
+			// from
+			eoaAddrGeth.Hash(),
+			// to
+			tokenBridgeAddr.Hash(),
+		},
+		// amount
+		Data: common.LeftPadBytes([]byte{0x01}, 32),
+	}
+
+	// A valid transfer log for a log message published event.
+	validLogMessagedPublishedLog = &types.Log{
+		Address: coreBridgeAddr,
+		Topics: []common.Hash{
+			// LogMessagePublished(address indexed sender, uint64 sequence, uint32 nonce, bytes payload, uint8 consistencyLevel);
+			common.HexToHash(EVENTHASH_WORMHOLE_LOG_MESSAGE_PUBLISHED),
+			// sender
+			tokenBridgeAddr.Hash(),
+		},
+		Data: receiptData(big.NewInt(255)),
+	}
+)
+
+var (
+	validTransferReceipt = &types.Receipt{
+		Status: types.ReceiptStatusSuccessful,
+		Logs: []*types.Log{
+			transferLog,
+			validLogMessagedPublishedLog,
+		},
+	}
+	// Invalid: no erc20 transfer, so amount out > amount in
+	// invalidTransferReceipt = &types.Receipt{
+	// 	Status: types.ReceiptStatusSuccessful,
+	// 	Logs: []*types.Log{
+	// 		logMessagedPublishedLog,
+	// 	},
+	// }
+	// TODO: Invalid: erc20 transfer amount is less than payload amount, so amount out > amount in
+	// invalidTransferReceipt = &types.Receipt{
+	// 	Status:            types.ReceiptStatusSuccessful,
+	// 	Logs: []*types.Log{logMessagedPublishedLog},
+	// }
+)
+
+func TestParseReceiptHappyPath(t *testing.T) {
+	mocks := setup()
+	defer mocks.ctxCancel()
+
+	// t.Parallel() // marks TLog as capable of running in parallel with other tests
+	tests := map[string]struct {
+		receipt  *types.Receipt
+		expected *TransferReceipt
+	}{
+		"valid transfer receipt, single LogMessagePublished": {
+			validTransferReceipt,
+			&TransferReceipt{
+				Deposits: &[]*NativeDeposit{},
+				Transfers: &[]*ERC20Transfer{
+					{
+						From:         eoaAddrGeth,
+						To:           tokenBridgeAddr,
+						TokenAddress: usdcAddr,
+						TokenChain:   vaa.ChainIDEthereum,
+						Amount:       big.NewInt(1),
+					},
+				},
+				MessagePublicatons: &[]*LogMessagePublished{
+					{
+						EventEmitter: coreBridgeAddr,
+						MsgSender:    tokenBridgeAddr,
+						TransferDetails: &TransferDetails{
+							PayloadType:      TransferTokens,
+							OriginAddressRaw: common.LeftPadBytes(usdcAddr.Bytes(), EVM_WORD_LENGTH),
+							TokenChain:       2, // Wormhole ethereum chain ID
+							AmountRaw:        big.NewInt(1),
+							TargetAddress:    eoaAddrVAA,
+							// Amount and OriginAddress are not populated by ParseReceipt
+							// Amount: big.NewInt(1),
+							// OriginAddress: erc20Addr,
+						},
+					},
+				},
+			},
+		},
+	}
+	for name, test := range tests {
+		t.Run(name, func(t *testing.T) {
+
+			transferReceipt, err := mocks.transferVerifier.ParseReceipt(test.receipt)
+			require.NoError(t, err)
+
+			// Note: the data for this test uses only a single transfer. However, if multiple transfers
+			// are used, iteration over these slices will be non-deterministic which might result in a flaky
+			// test.
+			expectedTransfers := *test.expected.Transfers
+			assert.Equal(t, len(expectedTransfers), len(*transferReceipt.Transfers))
+			for _, ret := range *transferReceipt.Transfers {
+				assert.Equal(t, expectedTransfers[0].To, ret.To)
+				assert.Equal(t, expectedTransfers[0].From, ret.From)
+				assert.Equal(t, expectedTransfers[0].TokenAddress, ret.TokenAddress)
+				assert.Zero(t, ret.Amount.Cmp(expectedTransfers[0].Amount))
+			}
+
+			expectedMessages := *test.expected.MessagePublicatons
+			assert.Equal(t, len(expectedMessages), len(*transferReceipt.MessagePublicatons))
+			for _, ret := range *transferReceipt.MessagePublicatons {
+				// TODO: switch argument order to (expected, actual)
+				assert.Equal(t, ret.MsgSender, expectedMessages[0].MsgSender)
+				assert.Equal(t, ret.EventEmitter, expectedMessages[0].EventEmitter)
+				assert.Equal(t, ret.TransferDetails, expectedMessages[0].TransferDetails)
+
+				t.Logf("Expected AmountRaw: %s", expectedMessages[0].TransferDetails.AmountRaw.String())
+				t.Logf("Actual AmountRaw: %s", ret.TransferDetails.AmountRaw.String())
+				assert.Zero(t, expectedMessages[0].TransferDetails.AmountRaw.Cmp(ret.TransferDetails.AmountRaw))
+
+				// Amount and OriginAddress are not populated by ParseReceipt
+				assert.Equal(t, common.BytesToAddress([]byte{0x00}), ret.TransferDetails.OriginAddress)
+				assert.Nil(t, ret.TransferDetails.Amount)
+			}
+
+		})
+	}
+}
+
+func TestParseReceiptErrors(t *testing.T) {
+	mocks := setup()
+	defer mocks.ctxCancel()
+
+	// Create a log containing an invalid deposit log
+	badDepositLog := *transferLog
+	badDepositLog.Topics = []common.Hash{
+		common.HexToHash(EVENTHASH_WETH_DEPOSIT),
+		// Omit essential topics
+	}
+
+	// Create a log containing an invalid transfer log
+	badTransferLog := *transferLog
+	badTransferLog.Topics = []common.Hash{
+		common.HexToHash(EVENTHASH_ERC20_TRANSFER),
+		// Omit essential topics
+	}
+
+	// Create a log containing a LogMessagePublished event without any payload
+	emptyPayloadLogMessagePublishedLog := *validLogMessagedPublishedLog
+	emptyPayloadLogMessagePublishedLog.Data = []byte{}
+
+	// TODO: Create a receipt with the wrong payload type (not a token transfer).
+	// wrongPayloadTypeLogMessagePublishedLog := types.Log{
+	// 	Address: coreBridgeAddr,
+	// 	Topics: []common.Hash{
+	// 		// LogMessagePublished(address indexed sender, uint64 sequence, uint32 nonce, bytes payload, uint8 consistencyLevel);
+	// 		common.HexToHash(EVENTHASH_WORMHOLE_LOG_MESSAGE_PUBLISHED),
+	// 		// sender
+	// 		tokenBridgeAddr.Hash(),
+	// 	},
+	// 	Data: receiptData(big.NewInt(1).SetBytes([]byte{0xaa})),
+	// }
+	// // The LogMessagePublished payload type occurs in the 6th EVM word slot, and is left-padded with zeroes.
+	// // Note that the value is 0-indexed
+	// payloadTypeOffset := EVM_WORD_LENGTH * 5
+	// wrongPayloadTypeLogMessagePublishedLog.Data[payloadTypeOffset] = 0x02
+
+	tests := map[string]struct {
+		receipt *types.Receipt
+	}{
+		"wrong receipt status": {
+			receipt: &types.Receipt{
+				Status: types.ReceiptStatusFailed,
+				Logs: []*types.Log{
+					validLogMessagedPublishedLog,
+				},
+			},
+		},
+		"no logs": {
+			receipt: &types.Receipt{
+				Status: types.ReceiptStatusSuccessful,
+				Logs:   []*types.Log{},
+			},
+		},
+		"invalid deposit log in receipt": {
+			receipt: &types.Receipt{
+				Status: types.ReceiptStatusSuccessful,
+				Logs: []*types.Log{
+					&badDepositLog,
+				},
+			},
+		},
+		"invalid transfer log in receipt": {
+			receipt: &types.Receipt{
+				Status: types.ReceiptStatusSuccessful,
+				Logs: []*types.Log{
+					&badTransferLog,
+				},
+			},
+		},
+		"LogMessagePublished with empty payload": {
+			receipt: &types.Receipt{
+				Status: types.ReceiptStatusSuccessful,
+				Logs: []*types.Log{
+					&emptyPayloadLogMessagePublishedLog,
+				},
+			},
+		},
+		// TODO: Need to create a different mock for ParseLogMessagePublished in order to test this
+		// "LogMessagePublished with wrong payload type": {
+		// 	receipt: &types.Receipt{
+		// 		Status: types.ReceiptStatusSuccessful,
+		// 		Logs: []*types.Log{
+		// 			&wrongPayloadTypeLogMessagePublishedLog,
+		// 		},
+		// 	},
+		// },
+	}
+	for name, test := range tests {
+		t.Run(name, func(t *testing.T) {
+
+			receipt, err := mocks.transferVerifier.ParseReceipt(test.receipt)
+			require.Error(t, err)
+			assert.Equal(t, TransferReceipt{}, *receipt)
+		})
+	}
+}
+
+func TestParseERC20TransferEvent(t *testing.T) {
+	type parsedValues struct {
+		from   common.Address
+		to     common.Address
+		amount *big.Int
+	}
+	erc20TransferHash := common.HexToHash(EVENTHASH_ERC20_TRANSFER)
+	t.Parallel() // marks TLog as capable of running in parallel with other tests
+	tests := map[string]struct {
+		topics   []common.Hash
+		data     []byte
+		expected *parsedValues
+	}{
+		"well-formed": {
+			topics: []common.Hash{
+				erc20TransferHash,
+				eoaAddrGeth.Hash(),
+				tokenBridgeAddr.Hash(),
+			},
+			data: common.LeftPadBytes([]byte{0x01}, 32),
+			expected: &parsedValues{
+				from:   eoaAddrGeth,
+				to:     tokenBridgeAddr,
+				amount: new(big.Int).SetBytes([]byte{0x01}),
+			},
+		},
+		"data too short": {
+			topics: []common.Hash{
+				erc20TransferHash,
+				eoaAddrGeth.Hash(),
+				tokenBridgeAddr.Hash(),
+			},
+			// should be 32 bytes exactly
+			data:     []byte{0x01},
+			expected: &parsedValues{}, // everything nil for its type
+		},
+		"wrong number of topics": {
+			// only 1 topic: should be 3
+			topics: []common.Hash{
+				erc20TransferHash,
+			},
+			data:     common.LeftPadBytes([]byte{0x01}, 32),
+			expected: &parsedValues{}, // everything nil for its type
+		},
+	}
+
+	for name, test := range tests {
+		t.Run(name, func(t *testing.T) {
+			t.Parallel() // marks each test case as capable of running in parallel with each other
+
+			from, to, amount := parseERC20TransferEvent(test.topics, test.data)
+			assert.Equal(t, test.expected.from, from)
+			assert.Equal(t, test.expected.to, to)
+			assert.Zero(t, amount.Cmp(test.expected.amount))
+		})
+	}
+}
+
+func TestParseWNativeDepositEvent(t *testing.T) {
+	{
+		type parsedValues struct {
+			destination common.Address
+			amount      *big.Int
+		}
+		t.Parallel() // marks TLog as capable of running in parallel with other tests
+
+		wethDepositHash := common.HexToHash(EVENTHASH_WETH_DEPOSIT)
+		tests := map[string]struct {
+			topics   []common.Hash
+			data     []byte
+			expected *parsedValues
+		}{
+			"well-formed": {
+				topics: []common.Hash{
+					wethDepositHash,
+					tokenBridgeAddr.Hash(),
+				},
+				data: common.LeftPadBytes([]byte{0x01}, 32),
+				expected: &parsedValues{
+					destination: tokenBridgeAddr,
+					amount:      new(big.Int).SetBytes([]byte{0x01}),
+				},
+			},
+			"data too short": {
+				topics: []common.Hash{
+					wethDepositHash,
+					tokenBridgeAddr.Hash(),
+				},
+				// should be 32 bytes exactly
+				data:     []byte{0x01},
+				expected: &parsedValues{}, // everything nil for its type
+			},
+			"wrong number of topics": {
+				// only 1 topic: should be 2
+				topics: []common.Hash{
+					wethDepositHash,
+				},
+				data:     common.LeftPadBytes([]byte{0x01}, 32),
+				expected: &parsedValues{}, // everything nil for its type
+			},
+		}
+
+		for name, test := range tests {
+			t.Run(name, func(t *testing.T) {
+				t.Parallel() // marks each test case as capable of running in parallel with each other
+
+				destination, amount := parseWNativeDepositEvent(test.topics, test.data)
+				assert.Equal(t, test.expected.destination, destination)
+				assert.Zero(t, amount.Cmp(test.expected.amount))
+			})
+		}
+	}
+
+}
+
+func TestProcessReceipt(t *testing.T) {
+	mocks := setup()
+
+	tests := map[string]struct {
+		transferReceipt *TransferReceipt
+		// number of receipts successfully processed
+		expected    int
+		shouldError bool
+	}{
+		// TODO test cases:
+		// - multiple transfers adding up to the right amount
+		// - multiple depoists adding up to the right amount
+		// - multiple LogMessagePublished events
+		"valid transfer: amounts match, deposit": {
+			transferReceipt: &TransferReceipt{
+				Deposits: &[]*NativeDeposit{
+					{
+						TokenAddress: nativeAddr,
+						TokenChain:   vaa.ChainIDEthereum,
+						Receiver:     tokenBridgeAddr,
+						Amount:       big.NewInt(123),
+					},
+				},
+				Transfers: &[]*ERC20Transfer{},
+				MessagePublicatons: &[]*LogMessagePublished{
+					{
+						EventEmitter: coreBridgeAddr,
+						MsgSender:    tokenBridgeAddr,
+						TransferDetails: &TransferDetails{
+							PayloadType:      TransferTokens,
+							OriginAddressRaw: nativeAddr.Bytes(),
+							OriginAddress:    nativeAddr,
+							TargetAddress:    eoaAddrVAA,
+							TokenChain:       2,
+							AmountRaw:        big.NewInt(123),
+							Amount:           big.NewInt(123),
+						},
+					},
+				},
+			},
+			expected:    1,
+			shouldError: false,
+		},
+		"valid transfer: amounts match, transfer": {
+			transferReceipt: &TransferReceipt{
+				Deposits: &[]*NativeDeposit{},
+				Transfers: &[]*ERC20Transfer{
+					{
+						TokenAddress: usdcAddr,
+						TokenChain:   vaa.ChainIDEthereum,
+						From:         eoaAddrGeth,
+						To:           tokenBridgeAddr,
+						Amount:       big.NewInt(456),
+					},
+				},
+				MessagePublicatons: &[]*LogMessagePublished{
+					{
+						EventEmitter: coreBridgeAddr,
+						MsgSender:    tokenBridgeAddr,
+						TransferDetails: &TransferDetails{
+							PayloadType:      TransferTokens,
+							OriginAddressRaw: usdcAddr.Bytes(),
+							OriginAddress:    usdcAddr,
+							TokenChain:       2,
+							TargetAddress:    eoaAddrVAA,
+							AmountRaw:        big.NewInt(456),
+							Amount:           big.NewInt(456),
+						},
+					},
+				},
+			},
+			expected:    1,
+			shouldError: false,
+		},
+		"valid transfer: amount in is greater than amount out, deposit": {
+			transferReceipt: &TransferReceipt{
+				Deposits: &[]*NativeDeposit{
+					{
+						TokenAddress: nativeAddr,
+						TokenChain:   vaa.ChainIDEthereum,
+						Receiver:     tokenBridgeAddr,
+						Amount:       big.NewInt(999),
+					},
+				},
+				Transfers: &[]*ERC20Transfer{},
+				MessagePublicatons: &[]*LogMessagePublished{
+					{
+						EventEmitter: coreBridgeAddr,
+						MsgSender:    tokenBridgeAddr,
+						TransferDetails: &TransferDetails{
+							PayloadType:      TransferTokens,
+							OriginAddressRaw: nativeAddr.Bytes(),
+							TokenChain:       2,
+							OriginAddress:    nativeAddr,
+							TargetAddress:    eoaAddrVAA,
+							AmountRaw:        big.NewInt(321),
+							Amount:           big.NewInt(321),
+						},
+					},
+				},
+			},
+			expected:    1,
+			shouldError: false,
+		},
+		"valid transfer: amount in is greater than amount out, transfer": {
+			transferReceipt: &TransferReceipt{
+				Deposits: &[]*NativeDeposit{},
+				Transfers: &[]*ERC20Transfer{
+					{
+						TokenAddress: usdcAddr,
+						TokenChain:   vaa.ChainIDEthereum,
+						From:         eoaAddrGeth,
+						To:           tokenBridgeAddr,
+						Amount:       big.NewInt(999),
+					},
+				},
+				MessagePublicatons: &[]*LogMessagePublished{
+					{
+						EventEmitter: coreBridgeAddr,
+						MsgSender:    tokenBridgeAddr,
+						TransferDetails: &TransferDetails{
+							PayloadType:      TransferTokens,
+							OriginAddressRaw: usdcAddr.Bytes(),
+							OriginAddress:    usdcAddr,
+							TargetAddress:    eoaAddrVAA,
+							TokenChain:       2,
+							AmountRaw:        big.NewInt(321),
+							Amount:           big.NewInt(321),
+						},
+					},
+				},
+			},
+			expected:    1,
+			shouldError: false,
+		},
+		"invalid transfer: amount in too low, deposit": {
+			transferReceipt: &TransferReceipt{
+				Deposits: &[]*NativeDeposit{
+					{
+						TokenAddress: nativeAddr,
+						TokenChain:   NATIVE_CHAIN_ID,
+						Receiver:     tokenBridgeAddr,
+						Amount:       big.NewInt(10),
+					},
+				},
+				Transfers: &[]*ERC20Transfer{},
+				MessagePublicatons: &[]*LogMessagePublished{
+					{
+						EventEmitter: coreBridgeAddr,
+						MsgSender:    tokenBridgeAddr,
+						TransferDetails: &TransferDetails{
+							PayloadType:      TransferTokens,
+							OriginAddressRaw: nativeAddr.Bytes(),
+							OriginAddress:    nativeAddr,
+							TargetAddress:    eoaAddrVAA,
+							TokenChain:       vaa.ChainIDEthereum,
+							AmountRaw:        big.NewInt(11),
+							Amount:           big.NewInt(11),
+						},
+					},
+				},
+			},
+			expected:    1,
+			shouldError: true,
+		},
+		"invalid transfer: amount in too low, transfer": {
+			transferReceipt: &TransferReceipt{
+				Deposits: &[]*NativeDeposit{},
+				Transfers: &[]*ERC20Transfer{
+					{
+						TokenAddress: usdcAddr,
+						TokenChain:   NATIVE_CHAIN_ID,
+						From:         eoaAddrGeth,
+						To:           tokenBridgeAddr,
+						Amount:       big.NewInt(1),
+					},
+				},
+				MessagePublicatons: &[]*LogMessagePublished{
+					{
+						EventEmitter: coreBridgeAddr,
+						MsgSender:    tokenBridgeAddr,
+						TransferDetails: &TransferDetails{
+							PayloadType:      TransferTokens,
+							OriginAddressRaw: nativeAddr.Bytes(),
+							OriginAddress:    nativeAddr,
+							TargetAddress:    eoaAddrVAA,
+							TokenChain:       2,
+							AmountRaw:        big.NewInt(2),
+							Amount:           big.NewInt(2),
+						},
+					},
+				},
+			},
+			expected:    1,
+			shouldError: true,
+		},
+		"invalid transfer: transfer out after transferring a different token": {
+			transferReceipt: &TransferReceipt{
+				Deposits: &[]*NativeDeposit{},
+				Transfers: &[]*ERC20Transfer{
+					{
+						TokenAddress: usdcAddr,
+						TokenChain:   vaa.ChainIDEthereum,
+						From:         eoaAddrGeth,
+						To:           tokenBridgeAddr,
+						Amount:       big.NewInt(2),
+					},
+				},
+				MessagePublicatons: &[]*LogMessagePublished{
+					{
+						EventEmitter: coreBridgeAddr,
+						MsgSender:    tokenBridgeAddr,
+						TransferDetails: &TransferDetails{
+							PayloadType:      TransferTokens,
+							OriginAddressRaw: nativeAddr.Bytes(),
+							OriginAddress:    nativeAddr,
+							TargetAddress:    eoaAddrVAA,
+							TokenChain:       2,
+							AmountRaw:        big.NewInt(2),
+							Amount:           big.NewInt(2),
+						},
+					},
+				},
+			},
+			expected:    1,
+			shouldError: true,
+		},
+	}
+
+	for name, test := range tests {
+		t.Run(name, func(t *testing.T) {
+
+			summary, err := mocks.transferVerifier.ProcessReceipt(test.transferReceipt)
+
+			assert.Equal(t, test.expected, summary.logsProcessed, "number of processed receipts did not match")
+
+			if err != nil {
+				assert.True(t, test.shouldError, "test should have returned an error")
+				_, ok := err.(*InvariantError)
+				assert.True(t, ok, "wrong error type. expected InvariantError, got: `%w`", err)
+			} else {
+				assert.False(t, test.shouldError, "test should not have returned an error but got: `%w`", err)
+			}
+		})
+	}
+}
+
+func receiptData(payloadAmount *big.Int) (data []byte) {
+	// non-payload part of the receipt and ABI metadata fields
+	seq := common.LeftPadBytes([]byte{0x11}, 32)
+	nonce := common.LeftPadBytes([]byte{0x22}, 32)
+	offset := common.LeftPadBytes([]byte{0x80}, 32)
+	consistencyLevel := common.LeftPadBytes([]byte{0x01}, 32)
+	payloadLength := common.LeftPadBytes([]byte{0x85}, 32) // 133 for transferTokens
+
+	data = append(data, seq...)
+	data = append(data, nonce...)
+	data = append(data, offset...)
+	data = append(data, consistencyLevel...)
+	data = append(data, payloadLength...)
+	data = append(data, transferTokensPayload(payloadAmount)...)
+
+	return data
+}
+
+// Generate the Payload portion of a LogMessagePublished receipt for use in unit tests.
+func transferTokensPayload(payloadAmount *big.Int) (data []byte) {
+	// tokenTransfer() payload format:
+	//     transfer.payloadID, uint8, size: 1
+	//     amount, uint256, size: 32
+	//     tokenAddress, bytes32: size 32
+	//     tokenChain, uint16, size 2
+	//     to, bytes32: size 32
+	//     toChain, uint16, size: 2
+	//     fee, uint256 size: size 32
+	// 1 + 32 + 32 + 2 + 32 + 2 + 32 = 133
+	// See also: https://docs.soliditylang.org/en/latest/abi-spec.html
+
+	payloadType := []byte{0x01} // transferTokens, not padded
+	amount := common.LeftPadBytes(payloadAmount.Bytes(), 32)
+	tokenAddress := common.LeftPadBytes(usdcAddr.Bytes(), 32)
+	tokenChain := common.LeftPadBytes([]byte{0x02}, 2) // Eth wormhole chain ID, uint16
+	to := common.LeftPadBytes([]byte{0xbe, 0xef, 0xca, 0xfe}, 32)
+	toChain := common.LeftPadBytes([]byte{0x01}, 2) // Eth wormhole chain ID, uint16
+	fee := common.LeftPadBytes([]byte{0x00}, 32)    // Solana wormhole chain ID, uint16
+	data = append(data, payloadType...)
+	data = append(data, amount...)
+	data = append(data, tokenAddress...)
+	data = append(data, tokenChain...)
+	data = append(data, to...)
+	data = append(data, toChain...)
+	data = append(data, fee...)
+	return data
+}

+ 1053 - 0
node/pkg/txverifier/evmtypes.go

@@ -0,0 +1,1053 @@
+package txverifier
+
+// TODO
+// Change constant naming convention to PascalCase (maybe goimports can do this automatically)
+// Can the actual ethCalls be factored into their own function?
+
+import (
+	"bytes"
+	"context"
+	"encoding/binary"
+	"encoding/hex"
+	"errors"
+	"fmt"
+	"strconv"
+
+	"math/big"
+	"time"
+
+	connectors "github.com/certusone/wormhole/node/pkg/watchers/evm/connectors"
+	"github.com/certusone/wormhole/node/pkg/watchers/evm/connectors/ethabi"
+	"github.com/ethereum/go-ethereum"
+	"github.com/ethereum/go-ethereum/common"
+	"github.com/ethereum/go-ethereum/core/types"
+	ethClient "github.com/ethereum/go-ethereum/ethclient"
+	"github.com/ethereum/go-ethereum/event"
+	"github.com/wormhole-foundation/wormhole/sdk/vaa"
+	"go.uber.org/zap"
+)
+
+// Event Signatures
+const (
+	// LogMessagePublished(address indexed sender, uint64 sequence, uint32 nonce, bytes payload, uint8 consistencyLevel);
+	EVENTHASH_WORMHOLE_LOG_MESSAGE_PUBLISHED = "0x6eb224fb001ed210e379b335e35efe88672a8ce935d981a6896b27ffdf52a3b2"
+	// Transfer(address,address,uint256)
+	EVENTHASH_ERC20_TRANSFER = "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"
+	// Deposit(address,uint256)
+	EVENTHASH_WETH_DEPOSIT = "0xe1fffcc4923d04b559f4d29a8bfc6cda04eb5b0d3c460751c2402c5c5cc9109c"
+)
+
+// Function signatures
+var (
+	// wrappedAsset(uint16 tokenChainId, bytes32 tokenAddress) => 0x1ff1e286
+	TOKEN_BRIDGE_WRAPPED_ASSET_SIGNATURE = []byte("\x1f\xf1\xe2\x86")
+	// isWrappedAsset(address token) => 0x1a2be4da
+	TOKEN_BRIDGE_IS_WRAPPED_ASSET_SIGNATURE = []byte("\x1a\x2b\xe4\xda")
+	// decimals() => 0x313ce567
+	ERC20_DECIMALS_SIGNATURE = []byte("\x31\x3c\xe5\x67")
+	// chainId() => 0x9a8a0592
+	WRAPPED_ERC20_CHAIN_ID_SIGNATURE = []byte("\x9a\x8a\x05\x92")
+)
+
+// Fixed addresses
+var (
+	// https://etherscan.io/token/0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2
+	ZERO_ADDRESS     = common.BytesToAddress([]byte{0x00})
+	ZERO_ADDRESS_VAA = VAAAddrFrom(ZERO_ADDRESS)
+)
+
+// EVM chain constants
+const (
+	// EVM uses 32 bytes for words. Note that vaa.Address is an alias for a slice of 32 bytes
+	EVM_WORD_LENGTH = 32
+	// The expected total number of indexed topics for an ERC20 Transfer event
+	TOPICS_COUNT_TRANSFER = 3
+	// The expected total number of indexed topics for a WETH Deposit event
+	TOPICS_COUNT_DEPOSIT = 2
+)
+
+const (
+	RPC_TIMEOUT = 10 * time.Second
+)
+
+// Important addresses for Transfer Verification.
+type TVAddresses struct {
+	CoreBridgeAddr common.Address
+	// Address of the Wormhole token bridge contract for this chain
+	TokenBridgeAddr common.Address
+	// Wrapped version of the native asset, e.g. WETH for Ethereum
+	WrappedNativeAddr common.Address
+}
+
+// Stores the EVM chain ID and corresponding Wormhole chain ID for the current chain being monitored by the connector.
+type chainIds struct {
+	evmChainId      uint64
+	wormholeChainId vaa.ChainID
+}
+
+// TransferVerifier contains configuration values for verifying transfers.
+type TransferVerifier[E evmClient, C connector] struct {
+	Addresses *TVAddresses
+	// The chainId being monitored as reported by the client connector.
+	chainIds *chainIds
+	// Wormhole connector for wrapping contract-specific interactions
+	logger zap.Logger
+	// Corresponds to the connector interface for EVM chains
+	evmConnector C
+	// Corresponds to an ethClient from go-ethereum
+	client E
+	// Mapping to track the transactions that have been processed. Indexed by a log's txHash.
+	processedTransactions map[common.Hash]*types.Receipt
+	// The latest transaction block number, used to determine the size of historic receipts to keep in memory.
+	lastBlockNumber uint64
+	// The block height difference between the latest block and the oldest block to keep in memory.
+	pruneHeightDelta uint64
+
+	// Holds previously-recorded decimals (uint8) for token addresses
+	// (common.Address) that have been observed.
+	decimalsCache map[common.Address]uint8
+
+	// Records whether an asset is wrapped but does not store the native data
+	isWrappedCache map[string]bool
+
+	// Maps the 32-byte token addresses received via LogMessagePublished
+	// events to their unwrapped 20-byte addresses. This mapping is also
+	// used for non-wrapped token addresses.
+	wrappedCache map[string]common.Address
+
+	// Native chain cache for wrapped assets.
+	nativeChainCache map[string]vaa.ChainID
+}
+
+func NewTransferVerifier(connector connectors.Connector, tvAddrs *TVAddresses, pruneHeightDelta uint64, logger *zap.Logger) (*TransferVerifier[*ethClient.Client, connectors.Connector], error) {
+	// Retrieve the chainId from the connector.
+	chainIdFromClient, err := connector.Client().ChainID(context.Background())
+	if err != nil {
+		return nil, fmt.Errorf("failed to get chain ID: %w", err)
+	}
+
+	// Fetch EVM chain ID from the connector and attempt to convert it to a Wormhole chain ID.
+	evmChainId, parseErr := strconv.ParseUint(chainIdFromClient.String(), 10, 16)
+	if parseErr != nil {
+		return nil, fmt.Errorf("Failed to parse chainId from string returned by connector client: %w", parseErr)
+	}
+
+	wormholeChainId, unregisteredErr := TryWormholeChainIdFromNative(evmChainId)
+	if unregisteredErr != nil {
+		return nil, fmt.Errorf("Could not get Wormhole chain ID from EVM chain ID: %w", unregisteredErr)
+	}
+
+	return &TransferVerifier[*ethClient.Client, connectors.Connector]{
+		Addresses: tvAddrs,
+		chainIds: &chainIds{
+			evmChainId:      evmChainId,
+			wormholeChainId: wormholeChainId,
+		},
+		logger:                *logger,
+		evmConnector:          connector,
+		client:                connector.Client(),
+		processedTransactions: make(map[common.Hash]*types.Receipt),
+		lastBlockNumber:       0,
+		pruneHeightDelta:      pruneHeightDelta,
+		decimalsCache:         make(map[common.Address]uint8),
+		isWrappedCache:        make(map[string]bool),
+		wrappedCache:          make(map[string]common.Address),
+		nativeChainCache:      make(map[string]vaa.ChainID),
+	}, nil
+}
+
+type connector interface {
+	ParseLogMessagePublished(log types.Log) (*ethabi.AbiLogMessagePublished, error)
+	TransactionReceipt(ctx context.Context, txHash common.Hash) (*types.Receipt, error)
+}
+
+type evmClient interface {
+	// getDecimals()
+	CallContract(ctx context.Context, msg ethereum.CallMsg, blockNumber *big.Int) ([]byte, error)
+}
+
+type Subscription struct {
+	// TODO make generic or use an interface
+	client    *ethClient.Client
+	connector connectors.Connector
+	logC      chan *ethabi.AbiLogMessagePublished
+	errC      chan error
+	quit      chan struct{}
+}
+
+func NewSubscription(client *ethClient.Client, connector connectors.Connector) *Subscription {
+	return &Subscription{
+		client:    client,
+		connector: connector,
+		logC:      make(chan *ethabi.AbiLogMessagePublished),
+		errC:      make(chan error),
+		quit:      make(chan struct{}),
+	}
+}
+
+// Subscribe creates a subscription to WatchLogMessagePublished events and will
+// attempt to reconnect when errors occur, such as Websocket connection
+// problems.
+func (s *Subscription) Subscribe(ctx context.Context) {
+	go func() {
+		for {
+			select {
+			case <-ctx.Done():
+				return
+			case <-s.quit:
+				return
+			default:
+				subscription, err := s.connector.WatchLogMessagePublished(
+					ctx,
+					s.errC,
+					s.logC,
+				)
+
+				if err != nil {
+					s.errC <- fmt.Errorf("failed to subscribe to logs: %w", err)
+					time.Sleep(RECONNECT_DELAY) // Wait before retrying
+					continue
+				}
+
+				// Handle subscription until error occurs
+				// TODO: This section of code should have a limit on the number of times it will retry
+				// and fail if it can't connect a certain number of times
+				err = s.handleSubscription(ctx, subscription)
+
+				if err != nil {
+					s.errC <- err
+					time.Sleep(RECONNECT_DELAY) // Wait before retrying
+				}
+			}
+		}
+	}()
+}
+
+func (s *Subscription) handleSubscription(ctx context.Context, subscription event.Subscription) error {
+	for {
+		select {
+		case <-ctx.Done():
+			subscription.Unsubscribe()
+			return nil
+		case <-s.quit:
+			subscription.Unsubscribe()
+			return nil
+		case err := <-subscription.Err():
+			subscription.Unsubscribe()
+			return fmt.Errorf("subscription error: %w", err)
+		}
+	}
+}
+
+func (s *Subscription) Events() <-chan *ethabi.AbiLogMessagePublished {
+	return s.logC
+}
+
+func (s *Subscription) Errors() <-chan error {
+	return s.errC
+}
+
+func (s *Subscription) Close() {
+	close(s.quit)
+}
+
+// Abstraction over the fields that are expected to be present for Transfer
+// types encoded in receipt logs: Deposits, Transfers, and LogMessagePublished
+// events.
+type TransferLog interface {
+	// Amount after (de)normalization
+	TransferAmount() *big.Int
+	// The Transferror: EOA or contract that initiated the transfer. Not to be confused with msg.sender.
+	Sender() vaa.Address
+	// The Transferee. Ultimate recipient of funds.
+	Destination() vaa.Address
+	// Event emitter
+	Emitter() common.Address // Emitter will always be an Ethereum address
+	// Chain where the token was minted
+	OriginChain() vaa.ChainID
+	// Address that minted the token
+	OriginAddress() vaa.Address
+}
+
+// Abstraction over a Deposit event for a wrapped native asset, e.g. WETH for Ethereum.
+type NativeDeposit struct {
+	// The address of the token.
+	TokenAddress common.Address
+	// The native chain of the token (where it was minted)
+	TokenChain vaa.ChainID
+	Receiver   common.Address
+	Amount     *big.Int
+}
+
+func (d *NativeDeposit) TransferAmount() *big.Int {
+	return d.Amount
+}
+
+func (d *NativeDeposit) Destination() vaa.Address {
+	return VAAAddrFrom(d.Receiver)
+}
+
+// Deposit does not actually have a sender but this is required to implement the interface
+func (d *NativeDeposit) Sender() vaa.Address {
+	// Sender is not present in the Logs emitted for a Deposit
+	return ZERO_ADDRESS_VAA
+}
+
+func (d *NativeDeposit) Emitter() common.Address {
+	// Event emitter of the Deposit should be equal to TokenAddress.
+	return d.TokenAddress
+}
+
+func (d *NativeDeposit) OriginChain() vaa.ChainID {
+	return d.TokenChain
+}
+
+func (d *NativeDeposit) OriginAddress() vaa.Address {
+	return VAAAddrFrom(d.TokenAddress)
+}
+
+func (d *NativeDeposit) String() string {
+	return fmt.Sprintf(
+		"Deposit: {TokenAddress=%s TokenChain=%d Receiver=%s Amount=%s}",
+		d.TokenAddress.String(),
+		d.TokenChain,
+		d.Receiver.String(),
+		d.Amount.String(),
+	)
+}
+
+// DepositFromLog() creates a NativeDeposit struct given a log and Wormhole chain ID.
+func DepositFromLog(
+	log *types.Log,
+	// This chain ID should correspond to the Wormhole chain ID, not the EVM chain ID. In this context it's
+	// important to track the transfer as Wormhole sees it, not as the EVM network itself sees it.
+	chainId vaa.ChainID,
+) (deposit *NativeDeposit, err error) {
+	dest, amount := parseWNativeDepositEvent(log.Topics, log.Data)
+
+	if amount == nil {
+		return deposit, errors.New("could not parse Deposit from log")
+	}
+
+	deposit = &NativeDeposit{
+		TokenAddress: log.Address,
+		TokenChain:   chainId,
+		Receiver:     dest,
+		Amount:       amount,
+	}
+	return
+}
+
+// parseWNativeDepositEvent parses an event for a deposit of a wrapped version of the chain's native asset, i.e. WETH for Ethereum.
+func parseWNativeDepositEvent(logTopics []common.Hash, logData []byte) (destination common.Address, amount *big.Int) {
+
+	// https://etherscan.io/token/0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2#code#L29
+	// event  Deposit(address indexed dst, uint wad);
+	if len(logData) != EVM_WORD_LENGTH || len(logTopics) != TOPICS_COUNT_DEPOSIT {
+		return common.Address{}, nil
+	}
+
+	destination = common.BytesToAddress(logTopics[1][:])
+	amount = new(big.Int).SetBytes(logData[:])
+
+	return destination, amount
+}
+
+// Abstraction over an ERC20 Transfer event.
+type ERC20Transfer struct {
+	// The address of the token. Also equivalent to the Emitter of the event.
+	TokenAddress common.Address
+	// The native chain of the token (where it was minted)
+	TokenChain vaa.ChainID
+	From       common.Address
+	To         common.Address
+	Amount     *big.Int
+}
+
+func (t *ERC20Transfer) TransferAmount() *big.Int {
+	return t.Amount
+}
+
+func (t *ERC20Transfer) Sender() vaa.Address {
+	// Note that this value may return zero for receipt logs that are in
+	// fact Transfers emitted from e.g. UniswapV2 which have the same event
+	// signature as ERC20 Transfers.
+	return VAAAddrFrom(t.From)
+}
+
+func (t *ERC20Transfer) Destination() vaa.Address {
+	// Note that this value may return zero when tokens are being burned.
+	return VAAAddrFrom(t.To)
+}
+
+func (t *ERC20Transfer) Emitter() common.Address {
+	// The TokenAddress is equal to the Emitter for ERC20 Transfers
+	return t.TokenAddress
+}
+
+func (t *ERC20Transfer) OriginChain() vaa.ChainID {
+	return t.TokenChain
+}
+
+func (t *ERC20Transfer) OriginAddress() vaa.Address {
+	return VAAAddrFrom(t.TokenAddress)
+}
+
+func (t *ERC20Transfer) String() string {
+	return fmt.Sprintf(
+		"ERC20Transfer: {TokenAddress=%s TokenChain=%d From=%s To=%s Amount=%s}",
+		t.TokenAddress.String(),
+		t.TokenChain,
+		t.From.String(),
+		t.To.String(),
+		t.Amount.String(),
+	)
+}
+
+// ERC20TransferFromLog() creates an ERC20Transfer struct given a log and Wormhole chain ID.
+func ERC20TransferFromLog(
+	log *types.Log,
+	// This chain ID should correspond to the Wormhole chain ID, not the EVM chain ID. In this context it's
+	// important to track the transfer as Wormhole sees it, not as the EVM network itself sees it.
+	chainId vaa.ChainID,
+) (transfer *ERC20Transfer, err error) {
+	from, to, amount := parseERC20TransferEvent(log.Topics, log.Data)
+
+	// Ensure From address is not empty. The To address is allowed to be empty when funds are being burned.
+	if cmp(from, ZERO_ADDRESS) == 0 {
+		return transfer, errors.New("could not parse ERC20 Transfer from log: address From is empty")
+	}
+
+	if amount == nil {
+		return transfer, errors.New("could not parse ERC20 Transfer from log: nil Amount")
+	}
+
+	transfer = &ERC20Transfer{
+		TokenAddress: log.Address,
+		// Initially, set Token's chain to the chain being monitored. This will be updated by making an RPC call later.
+		TokenChain: chainId,
+		From:       from,
+		To:         to,
+		Amount:     amount,
+	}
+	return
+}
+
+// This function parses an ERC20 transfer event from a log topic and data.
+// It verifies the input lengths, extracts the 'from', 'to' and amount fields from the log data,
+// and returns these values as common.Address and big.Int types.
+// - Error handling: The function checks if the log data and topic lengths are correct before attempting to parse them.
+// - Input validation: The function verifies that the input lengths match expected values, preventing potential attacks or errors.
+func parseERC20TransferEvent(logTopics []common.Hash, logData []byte) (from common.Address, to common.Address, amount *big.Int) {
+
+	// https://github.com/OpenZeppelin/openzeppelin-contracts/blob/6e224307b44bc4bd0cb60d408844e028cfa3e485/contracts/token/ERC20/IERC20.sol#L16
+	// event Transfer(address indexed from, address indexed to, uint256 value)
+	if len(logData) != EVM_WORD_LENGTH || len(logTopics) != TOPICS_COUNT_TRANSFER {
+		return common.Address{}, common.Address{}, nil
+	}
+
+	from = common.BytesToAddress(logTopics[1][:])
+	to = common.BytesToAddress(logTopics[2][:])
+	amount = new(big.Int).SetBytes(logData[:])
+
+	return
+}
+
+// Abstraction over a LogMessagePublished event emitted by the core bridge.
+// TODO add String() method
+type LogMessagePublished struct {
+	// Which contract emitted the event.
+	EventEmitter common.Address
+	// Which address sent the transaction that triggered the message publication.
+	MsgSender common.Address
+	// Abstraction over fields encoded in the event's Data field which in turn contains the transfer's payload.
+	TransferDetails *TransferDetails
+	// Note: these fields are non-exhaustive. Data not needed for Transfer Verification is not encoded here.
+}
+
+func (l *LogMessagePublished) String() string {
+	return fmt.Sprintf("LogMessagePublished: {emitter=%s sender=%s transferDetails=%s}",
+		l.EventEmitter,
+		l.MsgSender,
+		l.TransferDetails,
+	)
+}
+
+func (l *LogMessagePublished) Destination() (destination vaa.Address) {
+	if l.TransferDetails != nil {
+		destination = l.TransferDetails.TargetAddress
+	}
+	return
+}
+
+func (l *LogMessagePublished) Emitter() common.Address {
+	return l.EventEmitter
+}
+
+func (l *LogMessagePublished) Sender() vaa.Address {
+	return VAAAddrFrom(l.MsgSender)
+}
+
+func (l *LogMessagePublished) TransferAmount() (amount *big.Int) {
+	if l.TransferDetails != nil {
+		return l.TransferDetails.Amount
+	}
+	return
+}
+
+func (l *LogMessagePublished) OriginAddress() (origin vaa.Address) {
+	if l.TransferDetails != nil {
+		origin = VAAAddrFrom(l.TransferDetails.OriginAddress)
+	}
+	return
+}
+
+func (l *LogMessagePublished) OriginChain() (chainID vaa.ChainID) {
+	if l.TransferDetails != nil {
+		chainID = l.TransferDetails.TokenChain
+	}
+	return
+}
+
+// TransferReceipt is an abstraction over an EVM transaction receipt for a
+// Token Bridge transfer. It represents Deposit, Transfer, and
+// LogMessagePublished events that can appear in a Receipt logs. Other event
+// types are not represented by this program because they are not relevant for
+// checking the invariants on transfers sent from the token bridge.
+type TransferReceipt struct {
+	Deposits  *[]*NativeDeposit
+	Transfers *[]*ERC20Transfer
+	// There must be at least one LogMessagePublished for a valid receipt.
+	MessagePublicatons *[]*LogMessagePublished
+}
+
+func (r *TransferReceipt) String() string {
+	dStr := ""
+	if r.Deposits != nil {
+		for _, d := range *r.Deposits {
+			if d != nil {
+				dStr += d.String()
+			}
+		}
+	}
+
+	tStr := ""
+	if r.Transfers != nil {
+		for _, t := range *r.Transfers {
+			if t != nil {
+				tStr += t.String()
+			}
+		}
+	}
+
+	mStr := ""
+	if r.MessagePublicatons != nil {
+		for _, m := range *r.MessagePublicatons {
+			if m != nil {
+				mStr += m.String()
+			}
+		}
+	}
+
+	return fmt.Sprintf(
+		"receipt: {deposits=%s transfers=%s messages=%s}",
+		dStr,
+		tStr,
+		mStr,
+	)
+}
+
+// Summary of a processed TransferReceipt. Contains information about relevant
+// transfers requested in and out of the bridge.
+type ReceiptSummary struct {
+	// Number of LogMessagePublished events in the receipt
+	logsProcessed int
+	// The sum of tokens transferred into the Token Bridge contract.
+	in map[string]*big.Int
+	// The sum of tokens parsed from the core bridge's LogMessagePublished payload.
+	out map[string]*big.Int
+}
+
+func NewReceiptSummary() *ReceiptSummary {
+	return &ReceiptSummary{
+		logsProcessed: 0,
+		// The sum of tokens transferred into the Token Bridge contract.
+		in: make(map[string]*big.Int),
+		// The sum of tokens parsed from the core bridge's LogMessagePublished payload.
+		out: make(map[string]*big.Int),
+	}
+}
+
+func (s *ReceiptSummary) String() (outStr string) {
+
+	ins := ""
+
+	for key, amountIn := range s.in {
+		ins += fmt.Sprintf("%s=%s", key, amountIn.String())
+	}
+
+	outs := ""
+	for key, amountOut := range s.out {
+		outs += fmt.Sprintf("%s=%s ", key, amountOut.String())
+	}
+
+	outStr = fmt.Sprintf(
+		"receipt summary: logsProcessed=%d requestedIn={%s} requestedOut={%s}",
+		s.logsProcessed,
+		ins,
+		outs,
+	)
+	return outStr
+}
+
+// https://wormhole.com/docs/learn/infrastructure/vaas/#payload-types
+type VAAPayloadType uint8
+
+const (
+	TransferTokens            VAAPayloadType = 1
+	TransferTokensWithPayload VAAPayloadType = 3
+)
+
+// Abstraction of a Token Bridge transfer payload encoded in the Data field of a LogMessagePublished event.
+// It is meant to correspond to the API for Token Transfer messages as described in the Token Bridge whitepaper:
+// https://github.com/wormhole-foundation/wormhole/blob/main/whitepapers/0003_token_bridge.md#api--database-schema
+type TransferDetails struct {
+	PayloadType VAAPayloadType
+	// Denormalized amount, accounting for decimal differences between contracts and chains
+	Amount *big.Int
+	// Amount as sent in the raw payload
+	AmountRaw *big.Int
+	// Original wormhole chain ID where the token was minted.
+	TokenChain vaa.ChainID
+	// Original address of the token when minted natively. Corresponds to the "unwrapped" address in the token bridge.
+	OriginAddress common.Address
+	// Raw token address parsed from the payload. May be wrapped.
+	OriginAddressRaw []byte
+	// Not necessarily an EVM address, so vaa.Address is used instead
+	TargetAddress vaa.Address
+}
+
+func (td *TransferDetails) String() string {
+	return fmt.Sprintf(
+		"PayloadType: %d OriginAddressRaw(hex-encoded): %s TokenChain: %d OriginAddress: %s TargetAddress: %s AmountRaw: %s Amount: %s",
+		td.PayloadType,
+		fmt.Sprintf("%x", td.OriginAddressRaw),
+		td.TokenChain,
+		td.OriginAddress.String(),
+		td.TargetAddress.String(),
+		td.AmountRaw.String(),
+		td.Amount.String(),
+	)
+}
+
+// unwrapIfWrapped returns the "unwrapped" address for a token a.k.a. the OriginAddress
+// of the token's original minting contract.
+func (tv *TransferVerifier[ethClient, connector]) unwrapIfWrapped(
+	tokenAddress []byte,
+	tokenChain vaa.ChainID,
+) (unwrappedTokenAddress common.Address, err error) {
+	ctx, cancel := context.WithTimeout(context.Background(), RPC_TIMEOUT)
+	defer cancel()
+
+	tokenAddressAsKey := hex.EncodeToString(tokenAddress)
+
+	// If the token address already exists in the wrappedCache mapping the
+	// cached value can be returned.
+	if addr, exists := tv.wrappedCache[tokenAddressAsKey]; exists {
+		tv.logger.Debug("wrapped asset found in cache, returning")
+		return addr, nil
+	}
+
+	// prepare eth_call data, 4-byte signature + 2x 32 byte arguments
+	calldata := make([]byte, 4+EVM_WORD_LENGTH+EVM_WORD_LENGTH)
+
+	copy(calldata, TOKEN_BRIDGE_WRAPPED_ASSET_SIGNATURE)
+	// Add the uint16 tokenChain as the last two bytes in the first argument
+	binary.BigEndian.PutUint16(calldata[4+30:], uint16(tokenChain))
+	copy(calldata[4+EVM_WORD_LENGTH:], common.LeftPadBytes(tokenAddress, EVM_WORD_LENGTH))
+
+	ethCallMsg := ethereum.CallMsg{
+		To:   &tv.Addresses.TokenBridgeAddr,
+		Data: calldata,
+	}
+	tv.logger.Debug("calling wrappedAsset",
+		zap.Uint16("tokenChain", uint16(tokenChain)),
+		zap.String("tokenChainString", tokenChain.String()),
+		zap.String("tokenAddress", fmt.Sprintf("%x", tokenAddress)),
+		zap.String("callData", fmt.Sprintf("%x", calldata)))
+
+	result, err := tv.client.CallContract(ctx, ethCallMsg, nil)
+	if err != nil {
+		// This strictly handles the error case. The contract call will
+		// return the zero address for assets not in its map.
+		return common.Address{}, fmt.Errorf("failed to get mapping for token %s", tokenAddressAsKey)
+	}
+
+	tokenAddressNative := common.BytesToAddress(result)
+	tv.wrappedCache[tokenAddressAsKey] = tokenAddressNative
+
+	tv.logger.Debug("got wrappedAsset result",
+		zap.String("tokenAddressNative", fmt.Sprintf("%x", tokenAddressNative)))
+
+	if cmp(tokenAddressNative, ZERO_ADDRESS) == 0 {
+		tv.logger.Info("got zero address for wrappedAsset result. this asset is probably not registered correctly",
+			zap.String("queried tokenAddress", fmt.Sprintf("%x", tokenAddress)),
+			zap.Uint16("queried tokenChain", uint16(tokenChain)),
+			zap.String("tokenChain name", tokenChain.String()),
+		)
+	}
+
+	return tokenAddressNative, nil
+}
+
+// chainId() calls the chainId() function on the contract at the supplied address. To get the chain ID being monitored
+// by the Transfer Verifier, use the field TransferVerifier.chain.
+func (tv *TransferVerifier[ethClient, Connector]) chainId(
+	addr common.Address,
+) (vaa.ChainID, error) {
+
+	if cmp(addr, ZERO_ADDRESS) == 0 {
+		return 0, errors.New("got zero address as parameter for chainId() call")
+	}
+	ctx, cancel := context.WithTimeout(context.Background(), RPC_TIMEOUT)
+	defer cancel()
+
+	tokenAddressAsKey := addr.Hex()
+
+	// If the token address already exists in the wrappedCache mapping the
+	// cached value can be returned.
+	if chainId, exists := tv.nativeChainCache[tokenAddressAsKey]; exists {
+		tv.logger.Debug("wrapped asset found in native chain cache, returning")
+		return chainId, nil
+	}
+
+	// prepare eth_call data, 4-byte signature
+	calldata := make([]byte, 4)
+
+	copy(calldata, WRAPPED_ERC20_CHAIN_ID_SIGNATURE)
+
+	ethCallMsg := ethereum.CallMsg{
+		To:   &addr,
+		Data: calldata,
+	}
+
+	tv.logger.Debug("calling chainId()", zap.String("tokenAddress", addr.String()))
+
+	result, err := tv.client.CallContract(ctx, ethCallMsg, nil)
+
+	if err != nil {
+		// TODO add more checks here
+		return 0, err
+	}
+	if len(result) < EVM_WORD_LENGTH {
+		tv.logger.Warn("result for chainId has insufficient length",
+			zap.Int("length", len(result)),
+			zap.String("result", fmt.Sprintf("%x", result)))
+		return 0, errors.New("result for chainId has insufficient length")
+	}
+
+	chainID := vaa.ChainID(binary.BigEndian.Uint16(result))
+
+	tv.nativeChainCache[tokenAddressAsKey] = chainID
+
+	return chainID, nil
+}
+
+func (tv *TransferVerifier[ethClient, Connector]) isWrappedAsset(
+	addr common.Address,
+	// chainID common.Address,
+) (bool, error) {
+	ctx, cancel := context.WithTimeout(context.Background(), RPC_TIMEOUT)
+	defer cancel()
+
+	tokenAddressAsKey := addr.Hex()
+
+	// If the token address already exists in the isWrappedCache mapping the
+	// cached value can be returned.
+	if wrapped, exists := tv.isWrappedCache[tokenAddressAsKey]; exists {
+		tv.logger.Debug("asset found in isWrapped cache, returning")
+		return wrapped, nil
+	}
+
+	// Prepare eth_call data: 4-byte signature + 32 byte address
+	calldata := make([]byte, 4+EVM_WORD_LENGTH)
+	copy(calldata, TOKEN_BRIDGE_IS_WRAPPED_ASSET_SIGNATURE)
+	copy(calldata[4:], common.LeftPadBytes(addr.Bytes(), EVM_WORD_LENGTH))
+
+	evmCallMsg := ethereum.CallMsg{
+		To:   &tv.Addresses.TokenBridgeAddr,
+		Data: calldata,
+	}
+
+	tv.logger.Debug("calling isWrappedAsset()", zap.String("tokenAddress", addr.String()))
+
+	result, err := tv.client.CallContract(ctx, evmCallMsg, nil)
+
+	if err != nil {
+		// TODO add more info here
+		tv.logger.Warn("isWrappedAsset() call error", zap.Error(err))
+		return false, err
+	}
+	if len(result) < EVM_WORD_LENGTH {
+		tv.logger.Warn("isWrappedAsset() result length is too small", zap.String("result", fmt.Sprintf("%x", result)))
+		return false, err
+	}
+	tv.logger.Debug("isWrappedAsset result", zap.String("result", fmt.Sprintf("%x", result)))
+
+	// The boolean result will be returned as a byte string with length
+	// equal to EVM_WORD_LENGTH. Grab the last byte.
+	wrapped := result[EVM_WORD_LENGTH-1] == 1
+
+	tv.isWrappedCache[tokenAddressAsKey] = wrapped
+
+	return wrapped, nil
+}
+
+// Determine whether a log is relevant for the addresses passed into TVAddresses. Returns a string of the form "address-chain" for relevant entries.
+func relevant[L TransferLog](tLog TransferLog, tv *TVAddresses) (key string, relevant bool) {
+
+	switch log := tLog.(type) {
+	case *NativeDeposit:
+		// Skip native deposit events emitted by contracts other than the configured wrapped native address.
+		if cmp(log.Emitter(), tv.WrappedNativeAddr) != 0 {
+			return
+		}
+
+		// We only care about deposits into the token bridge.
+		if cmp(log.Destination(), tv.TokenBridgeAddr) != 0 {
+			return
+		}
+
+	case *ERC20Transfer:
+		// We only care about transfers sent to the token bridge.
+		if cmp(log.Destination(), tv.TokenBridgeAddr) != 0 {
+			return
+		}
+
+	case *LogMessagePublished:
+		// This check is already done elsewhere but it's important.
+		if cmp(log.Emitter(), tv.CoreBridgeAddr) != 0 {
+			return
+		}
+
+		// Only consider LogMessagePublished events with msg.sender equal to the Token Bridge
+		if cmp(log.Sender(), tv.TokenBridgeAddr) != 0 {
+			return
+		}
+
+		// The following values are not exposed by the interface, so check them directly here.
+		if log.TransferDetails.PayloadType != TransferTokens && log.TransferDetails.PayloadType != TransferTokensWithPayload {
+			return
+		}
+
+	}
+	return fmt.Sprintf(KEY_FORMAT, tLog.OriginAddress(), tLog.OriginChain()), true
+}
+
+// Custom error type indicating an issue in issue in a type that implements the
+// TransferLog interface. Used to ensure that a TransferLog is well-formed.
+// Typically indicates a bug in the code.
+type InvalidLogError struct {
+	Msg string
+}
+
+func (i InvalidLogError) Error() string {
+	return fmt.Sprintf("invalid log: %s", i.Msg)
+}
+
+// validate() ensures a TransferLog is well-formed. This means that its fields
+// are not nil and in most cases are not equal to the zero-value for the
+// field's type.
+func validate[L TransferLog](tLog TransferLog) error {
+
+	// Generic validation for all TransferLogs
+	if cmp(tLog.Emitter(), ZERO_ADDRESS) == 0 {
+		return &InvalidLogError{Msg: "emitter is the zero address"}
+	}
+
+	if tLog.OriginChain() == 0 {
+		return &InvalidLogError{Msg: "originChain is zero"}
+	}
+
+	if tLog.TransferAmount() == nil {
+		return &InvalidLogError{Msg: "transfer amount is nil"}
+	}
+
+	if tLog.TransferAmount().Sign() == -1 {
+		return &InvalidLogError{Msg: "transfer amount is negative"}
+	}
+
+	switch log := tLog.(type) {
+	case *NativeDeposit:
+		// Deposit does not actually have a sender, so it should always be equal to the zero address.
+		if cmp(log.Sender(), ZERO_ADDRESS_VAA) != 0 {
+			return &InvalidLogError{Msg: "sender address for Deposit must be 0"}
+		}
+		if cmp(log.Emitter(), log.TokenAddress) != 0 {
+			return &InvalidLogError{Msg: "deposit emitter is not equal to its token address"}
+		}
+		if cmp(log.Destination(), ZERO_ADDRESS_VAA) == 0 {
+			return &InvalidLogError{Msg: "destination is not set"}
+		}
+		if cmp(log.OriginAddress(), ZERO_ADDRESS_VAA) == 0 {
+			return &InvalidLogError{Msg: "originAddress is the zero address"}
+		}
+	case *ERC20Transfer:
+		// Note: The token bridge transfers to the zero address in
+		// order to burn tokens for some kinds of transfers. For this
+		// reason, there is no validation here to check if Destination
+		// is the zero address.
+
+		// Sender must not be checked to be non-zero here. The event
+		// hash for Transfer also shows up in other popular contracts
+		// (e.g. UniswapV2) and may have a valid reason to set this
+		// field to zero.
+
+		// TODO ensure that, if the Token is wrapped, that its tokenchain is not equal to NATIVE_CHAIN_ID.
+		// at this point, this should've been updated
+
+		if cmp(log.Emitter(), log.TokenAddress) != 0 {
+			return &InvalidLogError{Msg: "transfer emitter is not equal to its token address"}
+		}
+		if cmp(log.OriginAddress(), ZERO_ADDRESS_VAA) == 0 {
+			return &InvalidLogError{Msg: "originAddress is the zero address"}
+		}
+	case *LogMessagePublished:
+		// LogMessagePublished cannot have a sender with a 0 address
+		if cmp(log.Sender(), ZERO_ADDRESS_VAA) == 0 {
+			return &InvalidLogError{Msg: "sender cannot be zero"}
+		}
+		if cmp(log.Destination(), ZERO_ADDRESS_VAA) == 0 {
+			return &InvalidLogError{Msg: "destination is not set"}
+		}
+
+		// TODO is this valid for assets that return the zero address from unwrap?
+		// if cmp(log.OriginAddress(), ZERO_ADDRESS_VAA) == 0 {
+		// 	return errors.New("origin cannot be zero")
+		// }
+
+		// The following values are not exposed by the interface, so check them directly here.
+		if log.TransferDetails == nil {
+			return &InvalidLogError{Msg: "TransferDetails cannot be nil"}
+		}
+		if cmp(log.TransferDetails.TargetAddress, ZERO_ADDRESS_VAA) == 0 {
+			return &InvalidLogError{Msg: "target address cannot be zero"}
+		}
+
+		if len(log.TransferDetails.OriginAddressRaw) == 0 {
+			return &InvalidLogError{Msg: "origin address raw cannot be empty"}
+		}
+
+		// if bytes.Compare(log.TransferDetails.OriginAddressRaw, ZERO_ADDRESS_VAA.Bytes()) == 0 {
+		// 	return &InvalidLogError{Msg: "origin address raw cannot be zero"}
+		// }
+
+		if log.TransferDetails.AmountRaw == nil {
+			return &InvalidLogError{Msg: "amountRaw cannot be nil"}
+		}
+		if log.TransferDetails.AmountRaw.Sign() == -1 {
+			return &InvalidLogError{Msg: "amountRaw cannot be negative"}
+		}
+		if log.TransferDetails.PayloadType != TransferTokens && log.TransferDetails.PayloadType != TransferTokensWithPayload {
+			return &InvalidLogError{Msg: "payload type is not a transfer type"}
+		}
+	default:
+		return &InvalidLogError{Msg: "invalid transfer log type: unknown"}
+	}
+
+	return nil
+}
+
+// getDecimals() is equivalent to calling decimals() on a contract that follows the ERC20 standard.
+func (tv *TransferVerifier[evmClient, connector]) getDecimals(
+	tokenAddress common.Address,
+) (decimals uint8, err error) {
+	ctx, cancel := context.WithTimeout(context.Background(), RPC_TIMEOUT)
+	defer cancel()
+
+	// First check if this token's decimals is stored in cache
+	if _, exists := tv.decimalsCache[tokenAddress]; exists {
+		tv.logger.Debug("asset decimals found in cache, returning")
+		return tv.decimalsCache[tokenAddress], nil
+	}
+
+	// If the decimals aren't cached, perform an eth_call lookup for the decimals
+	// This RPC call should only be made once per token, until the guardian is restarted
+	evmCallMsg := ethereum.CallMsg{
+		To:   &tokenAddress,
+		Data: ERC20_DECIMALS_SIGNATURE,
+	}
+
+	result, err := tv.client.CallContract(ctx, evmCallMsg, nil)
+	if err != nil {
+		tv.logger.Warn("error from getDecimals() for token",
+			zap.String("tokenAddress", tokenAddress.String()),
+			zap.ByteString("result", result),
+			zap.Error(err))
+		return 0, err
+	}
+
+	if len(result) < EVM_WORD_LENGTH {
+		tv.logger.Warn("failed to get decimals for token: result has insufficient length",
+			zap.String("tokenAddress", tokenAddress.String()),
+			zap.ByteString("result", result))
+		return 0, err
+	}
+
+	// An ERC20 token's decimals should fit in a single byte. A call to `decimals()`
+	// returns a uint8 value encoded in string with 32-bytes. To get the decimals,
+	// we grab the last byte, expecting all the preceding bytes to be equal to 0.
+	decimals = result[EVM_WORD_LENGTH-1]
+
+	// Add the decimal value to the cache
+	tv.decimalsCache[tokenAddress] = decimals
+	tv.logger.Debug("adding new token's decimals to cache",
+		zap.String("tokenAddress", tokenAddress.String()),
+		zap.Uint8("tokenDecimals", decimals))
+
+	return decimals, nil
+}
+
+// Yields the registered Wormhole chain ID corresponding to an EVM chain ID.
+func TryWormholeChainIdFromNative(evmChainId uint64) (wormholeChainID vaa.ChainID, err error) {
+	wormholeChainID = vaa.ChainIDUnset
+	// Add additional cases below to support more EVM chains.
+	// Note: it might be better for this function to be moved into the SDK in case other codebases need similar functionality.
+	switch evmChainId {
+	// Special carve out for anvil-based testing. This chain ID  1337 anvil's default.
+	// In this case, report the native chain ID as the mainnet chain ID for the purposes of testing.
+	case 1, 1337:
+		wormholeChainID = vaa.ChainIDEthereum
+	case 11155111:
+		wormholeChainID = vaa.ChainIDSepolia
+	default:
+		err = fmt.Errorf(
+			"Transfer Verifier does not have a registered mapping from EVM chain ID %d to a Wormhole chain ID",
+			evmChainId,
+		)
+	}
+	return
+}
+
+// Gives the representation of a geth address in vaa.Address
+func VAAAddrFrom(gethAddr common.Address) (vaaAddr vaa.Address) {
+	// Geth uses 20 bytes to represent an address. A VAA address is equivalent if it has the same
+	// final 20 bytes. The leading bytes are expected to be zero for both types.
+	vaaAddr = vaa.Address(common.LeftPadBytes(gethAddr[:], EVM_WORD_LENGTH))
+	return
+}
+
+// Interface useful for comparing vaa.Address and common.Address
+type Bytes interface {
+	Bytes() []byte
+}
+
+// Utility method for comparing common.Address and vaa.Address at the byte level.
+func cmp[some Bytes, other Bytes](a some, b other) int {
+
+	// Compare bytes, prepending 0s to ensure that both values are of EVM_WORD_LENGTH.
+	return bytes.Compare(
+		common.LeftPadBytes(a.Bytes(), EVM_WORD_LENGTH),
+		common.LeftPadBytes(b.Bytes(), EVM_WORD_LENGTH),
+	)
+}

+ 860 - 0
node/pkg/txverifier/evmtypes_test.go

@@ -0,0 +1,860 @@
+package txverifier
+
+import (
+	"bytes"
+	"math/big"
+	"testing"
+
+	"github.com/ethereum/go-ethereum/common"
+	"github.com/ethereum/go-ethereum/core/types"
+	"github.com/stretchr/testify/assert"
+	"github.com/stretchr/testify/require"
+	"github.com/wormhole-foundation/wormhole/sdk/vaa"
+)
+
+var (
+	// Mainnet values
+	WETH_ADDRESS                = common.HexToAddress("c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2")
+	NATIVE_CHAIN_ID vaa.ChainID = 2
+)
+
+func TestRelevantDeposit(t *testing.T) {
+	t.Parallel()
+
+	// The expected return values for relevant()
+	type result struct {
+		key      string
+		relevant bool
+	}
+
+	mocks := setup()
+
+	deposits := map[string]struct {
+		input    NativeDeposit
+		expected result
+	}{
+		"relevant, deposit": {
+			input: NativeDeposit{
+				TokenAddress: nativeAddr,
+				TokenChain:   NATIVE_CHAIN_ID,
+				Receiver:     tokenBridgeAddr,
+				Amount:       big.NewInt(500),
+			},
+			expected: result{"000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2-2", true},
+		},
+		"irrelevant, deposit from non-native contract": {
+			input: NativeDeposit{
+				TokenAddress: usdcAddr, // not Native
+				TokenChain:   NATIVE_CHAIN_ID,
+				Receiver:     tokenBridgeAddr,
+				Amount:       big.NewInt(500),
+			},
+			expected: result{"", false},
+		},
+		"irrelevant, deposit not sent to token bridge": {
+			input: NativeDeposit{
+				TokenAddress: nativeAddr,
+				TokenChain:   NATIVE_CHAIN_ID,
+				Receiver:     eoaAddrGeth, // not token bridge
+				Amount:       big.NewInt(500),
+			},
+			expected: result{"", false},
+		},
+		"irrelevant, sanity check for zero-address deposits": {
+			input: NativeDeposit{
+				TokenAddress: ZERO_ADDRESS, // zero address
+				TokenChain:   NATIVE_CHAIN_ID,
+				Receiver:     tokenBridgeAddr,
+				Amount:       big.NewInt(500),
+			},
+			expected: result{"", false},
+		},
+	}
+
+	transfers := map[string]struct {
+		input    ERC20Transfer
+		expected result
+	}{
+		"relevant, transfer": {
+			input: ERC20Transfer{
+				TokenAddress: nativeAddr,
+				TokenChain:   NATIVE_CHAIN_ID,
+				From:         eoaAddrGeth,
+				To:           tokenBridgeAddr,
+				Amount:       big.NewInt(500),
+			},
+			expected: result{"000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2-2", true},
+		},
+		"irrelevant, transfer destination is not token bridge": {
+			input: ERC20Transfer{
+				TokenAddress: nativeAddr,
+				TokenChain:   NATIVE_CHAIN_ID,
+				From:         eoaAddrGeth,
+				To:           eoaAddrGeth,
+				Amount:       big.NewInt(500),
+			},
+			expected: result{"", false},
+		},
+	}
+
+	messages := map[string]struct {
+		input    LogMessagePublished
+		expected result
+	}{
+		"relevant, LogMessagePublished": {
+			input: LogMessagePublished{
+				EventEmitter: coreBridgeAddr,
+				MsgSender:    tokenBridgeAddr,
+				TransferDetails: &TransferDetails{
+					PayloadType:      TransferTokens,
+					OriginAddressRaw: usdcAddr.Bytes(),
+					TokenChain:       NATIVE_CHAIN_ID,
+					OriginAddress:    nativeAddr,
+					TargetAddress:    eoaAddrVAA,
+					AmountRaw:        big.NewInt(7),
+					Amount:           big.NewInt(7),
+				},
+			},
+			expected: result{"000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc2-2", true},
+		},
+		"irrelevant, LogMessagePublished has a sender not equal to token bridge": {
+			input: LogMessagePublished{
+				EventEmitter: coreBridgeAddr,
+				MsgSender:    eoaAddrGeth,
+				TransferDetails: &TransferDetails{
+					PayloadType:      TransferTokens,
+					OriginAddressRaw: usdcAddr.Bytes(),
+					TokenChain:       NATIVE_CHAIN_ID,
+					OriginAddress:    nativeAddr,
+					TargetAddress:    eoaAddrVAA,
+					AmountRaw:        big.NewInt(7),
+					Amount:           big.NewInt(7),
+				},
+			},
+			expected: result{"", false},
+		},
+		"irrelevant, LogMessagePublished not emitted by core bridge": {
+			input: LogMessagePublished{
+				EventEmitter: tokenBridgeAddr,
+				MsgSender:    tokenBridgeAddr,
+				TransferDetails: &TransferDetails{
+					PayloadType:      TransferTokens,
+					OriginAddressRaw: usdcAddr.Bytes(),
+					TokenChain:       NATIVE_CHAIN_ID,
+					OriginAddress:    nativeAddr,
+					TargetAddress:    eoaAddrVAA,
+					AmountRaw:        big.NewInt(7),
+					Amount:           big.NewInt(7),
+				},
+			},
+			expected: result{"", false},
+		},
+		"irrelevant, LogMessagePublished does not have a PayloadType corresponding to a Transfer": {
+			input: LogMessagePublished{
+				EventEmitter: coreBridgeAddr,
+				MsgSender:    tokenBridgeAddr,
+				TransferDetails: &TransferDetails{
+					PayloadType:      2,
+					OriginAddressRaw: usdcAddr.Bytes(),
+					TokenChain:       NATIVE_CHAIN_ID,
+					OriginAddress:    nativeAddr,
+					TargetAddress:    eoaAddrVAA,
+					AmountRaw:        big.NewInt(7),
+					Amount:           big.NewInt(7),
+				},
+			},
+			expected: result{"", false},
+		},
+	}
+
+	for name, test := range deposits {
+		test := test // NOTE: uncomment for Go < 1.22, see /doc/faq#closures_and_goroutines
+		t.Run(name, func(t *testing.T) {
+			t.Parallel() // marks each test case as capable of running in parallel with each other
+
+			key, relevant := relevant[*NativeDeposit](&test.input, mocks.transferVerifier.Addresses)
+			assert.Equal(t, test.expected.key, key)
+			assert.Equal(t, test.expected.relevant, relevant)
+
+			if key == "" {
+				assert.False(t, relevant, "key must be empty for irrelevant transfers, but got ", key)
+			} else {
+				assert.True(t, relevant, "relevant must be true for non-empty keys")
+			}
+		})
+	}
+
+	for name, test := range transfers {
+		test := test // NOTE: uncomment for Go < 1.22, see /doc/faq#closures_and_goroutines
+		t.Run(name, func(t *testing.T) {
+			t.Parallel() // marks each test case as capable of running in parallel with each other
+
+			key, relevant := relevant[*ERC20Transfer](&test.input, mocks.transferVerifier.Addresses)
+			assert.Equal(t, test.expected.key, key)
+			assert.Equal(t, test.expected.relevant, relevant)
+
+			if key == "" {
+				assert.False(t, relevant, "key must be empty for irrelevant transfers, but got ", key)
+			} else {
+				assert.True(t, relevant, "relevant must be true for non-empty keys")
+			}
+		})
+	}
+
+	for name, test := range messages {
+		test := test // NOTE: uncomment for Go < 1.22, see /doc/faq#closures_and_goroutines
+		t.Run(name, func(t *testing.T) {
+			t.Parallel() // marks each test case as capable of running in parallel with each other
+
+			key, relevant := relevant[*LogMessagePublished](&test.input, mocks.transferVerifier.Addresses)
+			assert.Equal(t, test.expected.key, key)
+			assert.Equal(t, test.expected.relevant, relevant)
+
+			if key == "" {
+				assert.False(t, relevant, "key must be empty for irrelevant transfers, but got ", key)
+			} else {
+				assert.True(t, relevant, "relevant must be true for non-empty keys")
+			}
+		})
+	}
+}
+
+func TestValidateDeposit(t *testing.T) {
+	t.Parallel()
+
+	invalidDeposits := map[string]struct {
+		deposit NativeDeposit
+	}{
+		"invalid: zero-value for TokenAddress": {
+			deposit: NativeDeposit{
+				// TokenAddress:
+				TokenChain: NATIVE_CHAIN_ID,
+				Receiver:   tokenBridgeAddr,
+				Amount:     big.NewInt(1),
+			},
+		},
+		"invalid: zero-value for TokenChain": {
+			deposit: NativeDeposit{
+				TokenAddress: usdcAddr,
+				// TokenChain:
+				Receiver: tokenBridgeAddr,
+				Amount:   big.NewInt(1),
+			},
+		},
+		"invalid: zero-value for Receiver": {
+			deposit: NativeDeposit{
+				TokenAddress: usdcAddr,
+				TokenChain:   NATIVE_CHAIN_ID,
+				// Receiver:
+				Amount: big.NewInt(1),
+			},
+		},
+		"invalid: nil Amount": {
+			deposit: NativeDeposit{
+				TokenAddress: usdcAddr,
+				TokenChain:   NATIVE_CHAIN_ID,
+				Receiver:     tokenBridgeAddr,
+				Amount:       nil,
+			},
+		},
+		"invalid: negative Amount": {
+			deposit: NativeDeposit{
+				TokenAddress: usdcAddr,
+				TokenChain:   NATIVE_CHAIN_ID,
+				Receiver:     tokenBridgeAddr,
+				Amount:       big.NewInt(-1),
+			},
+		},
+	}
+
+	for name, test := range invalidDeposits {
+		test := test // NOTE: uncomment for Go < 1.22, see /doc/faq#closures_and_goroutines
+		t.Run(name, func(t *testing.T) {
+			t.Parallel() // marks each test case as capable of running in parallel with each other
+
+			err := validate[*NativeDeposit](&test.deposit)
+			require.Error(t, err)
+		})
+	}
+
+	validDeposits := map[string]struct {
+		deposit NativeDeposit
+	}{
+		"valid": {
+			deposit: NativeDeposit{
+				TokenAddress: nativeAddr,
+				TokenChain:   NATIVE_CHAIN_ID,
+				Receiver:     tokenBridgeAddr,
+				Amount:       big.NewInt(500),
+			},
+		},
+	}
+
+	for name, test := range validDeposits {
+		test := test // NOTE: uncomment for Go < 1.22, see /doc/faq#closures_and_goroutines
+		t.Run(name, func(t *testing.T) {
+			t.Parallel() // marks each test case as capable of running in parallel with each other
+
+			err := validate[*NativeDeposit](&test.deposit)
+			require.NoError(t, err)
+
+			// Test the interface
+			// The Sender() field for a Deposit must always be
+			// 'zero'. It only exists to satisfy the TransferLog interface.
+			assert.Equal(t, ZERO_ADDRESS_VAA.Bytes(), test.deposit.Sender().Bytes())
+			assert.Equal(t, test.deposit.TokenAddress, test.deposit.Emitter())
+			assert.NotEqual(t, ZERO_ADDRESS, test.deposit.OriginAddress())
+		})
+	}
+}
+
+func TestValidateERC20Transfer(t *testing.T) {
+	t.Parallel()
+
+	invalidTransfers := map[string]struct {
+		input ERC20Transfer
+	}{
+		"invalid: zero-value for TokenAddress": {
+			input: ERC20Transfer{
+				// TokenAddress:
+				TokenChain: NATIVE_CHAIN_ID,
+				To:         tokenBridgeAddr,
+				From:       eoaAddrGeth,
+				Amount:     big.NewInt(1),
+			},
+		},
+		"invalid: zero-value for TokenChain": {
+			input: ERC20Transfer{
+				TokenAddress: usdcAddr,
+				// TokenChain:
+				To:     tokenBridgeAddr,
+				From:   eoaAddrGeth,
+				Amount: big.NewInt(1),
+			},
+		},
+		// Note: transfer's To and From values are allowed to be the zero address.
+		"invalid: nil Amount": {
+			input: ERC20Transfer{
+				TokenAddress: usdcAddr,
+				TokenChain:   NATIVE_CHAIN_ID,
+				From:         eoaAddrGeth,
+				To:           tokenBridgeAddr,
+				Amount:       nil,
+			},
+		},
+		"invalid: negative Amount": {
+			input: ERC20Transfer{
+				TokenAddress: usdcAddr,
+				TokenChain:   NATIVE_CHAIN_ID,
+				From:         eoaAddrGeth,
+				To:           tokenBridgeAddr,
+				Amount:       big.NewInt(-1),
+			},
+		},
+	}
+
+	for name, test := range invalidTransfers {
+		test := test // NOTE: uncomment for Go < 1.22, see /doc/faq#closures_and_goroutines
+		t.Run(name, func(t *testing.T) {
+			t.Parallel() // marks each test case as capable of running in parallel with each other
+
+			err := validate[*ERC20Transfer](&test.input)
+			require.Error(t, err)
+			assert.ErrorContains(t, err, "invalid log")
+		})
+	}
+
+	validTransfers := map[string]struct {
+		transfer ERC20Transfer
+	}{
+		"valid": {
+			transfer: ERC20Transfer{
+				TokenAddress: usdcAddr,
+				TokenChain:   NATIVE_CHAIN_ID,
+				To:           tokenBridgeAddr,
+				From:         eoaAddrGeth,
+				Amount:       big.NewInt(100),
+			},
+		},
+		"valid: zero-value for From (possible Transfer event from non-ERC20 contract)": {
+			transfer: ERC20Transfer{
+				TokenAddress: usdcAddr,
+				TokenChain:   NATIVE_CHAIN_ID,
+				From:         ZERO_ADDRESS,
+				To:           tokenBridgeAddr,
+				Amount:       big.NewInt(1),
+			},
+		},
+		"valid: zero-value for To (burning funds)": {
+			transfer: ERC20Transfer{
+				TokenAddress: usdcAddr,
+				TokenChain:   NATIVE_CHAIN_ID,
+				From:         tokenBridgeAddr,
+				To:           ZERO_ADDRESS,
+				Amount:       big.NewInt(1),
+			},
+		},
+	}
+
+	for name, test := range validTransfers {
+		test := test // NOTE: uncomment for Go < 1.22, see /doc/faq#closures_and_goroutines
+		t.Run(name, func(t *testing.T) {
+			t.Parallel() // marks each test case as capable of running in parallel with each other
+
+			err := validate[*ERC20Transfer](&test.transfer)
+			require.NoError(t, err)
+
+			// Test interface
+			assert.Equal(t, test.transfer.TokenAddress, test.transfer.Emitter())
+			assert.NotEqual(t, ZERO_ADDRESS, test.transfer.OriginAddress())
+		})
+	}
+}
+
+func TestValidateLogMessagePublished(t *testing.T) {
+	t.Parallel()
+
+	invalidMessages := map[string]struct {
+		logMessagePublished LogMessagePublished
+	}{
+		"invalid: zero-value for EventEmitter": {
+			logMessagePublished: LogMessagePublished{
+				// EventEmitter: coreBridgeAddr,
+				MsgSender: tokenBridgeAddr,
+				TransferDetails: &TransferDetails{
+					PayloadType:      TransferTokens,
+					OriginAddressRaw: usdcAddr.Bytes(),
+					TokenChain:       NATIVE_CHAIN_ID,
+					OriginAddress:    usdcAddr,
+					TargetAddress:    eoaAddrVAA,
+					AmountRaw:        big.NewInt(7),
+					Amount:           big.NewInt(7),
+				},
+			},
+		},
+		"invalid: zero-value for MsgSender": {
+			logMessagePublished: LogMessagePublished{
+				EventEmitter: coreBridgeAddr,
+				// MsgSender:    tokenBridgeAddr,
+				TransferDetails: &TransferDetails{
+					PayloadType:      TransferTokens,
+					OriginAddressRaw: usdcAddr.Bytes(),
+					TokenChain:       NATIVE_CHAIN_ID,
+					OriginAddress:    usdcAddr,
+					TargetAddress:    eoaAddrVAA,
+					AmountRaw:        big.NewInt(7),
+					Amount:           big.NewInt(7),
+				},
+			},
+		},
+		"invalid: zero-value for TransferDetails": {
+			logMessagePublished: LogMessagePublished{
+				EventEmitter: coreBridgeAddr,
+				MsgSender:    tokenBridgeAddr,
+				// TransferDetails: &TransferDetails{
+				// 	PayloadType:     TransferTokens,
+				// 	OriginAddressRaw: usdcAddr,
+				// 	TokenChain:      NATIVE_CHAIN_ID,
+				// 	OriginAddress:   eoaAddrGeth,
+				// 	TargetAddress:   eoaAddrVAA,
+				// 	AmountRaw:       big.NewInt(7),
+				// 	Amount:          big.NewInt(7),
+				// },
+			},
+		},
+		"invalid: zero-value for PayloadType": {
+			logMessagePublished: LogMessagePublished{
+				EventEmitter: coreBridgeAddr,
+				MsgSender:    tokenBridgeAddr,
+				TransferDetails: &TransferDetails{
+					// PayloadType:     TransferTokens,
+					OriginAddressRaw: usdcAddr.Bytes(),
+					TokenChain:       NATIVE_CHAIN_ID,
+					OriginAddress:    usdcAddr,
+					TargetAddress:    eoaAddrVAA,
+					AmountRaw:        big.NewInt(7),
+					Amount:           big.NewInt(7),
+				},
+			},
+		},
+		"invalid: zero-value for OriginAddressRaw": {
+			logMessagePublished: LogMessagePublished{
+				EventEmitter: coreBridgeAddr,
+				MsgSender:    tokenBridgeAddr,
+				TransferDetails: &TransferDetails{
+					PayloadType: TransferTokens,
+					// OriginAddressRaw: erc20Addr,
+					TokenChain:    NATIVE_CHAIN_ID,
+					OriginAddress: usdcAddr,
+					TargetAddress: eoaAddrVAA,
+					AmountRaw:     big.NewInt(7),
+					Amount:        big.NewInt(7),
+				},
+			},
+		},
+		"invalid: zero-value for TokenChain": {
+			logMessagePublished: LogMessagePublished{
+				EventEmitter: coreBridgeAddr,
+				MsgSender:    tokenBridgeAddr,
+				TransferDetails: &TransferDetails{
+					PayloadType:      TransferTokens,
+					OriginAddressRaw: usdcAddr.Bytes(),
+					// TokenChain:      NATIVE_CHAIN_ID,
+					OriginAddress: usdcAddr,
+					TargetAddress: eoaAddrVAA,
+					AmountRaw:     big.NewInt(7),
+					Amount:        big.NewInt(7),
+				},
+			},
+		},
+		// OriginAddress may be zero for unwrapped assets without a wrapped entry?
+		// "invalid: zero-value for OriginAddress": {
+		// 	input: LogMessagePublished{
+		// 		EventEmitter: coreBridgeAddr,
+		// 		MsgSender:    tokenBridgeAddr,
+		// 		TransferDetails: &TransferDetails{
+		// 			PayloadType:      TransferTokens,
+		// 			OriginAddressRaw: usdcAddr,
+		// 			TokenChain:       NATIVE_CHAIN_ID,
+		// 			// OriginAddress:   usdcAddr,
+		// 			TargetAddress: eoaAddrVAA,
+		// 			AmountRaw:     big.NewInt(7),
+		// 			Amount:        big.NewInt(7),
+		// 		},
+		// 	},
+		// },
+		"invalid: zero-value for TargetAddress": {
+			logMessagePublished: LogMessagePublished{
+				EventEmitter: coreBridgeAddr,
+				MsgSender:    tokenBridgeAddr,
+				TransferDetails: &TransferDetails{
+					PayloadType:      TransferTokens,
+					OriginAddressRaw: usdcAddr.Bytes(),
+					TokenChain:       NATIVE_CHAIN_ID,
+					OriginAddress:    usdcAddr,
+					// TargetAddress:   eoaAddrVAA,
+					AmountRaw: big.NewInt(7),
+					Amount:    big.NewInt(7),
+				},
+			},
+		},
+		"invalid: nil AmountRaw": {
+			logMessagePublished: LogMessagePublished{
+				EventEmitter: coreBridgeAddr,
+				MsgSender:    tokenBridgeAddr,
+				TransferDetails: &TransferDetails{
+					PayloadType:      TransferTokens,
+					OriginAddressRaw: usdcAddr.Bytes(),
+					TokenChain:       NATIVE_CHAIN_ID,
+					OriginAddress:    usdcAddr,
+					TargetAddress:    eoaAddrVAA,
+					// AmountRaw:       big.NewInt(7),
+					Amount: big.NewInt(7),
+				},
+			},
+		},
+		"invalid: negative AmountRaw": {
+			logMessagePublished: LogMessagePublished{
+				EventEmitter: coreBridgeAddr,
+				MsgSender:    tokenBridgeAddr,
+				TransferDetails: &TransferDetails{
+					PayloadType:      TransferTokens,
+					OriginAddressRaw: usdcAddr.Bytes(),
+					TokenChain:       NATIVE_CHAIN_ID,
+					OriginAddress:    usdcAddr,
+					TargetAddress:    eoaAddrVAA,
+					AmountRaw:        big.NewInt(-1),
+					Amount:           big.NewInt(7),
+				},
+			},
+		},
+		"invalid: nil Amount": {
+			logMessagePublished: LogMessagePublished{
+				EventEmitter: coreBridgeAddr,
+				MsgSender:    tokenBridgeAddr,
+				TransferDetails: &TransferDetails{
+					PayloadType:      TransferTokens,
+					OriginAddressRaw: usdcAddr.Bytes(),
+					TokenChain:       NATIVE_CHAIN_ID,
+					OriginAddress:    usdcAddr,
+					TargetAddress:    eoaAddrVAA,
+					AmountRaw:        big.NewInt(7),
+					// Amount:          big.NewInt(7),
+				},
+			},
+		},
+		"invalid: negative Amount": {
+			logMessagePublished: LogMessagePublished{
+				EventEmitter: coreBridgeAddr,
+				MsgSender:    tokenBridgeAddr,
+				TransferDetails: &TransferDetails{
+					PayloadType:      TransferTokens,
+					OriginAddressRaw: usdcAddr.Bytes(),
+					TokenChain:       NATIVE_CHAIN_ID,
+					OriginAddress:    usdcAddr,
+					TargetAddress:    eoaAddrVAA,
+					AmountRaw:        big.NewInt(7),
+					Amount:           big.NewInt(-1),
+				},
+			},
+		},
+	}
+
+	for name, test := range invalidMessages {
+		test := test // NOTE: uncomment for Go < 1.22, see /doc/faq#closures_and_goroutines
+		t.Run(name, func(t *testing.T) {
+			t.Parallel() // marks each test case as capable of running in parallel with each other
+
+			err := validate[*LogMessagePublished](&test.logMessagePublished)
+			require.Error(t, err)
+			_, ok := err.(*InvalidLogError)
+			assert.True(t, ok, "wrong error type: ", err.Error())
+		})
+	}
+
+	validTransfers := map[string]struct {
+		input LogMessagePublished
+	}{
+		"valid and relevant": {
+			input: LogMessagePublished{
+				EventEmitter: coreBridgeAddr,
+				MsgSender:    tokenBridgeAddr,
+				TransferDetails: &TransferDetails{
+					PayloadType:      TransferTokens,
+					OriginAddressRaw: usdcAddr.Bytes(),
+					TokenChain:       NATIVE_CHAIN_ID,
+					OriginAddress:    eoaAddrGeth,
+					TargetAddress:    eoaAddrVAA,
+					AmountRaw:        big.NewInt(7),
+					Amount:           big.NewInt(7),
+				},
+			},
+		},
+		"valid and irrelevant": {
+			input: LogMessagePublished{
+				EventEmitter: usdcAddr,
+				MsgSender:    eoaAddrGeth,
+				TransferDetails: &TransferDetails{
+					PayloadType:      TransferTokensWithPayload,
+					OriginAddressRaw: usdcAddr.Bytes(),
+					TokenChain:       NATIVE_CHAIN_ID,
+					OriginAddress:    eoaAddrGeth,
+					TargetAddress:    eoaAddrVAA,
+					AmountRaw:        big.NewInt(7),
+					Amount:           big.NewInt(7),
+				},
+			},
+		},
+	}
+
+	for name, test := range validTransfers {
+		test := test // NOTE: uncomment for Go < 1.22, see /doc/faq#closures_and_goroutines
+		t.Run(name, func(t *testing.T) {
+			t.Parallel() // marks each test case as capable of running in parallel with each other
+
+			err := validate[*LogMessagePublished](&test.input)
+			require.NoError(t, err)
+		})
+	}
+}
+
+func TestCmp(t *testing.T) {
+
+	t.Parallel()
+
+	// Table-driven tests were not used here because the function takes generic types which are awkward to declare
+	// in that format.
+
+	// Test identity
+	assert.Zero(t, cmp(ZERO_ADDRESS, ZERO_ADDRESS))
+	assert.Zero(t, cmp(ZERO_ADDRESS_VAA, ZERO_ADDRESS))
+
+	// Test mixed types
+	assert.Zero(t, cmp(ZERO_ADDRESS, ZERO_ADDRESS_VAA))
+	assert.Zero(t, cmp(ZERO_ADDRESS_VAA, ZERO_ADDRESS_VAA))
+
+	vaaAddr, err := vaa.BytesToAddress([]byte{0x01})
+	require.NoError(t, err)
+	assert.Zero(t, cmp(vaaAddr, common.BytesToAddress([]byte{0x01})))
+
+	vaaAddr, err = vaa.BytesToAddress([]byte{0xff, 0x02})
+	require.NoError(t, err)
+	assert.Zero(t, cmp(common.BytesToAddress([]byte{0xff, 0x02}), vaaAddr))
+}
+
+func TestVAAFromAddr(t *testing.T) {
+
+	t.Parallel()
+
+	// Test values. Declared here in order to silence error values from the vaa functions.
+	vaa1, _ := vaa.BytesToAddress([]byte{0xff, 0x02})
+	vaa2, _ := vaa.StringToAddress("0000000000000000000000002260fac5e5542a773aa44fbcfedf7c193bc2c599")
+
+	tests := map[string]struct {
+		input    common.Address
+		expected vaa.Address
+	}{
+		"valid, arbitrary": {
+			input:    common.BytesToAddress([]byte{0xff, 0x02}),
+			expected: vaa1,
+		},
+		"valid, zero values": {
+			input:    ZERO_ADDRESS,
+			expected: ZERO_ADDRESS_VAA,
+		},
+		"valid, string-based": {
+			input:    common.HexToAddress("0x2260fac5e5542a773aa44fbcfedf7c193bc2c599"),
+			expected: vaa2,
+		},
+	}
+
+	for name, test := range tests {
+		test := test // NOTE: uncomment for Go < 1.22, see /doc/faq#closures_and_goroutines
+		t.Run(name, func(t *testing.T) {
+			t.Parallel() // marks each test case as capable of running in parallel with each other
+
+			res := VAAAddrFrom(test.input)
+			assert.Equal(t, test.expected, res)
+			assert.Zero(t, bytes.Compare(res[:], common.LeftPadBytes(test.input.Bytes(), EVM_WORD_LENGTH)))
+		})
+	}
+
+}
+
+func TestDepositFrom(t *testing.T) {
+
+	t.Parallel()
+
+	tests := map[string]struct {
+		log      types.Log
+		expected *NativeDeposit
+	}{
+		"valid deposit": {
+			log: types.Log{
+				Address: WETH_ADDRESS,
+				Topics: []common.Hash{
+					common.HexToHash(EVENTHASH_WETH_DEPOSIT),
+					// Receiver
+					common.HexToHash(tokenBridgeAddr.String()),
+				},
+				TxHash: common.BytesToHash([]byte{0x01}),
+				Data:   common.LeftPadBytes(big.NewInt(100).Bytes(), EVM_WORD_LENGTH),
+			},
+			expected: &NativeDeposit{
+				Receiver:     tokenBridgeAddr,
+				TokenAddress: WETH_ADDRESS,
+				// Default token chain for a transfer.
+				TokenChain: NATIVE_CHAIN_ID,
+				Amount:     big.NewInt(100),
+			},
+		},
+	}
+
+	for name, test := range tests {
+		t.Run(name, func(t *testing.T) {
+			t.Parallel() // marks each test case as capable of running in parallel with each other
+
+			deposit, err := DepositFromLog(&test.log, NATIVE_CHAIN_ID)
+			assert.Equal(t, test.expected, deposit)
+			require.NoError(t, err)
+		})
+	}
+
+}
+
+func TestParseERC20TransferFrom(t *testing.T) {
+
+	t.Parallel()
+
+	tests := map[string]struct {
+		log      types.Log
+		expected *ERC20Transfer
+	}{
+		"valid transfer": {
+			log: types.Log{
+				Address: usdcAddr,
+				Topics: []common.Hash{
+					common.HexToHash(EVENTHASH_ERC20_TRANSFER),
+					// From
+					common.HexToHash(eoaAddrGeth.String()),
+					// To
+					common.HexToHash(tokenBridgeAddr.String()),
+				},
+				TxHash: common.BytesToHash([]byte{0x01}),
+				Data:   common.LeftPadBytes(big.NewInt(100).Bytes(), EVM_WORD_LENGTH),
+			},
+			expected: &ERC20Transfer{
+				TokenAddress: usdcAddr,
+				// Default token chain for a transfer.
+				TokenChain: NATIVE_CHAIN_ID,
+				From:       eoaAddrGeth,
+				To:         tokenBridgeAddr,
+				Amount:     big.NewInt(100),
+			},
+		},
+		"valid transfer: burn action": {
+			log: types.Log{
+				Address: usdcAddr,
+				Topics: []common.Hash{
+					common.HexToHash(EVENTHASH_ERC20_TRANSFER),
+					// From
+					common.HexToHash(eoaAddrGeth.String()),
+					// To is equal to the zero-address for burn transfers
+					common.HexToHash(ZERO_ADDRESS.String()),
+				},
+				TxHash: common.BytesToHash([]byte{0x01}),
+				Data:   common.LeftPadBytes(big.NewInt(100).Bytes(), EVM_WORD_LENGTH),
+			},
+			expected: &ERC20Transfer{
+				TokenAddress: usdcAddr,
+				// Default token chain for a transfer.
+				TokenChain: NATIVE_CHAIN_ID,
+				From:       eoaAddrGeth,
+				To:         ZERO_ADDRESS,
+				Amount:     big.NewInt(100),
+			},
+		},
+	}
+
+	for name, test := range tests {
+		test := test // NOTE: uncomment for Go < 1.22, see /doc/faq#closures_and_goroutines
+		t.Run(name, func(t *testing.T) {
+			t.Parallel() // marks each test case as capable of running in parallel with each other
+
+			transfer, err := ERC20TransferFromLog(&test.log, NATIVE_CHAIN_ID)
+			assert.Equal(t, test.expected, transfer)
+			require.NoError(t, err)
+		})
+	}
+
+	invalidTests := map[string]struct {
+		log types.Log
+	}{
+		"invalid transfer: From is zero address": {
+			log: types.Log{
+				Address: usdcAddr,
+				Topics: []common.Hash{
+					common.HexToHash(EVENTHASH_ERC20_TRANSFER),
+					// From
+					common.HexToHash(ZERO_ADDRESS.String()),
+					// To
+					common.HexToHash(tokenBridgeAddr.String()),
+				},
+				TxHash: common.BytesToHash([]byte{0x01}),
+				Data:   common.LeftPadBytes(big.NewInt(100).Bytes(), EVM_WORD_LENGTH),
+			},
+		},
+	}
+
+	for name, invalidTest := range invalidTests {
+		test := invalidTest // NOTE: uncomment for Go < 1.22, see /doc/faq#closures_and_goroutines
+		t.Run(name, func(t *testing.T) {
+			t.Parallel() // marks each test case as capable of running in parallel with each other
+
+			transfer, err := ERC20TransferFromLog(&test.log, NATIVE_CHAIN_ID)
+			require.Error(t, err)
+			assert.Nil(t, transfer)
+		})
+	}
+
+}

+ 301 - 0
node/pkg/txverifier/sui.go

@@ -0,0 +1,301 @@
+package txverifier
+
+// TODOs:
+//	* balances on Sui are stored as u64's. Consider using uint64 instead of big.Int
+
+import (
+	"encoding/json"
+	"errors"
+	"fmt"
+	"io"
+	"math/big"
+	"net/http"
+	"strings"
+
+	"github.com/wormhole-foundation/wormhole/sdk/vaa"
+	"go.uber.org/zap"
+)
+
+// Global variables
+var (
+	suiModule    = "publish_message"
+	suiEventName = "WormholeMessage"
+)
+
+type SuiTransferVerifier struct {
+	suiCoreContract        string
+	suiTokenBridgeEmitter  string
+	suiTokenBridgeContract string
+	suiEventType           string
+}
+
+func NewSuiTransferVerifier(suiCoreContract, suiTokenBridgeEmitter, suiTokenBridgeContract string) *SuiTransferVerifier {
+	return &SuiTransferVerifier{
+		suiCoreContract:        suiCoreContract,
+		suiTokenBridgeEmitter:  suiTokenBridgeEmitter,
+		suiTokenBridgeContract: suiTokenBridgeContract,
+		suiEventType:           fmt.Sprintf("%s::%s::%s", suiCoreContract, suiModule, suiEventName),
+	}
+}
+
+// func (s *SuiTransferVerifier) GetSuiEventType() string {
+// 	return s.suiEventType
+// }
+
+// Filter to be used for querying events
+// The `MoveEventType` filter doesn't seem to be available in the documentation. However, there is an example
+// showing the inclusion of `type` in the `MoveModule` filter.
+// Reference: https://docs.sui.io/guides/developer/sui-101/using-events#query-events-with-rpc
+func (s *SuiTransferVerifier) GetEventFilter() string {
+	return fmt.Sprintf(`
+	{
+		"MoveModule":{
+			"package":"%s",
+			"module":"%s",
+			"type":"%s"
+		}
+	}`, s.suiCoreContract, suiModule, s.suiEventType)
+}
+
+// processEvents takes a list of events and processes them to determine the amount requested out of the bridge. It returns a mapping
+// that maps the token address and chain ID to the amount requested out of the bridge. It does not return an error, because any faulty
+// events can be skipped, since they would likely fail being processed by the guardian as well. Debug level logging can be used to
+// reveal any potential locations where errors are occurring.
+func (s *SuiTransferVerifier) processEvents(events []SuiEvent, logger *zap.Logger) (requestedOutOfBridge map[string]*big.Int, numEventsProcessed uint) {
+	// Initialize the map to store the amount requested out of the bridge
+	requestedOutOfBridge = make(map[string]*big.Int)
+
+	// Filter events that have the sui token bridge emitter as the sender in the message. The events indicate
+	// how much is going to leave the network.
+	for _, event := range events {
+
+		// If any of these event parameters are nil, skip the event
+		if event.Message == nil || event.Message.Sender == nil || event.Type == nil {
+			continue
+		}
+
+		// Only process the event if it is a WormholeMessage event from the token bridge emitter
+		if *event.Type == s.suiEventType && *event.Message.Sender == s.suiTokenBridgeEmitter {
+
+			// Parse the wormhole message. vaa.IsTransfer can be omitted, since this is done
+			// inside `DecodeTransferPayloadHdr` already.
+			hdr, err := vaa.DecodeTransferPayloadHdr(event.Message.Payload)
+
+			// If there is an error decoding the payload, skip the event
+			if err != nil {
+				logger.Debug("Error decoding payload", zap.Error(err))
+				continue
+			}
+
+			// Add the key if it does not exist yet
+			key := fmt.Sprintf(KEY_FORMAT, hdr.OriginAddress.String(), hdr.OriginChain)
+			if _, exists := requestedOutOfBridge[key]; !exists {
+				requestedOutOfBridge[key] = big.NewInt(0)
+			}
+
+			// Add the amount requested out of the bridge
+			requestedOutOfBridge[key] = new(big.Int).Add(requestedOutOfBridge[key], hdr.Amount)
+
+			numEventsProcessed++
+		} else {
+			logger.Debug("Event does not match the criteria", zap.String("event type", *event.Type), zap.String("event sender", *event.Message.Sender))
+		}
+	}
+
+	return requestedOutOfBridge, numEventsProcessed
+}
+
+func (s *SuiTransferVerifier) processObjectUpdates(objectChanges []ObjectChange, suiApiConnection SuiApiInterface, logger *zap.Logger) (transferredIntoBridge map[string]*big.Int, numChangesProcessed uint) {
+	transferredIntoBridge = make(map[string]*big.Int)
+
+	for _, objectChange := range objectChanges {
+		// Check that the type information is correct.
+		if !objectChange.ValidateTypeInformation(s.suiTokenBridgeContract) {
+			continue
+		}
+
+		// Get the past objects
+		resp, err := suiApiConnection.TryMultiGetPastObjects(objectChange.ObjectId, objectChange.Version, objectChange.PreviousVersion)
+
+		if err != nil {
+			logger.Error("Error in getting past objects", zap.Error(err))
+			continue
+		}
+
+		decimals, err := resp.GetDecimals()
+		if err != nil {
+			logger.Error("Error in getting decimals", zap.Error(err))
+			continue
+		}
+
+		address, err := resp.GetTokenAddress()
+		if err != nil {
+			logger.Error("Error in getting token address", zap.Error(err))
+			continue
+		}
+
+		chain, err := resp.GetTokenChain()
+		if err != nil {
+			logger.Error("Error in getting token chain", zap.Error(err))
+			continue
+		}
+
+		// Get the balance difference
+		balanceDiff, err := resp.GetBalanceDiff()
+		if err != nil {
+			logger.Error("Error in getting balance difference", zap.Error(err))
+			continue
+		}
+
+		normalized := normalize(balanceDiff, decimals)
+
+		// Add the key if it does not exist yet
+		key := fmt.Sprintf(KEY_FORMAT, address, chain)
+
+		// Add the normalized amount to the transferredIntoBridge map
+		// Intentionally use 'Set' instead of 'Add' because there should only be a single objectChange per token
+		var amount big.Int
+		transferredIntoBridge[key] = amount.Set(normalized)
+
+		// Increment the number of changes processed
+		numChangesProcessed++
+	}
+
+	return transferredIntoBridge, numChangesProcessed
+}
+
+func (s *SuiTransferVerifier) ProcessDigest(digest string, suiApiConnection SuiApiInterface, logger *zap.Logger) (uint, error) {
+	// Get the transaction block
+	txBlock, err := suiApiConnection.GetTransactionBlock(digest)
+
+	if err != nil {
+		logger.Fatal("Error in getting transaction block", zap.Error(err))
+	}
+
+	// process all events, indicating funds that are leaving the chain
+	requestedOutOfBridge, numEventsProcessed := s.processEvents(txBlock.Result.Events, logger)
+
+	// process all object changes, indicating funds that are entering the chain
+	transferredIntoBridge, numChangesProcessed := s.processObjectUpdates(txBlock.Result.ObjectChanges, suiApiConnection, logger)
+
+	// TODO: Using `Warn` for testing purposes. Update to Fatal? when ready to go into PR.
+	// TODO: Revisit error handling here.
+	for key, amountOut := range requestedOutOfBridge {
+
+		if _, exists := transferredIntoBridge[key]; !exists {
+			logger.Warn("transfer-out request for tokens that were never deposited",
+				zap.String("tokenAddress", key))
+			// TODO: Is it better to return or continue here?
+			return 0, errors.New("transfer-out request for tokens that were never deposited")
+			// continue
+		}
+
+		amountIn := transferredIntoBridge[key]
+
+		if amountOut.Cmp(amountIn) > 0 {
+			logger.Warn("requested amount out is larger than amount in")
+			return 0, errors.New("requested amount out is larger than amount in")
+		}
+
+		keyParts := strings.Split(key, "-")
+		logger.Info("bridge request processed",
+			zap.String("tokenAddress", keyParts[0]),
+			zap.String("chain", keyParts[1]),
+			zap.String("amountOut", amountOut.String()),
+			zap.String("amountIn", amountIn.String()))
+	}
+
+	//nolint:gosec
+	logger.Info("Digest processed", zap.String("txDigest", digest), zap.Uint("numEventsProcessed", numEventsProcessed), zap.Uint("numChangesProcessed", numChangesProcessed))
+
+	return numEventsProcessed, nil
+}
+
+type SuiApiResponse interface {
+	GetError() error
+}
+
+func suiApiRequest[T SuiApiResponse](rpc string, method string, params string) (T, error) {
+	var defaultT T
+
+	// Create the request
+	requestBody := fmt.Sprintf(`{"jsonrpc":"2.0", "id": 1, "method": "%s", "params": %s}`, method, params)
+
+	//nolint:noctx
+	req, err := http.NewRequest("POST", rpc, strings.NewReader(requestBody))
+	if err != nil {
+		return defaultT, fmt.Errorf("cannot create request: %w", err)
+	}
+
+	// Add headers
+	req.Header.Set("Content-Type", "application/json")
+
+	// Send the request
+	client := &http.Client{}
+	resp, err := client.Do(req)
+	if err != nil {
+		return defaultT, fmt.Errorf("cannot send request: %w", err)
+	}
+	defer resp.Body.Close()
+
+	// Read the response
+	body, err := io.ReadAll(resp.Body)
+	if err != nil {
+		return defaultT, fmt.Errorf("cannot read response: %w", err)
+	}
+
+	// Parse the response
+	var res T
+	err = json.Unmarshal(body, &res)
+	if err != nil {
+		return defaultT, fmt.Errorf("cannot parse response: %w", err)
+	}
+
+	// Check if an error message exists
+	if res.GetError() != nil {
+		return defaultT, fmt.Errorf("error from Sui RPC: %w", res.GetError())
+	}
+
+	return res, nil
+}
+
+type SuiApiConnection struct {
+	rpc string
+}
+
+func NewSuiApiConnection(rpc string) SuiApiInterface {
+	return &SuiApiConnection{rpc: rpc}
+}
+
+func (s *SuiApiConnection) GetTransactionBlock(txDigest string) (SuiGetTransactionBlockResponse, error) {
+	method := "sui_getTransactionBlock"
+	params := fmt.Sprintf(`[
+				"%s", 
+				{
+					"showObjectChanges":true,
+					"showEvents": true
+				}
+			]`, txDigest)
+
+	return suiApiRequest[SuiGetTransactionBlockResponse](s.rpc, method, params)
+}
+
+func (s *SuiApiConnection) QueryEvents(filter string, cursor string, limit int, descending bool) (SuiQueryEventsResponse, error) {
+	method := "suix_queryEvents"
+	params := fmt.Sprintf(`[%s, %s, %d, %t]`, filter, cursor, limit, descending)
+
+	return suiApiRequest[SuiQueryEventsResponse](s.rpc, method, params)
+}
+
+func (s *SuiApiConnection) TryMultiGetPastObjects(objectId string, version string, previousVersion string) (SuiTryMultiGetPastObjectsResponse, error) {
+	method := "sui_tryMultiGetPastObjects"
+	params := fmt.Sprintf(`[
+			[
+				{"objectId" : "%s", "version" : "%s"},
+				{"objectId" : "%s", "version" : "%s"}
+			],
+			{"showContent": true}
+		]`, objectId, version, objectId, previousVersion)
+
+	return suiApiRequest[SuiTryMultiGetPastObjectsResponse](s.rpc, method, params)
+}

+ 1155 - 0
node/pkg/txverifier/sui_test.go

@@ -0,0 +1,1155 @@
+package txverifier
+
+import (
+	"encoding/hex"
+	"encoding/json"
+	"fmt"
+	"math/big"
+	"testing"
+
+	"github.com/stretchr/testify/assert"
+	"github.com/wormhole-foundation/wormhole/sdk/vaa"
+	"go.uber.org/zap"
+)
+
+// Tokens
+const (
+	EthereumUsdcAddress = "000000000000000000000000a0b86991c6218b36c1d19d4a2e9eb0ce3606eb48"
+	SuiUsdcAddress      = "5d4b302506645c37ff133b98c4b50a5ae14841659738d6d733d59d0d217a93bf"
+)
+
+// func initGlobals() {
+// 	suiEventType = fmt.Sprintf("%s::%s::%s", *suiCoreContract, suiModule, suiEventName)
+// }
+
+func newTestSuiTransferVerifier() *SuiTransferVerifier {
+	suiCoreContract := "0x5306f64e312b581766351c07af79c72fcb1cd25147157fdc2f8ad76de9a3fb6a"
+	suiTokenBridgeContract := "0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d"
+	suiTokenBridgeEmitter := "0xccceeb29348f71bdd22ffef43a2a19c1f5b5e17c5cca5411529120182672ade5"
+
+	return NewSuiTransferVerifier(suiCoreContract, suiTokenBridgeEmitter, suiTokenBridgeContract)
+}
+
+type MockSuiApiConnection struct {
+	// The events to be returned by QueryEvents
+	Events           []SuiEvent
+	ObjectsResponses []SuiTryMultiGetPastObjectsResponse
+}
+
+type ResultTestCase struct {
+	decimals     uint8
+	tokenChain   string
+	tokenAddress string
+	wrapped      bool
+	newBalance   string
+	oldBalance   string
+	drop         bool
+}
+
+func NewMockSuiApiConnection(events []SuiEvent) *MockSuiApiConnection {
+	return &MockSuiApiConnection{
+		Events:           events,
+		ObjectsResponses: nil,
+	}
+}
+
+func (mock *MockSuiApiConnection) SetEvents(events []SuiEvent) {
+	mock.Events = events
+}
+
+func (mock *MockSuiApiConnection) SetObjectsResponse(ObjectResponse SuiTryMultiGetPastObjectsResponse) {
+	mock.ObjectsResponses = append(mock.ObjectsResponses, ObjectResponse)
+}
+
+func (mock *MockSuiApiConnection) QueryEvents(filter string, cursor string, limit int, descending bool) (SuiQueryEventsResponse, error) {
+	return SuiQueryEventsResponse{}, nil
+}
+
+func (mock *MockSuiApiConnection) GetTransactionBlock(txDigest string) (SuiGetTransactionBlockResponse, error) {
+
+	objectChanges := []ObjectChange{}
+
+	// Create new nested object that unwraps some of it
+	for _, objectResponse := range mock.ObjectsResponses {
+		objectType, _ := objectResponse.GetObjectType()
+		objectId, _ := objectResponse.GetObjectId()
+		version, _ := objectResponse.GetVersion()
+		previousVersion, _ := objectResponse.GetPreviousVersion()
+
+		obj := ObjectChange{
+			ObjectType:      objectType,
+			ObjectId:        objectId,
+			Version:         version,
+			PreviousVersion: previousVersion,
+		}
+		objectChanges = append(objectChanges, obj)
+	}
+
+	return SuiGetTransactionBlockResponse{Result: SuiGetTransactionBlockResult{Events: mock.Events, ObjectChanges: objectChanges}}, nil
+}
+func (mock *MockSuiApiConnection) TryMultiGetPastObjects(objectId string, version string, previousVersion string) (SuiTryMultiGetPastObjectsResponse, error) {
+
+	for _, response := range mock.ObjectsResponses {
+		keyIn := fmt.Sprintf("%s-%s-%s", objectId, version, previousVersion)
+		objectId, err0 := response.GetObjectId()
+		version, err1 := response.GetVersion()
+		previousVersion, err2 := response.GetPreviousVersion()
+		if err0 != nil || err1 != nil || err2 != nil {
+			return SuiTryMultiGetPastObjectsResponse{}, fmt.Errorf("Error processing version data")
+		}
+
+		keyCur := fmt.Sprintf("%s-%s-%s", objectId, version, previousVersion)
+		if keyIn == keyCur {
+			return response, nil
+		}
+	}
+
+	return SuiTryMultiGetPastObjectsResponse{}, fmt.Errorf("Can't find entry")
+}
+
+func TestNewSuiApiConnection(t *testing.T) {
+	sampleUrl := "http://localhost:8080"
+
+	api := NewSuiApiConnection(sampleUrl)
+	if rpc, ok := api.(*SuiApiConnection); ok {
+		assert.Equal(t, sampleUrl, rpc.rpc)
+	} else {
+		t.Errorf("Unable to get RPC from SuiApiConnection")
+	}
+}
+
+func TestProcessEvents(t *testing.T) {
+	suiTxVerifier := newTestSuiTransferVerifier()
+
+	arbitraryEventType := "arbitrary::EventType"
+	arbitraryEmitter := "0x3117"
+
+	logger := zap.NewNop()
+
+	// Constants used throughout the tests
+	suiEventType := suiTxVerifier.suiEventType
+	suiTokenBridgeEmitter := suiTxVerifier.suiTokenBridgeEmitter
+
+	// Define test cases
+	tests := []struct {
+		name           string
+		events         []SuiEvent
+		expectedResult map[string]*big.Int
+		expectedCount  uint
+	}{
+		{
+			name:           "TestNoEvents",
+			events:         []SuiEvent{},
+			expectedResult: map[string]*big.Int{},
+			expectedCount:  0,
+		},
+		{
+			name: "TestSingleEthereumUSDCEvent",
+			events: []SuiEvent{
+				{
+					Type: &suiEventType,
+					Message: &WormholeMessage{
+						Sender:  &suiTokenBridgeEmitter,
+						Payload: generatePayload(1, big.NewInt(100), EthereumUsdcAddress, 2),
+					},
+				},
+			},
+			expectedResult: map[string]*big.Int{
+				fmt.Sprintf(KEY_FORMAT, EthereumUsdcAddress, vaa.ChainIDEthereum): big.NewInt(100),
+			},
+			expectedCount: 1,
+		},
+		{
+			name: "TestMultipleEthereumUSDCEvents",
+			events: []SuiEvent{
+				{
+					Type: &suiEventType,
+					Message: &WormholeMessage{
+						Sender:  &suiTokenBridgeEmitter,
+						Payload: generatePayload(1, big.NewInt(100), EthereumUsdcAddress, uint16(vaa.ChainIDEthereum)),
+					},
+				},
+				{
+					Type: &suiEventType,
+					Message: &WormholeMessage{
+						Sender:  &suiTokenBridgeEmitter,
+						Payload: generatePayload(1, big.NewInt(100), EthereumUsdcAddress, uint16(vaa.ChainIDEthereum)),
+					},
+				},
+			},
+			expectedResult: map[string]*big.Int{
+				fmt.Sprintf(KEY_FORMAT, EthereumUsdcAddress, vaa.ChainIDEthereum): big.NewInt(200),
+			},
+			expectedCount: 2,
+		},
+		{
+			name: "TestMixedEthereumAndSuiUSDCEvents",
+			events: []SuiEvent{
+				{
+					Type: &suiEventType,
+					Message: &WormholeMessage{
+						Sender:  &suiTokenBridgeEmitter,
+						Payload: generatePayload(1, big.NewInt(100), EthereumUsdcAddress, uint16(vaa.ChainIDEthereum)),
+					},
+				},
+				{
+					Type: &suiEventType,
+					Message: &WormholeMessage{
+						Sender:  &suiTokenBridgeEmitter,
+						Payload: generatePayload(1, big.NewInt(100), SuiUsdcAddress, uint16(vaa.ChainIDSui)),
+					},
+				},
+			},
+			expectedResult: map[string]*big.Int{
+				fmt.Sprintf(KEY_FORMAT, EthereumUsdcAddress, vaa.ChainIDEthereum): big.NewInt(100),
+				fmt.Sprintf(KEY_FORMAT, SuiUsdcAddress, vaa.ChainIDSui):           big.NewInt(100),
+			},
+			expectedCount: 2,
+		},
+		{
+			name: "TestIncorrectSender",
+			events: []SuiEvent{
+				{
+					Type: &suiEventType,
+					Message: &WormholeMessage{
+						Sender:  &arbitraryEmitter,
+						Payload: generatePayload(1, big.NewInt(100), EthereumUsdcAddress, uint16(vaa.ChainIDEthereum)),
+					},
+				},
+			},
+			expectedResult: map[string]*big.Int{},
+			expectedCount:  0,
+		},
+		{
+			name: "TestSkipNonWormholeEvents",
+			events: []SuiEvent{
+				{
+					Type: &suiEventType,
+					Message: &WormholeMessage{
+						Sender:  &suiTokenBridgeEmitter,
+						Payload: generatePayload(1, big.NewInt(100), EthereumUsdcAddress, uint16(vaa.ChainIDEthereum)),
+					},
+				},
+				{
+					Type: &arbitraryEventType,
+					Message: &WormholeMessage{
+						Sender:  &suiTokenBridgeEmitter,
+						Payload: generatePayload(1, big.NewInt(100), SuiUsdcAddress, uint16(vaa.ChainIDSui)),
+					},
+				},
+			},
+			expectedResult: map[string]*big.Int{
+				fmt.Sprintf(KEY_FORMAT, EthereumUsdcAddress, vaa.ChainIDEthereum): big.NewInt(100),
+			},
+			expectedCount: 1,
+		},
+		{
+			name: "TestInvalidWormholePayloads",
+			events: []SuiEvent{
+				{ // Invalid payload type
+					Type: &suiEventType,
+					Message: &WormholeMessage{
+						Sender:  &suiTokenBridgeEmitter,
+						Payload: generatePayload(0, big.NewInt(100), EthereumUsdcAddress, uint16(vaa.ChainIDEthereum)),
+					},
+				},
+				{ // Empty payload
+					Type: &suiEventType,
+					Message: &WormholeMessage{
+						Sender:  &suiTokenBridgeEmitter,
+						Payload: []byte{},
+					},
+				},
+			},
+			expectedResult: map[string]*big.Int{},
+			expectedCount:  0,
+		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+
+			result, count := suiTxVerifier.processEvents(tt.events, logger)
+
+			assert.Equal(t, tt.expectedResult, result)
+			assert.Equal(t, tt.expectedCount, count)
+		})
+	}
+}
+
+func TestProcessObjectUpdates(t *testing.T) {
+	suiTxVerifier := newTestSuiTransferVerifier()
+
+	logger := zap.NewNop() // zap.Must(zap.NewDevelopment())
+
+	// Constants used throughout the tests
+	normalObjectNativeType := "0x2::dynamic_field::Field<0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d::token_registry::Key<0x2::sui::SUI>, 0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d::native_asset::NativeAsset<0x2::sui::SUI>>"
+	normalObjectForeignType := "0x2::dynamic_field::Field<0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d::token_registry::Key<0x5d4b302506645c37ff133b98c4b50a5ae14841659738d6d733d59d0d217a93bf::coin::COIN>, 0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d::wrapped_asset::WrappedAsset<0x5d4b302506645c37ff133b98c4b50a5ae14841659738d6d733d59d0d217a93bf::coin::COIN>>"
+	normalVersion := "6565"
+	normalPreviousVersion := "4040"
+	normalObjectNativeId := "0x831c45a8d512c9cf46e7a8a947f7cbbb5e0a59829aa72450ff26fb1873fd0e94"
+	normalObjectForeignId := "0xf8f80c0d569fb076adb5fdc3a717dcb9ac14f7fd7512dc17efbf0f80a8b7fa8a"
+
+	normalTokenAddressForeign := "0,0,0,0,0,0,0,0,0,0,0,0,160,184,105,145,198,33,139,54,193,209,157,74,46,158,176,206,54,6,235,72"
+	normalTokenAddressNative := "146,88,24,31,92,234,200,219,255,183,3,8,144,36,60,174,214,154,149,153,210,136,109,149,122,156,183,101,106,243,189,179"
+	normalChainIdNative := "21"
+	normalChainIdForeign := "2"
+
+	oneToken := new(big.Int)
+	oneToken.SetString("1000000000000000000", 10)
+
+	// Decimals, token chain, token address, wrapped or not, balance/custody
+	tests := []struct {
+		name           string
+		objectChanges  []ObjectChange
+		resultList     []ResultTestCase
+		expectedResult map[string]*big.Int
+		expectedCount  uint
+	}{
+		{
+			name: "TestProcessObjectNativeBase",
+			objectChanges: []ObjectChange{
+				{
+					ObjectType:      normalObjectNativeType,
+					Version:         normalVersion,
+					PreviousVersion: normalPreviousVersion,
+					ObjectId:        normalObjectNativeId,
+				},
+			},
+			resultList: []ResultTestCase{
+				{
+					tokenChain:   normalChainIdNative,
+					tokenAddress: normalTokenAddressNative,
+					wrapped:      false,
+					newBalance:   "1000",
+					oldBalance:   "10",
+					decimals:     8,
+				},
+			},
+			expectedResult: map[string]*big.Int{"9258181f5ceac8dbffb7030890243caed69a9599d2886d957a9cb7656af3bdb3-21": big.NewInt(990)},
+			expectedCount:  1,
+		},
+		{
+			name: "TestProcessObjectForeignBase",
+			objectChanges: []ObjectChange{
+				{
+					ObjectType:      normalObjectForeignType,
+					Version:         normalVersion,
+					PreviousVersion: normalPreviousVersion,
+					ObjectId:        normalObjectForeignId,
+				},
+			},
+			resultList: []ResultTestCase{
+				{
+					tokenChain:   normalChainIdForeign,
+					tokenAddress: normalTokenAddressForeign,
+					wrapped:      true,
+					newBalance:   "10",
+					oldBalance:   "1000",
+					decimals:     8,
+				},
+			},
+			expectedResult: map[string]*big.Int{"000000000000000000000000a0b86991c6218b36c1d19d4a2e9eb0ce3606eb48-2": big.NewInt(990)},
+			expectedCount:  1,
+		},
+		{
+			name: "TestProcessObjectNativeNegative",
+			objectChanges: []ObjectChange{
+				{
+					ObjectType:      normalObjectNativeType,
+					Version:         normalVersion,
+					PreviousVersion: normalPreviousVersion,
+					ObjectId:        normalObjectNativeId,
+				},
+			},
+			resultList: []ResultTestCase{
+				{
+					tokenChain:   normalChainIdNative,
+					tokenAddress: normalTokenAddressNative,
+					wrapped:      false,
+					newBalance:   "10",
+					oldBalance:   "1000",
+					decimals:     8,
+				},
+			},
+			expectedResult: map[string]*big.Int{"9258181f5ceac8dbffb7030890243caed69a9599d2886d957a9cb7656af3bdb3-21": big.NewInt(-990)},
+			expectedCount:  1,
+		},
+		{
+			name: "TestProcessObjectForeignNegative", // Unsure if this test case is possible from Sui API
+			objectChanges: []ObjectChange{
+				{
+					ObjectType:      normalObjectForeignType,
+					Version:         normalVersion,
+					PreviousVersion: normalPreviousVersion,
+					ObjectId:        normalObjectForeignId,
+				},
+			},
+			resultList: []ResultTestCase{
+				{
+					tokenChain:   normalChainIdForeign,
+					tokenAddress: normalTokenAddressForeign,
+					wrapped:      true,
+					newBalance:   "1000",
+					oldBalance:   "10",
+					decimals:     8,
+				},
+			},
+			expectedResult: map[string]*big.Int{"000000000000000000000000a0b86991c6218b36c1d19d4a2e9eb0ce3606eb48-2": big.NewInt(-990)},
+			expectedCount:  1,
+		},
+		{
+			name: "TestProcessObjectNativeMultiple",
+			objectChanges: []ObjectChange{
+				{
+					ObjectType:      normalObjectNativeType,
+					Version:         normalVersion,
+					PreviousVersion: normalPreviousVersion,
+					ObjectId:        normalObjectNativeId,
+				},
+				{
+					ObjectType:      "0x2::dynamic_field::Field<0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d::token_registry::Key<0xb779486cfd6c19e9218cc7dc17c453014d2d9ba12d2ee4dbb0ec4e1e02ae1cca::spt::SPT>, 0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d::native_asset::NativeAsset<0xb779486cfd6c19e9218cc7dc17c453014d2d9ba12d2ee4dbb0ec4e1e02ae1cca::spt::SPT>>",
+					Version:         normalVersion,
+					PreviousVersion: normalPreviousVersion,
+					ObjectId:        "0x0063d37cdce648a7c6f72f69a75a114fbcc81ef23300e4ace60c7941521163db",
+				},
+			},
+			resultList: []ResultTestCase{
+				{
+					tokenChain:   normalChainIdNative,
+					tokenAddress: normalTokenAddressNative,
+					wrapped:      false,
+					newBalance:   "1000",
+					oldBalance:   "10",
+					decimals:     8,
+				},
+				{
+					tokenChain:   normalChainIdNative,
+					tokenAddress: "80,117,89,76,1,212,111,59,203,196,167,239,20,98,5,130,115,190,206,119,147,238,189,4,100,150,53,151,201,253,9,53",
+					wrapped:      false,
+					newBalance:   "5000",
+					oldBalance:   "50",
+					decimals:     8,
+				},
+			},
+			expectedResult: map[string]*big.Int{"9258181f5ceac8dbffb7030890243caed69a9599d2886d957a9cb7656af3bdb3-21": big.NewInt(990), "5075594c01d46f3bcbc4a7ef1462058273bece7793eebd0464963597c9fd0935-21": big.NewInt(4950)},
+			expectedCount:  2,
+		},
+		{
+			name: "TestProcessObjectNativeAndForeign",
+			objectChanges: []ObjectChange{
+				{
+					ObjectType:      normalObjectNativeType,
+					Version:         normalVersion,
+					PreviousVersion: normalPreviousVersion,
+					ObjectId:        normalObjectNativeId,
+				},
+				{
+					ObjectType:      normalObjectForeignType,
+					Version:         normalVersion,
+					PreviousVersion: normalPreviousVersion,
+					ObjectId:        normalObjectForeignId,
+				},
+			},
+			resultList: []ResultTestCase{
+				{
+					tokenChain:   normalChainIdNative,
+					tokenAddress: normalTokenAddressNative,
+					wrapped:      false,
+					newBalance:   "1000",
+					oldBalance:   "10",
+					decimals:     8,
+				},
+				{
+					tokenChain:   normalChainIdForeign,
+					tokenAddress: normalTokenAddressForeign,
+					wrapped:      true,
+					newBalance:   "50",
+					oldBalance:   "5000",
+					decimals:     8,
+				},
+			},
+			expectedResult: map[string]*big.Int{"9258181f5ceac8dbffb7030890243caed69a9599d2886d957a9cb7656af3bdb3-21": big.NewInt(990), "000000000000000000000000a0b86991c6218b36c1d19d4a2e9eb0ce3606eb48-2": big.NewInt(4950)},
+			expectedCount:  2,
+		},
+		{
+			name: "TestProcessObjectWrongPackageIdType",
+			objectChanges: []ObjectChange{
+				{
+					ObjectType:      "0x2::dynamic_field::Field<0xa340e3db1332c21f20f5c08bef0fa459e733575f9a7e2f5faca64f72cd5a54f2::token_registry::Key<0x2::sui::SUI>, 0xa340e3db1332c21f20f5c08bef0fa459e733575f9a7e2f5faca64f72cd5a54f2::native_asset::NativeAsset<0x2::sui::SUI>",
+					Version:         normalVersion,
+					PreviousVersion: normalPreviousVersion,
+					ObjectId:        normalObjectNativeId,
+				},
+			},
+			resultList: []ResultTestCase{
+				{
+					tokenChain:   normalChainIdNative,
+					tokenAddress: normalTokenAddressNative,
+					wrapped:      false,
+					newBalance:   "1000",
+					oldBalance:   "10",
+					decimals:     8,
+				},
+			},
+			expectedResult: map[string]*big.Int{},
+			expectedCount:  0,
+		},
+		{
+			name: "TestProcessObjectNotDynamicField",
+			objectChanges: []ObjectChange{
+				{
+					ObjectType:      "0x11111111111111111111::dynamic_field::Field<0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d::token_registry::Key<0x2::sui::SUI>, 0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d::native_asset::NativeAsset<0x2::sui::SUI>",
+					Version:         normalVersion,
+					PreviousVersion: normalPreviousVersion,
+					ObjectId:        normalObjectNativeId,
+				},
+			},
+			resultList: []ResultTestCase{
+				{
+					tokenChain:   normalChainIdNative,
+					tokenAddress: normalTokenAddressNative,
+					wrapped:      false,
+					newBalance:   "1000",
+					oldBalance:   "10",
+					decimals:     8,
+				},
+			},
+			expectedResult: map[string]*big.Int{},
+			expectedCount:  0,
+		},
+		{
+			name: "TestProcessObjectMismatchedCoinTypes",
+			objectChanges: []ObjectChange{
+				{
+					ObjectType:      "0x2::dynamic_field::Field<0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d::token_registry::Key<0x2::sui::SUI>, 0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d::native_asset::NativeAsset<0x11111111111111111111::sui::SUI>",
+					Version:         normalVersion,
+					PreviousVersion: normalPreviousVersion,
+					ObjectId:        normalObjectNativeId,
+				},
+			},
+			resultList: []ResultTestCase{
+				{
+					tokenChain:   normalChainIdNative,
+					tokenAddress: normalTokenAddressNative,
+					wrapped:      false,
+					newBalance:   "1000",
+					oldBalance:   "10",
+					decimals:     8,
+				},
+			},
+			expectedResult: map[string]*big.Int{},
+			expectedCount:  0,
+		},
+		{
+			name: "TestProcessObjectNotAssetType",
+			objectChanges: []ObjectChange{
+				{
+					ObjectType:      "0x2::dynamic_field::Field<0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d::token_registry::Key<0x2::sui::SUI>, 0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d::not_native_asset::NativeAsset<0x2::sui::SUI>",
+					Version:         normalVersion,
+					PreviousVersion: normalPreviousVersion,
+					ObjectId:        normalObjectNativeId,
+				},
+			},
+			resultList: []ResultTestCase{
+				{
+					tokenChain:   normalChainIdNative,
+					tokenAddress: normalTokenAddressNative,
+					wrapped:      false,
+					newBalance:   "1000",
+					oldBalance:   "10",
+					decimals:     8,
+				},
+			},
+			expectedResult: map[string]*big.Int{},
+			expectedCount:  0,
+		},
+		{
+			name: "TestProcessObjectOneGoodOneBad",
+			objectChanges: []ObjectChange{
+				{
+					ObjectType:      normalObjectForeignType,
+					Version:         normalVersion,
+					PreviousVersion: normalPreviousVersion,
+					ObjectId:        normalObjectForeignId,
+				},
+				{
+					ObjectType:      "0x2::dynamic_field::Field<0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d::token_registry::Key<0x2::sui::SUI>, 0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d::not_native_asset::NativeAsset<0x2::sui::SUI>",
+					Version:         fmt.Sprintf("%s111", normalVersion),
+					PreviousVersion: normalPreviousVersion,
+					ObjectId:        normalObjectNativeId,
+				},
+			},
+			resultList: []ResultTestCase{
+				{
+					tokenChain:   normalChainIdForeign,
+					tokenAddress: normalTokenAddressForeign,
+					wrapped:      true,
+					newBalance:   "10",
+					oldBalance:   "1000",
+					decimals:     8,
+				},
+				{
+					tokenChain:   normalChainIdNative,
+					tokenAddress: normalTokenAddressNative,
+					wrapped:      false,
+					newBalance:   "1000",
+					oldBalance:   "10",
+					decimals:     8,
+				},
+			},
+			expectedResult: map[string]*big.Int{"000000000000000000000000a0b86991c6218b36c1d19d4a2e9eb0ce3606eb48-2": big.NewInt(990)},
+			expectedCount:  1,
+		},
+		{
+			name: "TestProcessObjectRealNumbers",
+			objectChanges: []ObjectChange{
+				{
+					ObjectType:      normalObjectNativeType,
+					Version:         normalVersion,
+					PreviousVersion: normalPreviousVersion,
+					ObjectId:        normalObjectNativeId,
+				},
+			},
+			resultList: []ResultTestCase{
+				{
+					tokenChain:   normalChainIdNative,
+					tokenAddress: normalTokenAddressNative,
+					wrapped:      false,
+					newBalance:   "1000000000000000",
+					oldBalance:   "999999000000000",
+					decimals:     8,
+				},
+			},
+			expectedResult: map[string]*big.Int{"9258181f5ceac8dbffb7030890243caed69a9599d2886d957a9cb7656af3bdb3-21": big.NewInt(1000000000)},
+			expectedCount:  1,
+		},
+		{
+			name: "TestProcessObjectNormalize",
+			objectChanges: []ObjectChange{
+				{
+					ObjectType:      normalObjectNativeType,
+					Version:         normalVersion,
+					PreviousVersion: normalPreviousVersion,
+					ObjectId:        normalObjectNativeId,
+				},
+			},
+			resultList: []ResultTestCase{
+				{
+					tokenChain:   normalChainIdNative,
+					tokenAddress: normalTokenAddressNative,
+					wrapped:      false,
+					newBalance:   "101000000000000000000",
+					oldBalance:   "100000000000000000000",
+					decimals:     18,
+				},
+			},
+			expectedResult: map[string]*big.Int{"9258181f5ceac8dbffb7030890243caed69a9599d2886d957a9cb7656af3bdb3-21": big.NewInt(100000000)},
+			expectedCount:  1,
+		},
+		{
+			name: "TestProcessObjectMissingVersion",
+			objectChanges: []ObjectChange{
+				{
+					ObjectType:      normalObjectNativeType,
+					Version:         normalVersion,
+					PreviousVersion: normalPreviousVersion,
+					ObjectId:        normalObjectNativeId,
+				},
+			},
+			resultList: []ResultTestCase{
+				{
+					tokenChain:   normalChainIdNative,
+					tokenAddress: normalTokenAddressNative,
+					wrapped:      false,
+					newBalance:   "1000",
+					oldBalance:   "10",
+					decimals:     8,
+					drop:         true,
+				},
+			},
+			expectedResult: map[string]*big.Int{},
+			expectedCount:  0,
+		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			connection := NewMockSuiApiConnection([]SuiEvent{})
+
+			assert.Equal(t, len(tt.objectChanges), len(tt.resultList))
+
+			// Add all changes to the mock Sui API for future lookups
+			for index := 0; index < len(tt.objectChanges); index++ {
+				change := tt.objectChanges[index]
+				queryResult := tt.resultList[index]
+
+				if !queryResult.drop {
+					responseObject := generateResponsesObject(change.ObjectId, change.Version, change.ObjectType, change.PreviousVersion, queryResult.newBalance, queryResult.oldBalance, queryResult.tokenAddress, queryResult.tokenChain, queryResult.decimals, queryResult.wrapped)
+					connection.SetObjectsResponse(responseObject)
+				}
+			}
+
+			// Run function and check results
+			transferredIntoBridge, numEventsProcessed := suiTxVerifier.processObjectUpdates(tt.objectChanges, connection, logger)
+			assert.Equal(t, tt.expectedResult, transferredIntoBridge)
+			assert.Equal(t, tt.expectedCount, numEventsProcessed)
+		})
+	}
+}
+
+// TODO
+func TestProcessDigest(t *testing.T) {
+	suiTxVerifier := newTestSuiTransferVerifier()
+
+	// Constants used throughout the tests
+	normalObjectNativeType := "0x2::dynamic_field::Field<0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d::token_registry::Key<0x2::sui::SUI>, 0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d::native_asset::NativeAsset<0x2::sui::SUI>>"
+	normalObjectForeignType := "0x2::dynamic_field::Field<0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d::token_registry::Key<0x5d4b302506645c37ff133b98c4b50a5ae14841659738d6d733d59d0d217a93bf::coin::COIN>, 0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d::wrapped_asset::WrappedAsset<0x5d4b302506645c37ff133b98c4b50a5ae14841659738d6d733d59d0d217a93bf::coin::COIN>>"
+	normalVersion := "6565"
+	normalPreviousVersion := "4040"
+	normalObjectNativeId := "0x831c45a8d512c9cf46e7a8a947f7cbbb5e0a59829aa72450ff26fb1873fd0e94"
+	normalObjectForeignId := "0xf8f80c0d569fb076adb5fdc3a717dcb9ac14f7fd7512dc17efbf0f80a8b7fa8a"
+
+	normalTokenAddressForeign := "0,0,0,0,0,0,0,0,0,0,0,0,160,184,105,145,198,33,139,54,193,209,157,74,46,158,176,206,54,6,235,72"
+	normalTokenAddressNative := "93,75,48,37,6,100,92,55,255,19,59,152,196,181,10,90,225,72,65,101,151,56,214,215,51,213,157,13,33,122,147,191"
+	normalChainIdNative := "21"
+	normalChainIdForeign := "2"
+
+	suiEventType := suiTxVerifier.suiEventType
+	suiTokenBridgeEmitter := suiTxVerifier.suiTokenBridgeEmitter
+
+	logger := zap.Must(zap.NewDevelopment())
+
+	// func processDigest(digest string, suiApiConnection SuiApiInterface, logger *zap.Logger) error {
+	// Needs BOTH events and ObjectChange information to be updated
+	tests := []struct {
+		name          string
+		objectChanges []ObjectChange
+		resultList    []ResultTestCase
+		suiEvents     []SuiEvent
+		expectedError string
+		expectedCount uint
+	}{
+		{
+			name: "TestProcessDigestNativeBase",
+			objectChanges: []ObjectChange{
+				{
+					ObjectType:      normalObjectNativeType,
+					Version:         normalVersion,
+					PreviousVersion: normalPreviousVersion,
+					ObjectId:        normalObjectNativeId,
+				},
+			},
+			resultList: []ResultTestCase{
+				{
+					tokenChain:   normalChainIdNative,
+					tokenAddress: normalTokenAddressNative,
+					wrapped:      false,
+					newBalance:   "1000",
+					oldBalance:   "10",
+					decimals:     8,
+				},
+			},
+			suiEvents: []SuiEvent{
+				{
+					Type: &suiEventType,
+					Message: &WormholeMessage{
+						Sender:  &suiTokenBridgeEmitter,
+						Payload: generatePayload(1, big.NewInt(990), SuiUsdcAddress, uint16(vaa.ChainIDSui)),
+					},
+				},
+			},
+			expectedError: "",
+			expectedCount: 1,
+		},
+		{
+			name: "TestProcessDigestTakingMoreThanPuttingIn",
+			objectChanges: []ObjectChange{
+				{
+					ObjectType:      normalObjectNativeType,
+					Version:         normalVersion,
+					PreviousVersion: normalPreviousVersion,
+					ObjectId:        normalObjectNativeId,
+				},
+			},
+			resultList: []ResultTestCase{
+				{
+					tokenChain:   normalChainIdNative,
+					tokenAddress: normalTokenAddressNative,
+					wrapped:      false,
+					newBalance:   "100000",
+					oldBalance:   "100000",
+					decimals:     8,
+				},
+			},
+			suiEvents: []SuiEvent{
+				{
+					Type: &suiEventType,
+					Message: &WormholeMessage{
+						Sender:  &suiTokenBridgeEmitter,
+						Payload: generatePayload(1, big.NewInt(100000), SuiUsdcAddress, uint16(vaa.ChainIDSui)),
+					},
+				},
+			},
+			expectedError: "requested amount out is larger than amount in",
+			expectedCount: 0,
+		},
+		{
+			name:          "TestProcessDigestNoEvents",
+			objectChanges: []ObjectChange{},
+			resultList:    []ResultTestCase{},
+			suiEvents: []SuiEvent{
+				{
+					Type: &suiEventType,
+					Message: &WormholeMessage{
+						Sender:  &suiTokenBridgeEmitter,
+						Payload: generatePayload(1, big.NewInt(100000), SuiUsdcAddress, uint16(vaa.ChainIDSui)),
+					},
+				},
+			},
+			expectedError: "transfer-out request for tokens that were never deposited",
+			expectedCount: 0,
+		},
+		{
+			name: "TestProcessDigestForeignBase",
+			objectChanges: []ObjectChange{
+				{
+					ObjectType:      normalObjectForeignType,
+					Version:         normalVersion,
+					PreviousVersion: normalPreviousVersion,
+					ObjectId:        normalObjectForeignId,
+				},
+			},
+			resultList: []ResultTestCase{
+				{
+					tokenChain:   normalChainIdForeign,
+					tokenAddress: normalTokenAddressForeign,
+					wrapped:      true,
+					newBalance:   "10",
+					oldBalance:   "1000",
+					decimals:     8,
+				},
+			},
+			suiEvents: []SuiEvent{
+				{
+					Type: &suiEventType,
+					Message: &WormholeMessage{
+						Sender:  &suiTokenBridgeEmitter,
+						Payload: generatePayload(1, big.NewInt(990), EthereumUsdcAddress, uint16(vaa.ChainIDEthereum)),
+					},
+				},
+			},
+			expectedError: "",
+			expectedCount: 1,
+		},
+		{
+			name:          "TestProcessDigestNoEvents",
+			objectChanges: []ObjectChange{},
+			resultList:    []ResultTestCase{},
+			suiEvents: []SuiEvent{
+				{
+					Type: &suiEventType,
+					Message: &WormholeMessage{
+						Sender:  &suiTokenBridgeEmitter,
+						Payload: generatePayload(1, big.NewInt(100000), SuiUsdcAddress, uint16(vaa.ChainIDSui)),
+					},
+				},
+			},
+			expectedError: "transfer-out request for tokens that were never deposited",
+			expectedCount: 0,
+		},
+		{
+			name: "TestProcessDigestMultipleEvents",
+			objectChanges: []ObjectChange{
+				{
+					ObjectType:      normalObjectForeignType,
+					Version:         normalVersion,
+					PreviousVersion: normalPreviousVersion,
+					ObjectId:        normalObjectForeignId,
+				},
+			},
+			resultList: []ResultTestCase{
+				{
+					tokenChain:   normalChainIdForeign,
+					tokenAddress: normalTokenAddressForeign,
+					wrapped:      true,
+					newBalance:   "10",
+					oldBalance:   "2000",
+					decimals:     8,
+				},
+			},
+			suiEvents: []SuiEvent{
+				{
+					Type: &suiEventType,
+					Message: &WormholeMessage{
+						Sender:  &suiTokenBridgeEmitter,
+						Payload: generatePayload(1, big.NewInt(990), EthereumUsdcAddress, uint16(vaa.ChainIDEthereum)),
+					},
+				},
+				{
+					Type: &suiEventType,
+					Message: &WormholeMessage{
+						Sender:  &suiTokenBridgeEmitter,
+						Payload: generatePayload(1, big.NewInt(1000), EthereumUsdcAddress, uint16(vaa.ChainIDEthereum)),
+					},
+				},
+			},
+			expectedError: "",
+			expectedCount: 2,
+		},
+		{
+			name: "TestProcessDigestMultipleEventsOverWithdraw",
+			objectChanges: []ObjectChange{
+				{
+					ObjectType:      normalObjectForeignType,
+					Version:         normalVersion,
+					PreviousVersion: normalPreviousVersion,
+					ObjectId:        normalObjectForeignId,
+				},
+			},
+			resultList: []ResultTestCase{
+				{
+					tokenChain:   normalChainIdForeign,
+					tokenAddress: normalTokenAddressForeign,
+					wrapped:      true,
+					newBalance:   "10",
+					oldBalance:   "2000",
+					decimals:     8,
+				},
+			},
+			suiEvents: []SuiEvent{
+				{
+					Type: &suiEventType,
+					Message: &WormholeMessage{
+						Sender:  &suiTokenBridgeEmitter,
+						Payload: generatePayload(1, big.NewInt(990), EthereumUsdcAddress, uint16(vaa.ChainIDEthereum)),
+					},
+				},
+				{
+					Type: &suiEventType,
+					Message: &WormholeMessage{
+						Sender:  &suiTokenBridgeEmitter,
+						Payload: generatePayload(1, big.NewInt(1001), EthereumUsdcAddress, uint16(vaa.ChainIDEthereum)),
+					},
+				},
+			},
+			expectedError: "requested amount out is larger than amount in",
+			expectedCount: 0,
+		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+
+			assert.Equal(t, len(tt.objectChanges), len(tt.resultList))
+			connection := NewMockSuiApiConnection(tt.suiEvents) // Set events for connection
+
+			// Add Object Response data for Sui connections
+			for index := 0; index < len(tt.objectChanges); index++ {
+				change := tt.objectChanges[index]
+				queryResult := tt.resultList[index]
+
+				responseObject := generateResponsesObject(change.ObjectId, change.Version, change.ObjectType, change.PreviousVersion, queryResult.newBalance, queryResult.oldBalance, queryResult.tokenAddress, queryResult.tokenChain, queryResult.decimals, queryResult.wrapped)
+
+				connection.SetObjectsResponse(responseObject)
+			}
+
+			numProcessed, err := suiTxVerifier.ProcessDigest("HASH", connection, logger)
+
+			assert.Equal(t, true, tt.expectedError == "" && err == nil || err != nil && err.Error() == tt.expectedError)
+			assert.Equal(t, tt.expectedCount, numProcessed)
+		})
+	}
+}
+
+// Generate WormholeMessage payload.
+//
+//	Payload type: payload[0]
+//	Amount: payload[1] for 32
+//	Origin address: payload[33] for 32
+//	Origin chain ID: payload[65] for 2
+func generatePayload(payloadType byte, amount *big.Int, originAddressHex string, originChainID uint16) []byte {
+	originAddress, _ := hex.DecodeString(originAddressHex)
+
+	payload := make([]byte, 0, 101)
+
+	// Append payload type
+	payload = append(payload, payloadType)
+
+	// Append amount (32 bytes)
+	amountBytes := amount.FillBytes(make([]byte, 32))
+	payload = append(payload, amountBytes...)
+
+	// Append origin address (32 bytes)
+	payload = append(payload, originAddress...)
+
+	// Append origin chain ID (2 bytes)
+	originChainIDBytes := []byte{byte(originChainID >> 8), byte(originChainID & 0xff)}
+	payload = append(payload, originChainIDBytes...)
+
+	// Right-pad the payload to 101 bytes
+	padding := make([]byte, 101-len(payload))
+	payload = append(payload, padding...)
+
+	return payload
+}
+
+/*
+JSON data
+
+Decimals, token chain, token address, wrapped or not, balance/custody
+*/
+
+func generateResponsesObject(objectId string, version string, objectType string, previousVersion string, balanceAfter string, balanceBefore string, tokenAddress string, tokenChain string, decimals uint8, isWrapped bool) SuiTryMultiGetPastObjectsResponse {
+
+	var newVersion string
+	var oldVersion string
+
+	if isWrapped == false {
+		newVersion = generateResponseObjectNative(objectId, version, objectType, balanceAfter, tokenAddress, decimals)
+		oldVersion = generateResponseObjectNative(objectId, previousVersion, objectType, balanceBefore, tokenAddress, decimals)
+	} else {
+		newVersion = generateResponseObjectForeign(objectId, version, objectType, balanceAfter, tokenAddress, tokenChain, decimals)
+		oldVersion = generateResponseObjectForeign(objectId, previousVersion, objectType, balanceBefore, tokenAddress, tokenChain, decimals)
+	}
+
+	// Complete the rest of the response data
+	responseString := fmt.Sprintf(`{"result": [{"details" : %s}, {"details" : %s}]}`, newVersion, oldVersion)
+
+	data := SuiTryMultiGetPastObjectsResponse{}
+	err := json.Unmarshal([]byte(responseString), &data)
+	if err != nil {
+		fmt.Println("Error in JSON parsing...")
+	}
+
+	return data
+}
+
+func generateResponseObjectNative(objectId string, version string, objectType string, balance string, tokenAddress string, decimals uint8) string {
+	json_string_per_object := fmt.Sprintf(`{
+		"objectId": "%s",
+		"version": "%s",
+		"digest": "4ne8fjG16hAXP8GxuXzoA5hBwuHz6C4D7cyf4TZza4Pa",
+		"type": "%s",
+		"owner": {
+			"ObjectOwner": "0x334881831bd89287554a6121087e498fa023ce52c037001b53a4563a00a281a5"
+		},
+		"previousTransaction": "FRx1iHA3Wq2ybDe3hhMSkS5yqsKJ4wUDUWY3Xp8K6g18",
+		"storageRebate": "3146400",
+		"content": {
+			"type": "%s",
+			"fields": {
+			"id": {
+				"id": "0x831c45a8d512c9cf46e7a8a947f7cbbb5e0a59829aa72450ff26fb1873fd0e94"
+			},
+			"name": {
+				"type": "0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d::token_registry::Key<0x2::sui::SUI>",
+				"fields": {
+				"dummy_field": false
+				}
+			},
+			"value": {
+				"type": "0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d::native_asset::NativeAsset<0x2::sui::SUI>",
+				"fields": {
+				"custody": "%s",
+				"decimals": %d,
+				"token_address": {
+					"type": "0x5306f64e312b581766351c07af79c72fcb1cd25147157fdc2f8ad76de9a3fb6a::external_address::ExternalAddress",
+					"fields": {
+					"value": {
+						"type": "0x5306f64e312b581766351c07af79c72fcb1cd25147157fdc2f8ad76de9a3fb6a::bytes32::Bytes32",
+						"fields": {
+						"data": [
+							%s
+						]
+						}
+					}
+					}
+				}
+				}
+			}
+    }}}`, objectId, version, objectType, objectType, balance, decimals, tokenAddress)
+
+	return json_string_per_object
+}
+
+func generateResponseObjectForeign(objectId string, version string, objectType string, balance string, tokenAddress string, tokenChain string, decimals uint8) string {
+	json_string_per_object := fmt.Sprintf(`{
+		"objectId": "%s",
+		"version": "%s",
+		"digest": "CWXv7KJrNawMqREtVYCRT9PVF2H8cogW1WCLMd5iQchr",
+		"type": "%s",
+		"owner": {
+		  "ObjectOwner": "0x334881831bd89287554a6121087e498fa023ce52c037001b53a4563a00a281a5"
+		},
+		"previousTransaction": "EaqLzHQTeiPq2FjYCRobDH5E91DAVZgKgZzwQUJ5FaNU",
+		"storageRebate": "4050800",
+		"content": {
+		  "dataType": "moveObject",
+		  "type": "%s",
+		  "hasPublicTransfer": false,
+		  "fields": {
+			"id": {
+			  "id": "0xf8f80c0d569fb076adb5fdc3a717dcb9ac14f7fd7512dc17efbf0f80a8b7fa8a"
+			},
+			"name": {
+			  "type": "0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d::token_registry::Key<0x5d4b302506645c37ff133b98c4b50a5ae14841659738d6d733d59d0d217a93bf::coin::COIN>",
+			  "fields": {
+				"dummy_field": false
+			  }
+			},
+			"value": {
+			  "type": "0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d::wrapped_asset::WrappedAsset<0x5d4b302506645c37ff133b98c4b50a5ae14841659738d6d733d59d0d217a93bf::coin::COIN>",
+			  "fields": {
+				"decimals": 6,
+				"info": {
+				  "type": "0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d::wrapped_asset::ForeignInfo<0x5d4b302506645c37ff133b98c4b50a5ae14841659738d6d733d59d0d217a93bf::coin::COIN>",
+				  "fields": {
+					"native_decimals": %d,
+					"symbol": "USDC",
+					"token_address": {
+					  "type": "0x5306f64e312b581766351c07af79c72fcb1cd25147157fdc2f8ad76de9a3fb6a::external_address::ExternalAddress",
+					  "fields": {
+						"value": {
+						  "type": "0x5306f64e312b581766351c07af79c72fcb1cd25147157fdc2f8ad76de9a3fb6a::bytes32::Bytes32",
+						  "fields": {"data": [%s]
+						  }
+						}
+					  }
+					},
+					"token_chain": %s
+				  }
+				},
+				"treasury_cap": {
+				  "type": "0x2::coin::TreasuryCap<0x5d4b302506645c37ff133b98c4b50a5ae14841659738d6d733d59d0d217a93bf::coin::COIN>",
+				  "fields": {
+					"id": {
+					  "id": "0xa5085139fdeae133cf6ca58f1f1cee138f24ad6fc54d8e24a519dc24f3b2b974"
+					},
+					"total_supply": {
+					  "type": "0x2::balance::Supply<0x5d4b302506645c37ff133b98c4b50a5ae14841659738d6d733d59d0d217a93bf::coin::COIN>",
+					  "fields": {
+						"value": "%s"
+					  }
+					}
+				  }
+				},
+				"upgrade_cap": {
+				  "type": "0x2::package::UpgradeCap",
+				  "fields": {
+					"id": {
+					  "id": "0x86ebd31cc715928671ac05e29e85b68ae1d96db02565b5413084fcb5afb695b1"
+					},
+					"package": "0x5d4b302506645c37ff133b98c4b50a5ae14841659738d6d733d59d0d217a93bf",
+					"policy": 0,
+					"version": "1"
+				  }
+				}
+			  }
+			}
+		  }
+		}
+	  }`, objectId, version, objectType, objectType, decimals, tokenAddress, tokenChain, balance)
+	return json_string_per_object
+
+}

+ 436 - 0
node/pkg/txverifier/suitypes.go

@@ -0,0 +1,436 @@
+package txverifier
+
+import (
+	"encoding/hex"
+	"encoding/json"
+	"fmt"
+	"math/big"
+	"regexp"
+	"strings"
+)
+
+const SUI_CHAIN_ID = 21
+
+// The SuiApi interface defines the functions that are required to interact with the Sui RPC.
+type SuiApiInterface interface {
+	QueryEvents(filter string, cursor string, limit int, descending bool) (SuiQueryEventsResponse, error)
+	GetTransactionBlock(txDigest string) (SuiGetTransactionBlockResponse, error)
+	TryMultiGetPastObjects(objectId string, version string, previousVersion string) (SuiTryMultiGetPastObjectsResponse, error)
+}
+
+// This struct defines the standard properties that get returned from the RPC.
+// It includes the ErrorMessage and Error fields as well, with a standard implementation
+// of a `GetError()` function. `suiApiRequest` requires `GetError()` for standard
+// API error handling.
+type SuiApiStandardResponse struct {
+	Jsonrpc string `json:"jsonrpc"`
+	ID      int    `json:"id"`
+	// error_msg is typically populated when a non-api-related error occurs (like ratelimiting)
+	ErrorMessage *string `json:"error_msg"`
+	// error is typically populated when an api-related error occurs
+	Error *struct {
+		Code    int    `json:"code"`
+		Message string `json:"message"`
+	} `json:"error"`
+}
+
+func (e SuiApiStandardResponse) GetError() error {
+	if e.ErrorMessage != nil {
+		return fmt.Errorf("error from Sui RPC: %s", *e.ErrorMessage)
+	}
+
+	if e.Error != nil {
+		return fmt.Errorf("error from Sui RPC: %s", e.Error.Message)
+	}
+
+	return nil
+}
+
+// The response object for suix_queryEvents
+type SuiQueryEventsResponse struct {
+	SuiApiStandardResponse
+	Result SuiQueryEventsResult `json:"result"`
+}
+
+type SuiQueryEventsResult struct {
+	Data []SuiEvent `json:"data"`
+}
+
+type SuiEvent struct {
+	ID struct {
+		TxDigest *string `json:"txDigest"`
+		EventSeq *string `json:"eventSeq"`
+	} `json:"id"`
+	PackageID         *string `json:"packageId"`
+	TransactionModule *string `json:"transactionModule"`
+	Sender            *string `json:"sender"`
+	Type              *string `json:"type"`
+	// Bcs               *string          `json:"bcs"`
+	Timestamp *string          `json:"timestampMs"`
+	Message   *WormholeMessage `json:"parsedJson"`
+}
+
+// The response object for sui_GetTransactionBlock
+type SuiGetTransactionBlockResponse struct {
+	SuiApiStandardResponse
+	Result SuiGetTransactionBlockResult `json:"result"`
+}
+
+type SuiGetTransactionBlockResult struct {
+	ObjectChanges []ObjectChange `json:"objectChanges"`
+	Events        []SuiEvent     `json:"events"`
+}
+
+type ObjectChange struct {
+	ObjectType      string `json:"objectType"`
+	ObjectId        string `json:"objectId"`
+	Version         string `json:"version"`
+	PreviousVersion string `json:"previousVersion"`
+}
+
+// Validate the type information of the object change. The following checks are performed:
+//   - pass the object through a regex that extracts the package ID, coin type, and asset type
+//   - ensure that the asset type is wrapped or native
+//   - ensure that the package IDs match the expected package ID
+//   - ensure that the coin types match
+func (o ObjectChange) ValidateTypeInformation(expectedPackageId string) (success bool) {
+	// AI generated regex
+	re := regexp.MustCompile(`^0x2::dynamic_field::Field<([^:]+)::token_registry::Key<([^>]+)>, ([^:]+)::([^<]+)<([^>]+)>>$`)
+	matches := re.FindStringSubmatch(o.ObjectType)
+
+	if len(matches) == 6 {
+		scanPackage1 := matches[1]
+		scanCoinType1 := matches[2]
+		scanPackage2 := matches[3]
+		scanAssetType := matches[4]
+		scanCoinType2 := matches[5]
+
+		// Ensure that the asset type is wrapped or native
+		if scanAssetType != "wrapped_asset::WrappedAsset" && scanAssetType != "native_asset::NativeAsset" {
+			return false
+		}
+
+		// Ensure that the package IDs match the expected package ID
+		if scanPackage1 != expectedPackageId || scanPackage2 != expectedPackageId {
+			return false
+		}
+
+		// Ensure that the coin types match
+		if scanCoinType1 != scanCoinType2 {
+			return false
+		}
+
+		return true
+	}
+
+	// No matches were found
+	return false
+}
+
+// The response object for suix_tryMultiGetPastObjects
+type SuiTryMultiGetPastObjectsResponse struct {
+	SuiApiStandardResponse
+	Result []SuiTryMultiGetPastObjectsResult `json:"result"`
+}
+
+// Gets the balance difference of the two result objects.
+func (r SuiTryMultiGetPastObjectsResponse) GetBalanceDiff() (*big.Int, error) {
+
+	if len(r.Result) != 2 {
+		return big.NewInt(0), fmt.Errorf("incorrect number of results received")
+	}
+
+	// Determine if the asset is wrapped or native
+	isWrapped, err := r.Result[0].IsWrapped()
+	if err != nil {
+		return big.NewInt(0), fmt.Errorf("error in checking if object is wrapped: %w", err)
+	}
+
+	// TODO: Should we check that the other asset is also wrapped?
+	newBalance, err := r.Result[0].GetVersionBalance(isWrapped)
+	if err != nil {
+		return big.NewInt(0), fmt.Errorf("error in getting new balance: %w", err)
+	}
+
+	oldBalance, err := r.Result[1].GetVersionBalance(isWrapped)
+	if err != nil {
+		return big.NewInt(0), fmt.Errorf("error in getting old balance: %w", err)
+	}
+
+	difference := newBalance.Sub(newBalance, oldBalance)
+	// If the asset is wrapped, it means that the balance was burned, so the difference should be negative.
+	if isWrapped {
+		difference = difference.Mul(difference, big.NewInt(-1))
+	}
+
+	return difference, nil
+}
+
+// Gets the decimals
+func (r SuiTryMultiGetPastObjectsResponse) GetDecimals() (uint8, error) {
+	decimals0, err0 := r.Result[0].GetDecimals()
+	decimals1, err1 := r.Result[1].GetDecimals()
+
+	if err0 != nil {
+		return 0, fmt.Errorf("error in getting decimals: %w", err0)
+	} else if err1 != nil {
+		return 0, fmt.Errorf("error in getting decimals: %w", err1)
+	} else if decimals0 != decimals1 {
+		return 0, fmt.Errorf("decimals do not match")
+	}
+
+	return decimals0, nil
+}
+
+func (r SuiTryMultiGetPastObjectsResponse) GetTokenAddress() (string, error) {
+	tokenAddress0, err0 := r.Result[0].GetTokenAddress()
+	tokenAddress1, err1 := r.Result[1].GetTokenAddress()
+
+	if err0 != nil {
+		return "", fmt.Errorf("error in getting token address: %w", err0)
+	} else if err1 != nil {
+		return "", fmt.Errorf("error in getting token address: %w", err1)
+	} else if tokenAddress0 != tokenAddress1 {
+		return "", fmt.Errorf("token addresses do not match")
+	}
+
+	return tokenAddress0, nil
+}
+
+func (r SuiTryMultiGetPastObjectsResponse) GetTokenChain() (uint16, error) {
+	chain0, err0 := r.Result[0].GetTokenChain()
+	chain1, err1 := r.Result[1].GetTokenChain()
+
+	if err0 != nil {
+		return 0, fmt.Errorf("error in getting token chain: %w", err0)
+	} else if err1 != nil {
+		return 0, fmt.Errorf("error in getting token chain: %w", err1)
+	} else if chain0 != chain1 {
+		return 0, fmt.Errorf("token chain ids do not match")
+	}
+
+	return chain0, nil
+}
+
+func (r SuiTryMultiGetPastObjectsResponse) GetObjectId() (string, error) {
+	objectId, err := r.Result[0].GetObjectId()
+	if err != nil {
+		return "", fmt.Errorf("could not get object id")
+	}
+
+	return objectId, nil
+}
+
+func (r SuiTryMultiGetPastObjectsResponse) GetVersion() (string, error) {
+	version, err := r.Result[0].GetVersion()
+	if err != nil {
+		return "", fmt.Errorf("could not get object id")
+	}
+
+	return version, nil
+}
+
+func (r SuiTryMultiGetPastObjectsResponse) GetPreviousVersion() (string, error) {
+	previousVersion, err := r.Result[1].GetVersion()
+	if err != nil {
+		return "", fmt.Errorf("could not get object id")
+	}
+
+	return previousVersion, nil
+}
+
+func (r SuiTryMultiGetPastObjectsResponse) GetObjectType() (string, error) {
+	type0, err0 := r.Result[0].GetObjectType()
+	type1, err1 := r.Result[1].GetObjectType()
+
+	if err0 != nil {
+		return "", fmt.Errorf("error in getting token chain: %w", err0)
+	} else if err1 != nil {
+		return "", fmt.Errorf("error in getting token chain: %w", err1)
+	} else if type0 != type1 {
+		return "", fmt.Errorf("token chain ids do not match")
+	}
+
+	return type0, nil
+}
+
+// The result object for suix_tryMultiGetPastObjects.
+type SuiTryMultiGetPastObjectsResult struct {
+	Status  string           `json:"status"`
+	Details *json.RawMessage `json:"details"`
+}
+
+// Check if the result object is wrapped.
+func (r SuiTryMultiGetPastObjectsResult) IsWrapped() (bool, error) {
+	path := "content.type"
+	objectType, err := extractFromJsonPath[string](*r.Details, path)
+
+	if err != nil {
+		return false, fmt.Errorf("error in extracting object type: %w", err)
+	}
+
+	return strings.Contains(objectType, "wrapped_asset::WrappedAsset"), nil
+}
+
+// Get the balance of the result object.
+func (r SuiTryMultiGetPastObjectsResult) GetVersionBalance(isWrapped bool) (*big.Int, error) {
+
+	var path string
+	supplyInt := big.NewInt(0)
+
+	// The path to use for a native asset
+	pathNative := "content.fields.value.fields.custody"
+
+	// The path to use for a wrapped asset
+	pathWrapped := "content.fields.value.fields.treasury_cap.fields.total_supply.fields.value"
+
+	if isWrapped {
+		path = pathWrapped
+	} else {
+		path = pathNative
+	}
+
+	supply, err := extractFromJsonPath[string](*r.Details, path)
+
+	if err != nil {
+		return supplyInt, fmt.Errorf("error in extracting wormhole balance: %w", err)
+	}
+
+	supplyInt, success := supplyInt.SetString(supply, 10)
+
+	if !success {
+		return supplyInt, fmt.Errorf("error converting supply to int: %w", err)
+	}
+
+	return supplyInt, nil
+}
+
+// Get the result object's decimals.
+func (r SuiTryMultiGetPastObjectsResult) GetDecimals() (uint8, error) {
+	// token_bridge::wrapped_asset::decimals() and token_bridge::native_asset::decimals()
+	// both store the decimals used for truncation in the NativeAsset or WrappedAsset's `decimals()` field
+	path := "content.fields.value.fields.decimals"
+
+	decimals, err := extractFromJsonPath[float64](*r.Details, path)
+
+	if err != nil {
+		return 0, fmt.Errorf("error in extracting decimals: %w", err)
+	}
+
+	return uint8(decimals), nil
+}
+
+// Get the result object's token address. This will be the address of the token
+// on it's chain of origin.
+func (r SuiTryMultiGetPastObjectsResult) GetTokenAddress() (tokenAddress string, err error) {
+	var path string
+
+	// The path to use for a native asset
+	pathNative := "content.fields.value.fields.token_address.fields.value.fields.data"
+
+	// The path to use for a wrapped asset
+	pathWrapped := "content.fields.value.fields.info.fields.token_address.fields.value.fields.data"
+
+	wrapped, err := r.IsWrapped()
+
+	if err != nil {
+		return "", fmt.Errorf("error in checking if object is wrapped: %w", err)
+	}
+
+	if wrapped {
+		path = pathWrapped
+	} else {
+		path = pathNative
+	}
+
+	data, err := extractFromJsonPath[[]interface{}](*r.Details, path)
+
+	if err != nil {
+		return "", fmt.Errorf("error in extracting token address: %w", err)
+	}
+
+	// data is of type []interface{}, and each element is of type float64.
+	// We need to covnert each element to a byte, and then convert the []byte to a hex string.
+	addrBytes := make([]byte, len(data))
+
+	for i, v := range data {
+		if f, ok := v.(float64); ok {
+			addrBytes[i] = byte(f)
+		} else {
+			return "", fmt.Errorf("error in converting token data to float type")
+		}
+	}
+
+	return hex.EncodeToString(addrBytes), nil
+}
+
+// Get the token's chain ID. This will be the chain ID of the network the token
+// originated from.
+func (r SuiTryMultiGetPastObjectsResult) GetTokenChain() (uint16, error) {
+
+	wrapped, err := r.IsWrapped()
+
+	if err != nil {
+		return 0, fmt.Errorf("error in checking if object is wrapped: %w", err)
+	}
+
+	if !wrapped {
+		return SUI_CHAIN_ID, nil
+	}
+
+	path := "content.fields.value.fields.info.fields.token_chain"
+
+	chain, err := extractFromJsonPath[float64](*r.Details, path)
+
+	if err != nil {
+		return 0, fmt.Errorf("error in extracting chain: %w", err)
+	}
+
+	return uint16(chain), nil
+}
+
+func (r SuiTryMultiGetPastObjectsResult) GetObjectId() (string, error) {
+	path := "objectId"
+
+	objectId, err := extractFromJsonPath[string](*r.Details, path)
+
+	if err != nil {
+		return "", fmt.Errorf("error in extracting objectId: %w", err)
+	}
+
+	return objectId, nil
+}
+
+func (r SuiTryMultiGetPastObjectsResult) GetVersion() (string, error) {
+	path := "version"
+
+	version, err := extractFromJsonPath[string](*r.Details, path)
+
+	if err != nil {
+		return "", fmt.Errorf("error in extracting version: %w", err)
+	}
+
+	return version, nil
+}
+
+func (r SuiTryMultiGetPastObjectsResult) GetObjectType() (string, error) {
+	path := "type"
+
+	version, err := extractFromJsonPath[string](*r.Details, path)
+
+	if err != nil {
+		return "", fmt.Errorf("error in extracting version: %w", err)
+	}
+
+	return version, nil
+}
+
+// Definition of the WormholeMessage event
+type WormholeMessage struct {
+	ConsistencyLevel *uint8  `json:"consistency_level"`
+	Nonce            *uint64 `json:"nonce"`
+	Payload          []byte  `json:"payload"`
+	Sender           *string `json:"sender"`
+	Sequence         *string `json:"sequence"`
+	Timestamp        *string `json:"timestamp"`
+}

+ 98 - 0
node/pkg/txverifier/utils.go

@@ -0,0 +1,98 @@
+package txverifier
+
+import (
+	"encoding/json"
+	"fmt"
+	"math/big"
+	"strings"
+)
+
+// Constants
+const (
+	MAX_DECIMALS = 8
+	KEY_FORMAT   = "%s-%d"
+)
+
+// Extracts the value at the given path from the JSON object, and casts it to
+// type T. If the path does not exist in the object, an error is returned.
+func extractFromJsonPath[T any](data json.RawMessage, path string) (T, error) {
+	var defaultT T
+
+	var obj map[string]interface{}
+	err := json.Unmarshal(data, &obj)
+	if err != nil {
+		return defaultT, err
+	}
+
+	// Split the path and iterate over the keys, except for the final key. For
+	// each key, check if it exists in the object. If it does exist and is a map,
+	// update the object to the value of the key.
+	keys := strings.Split(path, ".")
+	for _, key := range keys[:len(keys)-1] {
+		if obj[key] == nil {
+			return defaultT, fmt.Errorf("key %s not found", key)
+		}
+
+		if v, ok := obj[key].(map[string]interface{}); ok {
+			obj = v
+		} else {
+			return defaultT, fmt.Errorf("can't convert to key to map[string]interface{} type")
+		}
+	}
+
+	// If the final key exists in the object, return the value as T. Otherwise,
+	// return an error.
+	if value, exists := obj[keys[len(keys)-1]]; exists {
+		if v, ok := value.(T); ok {
+			return v, nil
+		} else {
+			return defaultT, fmt.Errorf("can't convert to type T")
+		}
+	} else {
+		return defaultT, fmt.Errorf("key %s not found", keys[len(keys)-1])
+	}
+}
+
+// Normalize the amount to 8 decimals. If the amount has more than 8 decimals,
+// the amount is divided by 10^(decimals-8). If the amount has less than 8
+// decimals, the amount is returned as is.
+// https://wormhole.com/docs/build/start-building/supported-networks/evm/#addresses
+func normalize(amount *big.Int, decimals uint8) (normalizedAmount *big.Int) {
+	if amount == nil {
+		return nil
+	}
+	if decimals > MAX_DECIMALS {
+		exponent := new(big.Int).SetInt64(int64(decimals - 8))
+		multiplier := new(big.Int).Exp(new(big.Int).SetInt64(10), exponent, nil)
+		normalizedAmount = new(big.Int).Div(amount, multiplier)
+	} else {
+		return amount
+	}
+
+	return normalizedAmount
+}
+
+// denormalize() scales an amount to its native decimal representation by multiplying it by some power of 10.
+// See also:
+//   - documentation:
+//     https://github.com/wormhole-foundation/wormhole/blob/main/whitepapers/0003_token_bridge.md#handling-of-token-amounts-and-decimals
+//     https://wormhole.com/docs/build/start-building/supported-networks/evm/#addresses
+//   - solidity implementation:
+//     https://github.com/wormhole-foundation/wormhole/blob/91ec4d1dc01f8b690f0492815407505fb4587520/ethereum/contracts/bridge/Bridge.sol#L295-L300
+func denormalize(
+	amount *big.Int,
+	decimals uint8,
+) (denormalizedAmount *big.Int) {
+	if decimals > 8 {
+		// Scale from 8 decimals to `decimals`
+		exponent := new(big.Int).SetInt64(int64(decimals - 8))
+		multiplier := new(big.Int).Exp(new(big.Int).SetInt64(10), exponent, nil)
+		denormalizedAmount = new(big.Int).Mul(amount, multiplier)
+
+	} else {
+		// No scaling necessary
+		denormalizedAmount = new(big.Int).Set(amount)
+	}
+
+	return denormalizedAmount
+}

+ 221 - 0
node/pkg/txverifier/utils_test.go

@@ -0,0 +1,221 @@
+package txverifier
+
+import (
+	"encoding/json"
+	"math/big"
+	"testing"
+)
+
+func TestExtractFromJsonPath(t *testing.T) {
+	testcases := []struct {
+		name     string
+		data     json.RawMessage
+		path     string
+		expected interface{}
+		wantErr  bool
+		typ      string
+	}{
+		{
+			name:     "ValidPathString",
+			data:     json.RawMessage(`{"key1": {"key2": "value"}}`),
+			path:     "key1.key2",
+			expected: "value",
+			wantErr:  false,
+			typ:      "string",
+		},
+		{
+			name:     "ValidPathFloat",
+			data:     json.RawMessage(`{"key1": {"key2": 123.45}}`),
+			path:     "key1.key2",
+			expected: 123.45,
+			wantErr:  false,
+			typ:      "float64",
+		},
+		{
+			name:     "InvalidPath",
+			data:     json.RawMessage(`{"key1": {"key2": "value"}}`),
+			path:     "key1.key3",
+			expected: nil,
+			wantErr:  true,
+			typ:      "string",
+		},
+		{
+			name:     "NestedPath",
+			data:     json.RawMessage(`{"key1": {"key2": {"key3": "value"}}}`),
+			path:     "key1.key2.key3",
+			expected: "value",
+			wantErr:  false,
+			typ:      "string",
+		},
+		{
+			name:     "EmptyPath",
+			data:     json.RawMessage(`{"key1": {"key2": "value"}}`),
+			path:     "",
+			expected: nil,
+			wantErr:  true,
+			typ:      "string",
+		},
+		{
+			name:     "NonExistentPath",
+			data:     json.RawMessage(`{"key1": {"key2": "value"}}`),
+			path:     "key3.key4",
+			expected: nil,
+			wantErr:  true,
+			typ:      "string",
+		},
+		{
+			name:     "MalformedJson",
+			data:     json.RawMessage(`{"key1": {"key2": "value"`),
+			path:     "key1.key2",
+			expected: nil,
+			wantErr:  true,
+			typ:      "string",
+		},
+	}
+
+	for _, tt := range testcases {
+		t.Run(tt.name, func(t *testing.T) {
+			var result interface{}
+			var err error
+			switch tt.typ {
+			case "string":
+				var res string
+				res, err = extractFromJsonPath[string](tt.data, tt.path)
+				result = res
+			case "float64":
+				var res float64
+				res, err = extractFromJsonPath[float64](tt.data, tt.path)
+				result = res
+			default:
+				t.Fatalf("Unsupported type: %v", tt.typ)
+			}
+
+			if (err != nil) != tt.wantErr {
+				t.Errorf("Expected error: %v, got: %v", tt.wantErr, err)
+			}
+			if !tt.wantErr && result != tt.expected {
+				t.Errorf("Expected %v, got %v", tt.expected, result)
+			}
+		})
+	}
+}
+
+func TestNormalize(t *testing.T) {
+	testcases := []struct {
+		name     string
+		amount   *big.Int
+		decimals uint8
+		expected *big.Int
+	}{
+		{
+			name:     "AmountWithMoreThan8Decimals",
+			amount:   big.NewInt(1000000000000000000),
+			decimals: 18,
+			expected: big.NewInt(100000000),
+		},
+		{
+			name:     "AmountWithExactly8Decimals",
+			amount:   big.NewInt(12345678),
+			decimals: 8,
+			expected: big.NewInt(12345678),
+		},
+		{
+			name:     "AmountWithLessThan8Decimals",
+			amount:   big.NewInt(12345),
+			decimals: 5,
+			expected: big.NewInt(12345),
+		},
+		{
+			name:     "AmountWithZeroDecimals",
+			amount:   big.NewInt(12345678),
+			decimals: 0,
+			expected: big.NewInt(12345678),
+		},
+		{
+			name:     "AmountWith9Decimals",
+			amount:   big.NewInt(123456789),
+			decimals: 9,
+			expected: big.NewInt(12345678),
+		},
+		{
+			name:     "AmountWith10Decimals",
+			amount:   big.NewInt(1234567890),
+			decimals: 10,
+			expected: big.NewInt(12345678),
+		},
+		{
+			name:     "AmountEqualsNil",
+			amount:   nil,
+			decimals: 18,
+			expected: nil,
+		},
+	}
+
+	for _, tt := range testcases {
+		t.Run(tt.name, func(t *testing.T) {
+			result := normalize(tt.amount, tt.decimals)
+			if result.Cmp(tt.expected) != 0 {
+				t.Errorf("Expected %v, got %v", tt.expected, result)
+			}
+		})
+	}
+}
+
+func TestDenormalize(t *testing.T) {
+	t.Parallel() // marks TLog as capable of running in parallel with other tests
+	tests := map[string]struct {
+		amount   *big.Int
+		decimals uint8
+		expected *big.Int
+	}{
+		"noop: decimals less than 8": {
+			amount:   big.NewInt(123000),
+			decimals: 1,
+			expected: big.NewInt(123000),
+		},
+		"noop: decimals equal to 8": {
+			amount:   big.NewInt(123000),
+			decimals: 8,
+			expected: big.NewInt(123000),
+		},
+		"denormalize: decimals greater than 8": {
+			amount:   big.NewInt(123000),
+			decimals: 12,
+			expected: big.NewInt(1230000000),
+		},
+		// NOTE: some tokens on NEAR have as many as 24 decimals so this isn't a strict limit for Wormhole
+		// overall, but should be true for EVM chains.
+		"denormalize: decimals at maximum expected size": {
+			amount:   big.NewInt(123_000_000),
+			decimals: 18,
+			expected: big.NewInt(1_230_000_000_000_000_000),
+		},
+		// https://github.com/wormhole-foundation/wormhole/blob/main/whitepapers/0003_token_bridge.md#handling-of-token-amounts-and-decimals
+		"denormalize: whitepaper example 1": {
+			amount:   big.NewInt(100000000),
+			decimals: 18,
+			expected: big.NewInt(1000000000000000000),
+		},
+		"denormalize: whitepaper example 2": {
+			amount:   big.NewInt(20000),
+			decimals: 4,
+			expected: big.NewInt(20000),
+		},
+	}
+	for name, test := range tests {
+		test := test // NOTE: uncomment for Go < 1.22, see /doc/faq#closures_and_goroutines
+		t.Run(name, func(t *testing.T) {
+			t.Parallel() // marks each test case as capable of running in parallel with each other
+
+			if got := denormalize(test.amount, test.decimals); got.Cmp(test.expected) != 0 {
+				t.Fatalf("denormalize(%s, %d) returned %s; expected %s",
+					test.amount.String(),
+					test.decimals,
+					got,
+					test.expected.String(),
+				)
+			}
+
+		})
+	}
+}

+ 2 - 2
node/pkg/watchers/evm/by_transaction.go

@@ -16,7 +16,7 @@ var (
 	// SECURITY: Hardcoded ABI identifier for the LogMessagePublished topic. When using the watcher, we don't need this
 	// since the node will only hand us pre-filtered events. In this case, we need to manually verify it
 	// since ParseLogMessagePublished will only verify whether it parses.
-	logMessagePublishedTopic = eth_common.HexToHash("0x6eb224fb001ed210e379b335e35efe88672a8ce935d981a6896b27ffdf52a3b2")
+	LogMessagePublishedTopic = eth_common.HexToHash("0x6eb224fb001ed210e379b335e35efe88672a8ce935d981a6896b27ffdf52a3b2")
 )
 
 // MessageEventsForTransaction returns the lockup events for a given transaction.
@@ -65,7 +65,7 @@ func MessageEventsForTransaction(
 			continue
 		}
 
-		if l.Topics[0] != logMessagePublishedTopic {
+		if l.Topics[0] != LogMessagePublishedTopic {
 			continue
 		}
 

+ 1 - 1
scripts/check-docker-pin.sh

@@ -1,6 +1,6 @@
 #!/usr/bin/env bash
 
-# This script is checks to that all our Docker images are pinned to a specific SHA256 hash
+# This script checks that all our Docker images are pinned to a specific SHA256 hash.
 #
 # References as to why...
 #   - https://nickjanetakis.com/blog/docker-tip-18-please-pin-your-docker-image-versions

+ 20 - 0
scripts/sui-transfer-verifier.sh

@@ -0,0 +1,20 @@
+#!/usr/bin/env bash
+set -xeuo pipefail
+
+# mainnet core contract
+CORE_CONTRACT="0x5306f64e312b581766351c07af79c72fcb1cd25147157fdc2f8ad76de9a3fb6a"
+# mainnet token bridge contract
+TOKEN_BRIDGE_CONTRACT="0x26efee2b51c911237888e5dc6702868abca3c7ac12c53f76ef8eba0697695e3d"
+
+TOKEN_BRIDGE_EMITTER="0xccceeb29348f71bdd22ffef43a2a19c1f5b5e17c5cca5411529120182672ade5"
+
+RPC=<RPC_HERE>
+
+LOG_LEVEL="info"
+
+# Do `make node` first to compile transfer-verifier into guardiand
+/guardiand transfer-verifier-sui --suiRPC "${RPC}" \
+   --suiCoreContract "${CORE_CONTRACT}" \
+   --suiTokenBridgeContract "${TOKEN_BRIDGE_CONTRACT}" \
+   --suiTokenBridgeEmitter "${TOKEN_BRIDGE_EMITTER}" \
+   --logLevel "${LOG_LEVEL}"

+ 31 - 0
scripts/transfer-verifier-localnet.sh

@@ -0,0 +1,31 @@
+#!/usr/bin/env bash
+# Before running this script, ensure that anvil is running, e.g.:
+#
+# anvil --host 0.0.0.0 --base-fee 0 --fork-url $(worm info rpc mainnet ethereum) --mnemonic "myth like bonus scare over problem client lizard pioneer submit female collect" --fork-block-number 20641947 --fork-chain-id 1 --chain-id 1 --steps-tracing --auto-impersonate
+
+set -xeuo pipefail
+
+# mainnet 
+# CORE_CONTRACT="0x98f3c9e6E3fAce36bAAd05FE09d375Ef1464288B"
+# TOKEN_BRIDGE_CONTRACT="0x3ee18B2214AFF97000D974cf647E7C347E8fa585"
+# WRAPPED_NATIVE_CONTRACT="0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2"
+# devnet 
+CORE_CONTRACT="0xC89Ce4735882C9F0f0FE26686c53074E09B0D550"
+TOKEN_BRIDGE_CONTRACT="0x0290FB167208Af455bB137780163b7B7a9a10C16"
+WRAPPED_NATIVE_CONTRACT="0xDDb64fE46a91D46ee29420539FC25FD07c5FEa3E"
+
+# Needs to be websockets so that the eth connector can get notifications
+ETH_RPC_DEVNET="ws://localhost:8545" # from Tilt, via Anvil
+
+# RPC="${ALCHEMY_RPC}"
+RPC="${ETH_RPC_DEVNET}"
+
+LOG_LEVEL="debug"
+
+# Do `make node` first to compile transfer-verifier into guardiand. Note that the telemetry parameters are omitted here.
+./build/bin/guardiand transfer-verifier evm \
+   --rpcUrl "${RPC}" \
+   --coreContract "${CORE_CONTRACT}" \
+   --tokenContract "${TOKEN_BRIDGE_CONTRACT}" \
+   --wrappedNativeContract "${WRAPPED_NATIVE_CONTRACT}" \
+   --logLevel "${LOG_LEVEL}"