Lucas B hace 2 años
commit
27fc546116
Se han modificado 47 ficheros con 3854 adiciones y 0 borrados
  1. 155 0
      .gitignore
  2. 3 0
      .gitmodules
  3. 53 0
      README.md
  4. 212 0
      examples/searcher-cli.py
  5. 3 0
      jito_searcher_client/__init__.py
  6. 11 0
      jito_searcher_client/convert.py
  7. 4 0
      jito_searcher_client/generated/__init__.py
  8. 42 0
      jito_searcher_client/generated/auth_pb2.py
  9. 178 0
      jito_searcher_client/generated/auth_pb2.pyi
  10. 138 0
      jito_searcher_client/generated/auth_pb2_grpc.py
  11. 54 0
      jito_searcher_client/generated/auth_pb2_grpc.pyi
  12. 18 0
      jito_searcher_client/generated/block_engine_pb2.py
  13. 225 0
      jito_searcher_client/generated/block_engine_pb2.pyi
  14. 247 0
      jito_searcher_client/generated/block_engine_pb2_grpc.py
  15. 114 0
      jito_searcher_client/generated/block_engine_pb2_grpc.pyi
  16. 26 0
      jito_searcher_client/generated/block_pb2.py
  17. 56 0
      jito_searcher_client/generated/block_pb2.pyi
  18. 4 0
      jito_searcher_client/generated/block_pb2_grpc.py
  19. 4 0
      jito_searcher_client/generated/block_pb2_grpc.pyi
  20. 43 0
      jito_searcher_client/generated/bundle_pb2.py
  21. 240 0
      jito_searcher_client/generated/bundle_pb2.pyi
  22. 4 0
      jito_searcher_client/generated/bundle_pb2_grpc.py
  23. 4 0
      jito_searcher_client/generated/bundle_pb2_grpc.pyi
  24. 31 0
      jito_searcher_client/generated/packet_pb2.py
  25. 109 0
      jito_searcher_client/generated/packet_pb2.pyi
  26. 4 0
      jito_searcher_client/generated/packet_pb2_grpc.py
  27. 4 0
      jito_searcher_client/generated/packet_pb2_grpc.pyi
  28. 35 0
      jito_searcher_client/generated/relayer_pb2.py
  29. 84 0
      jito_searcher_client/generated/relayer_pb2.pyi
  30. 115 0
      jito_searcher_client/generated/relayer_pb2_grpc.py
  31. 59 0
      jito_searcher_client/generated/relayer_pb2_grpc.pyi
  32. 18 0
      jito_searcher_client/generated/searcher_pb2.py
  33. 229 0
      jito_searcher_client/generated/searcher_pb2.pyi
  34. 239 0
      jito_searcher_client/generated/searcher_pb2_grpc.py
  35. 94 0
      jito_searcher_client/generated/searcher_pb2_grpc.pyi
  36. 30 0
      jito_searcher_client/generated/shared_pb2.py
  37. 66 0
      jito_searcher_client/generated/shared_pb2.pyi
  38. 4 0
      jito_searcher_client/generated/shared_pb2_grpc.py
  39. 4 0
      jito_searcher_client/generated/shared_pb2_grpc.pyi
  40. 30 0
      jito_searcher_client/generated/shredstream_pb2.py
  41. 61 0
      jito_searcher_client/generated/shredstream_pb2.pyi
  42. 67 0
      jito_searcher_client/generated/shredstream_pb2_grpc.py
  43. 26 0
      jito_searcher_client/generated/shredstream_pb2_grpc.pyi
  44. 205 0
      jito_searcher_client/searcher.py
  45. 1 0
      mev-protos
  46. 468 0
      poetry.lock
  47. 33 0
      pyproject.toml

+ 155 - 0
.gitignore

@@ -0,0 +1,155 @@
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+#  Usually these files are written by a python script from a template
+#  before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+#   For a library or package, you might want to ignore these files since the code is
+#   intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+#   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+#   However, in case of collaboration, if having platform-specific dependencies or dependencies
+#   having no cross-platform support, pipenv may install dependencies that don't work, or not
+#   install all needed dependencies.
+#Pipfile.lock
+
+# poetry
+#   Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+#   This is especially recommended for binary packages to ensure reproducibility, and is more
+#   commonly ignored for libraries.
+#   https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+
+# pdm
+#   Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+#pdm.lock
+#   pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
+#   in version control.
+#   https://pdm.fming.dev/#use-with-ide
+.pdm.toml
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+.idea/

+ 3 - 0
.gitmodules

@@ -0,0 +1,3 @@
+[submodule "mev-protos"]
+	path = mev-protos
+	url = git@github.com:jito-labs/mev-protos.git

+ 53 - 0
README.md

@@ -0,0 +1,53 @@
+# About
+This library contains tooling to interact with Jito Lab's Block Engine as a searcher.
+
+# Downloading
+```bash
+$ pip install jito_searcher_client
+```
+
+# Keypair Authentication
+Please request access to the block engine by creating a solana keypair and emailing the public key to support@jito.wtf.
+
+# Simple Example
+
+```python
+from jito_searcher_client import get_searcher_client
+from jito_searcher_client.generated.searcher_pb2 import ConnectedLeadersRequest
+
+from solders.keypair import Keypair
+
+KEYPAIR_PATH = "/path/to/authenticated/keypair.json"
+BLOCK_ENGINE_URL = "frankfurt.mainnet.block-engine.jito.wtf"
+
+with open(KEYPAIR_PATH) as kp_path:
+    kp = Keypair.from_json(kp_path.read())
+
+client = get_searcher_client(BLOCK_ENGINE_URL, kp)
+leaders = client.GetConnectedLeaders(ConnectedLeadersRequest())
+print(f"{leaders=}")
+```
+
+# Development
+
+Install pip
+```bash
+$ curl -sSL https://bootstrap.pypa.io/get-pip.py | python 3 -
+```
+
+Install poetry
+```bash
+$ curl -sSL https://install.python-poetry.org | python3 -
+```
+
+Setup environment and build protobufs
+```bash
+$ poetry install
+$ poetry protoc
+$ poetry shell
+```
+
+Publishing package
+```bash
+$ poetry protoc && poetry build && poetry publish
+```

+ 212 - 0
examples/searcher-cli.py

@@ -0,0 +1,212 @@
+import time
+from typing import List
+
+import click
+from solana.rpc.api import Client
+from solana.rpc.commitment import Processed
+from solders.keypair import Keypair
+from solders.pubkey import Pubkey
+from solders.system_program import TransferParams, transfer
+from solders.transaction import Transaction, VersionedTransaction
+from spl.memo.instructions import MemoParams, create_memo
+
+from jito_searcher_client.convert import tx_to_protobuf_packet
+from jito_searcher_client.generated.bundle_pb2 import Bundle
+from jito_searcher_client.generated.searcher_pb2 import (
+    ConnectedLeadersRequest,
+    NextScheduledLeaderRequest,
+    NextScheduledLeaderResponse,
+    PendingTxSubscriptionRequest,
+    SendBundleRequest,
+)
+from jito_searcher_client.generated.searcher_pb2_grpc import SearcherServiceStub
+from jito_searcher_client.searcher import get_searcher_client
+
+
+@click.group("cli")
+@click.pass_context
+@click.option(
+    "--keypair-path",
+    help="Path to a keypair that is authenticated with the block engine.",
+    required=True,
+)
+@click.option(
+    "--block-engine-url",
+    help="Block Engine URL",
+    required=True,
+)
+def cli(
+        ctx,
+        keypair_path: str,
+        block_engine_url: str,
+):
+    """
+    This script can be used to interface with the block engine as a jito_searcher_client.
+    """
+    with open(keypair_path) as kp_path:
+        kp = Keypair.from_json(kp_path.read())
+    ctx.obj = get_searcher_client(block_engine_url, kp)
+
+
+@click.command("mempool-accounts")
+@click.pass_obj
+@click.argument("accounts", required=True, nargs=-1)
+def mempool_accounts(client: SearcherServiceStub, accounts: List[str]):
+    """
+    Stream pending transactions from write-locked accounts.
+    """
+    leader: NextScheduledLeaderResponse = client.GetNextScheduledLeader(
+        NextScheduledLeaderRequest()
+    )
+    print(
+        f"next scheduled leader is {leader.next_leader_identity} in {leader.next_leader_slot - leader.current_slot} slots"
+    )
+
+    for notification in client.SubscribePendingTransactions(
+            PendingTxSubscriptionRequest(accounts=accounts)
+    ):
+        for packet in notification.transactions:
+            print(VersionedTransaction.from_bytes(packet.data))
+
+
+@click.command("next-scheduled-leader")
+@click.pass_obj
+def next_scheduled_leader(client: SearcherServiceStub):
+    """
+    Find information on the next scheduled leader.
+    """
+    next_leader = client.GetNextScheduledLeader(NextScheduledLeaderRequest())
+    print(f"{next_leader=}")
+
+
+@click.command("connected-leaders")
+@click.pass_obj
+def connected_leaders(client: SearcherServiceStub):
+    """
+    Get leaders connected to this block engine.
+    """
+    leaders = client.GetConnectedLeaders(ConnectedLeadersRequest())
+    print(f"{leaders=}")
+
+
+@click.command("tip-accounts")
+@click.pass_obj
+def tip_accounts(client: SearcherServiceStub):
+    """
+    Get the tip accounts from the block engine.
+    """
+    accounts = client.GetNextScheduledLeader(NextScheduledLeaderRequest())
+    print(f"{accounts=}")
+
+
+@click.command("send-bundle")
+@click.pass_obj
+@click.option(
+    "--rpc-url",
+    help="RPC URL path",
+    type=str,
+    required=True,
+)
+@click.option(
+    "--payer",
+    help="Path to payer keypair",
+    type=str,
+    required=True,
+)
+@click.option(
+    "--message",
+    help="Message in the bundle",
+    type=str,
+    required=True,
+)
+@click.option(
+    "--num_txs",
+    help="Number of transactions in the bundle (max is 5)",
+    type=int,
+    required=True,
+)
+@click.option(
+    "--lamports",
+    help="Number of lamports to tip in each transaction",
+    type=int,
+    required=True,
+)
+@click.option(
+    "--tip_account",
+    help="Tip account to tip",
+    type=str,
+    required=True,
+)
+def send_bundle(
+        client: SearcherServiceStub,
+        rpc_url: str,
+        payer: str,
+        message: str,
+        num_txs: int,
+        lamports: int,
+        tip_account: str,
+):
+    """
+    Send a bundle!
+    """
+    with open(payer) as kp_path:
+        payer_kp = Keypair.from_json(kp_path.read())
+    tip_account = Pubkey.from_string(tip_account)
+
+    rpc_client = Client(rpc_url)
+    balance = rpc_client.get_balance(payer_kp.pubkey()).value
+    print(f"payer public key: {payer_kp.pubkey()} {balance=}")
+
+    is_leader_slot = False
+    print("waiting for jito leader...")
+    while not is_leader_slot:
+        time.sleep(0.5)
+        next_leader: NextScheduledLeaderResponse = client.GetNextScheduledLeader(
+            NextScheduledLeaderRequest()
+        )
+        num_slots_to_leader = next_leader.next_leader_slot - next_leader.current_slot
+        print(f"waiting {num_slots_to_leader} slots to jito leader")
+        is_leader_slot = num_slots_to_leader <= 2
+
+    blockhash = rpc_client.get_latest_blockhash().value.blockhash
+    block_height = rpc_client.get_block_height(Processed).value
+
+    # Build bundle
+    txs: List[Transaction] = []
+    for idx in range(num_txs):
+        ixs = [create_memo(MemoParams(program_id=Pubkey.from_string("MemoSq4gqABAXKb96qnH8TysNcWxMyWCqXgDLGmfcHr"),
+                                      signer=payer_kp.pubkey(),
+                                      message=bytes(f"jito bundle {idx}: {message}", "utf-8")))]
+        if idx == num_txs - 1:
+            # Adds searcher tip on last tx
+            ixs.append(transfer(TransferParams(
+                from_pubkey=payer_kp.pubkey(),
+                to_pubkey=tip_account,
+                lamports=lamports
+            )))
+        tx = Transaction.new_signed_with_payer(instructions=ixs,
+                                               payer=payer_kp.pubkey(),
+                                               signing_keypairs=[payer_kp],
+                                               recent_blockhash=blockhash
+                                               )
+        print(f"{idx=} signature={tx.signatures[0]}")
+        txs.append(tx)
+
+    # Note: setting meta.size here is important so the block engine can deserialize the packet
+    packets = [tx_to_protobuf_packet(tx) for tx in txs]
+
+    uuid_response = client.SendBundle(SendBundleRequest(bundle=Bundle(header=None, packets=packets)))
+    print(f"bundle uuid: {uuid_response.uuid}")
+
+    for tx in txs:
+        print(rpc_client.confirm_transaction(tx.signatures[0], Processed, sleep_seconds=0.5,
+                                             last_valid_block_height=block_height + 10))
+
+
+if __name__ == "__main__":
+    cli.add_command(mempool_accounts)
+    cli.add_command(next_scheduled_leader)
+    cli.add_command(connected_leaders)
+    cli.add_command(tip_accounts)
+    cli.add_command(send_bundle)
+    cli()

+ 3 - 0
jito_searcher_client/__init__.py

@@ -0,0 +1,3 @@
+from .convert import tx_to_protobuf_packet
+from .generated import *
+from .searcher import JwtToken, SearcherInterceptor, get_searcher_client

+ 11 - 0
jito_searcher_client/convert.py

@@ -0,0 +1,11 @@
+from solders.transaction import Transaction
+
+from .generated.packet_pb2 import Meta, Packet
+
+
+def tx_to_protobuf_packet(tx: Transaction) -> Packet:
+    """
+    Converts a transaction to a packet
+    Note: setting packet.meta.size is required, the rest are optional
+    """
+    return Packet(data=bytes(tx), meta=Meta(size=len(bytes(tx)), addr="0.0.0.0", port=0, flags=None, sender_stake=0))

+ 4 - 0
jito_searcher_client/generated/__init__.py

@@ -0,0 +1,4 @@
+import os
+import sys
+# ugh https://stackoverflow.com/a/55258233
+sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))

+ 42 - 0
jito_searcher_client/generated/auth_pb2.py

@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: auth.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import builder as _builder
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\nauth.proto\x12\x04\x61uth\x1a\x1fgoogle/protobuf/timestamp.proto\"H\n\x1cGenerateAuthChallengeRequest\x12\x18\n\x04role\x18\x01 \x01(\x0e\x32\n.auth.Role\x12\x0e\n\x06pubkey\x18\x02 \x01(\x0c\"2\n\x1dGenerateAuthChallengeResponse\x12\x11\n\tchallenge\x18\x01 \x01(\t\"_\n\x19GenerateAuthTokensRequest\x12\x11\n\tchallenge\x18\x01 \x01(\t\x12\x15\n\rclient_pubkey\x18\x02 \x01(\x0c\x12\x18\n\x10signed_challenge\x18\x03 \x01(\x0c\"J\n\x05Token\x12\r\n\x05value\x18\x01 \x01(\t\x12\x32\n\x0e\x65xpires_at_utc\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"c\n\x1aGenerateAuthTokensResponse\x12!\n\x0c\x61\x63\x63\x65ss_token\x18\x01 \x01(\x0b\x32\x0b.auth.Token\x12\"\n\rrefresh_token\x18\x02 \x01(\x0b\x32\x0b.auth.Token\"2\n\x19RefreshAccessTokenRequest\x12\x15\n\rrefresh_token\x18\x01 \x01(\t\"?\n\x1aRefreshAccessTokenResponse\x12!\n\x0c\x61\x63\x63\x65ss_token\x18\x01 \x01(\x0b\x32\x0b.auth.Token*L\n\x04Role\x12\x0b\n\x07RELAYER\x10\x00\x12\x0c\n\x08SEARCHER\x10\x01\x12\r\n\tVALIDATOR\x10\x02\x12\x1a\n\x16SHREDSTREAM_SUBSCRIBER\x10\x03\x32\xa7\x02\n\x0b\x41uthService\x12\x62\n\x15GenerateAuthChallenge\x12\".auth.GenerateAuthChallengeRequest\x1a#.auth.GenerateAuthChallengeResponse\"\x00\x12Y\n\x12GenerateAuthTokens\x12\x1f.auth.GenerateAuthTokensRequest\x1a .auth.GenerateAuthTokensResponse\"\x00\x12Y\n\x12RefreshAccessToken\x12\x1f.auth.RefreshAccessTokenRequest\x1a .auth.RefreshAccessTokenResponse\"\x00\x62\x06proto3')
+
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'auth_pb2', globals())
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+  DESCRIPTOR._options = None
+  _ROLE._serialized_start=570
+  _ROLE._serialized_end=646
+  _GENERATEAUTHCHALLENGEREQUEST._serialized_start=53
+  _GENERATEAUTHCHALLENGEREQUEST._serialized_end=125
+  _GENERATEAUTHCHALLENGERESPONSE._serialized_start=127
+  _GENERATEAUTHCHALLENGERESPONSE._serialized_end=177
+  _GENERATEAUTHTOKENSREQUEST._serialized_start=179
+  _GENERATEAUTHTOKENSREQUEST._serialized_end=274
+  _TOKEN._serialized_start=276
+  _TOKEN._serialized_end=350
+  _GENERATEAUTHTOKENSRESPONSE._serialized_start=352
+  _GENERATEAUTHTOKENSRESPONSE._serialized_end=451
+  _REFRESHACCESSTOKENREQUEST._serialized_start=453
+  _REFRESHACCESSTOKENREQUEST._serialized_end=503
+  _REFRESHACCESSTOKENRESPONSE._serialized_start=505
+  _REFRESHACCESSTOKENRESPONSE._serialized_end=568
+  _AUTHSERVICE._serialized_start=649
+  _AUTHSERVICE._serialized_end=944
+# @@protoc_insertion_point(module_scope)

+ 178 - 0
jito_searcher_client/generated/auth_pb2.pyi

@@ -0,0 +1,178 @@
+"""
+@generated by mypy-protobuf.  Do not edit manually!
+isort:skip_file
+"""
+import builtins
+import google.protobuf.descriptor
+import google.protobuf.internal.enum_type_wrapper
+import google.protobuf.message
+import google.protobuf.timestamp_pb2
+import sys
+import typing
+
+if sys.version_info >= (3, 10):
+    import typing as typing_extensions
+else:
+    import typing_extensions
+
+DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
+
+class _Role:
+    ValueType = typing.NewType("ValueType", builtins.int)
+    V: typing_extensions.TypeAlias = ValueType
+
+class _RoleEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_Role.ValueType], builtins.type):
+    DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
+    RELAYER: _Role.ValueType  # 0
+    SEARCHER: _Role.ValueType  # 1
+    VALIDATOR: _Role.ValueType  # 2
+    SHREDSTREAM_SUBSCRIBER: _Role.ValueType  # 3
+
+class Role(_Role, metaclass=_RoleEnumTypeWrapper): ...
+
+RELAYER: Role.ValueType  # 0
+SEARCHER: Role.ValueType  # 1
+VALIDATOR: Role.ValueType  # 2
+SHREDSTREAM_SUBSCRIBER: Role.ValueType  # 3
+global___Role = Role
+
+@typing_extensions.final
+class GenerateAuthChallengeRequest(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    ROLE_FIELD_NUMBER: builtins.int
+    PUBKEY_FIELD_NUMBER: builtins.int
+    role: global___Role.ValueType
+    """/ Role the client is attempting to generate tokens for."""
+    pubkey: builtins.bytes
+    """/ Client's 32 byte pubkey."""
+    def __init__(
+        self,
+        *,
+        role: global___Role.ValueType = ...,
+        pubkey: builtins.bytes = ...,
+    ) -> None: ...
+    def ClearField(self, field_name: typing_extensions.Literal["pubkey", b"pubkey", "role", b"role"]) -> None: ...
+
+global___GenerateAuthChallengeRequest = GenerateAuthChallengeRequest
+
+@typing_extensions.final
+class GenerateAuthChallengeResponse(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    CHALLENGE_FIELD_NUMBER: builtins.int
+    challenge: builtins.str
+    def __init__(
+        self,
+        *,
+        challenge: builtins.str = ...,
+    ) -> None: ...
+    def ClearField(self, field_name: typing_extensions.Literal["challenge", b"challenge"]) -> None: ...
+
+global___GenerateAuthChallengeResponse = GenerateAuthChallengeResponse
+
+@typing_extensions.final
+class GenerateAuthTokensRequest(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    CHALLENGE_FIELD_NUMBER: builtins.int
+    CLIENT_PUBKEY_FIELD_NUMBER: builtins.int
+    SIGNED_CHALLENGE_FIELD_NUMBER: builtins.int
+    challenge: builtins.str
+    """/ The pre-signed challenge."""
+    client_pubkey: builtins.bytes
+    """/ The signing keypair's corresponding 32 byte pubkey."""
+    signed_challenge: builtins.bytes
+    """/ The 64 byte signature of the challenge signed by the client's private key. The private key must correspond to
+    the pubkey passed in the [GenerateAuthChallenge] method. The client is expected to sign the challenge token
+    prepended with their pubkey. For example sign(pubkey, challenge).
+    """
+    def __init__(
+        self,
+        *,
+        challenge: builtins.str = ...,
+        client_pubkey: builtins.bytes = ...,
+        signed_challenge: builtins.bytes = ...,
+    ) -> None: ...
+    def ClearField(self, field_name: typing_extensions.Literal["challenge", b"challenge", "client_pubkey", b"client_pubkey", "signed_challenge", b"signed_challenge"]) -> None: ...
+
+global___GenerateAuthTokensRequest = GenerateAuthTokensRequest
+
+@typing_extensions.final
+class Token(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    VALUE_FIELD_NUMBER: builtins.int
+    EXPIRES_AT_UTC_FIELD_NUMBER: builtins.int
+    value: builtins.str
+    """/ The token."""
+    @property
+    def expires_at_utc(self) -> google.protobuf.timestamp_pb2.Timestamp:
+        """/ When the token will expire."""
+    def __init__(
+        self,
+        *,
+        value: builtins.str = ...,
+        expires_at_utc: google.protobuf.timestamp_pb2.Timestamp | None = ...,
+    ) -> None: ...
+    def HasField(self, field_name: typing_extensions.Literal["expires_at_utc", b"expires_at_utc"]) -> builtins.bool: ...
+    def ClearField(self, field_name: typing_extensions.Literal["expires_at_utc", b"expires_at_utc", "value", b"value"]) -> None: ...
+
+global___Token = Token
+
+@typing_extensions.final
+class GenerateAuthTokensResponse(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    ACCESS_TOKEN_FIELD_NUMBER: builtins.int
+    REFRESH_TOKEN_FIELD_NUMBER: builtins.int
+    @property
+    def access_token(self) -> global___Token:
+        """/ The token granting access to resources."""
+    @property
+    def refresh_token(self) -> global___Token:
+        """/ The token used to refresh the access_token. This has a longer TTL than the access_token."""
+    def __init__(
+        self,
+        *,
+        access_token: global___Token | None = ...,
+        refresh_token: global___Token | None = ...,
+    ) -> None: ...
+    def HasField(self, field_name: typing_extensions.Literal["access_token", b"access_token", "refresh_token", b"refresh_token"]) -> builtins.bool: ...
+    def ClearField(self, field_name: typing_extensions.Literal["access_token", b"access_token", "refresh_token", b"refresh_token"]) -> None: ...
+
+global___GenerateAuthTokensResponse = GenerateAuthTokensResponse
+
+@typing_extensions.final
+class RefreshAccessTokenRequest(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    REFRESH_TOKEN_FIELD_NUMBER: builtins.int
+    refresh_token: builtins.str
+    """/ Non-expired refresh token obtained from the [GenerateAuthTokens] method."""
+    def __init__(
+        self,
+        *,
+        refresh_token: builtins.str = ...,
+    ) -> None: ...
+    def ClearField(self, field_name: typing_extensions.Literal["refresh_token", b"refresh_token"]) -> None: ...
+
+global___RefreshAccessTokenRequest = RefreshAccessTokenRequest
+
+@typing_extensions.final
+class RefreshAccessTokenResponse(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    ACCESS_TOKEN_FIELD_NUMBER: builtins.int
+    @property
+    def access_token(self) -> global___Token:
+        """/ Fresh access_token."""
+    def __init__(
+        self,
+        *,
+        access_token: global___Token | None = ...,
+    ) -> None: ...
+    def HasField(self, field_name: typing_extensions.Literal["access_token", b"access_token"]) -> builtins.bool: ...
+    def ClearField(self, field_name: typing_extensions.Literal["access_token", b"access_token"]) -> None: ...
+
+global___RefreshAccessTokenResponse = RefreshAccessTokenResponse

+ 138 - 0
jito_searcher_client/generated/auth_pb2_grpc.py

@@ -0,0 +1,138 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+
+import auth_pb2 as auth__pb2
+
+
+class AuthServiceStub(object):
+    """/ This service is responsible for issuing auth tokens to clients for API access.
+    """
+
+    def __init__(self, channel):
+        """Constructor.
+
+        Args:
+            channel: A grpc.Channel.
+        """
+        self.GenerateAuthChallenge = channel.unary_unary(
+                '/auth.AuthService/GenerateAuthChallenge',
+                request_serializer=auth__pb2.GenerateAuthChallengeRequest.SerializeToString,
+                response_deserializer=auth__pb2.GenerateAuthChallengeResponse.FromString,
+                )
+        self.GenerateAuthTokens = channel.unary_unary(
+                '/auth.AuthService/GenerateAuthTokens',
+                request_serializer=auth__pb2.GenerateAuthTokensRequest.SerializeToString,
+                response_deserializer=auth__pb2.GenerateAuthTokensResponse.FromString,
+                )
+        self.RefreshAccessToken = channel.unary_unary(
+                '/auth.AuthService/RefreshAccessToken',
+                request_serializer=auth__pb2.RefreshAccessTokenRequest.SerializeToString,
+                response_deserializer=auth__pb2.RefreshAccessTokenResponse.FromString,
+                )
+
+
+class AuthServiceServicer(object):
+    """/ This service is responsible for issuing auth tokens to clients for API access.
+    """
+
+    def GenerateAuthChallenge(self, request, context):
+        """/ Returns a challenge, client is expected to sign this challenge with an appropriate keypair in order to obtain access tokens.
+        """
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def GenerateAuthTokens(self, request, context):
+        """/ Provides the client with the initial pair of auth tokens for API access.
+        """
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def RefreshAccessToken(self, request, context):
+        """/ Call this method with a non-expired refresh token to obtain a new access token.
+        """
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+
+def add_AuthServiceServicer_to_server(servicer, server):
+    rpc_method_handlers = {
+            'GenerateAuthChallenge': grpc.unary_unary_rpc_method_handler(
+                    servicer.GenerateAuthChallenge,
+                    request_deserializer=auth__pb2.GenerateAuthChallengeRequest.FromString,
+                    response_serializer=auth__pb2.GenerateAuthChallengeResponse.SerializeToString,
+            ),
+            'GenerateAuthTokens': grpc.unary_unary_rpc_method_handler(
+                    servicer.GenerateAuthTokens,
+                    request_deserializer=auth__pb2.GenerateAuthTokensRequest.FromString,
+                    response_serializer=auth__pb2.GenerateAuthTokensResponse.SerializeToString,
+            ),
+            'RefreshAccessToken': grpc.unary_unary_rpc_method_handler(
+                    servicer.RefreshAccessToken,
+                    request_deserializer=auth__pb2.RefreshAccessTokenRequest.FromString,
+                    response_serializer=auth__pb2.RefreshAccessTokenResponse.SerializeToString,
+            ),
+    }
+    generic_handler = grpc.method_handlers_generic_handler(
+            'auth.AuthService', rpc_method_handlers)
+    server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class AuthService(object):
+    """/ This service is responsible for issuing auth tokens to clients for API access.
+    """
+
+    @staticmethod
+    def GenerateAuthChallenge(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/auth.AuthService/GenerateAuthChallenge',
+            auth__pb2.GenerateAuthChallengeRequest.SerializeToString,
+            auth__pb2.GenerateAuthChallengeResponse.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def GenerateAuthTokens(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/auth.AuthService/GenerateAuthTokens',
+            auth__pb2.GenerateAuthTokensRequest.SerializeToString,
+            auth__pb2.GenerateAuthTokensResponse.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def RefreshAccessToken(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/auth.AuthService/RefreshAccessToken',
+            auth__pb2.RefreshAccessTokenRequest.SerializeToString,
+            auth__pb2.RefreshAccessTokenResponse.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)

+ 54 - 0
jito_searcher_client/generated/auth_pb2_grpc.pyi

@@ -0,0 +1,54 @@
+"""
+@generated by mypy-protobuf.  Do not edit manually!
+isort:skip_file
+"""
+import abc
+import auth_pb2
+import grpc
+
+class AuthServiceStub:
+    """/ This service is responsible for issuing auth tokens to clients for API access."""
+
+    def __init__(self, channel: grpc.Channel) -> None: ...
+    GenerateAuthChallenge: grpc.UnaryUnaryMultiCallable[
+        auth_pb2.GenerateAuthChallengeRequest,
+        auth_pb2.GenerateAuthChallengeResponse,
+    ]
+    """/ Returns a challenge, client is expected to sign this challenge with an appropriate keypair in order to obtain access tokens."""
+    GenerateAuthTokens: grpc.UnaryUnaryMultiCallable[
+        auth_pb2.GenerateAuthTokensRequest,
+        auth_pb2.GenerateAuthTokensResponse,
+    ]
+    """/ Provides the client with the initial pair of auth tokens for API access."""
+    RefreshAccessToken: grpc.UnaryUnaryMultiCallable[
+        auth_pb2.RefreshAccessTokenRequest,
+        auth_pb2.RefreshAccessTokenResponse,
+    ]
+    """/ Call this method with a non-expired refresh token to obtain a new access token."""
+
+class AuthServiceServicer(metaclass=abc.ABCMeta):
+    """/ This service is responsible for issuing auth tokens to clients for API access."""
+
+    @abc.abstractmethod
+    def GenerateAuthChallenge(
+        self,
+        request: auth_pb2.GenerateAuthChallengeRequest,
+        context: grpc.ServicerContext,
+    ) -> auth_pb2.GenerateAuthChallengeResponse:
+        """/ Returns a challenge, client is expected to sign this challenge with an appropriate keypair in order to obtain access tokens."""
+    @abc.abstractmethod
+    def GenerateAuthTokens(
+        self,
+        request: auth_pb2.GenerateAuthTokensRequest,
+        context: grpc.ServicerContext,
+    ) -> auth_pb2.GenerateAuthTokensResponse:
+        """/ Provides the client with the initial pair of auth tokens for API access."""
+    @abc.abstractmethod
+    def RefreshAccessToken(
+        self,
+        request: auth_pb2.RefreshAccessTokenRequest,
+        context: grpc.ServicerContext,
+    ) -> auth_pb2.RefreshAccessTokenResponse:
+        """/ Call this method with a non-expired refresh token to obtain a new access token."""
+
+def add_AuthServiceServicer_to_server(servicer: AuthServiceServicer, server: grpc.Server) -> None: ...

La diferencia del archivo ha sido suprimido porque es demasiado grande
+ 18 - 0
jito_searcher_client/generated/block_engine_pb2.py


+ 225 - 0
jito_searcher_client/generated/block_engine_pb2.pyi

@@ -0,0 +1,225 @@
+"""
+@generated by mypy-protobuf.  Do not edit manually!
+isort:skip_file
+"""
+import builtins
+import bundle_pb2
+import collections.abc
+import google.protobuf.descriptor
+import google.protobuf.internal.containers
+import google.protobuf.message
+import packet_pb2
+import shared_pb2
+import sys
+
+if sys.version_info >= (3, 8):
+    import typing as typing_extensions
+else:
+    import typing_extensions
+
+DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
+
+@typing_extensions.final
+class SubscribePacketsRequest(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    def __init__(
+        self,
+    ) -> None: ...
+
+global___SubscribePacketsRequest = SubscribePacketsRequest
+
+@typing_extensions.final
+class SubscribePacketsResponse(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    HEADER_FIELD_NUMBER: builtins.int
+    BATCH_FIELD_NUMBER: builtins.int
+    @property
+    def header(self) -> shared_pb2.Header: ...
+    @property
+    def batch(self) -> packet_pb2.PacketBatch: ...
+    def __init__(
+        self,
+        *,
+        header: shared_pb2.Header | None = ...,
+        batch: packet_pb2.PacketBatch | None = ...,
+    ) -> None: ...
+    def HasField(self, field_name: typing_extensions.Literal["batch", b"batch", "header", b"header"]) -> builtins.bool: ...
+    def ClearField(self, field_name: typing_extensions.Literal["batch", b"batch", "header", b"header"]) -> None: ...
+
+global___SubscribePacketsResponse = SubscribePacketsResponse
+
+@typing_extensions.final
+class SubscribeBundlesRequest(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    def __init__(
+        self,
+    ) -> None: ...
+
+global___SubscribeBundlesRequest = SubscribeBundlesRequest
+
+@typing_extensions.final
+class SubscribeBundlesResponse(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    BUNDLES_FIELD_NUMBER: builtins.int
+    @property
+    def bundles(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[bundle_pb2.BundleUuid]: ...
+    def __init__(
+        self,
+        *,
+        bundles: collections.abc.Iterable[bundle_pb2.BundleUuid] | None = ...,
+    ) -> None: ...
+    def ClearField(self, field_name: typing_extensions.Literal["bundles", b"bundles"]) -> None: ...
+
+global___SubscribeBundlesResponse = SubscribeBundlesResponse
+
+@typing_extensions.final
+class BlockBuilderFeeInfoRequest(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    def __init__(
+        self,
+    ) -> None: ...
+
+global___BlockBuilderFeeInfoRequest = BlockBuilderFeeInfoRequest
+
+@typing_extensions.final
+class BlockBuilderFeeInfoResponse(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    PUBKEY_FIELD_NUMBER: builtins.int
+    COMMISSION_FIELD_NUMBER: builtins.int
+    pubkey: builtins.str
+    commission: builtins.int
+    """commission (0-100)"""
+    def __init__(
+        self,
+        *,
+        pubkey: builtins.str = ...,
+        commission: builtins.int = ...,
+    ) -> None: ...
+    def ClearField(self, field_name: typing_extensions.Literal["commission", b"commission", "pubkey", b"pubkey"]) -> None: ...
+
+global___BlockBuilderFeeInfoResponse = BlockBuilderFeeInfoResponse
+
+@typing_extensions.final
+class AccountsOfInterest(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    ACCOUNTS_FIELD_NUMBER: builtins.int
+    @property
+    def accounts(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
+        """use * for all accounts"""
+    def __init__(
+        self,
+        *,
+        accounts: collections.abc.Iterable[builtins.str] | None = ...,
+    ) -> None: ...
+    def ClearField(self, field_name: typing_extensions.Literal["accounts", b"accounts"]) -> None: ...
+
+global___AccountsOfInterest = AccountsOfInterest
+
+@typing_extensions.final
+class AccountsOfInterestRequest(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    def __init__(
+        self,
+    ) -> None: ...
+
+global___AccountsOfInterestRequest = AccountsOfInterestRequest
+
+@typing_extensions.final
+class AccountsOfInterestUpdate(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    ACCOUNTS_FIELD_NUMBER: builtins.int
+    @property
+    def accounts(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ...
+    def __init__(
+        self,
+        *,
+        accounts: collections.abc.Iterable[builtins.str] | None = ...,
+    ) -> None: ...
+    def ClearField(self, field_name: typing_extensions.Literal["accounts", b"accounts"]) -> None: ...
+
+global___AccountsOfInterestUpdate = AccountsOfInterestUpdate
+
+@typing_extensions.final
+class ExpiringPacketBatch(google.protobuf.message.Message):
+    """A series of packets with an expiration attached to them.
+    The header contains a timestamp for when this packet was generated.
+    The expiry is how long the packet batches have before they expire and are forwarded to the validator.
+    This provides a more censorship resistant method to MEV than block engines receiving packets directly.
+    """
+
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    HEADER_FIELD_NUMBER: builtins.int
+    BATCH_FIELD_NUMBER: builtins.int
+    EXPIRY_MS_FIELD_NUMBER: builtins.int
+    @property
+    def header(self) -> shared_pb2.Header: ...
+    @property
+    def batch(self) -> packet_pb2.PacketBatch: ...
+    expiry_ms: builtins.int
+    def __init__(
+        self,
+        *,
+        header: shared_pb2.Header | None = ...,
+        batch: packet_pb2.PacketBatch | None = ...,
+        expiry_ms: builtins.int = ...,
+    ) -> None: ...
+    def HasField(self, field_name: typing_extensions.Literal["batch", b"batch", "header", b"header"]) -> builtins.bool: ...
+    def ClearField(self, field_name: typing_extensions.Literal["batch", b"batch", "expiry_ms", b"expiry_ms", "header", b"header"]) -> None: ...
+
+global___ExpiringPacketBatch = ExpiringPacketBatch
+
+@typing_extensions.final
+class PacketBatchUpdate(google.protobuf.message.Message):
+    """Packets and heartbeats are sent over the same stream.
+    ExpiringPacketBatches have an expiration attached to them so the block engine can track
+    how long it has until the relayer forwards the packets to the validator.
+    Heartbeats contain a timestamp from the system and is used as a simple and naive time-sync mechanism
+    so the block engine has some idea on how far their clocks are apart.
+    """
+
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    BATCHES_FIELD_NUMBER: builtins.int
+    HEARTBEAT_FIELD_NUMBER: builtins.int
+    @property
+    def batches(self) -> global___ExpiringPacketBatch: ...
+    @property
+    def heartbeat(self) -> shared_pb2.Heartbeat: ...
+    def __init__(
+        self,
+        *,
+        batches: global___ExpiringPacketBatch | None = ...,
+        heartbeat: shared_pb2.Heartbeat | None = ...,
+    ) -> None: ...
+    def HasField(self, field_name: typing_extensions.Literal["batches", b"batches", "heartbeat", b"heartbeat", "msg", b"msg"]) -> builtins.bool: ...
+    def ClearField(self, field_name: typing_extensions.Literal["batches", b"batches", "heartbeat", b"heartbeat", "msg", b"msg"]) -> None: ...
+    def WhichOneof(self, oneof_group: typing_extensions.Literal["msg", b"msg"]) -> typing_extensions.Literal["batches", "heartbeat"] | None: ...
+
+global___PacketBatchUpdate = PacketBatchUpdate
+
+@typing_extensions.final
+class StartExpiringPacketStreamResponse(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    HEARTBEAT_FIELD_NUMBER: builtins.int
+    @property
+    def heartbeat(self) -> shared_pb2.Heartbeat: ...
+    def __init__(
+        self,
+        *,
+        heartbeat: shared_pb2.Heartbeat | None = ...,
+    ) -> None: ...
+    def HasField(self, field_name: typing_extensions.Literal["heartbeat", b"heartbeat"]) -> builtins.bool: ...
+    def ClearField(self, field_name: typing_extensions.Literal["heartbeat", b"heartbeat"]) -> None: ...
+
+global___StartExpiringPacketStreamResponse = StartExpiringPacketStreamResponse

+ 247 - 0
jito_searcher_client/generated/block_engine_pb2_grpc.py

@@ -0,0 +1,247 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+
+import block_engine_pb2 as block__engine__pb2
+
+
+class BlockEngineValidatorStub(object):
+    """/ Validators can connect to Block Engines to receive packets and bundles.
+    """
+
+    def __init__(self, channel):
+        """Constructor.
+
+        Args:
+            channel: A grpc.Channel.
+        """
+        self.SubscribePackets = channel.unary_stream(
+                '/block_engine.BlockEngineValidator/SubscribePackets',
+                request_serializer=block__engine__pb2.SubscribePacketsRequest.SerializeToString,
+                response_deserializer=block__engine__pb2.SubscribePacketsResponse.FromString,
+                )
+        self.SubscribeBundles = channel.unary_stream(
+                '/block_engine.BlockEngineValidator/SubscribeBundles',
+                request_serializer=block__engine__pb2.SubscribeBundlesRequest.SerializeToString,
+                response_deserializer=block__engine__pb2.SubscribeBundlesResponse.FromString,
+                )
+        self.GetBlockBuilderFeeInfo = channel.unary_unary(
+                '/block_engine.BlockEngineValidator/GetBlockBuilderFeeInfo',
+                request_serializer=block__engine__pb2.BlockBuilderFeeInfoRequest.SerializeToString,
+                response_deserializer=block__engine__pb2.BlockBuilderFeeInfoResponse.FromString,
+                )
+
+
+class BlockEngineValidatorServicer(object):
+    """/ Validators can connect to Block Engines to receive packets and bundles.
+    """
+
+    def SubscribePackets(self, request, context):
+        """/ Validators can subscribe to the block engine to receive a stream of packets
+        """
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def SubscribeBundles(self, request, context):
+        """/ Validators can subscribe to the block engine to receive a stream of simulated and profitable bundles
+        """
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def GetBlockBuilderFeeInfo(self, request, context):
+        """Block builders can optionally collect fees. This returns fee information if a block builder wants to
+        collect one.
+        """
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+
+def add_BlockEngineValidatorServicer_to_server(servicer, server):
+    rpc_method_handlers = {
+            'SubscribePackets': grpc.unary_stream_rpc_method_handler(
+                    servicer.SubscribePackets,
+                    request_deserializer=block__engine__pb2.SubscribePacketsRequest.FromString,
+                    response_serializer=block__engine__pb2.SubscribePacketsResponse.SerializeToString,
+            ),
+            'SubscribeBundles': grpc.unary_stream_rpc_method_handler(
+                    servicer.SubscribeBundles,
+                    request_deserializer=block__engine__pb2.SubscribeBundlesRequest.FromString,
+                    response_serializer=block__engine__pb2.SubscribeBundlesResponse.SerializeToString,
+            ),
+            'GetBlockBuilderFeeInfo': grpc.unary_unary_rpc_method_handler(
+                    servicer.GetBlockBuilderFeeInfo,
+                    request_deserializer=block__engine__pb2.BlockBuilderFeeInfoRequest.FromString,
+                    response_serializer=block__engine__pb2.BlockBuilderFeeInfoResponse.SerializeToString,
+            ),
+    }
+    generic_handler = grpc.method_handlers_generic_handler(
+            'block_engine.BlockEngineValidator', rpc_method_handlers)
+    server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class BlockEngineValidator(object):
+    """/ Validators can connect to Block Engines to receive packets and bundles.
+    """
+
+    @staticmethod
+    def SubscribePackets(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_stream(request, target, '/block_engine.BlockEngineValidator/SubscribePackets',
+            block__engine__pb2.SubscribePacketsRequest.SerializeToString,
+            block__engine__pb2.SubscribePacketsResponse.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def SubscribeBundles(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_stream(request, target, '/block_engine.BlockEngineValidator/SubscribeBundles',
+            block__engine__pb2.SubscribeBundlesRequest.SerializeToString,
+            block__engine__pb2.SubscribeBundlesResponse.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def GetBlockBuilderFeeInfo(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/block_engine.BlockEngineValidator/GetBlockBuilderFeeInfo',
+            block__engine__pb2.BlockBuilderFeeInfoRequest.SerializeToString,
+            block__engine__pb2.BlockBuilderFeeInfoResponse.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+
+class BlockEngineRelayerStub(object):
+    """/ Relayers can forward packets to Block Engines.
+    / Block Engines provide an AccountsOfInterest field to only send transactions that are of interest.
+    """
+
+    def __init__(self, channel):
+        """Constructor.
+
+        Args:
+            channel: A grpc.Channel.
+        """
+        self.SubscribeAccountsOfInterest = channel.unary_stream(
+                '/block_engine.BlockEngineRelayer/SubscribeAccountsOfInterest',
+                request_serializer=block__engine__pb2.AccountsOfInterestRequest.SerializeToString,
+                response_deserializer=block__engine__pb2.AccountsOfInterestUpdate.FromString,
+                )
+        self.StartExpiringPacketStream = channel.stream_stream(
+                '/block_engine.BlockEngineRelayer/StartExpiringPacketStream',
+                request_serializer=block__engine__pb2.PacketBatchUpdate.SerializeToString,
+                response_deserializer=block__engine__pb2.StartExpiringPacketStreamResponse.FromString,
+                )
+
+
+class BlockEngineRelayerServicer(object):
+    """/ Relayers can forward packets to Block Engines.
+    / Block Engines provide an AccountsOfInterest field to only send transactions that are of interest.
+    """
+
+    def SubscribeAccountsOfInterest(self, request, context):
+        """/ The block engine feeds accounts of interest (AOI) updates to the relayer periodically.
+        / For all transactions the relayer receives, it forwards transactions to the block engine which write-lock
+        / any of the accounts in the AOI.
+        """
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def StartExpiringPacketStream(self, request_iterator, context):
+        """Validators can subscribe to packets from the relayer and receive a multiplexed signal that contains a mixture
+        of packets and heartbeats.
+        NOTE: This is a bi-directional stream due to a bug with how Envoy handles half closed client-side streams.
+        The issue is being tracked here: https://github.com/envoyproxy/envoy/issues/22748. In the meantime, the
+        server will stream heartbeats to clients at some reasonable cadence.
+        """
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+
+def add_BlockEngineRelayerServicer_to_server(servicer, server):
+    rpc_method_handlers = {
+            'SubscribeAccountsOfInterest': grpc.unary_stream_rpc_method_handler(
+                    servicer.SubscribeAccountsOfInterest,
+                    request_deserializer=block__engine__pb2.AccountsOfInterestRequest.FromString,
+                    response_serializer=block__engine__pb2.AccountsOfInterestUpdate.SerializeToString,
+            ),
+            'StartExpiringPacketStream': grpc.stream_stream_rpc_method_handler(
+                    servicer.StartExpiringPacketStream,
+                    request_deserializer=block__engine__pb2.PacketBatchUpdate.FromString,
+                    response_serializer=block__engine__pb2.StartExpiringPacketStreamResponse.SerializeToString,
+            ),
+    }
+    generic_handler = grpc.method_handlers_generic_handler(
+            'block_engine.BlockEngineRelayer', rpc_method_handlers)
+    server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class BlockEngineRelayer(object):
+    """/ Relayers can forward packets to Block Engines.
+    / Block Engines provide an AccountsOfInterest field to only send transactions that are of interest.
+    """
+
+    @staticmethod
+    def SubscribeAccountsOfInterest(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_stream(request, target, '/block_engine.BlockEngineRelayer/SubscribeAccountsOfInterest',
+            block__engine__pb2.AccountsOfInterestRequest.SerializeToString,
+            block__engine__pb2.AccountsOfInterestUpdate.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def StartExpiringPacketStream(request_iterator,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.stream_stream(request_iterator, target, '/block_engine.BlockEngineRelayer/StartExpiringPacketStream',
+            block__engine__pb2.PacketBatchUpdate.SerializeToString,
+            block__engine__pb2.StartExpiringPacketStreamResponse.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)

+ 114 - 0
jito_searcher_client/generated/block_engine_pb2_grpc.pyi

@@ -0,0 +1,114 @@
+"""
+@generated by mypy-protobuf.  Do not edit manually!
+isort:skip_file
+"""
+import abc
+import block_engine_pb2
+import collections.abc
+import grpc
+
+class BlockEngineValidatorStub:
+    """/ Validators can connect to Block Engines to receive packets and bundles."""
+
+    def __init__(self, channel: grpc.Channel) -> None: ...
+    SubscribePackets: grpc.UnaryStreamMultiCallable[
+        block_engine_pb2.SubscribePacketsRequest,
+        block_engine_pb2.SubscribePacketsResponse,
+    ]
+    """/ Validators can subscribe to the block engine to receive a stream of packets"""
+    SubscribeBundles: grpc.UnaryStreamMultiCallable[
+        block_engine_pb2.SubscribeBundlesRequest,
+        block_engine_pb2.SubscribeBundlesResponse,
+    ]
+    """/ Validators can subscribe to the block engine to receive a stream of simulated and profitable bundles"""
+    GetBlockBuilderFeeInfo: grpc.UnaryUnaryMultiCallable[
+        block_engine_pb2.BlockBuilderFeeInfoRequest,
+        block_engine_pb2.BlockBuilderFeeInfoResponse,
+    ]
+    """Block builders can optionally collect fees. This returns fee information if a block builder wants to
+    collect one.
+    """
+
+class BlockEngineValidatorServicer(metaclass=abc.ABCMeta):
+    """/ Validators can connect to Block Engines to receive packets and bundles."""
+
+    @abc.abstractmethod
+    def SubscribePackets(
+        self,
+        request: block_engine_pb2.SubscribePacketsRequest,
+        context: grpc.ServicerContext,
+    ) -> collections.abc.Iterator[block_engine_pb2.SubscribePacketsResponse]:
+        """/ Validators can subscribe to the block engine to receive a stream of packets"""
+    @abc.abstractmethod
+    def SubscribeBundles(
+        self,
+        request: block_engine_pb2.SubscribeBundlesRequest,
+        context: grpc.ServicerContext,
+    ) -> collections.abc.Iterator[block_engine_pb2.SubscribeBundlesResponse]:
+        """/ Validators can subscribe to the block engine to receive a stream of simulated and profitable bundles"""
+    @abc.abstractmethod
+    def GetBlockBuilderFeeInfo(
+        self,
+        request: block_engine_pb2.BlockBuilderFeeInfoRequest,
+        context: grpc.ServicerContext,
+    ) -> block_engine_pb2.BlockBuilderFeeInfoResponse:
+        """Block builders can optionally collect fees. This returns fee information if a block builder wants to
+        collect one.
+        """
+
+def add_BlockEngineValidatorServicer_to_server(servicer: BlockEngineValidatorServicer, server: grpc.Server) -> None: ...
+
+class BlockEngineRelayerStub:
+    """/ Relayers can forward packets to Block Engines.
+    / Block Engines provide an AccountsOfInterest field to only send transactions that are of interest.
+    """
+
+    def __init__(self, channel: grpc.Channel) -> None: ...
+    SubscribeAccountsOfInterest: grpc.UnaryStreamMultiCallable[
+        block_engine_pb2.AccountsOfInterestRequest,
+        block_engine_pb2.AccountsOfInterestUpdate,
+    ]
+    """/ The block engine feeds accounts of interest (AOI) updates to the relayer periodically.
+    / For all transactions the relayer receives, it forwards transactions to the block engine which write-lock
+    / any of the accounts in the AOI.
+    """
+    StartExpiringPacketStream: grpc.StreamStreamMultiCallable[
+        block_engine_pb2.PacketBatchUpdate,
+        block_engine_pb2.StartExpiringPacketStreamResponse,
+    ]
+    """Validators can subscribe to packets from the relayer and receive a multiplexed signal that contains a mixture
+    of packets and heartbeats.
+    NOTE: This is a bi-directional stream due to a bug with how Envoy handles half closed client-side streams.
+    The issue is being tracked here: https://github.com/envoyproxy/envoy/issues/22748. In the meantime, the
+    server will stream heartbeats to clients at some reasonable cadence.
+    """
+
+class BlockEngineRelayerServicer(metaclass=abc.ABCMeta):
+    """/ Relayers can forward packets to Block Engines.
+    / Block Engines provide an AccountsOfInterest field to only send transactions that are of interest.
+    """
+
+    @abc.abstractmethod
+    def SubscribeAccountsOfInterest(
+        self,
+        request: block_engine_pb2.AccountsOfInterestRequest,
+        context: grpc.ServicerContext,
+    ) -> collections.abc.Iterator[block_engine_pb2.AccountsOfInterestUpdate]:
+        """/ The block engine feeds accounts of interest (AOI) updates to the relayer periodically.
+        / For all transactions the relayer receives, it forwards transactions to the block engine which write-lock
+        / any of the accounts in the AOI.
+        """
+    @abc.abstractmethod
+    def StartExpiringPacketStream(
+        self,
+        request_iterator: collections.abc.Iterator[block_engine_pb2.PacketBatchUpdate],
+        context: grpc.ServicerContext,
+    ) -> collections.abc.Iterator[block_engine_pb2.StartExpiringPacketStreamResponse]:
+        """Validators can subscribe to packets from the relayer and receive a multiplexed signal that contains a mixture
+        of packets and heartbeats.
+        NOTE: This is a bi-directional stream due to a bug with how Envoy handles half closed client-side streams.
+        The issue is being tracked here: https://github.com/envoyproxy/envoy/issues/22748. In the meantime, the
+        server will stream heartbeats to clients at some reasonable cadence.
+        """
+
+def add_BlockEngineRelayerServicer_to_server(servicer: BlockEngineRelayerServicer, server: grpc.Server) -> None: ...

+ 26 - 0
jito_searcher_client/generated/block_pb2.py

@@ -0,0 +1,26 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: block.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import builder as _builder
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+import shared_pb2 as shared__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0b\x62lock.proto\x12\x05\x62lock\x1a\x0cshared.proto\"\xb6\x01\n\x0e\x43ondensedBlock\x12\x1e\n\x06header\x18\x01 \x01(\x0b\x32\x0e.shared.Header\x12\x1a\n\x12previous_blockhash\x18\x02 \x01(\t\x12\x11\n\tblockhash\x18\x03 \x01(\t\x12\x13\n\x0bparent_slot\x18\x04 \x01(\x04\x12\x1e\n\x16versioned_transactions\x18\x05 \x03(\x0c\x12\x0c\n\x04slot\x18\x06 \x01(\x04\x12\x12\n\ncommitment\x18\x07 \x01(\tb\x06proto3')
+
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'block_pb2', globals())
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+  DESCRIPTOR._options = None
+  _CONDENSEDBLOCK._serialized_start=37
+  _CONDENSEDBLOCK._serialized_end=219
+# @@protoc_insertion_point(module_scope)

+ 56 - 0
jito_searcher_client/generated/block_pb2.pyi

@@ -0,0 +1,56 @@
+"""
+@generated by mypy-protobuf.  Do not edit manually!
+isort:skip_file
+"""
+import builtins
+import collections.abc
+import google.protobuf.descriptor
+import google.protobuf.internal.containers
+import google.protobuf.message
+import shared_pb2
+import sys
+
+if sys.version_info >= (3, 8):
+    import typing as typing_extensions
+else:
+    import typing_extensions
+
+DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
+
+@typing_extensions.final
+class CondensedBlock(google.protobuf.message.Message):
+    """Condensed block helpful for getting data around efficiently internal to our system."""
+
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    HEADER_FIELD_NUMBER: builtins.int
+    PREVIOUS_BLOCKHASH_FIELD_NUMBER: builtins.int
+    BLOCKHASH_FIELD_NUMBER: builtins.int
+    PARENT_SLOT_FIELD_NUMBER: builtins.int
+    VERSIONED_TRANSACTIONS_FIELD_NUMBER: builtins.int
+    SLOT_FIELD_NUMBER: builtins.int
+    COMMITMENT_FIELD_NUMBER: builtins.int
+    @property
+    def header(self) -> shared_pb2.Header: ...
+    previous_blockhash: builtins.str
+    blockhash: builtins.str
+    parent_slot: builtins.int
+    @property
+    def versioned_transactions(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]: ...
+    slot: builtins.int
+    commitment: builtins.str
+    def __init__(
+        self,
+        *,
+        header: shared_pb2.Header | None = ...,
+        previous_blockhash: builtins.str = ...,
+        blockhash: builtins.str = ...,
+        parent_slot: builtins.int = ...,
+        versioned_transactions: collections.abc.Iterable[builtins.bytes] | None = ...,
+        slot: builtins.int = ...,
+        commitment: builtins.str = ...,
+    ) -> None: ...
+    def HasField(self, field_name: typing_extensions.Literal["header", b"header"]) -> builtins.bool: ...
+    def ClearField(self, field_name: typing_extensions.Literal["blockhash", b"blockhash", "commitment", b"commitment", "header", b"header", "parent_slot", b"parent_slot", "previous_blockhash", b"previous_blockhash", "slot", b"slot", "versioned_transactions", b"versioned_transactions"]) -> None: ...
+
+global___CondensedBlock = CondensedBlock

+ 4 - 0
jito_searcher_client/generated/block_pb2_grpc.py

@@ -0,0 +1,4 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+

+ 4 - 0
jito_searcher_client/generated/block_pb2_grpc.pyi

@@ -0,0 +1,4 @@
+"""
+@generated by mypy-protobuf.  Do not edit manually!
+isort:skip_file
+"""

+ 43 - 0
jito_searcher_client/generated/bundle_pb2.py

@@ -0,0 +1,43 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: bundle.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import builder as _builder
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+import packet_pb2 as packet__pb2
+import shared_pb2 as shared__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0c\x62undle.proto\x12\x06\x62undle\x1a\x0cpacket.proto\x1a\x0cshared.proto\"I\n\x06\x42undle\x12\x1e\n\x06header\x18\x02 \x01(\x0b\x32\x0e.shared.Header\x12\x1f\n\x07packets\x18\x03 \x03(\x0b\x32\x0e.packet.Packet\":\n\nBundleUuid\x12\x1e\n\x06\x62undle\x18\x01 \x01(\x0b\x32\x0e.bundle.Bundle\x12\x0c\n\x04uuid\x18\x02 \x01(\t\"4\n\x08\x41\x63\x63\x65pted\x12\x0c\n\x04slot\x18\x01 \x01(\x04\x12\x1a\n\x12validator_identity\x18\x02 \x01(\t\"\x8c\x02\n\x08Rejected\x12\x45\n\x1astate_auction_bid_rejected\x18\x01 \x01(\x0b\x32\x1f.bundle.StateAuctionBidRejectedH\x00\x12\x45\n\x1awinning_batch_bid_rejected\x18\x02 \x01(\x0b\x32\x1f.bundle.WinningBatchBidRejectedH\x00\x12\x37\n\x12simulation_failure\x18\x03 \x01(\x0b\x32\x19.bundle.SimulationFailureH\x00\x12/\n\x0einternal_error\x18\x04 \x01(\x0b\x32\x15.bundle.InternalErrorH\x00\x42\x08\n\x06reason\"g\n\x17WinningBatchBidRejected\x12\x12\n\nauction_id\x18\x01 \x01(\t\x12\x1e\n\x16simulated_bid_lamports\x18\x02 \x01(\x04\x12\x10\n\x03msg\x18\x03 \x01(\tH\x00\x88\x01\x01\x42\x06\n\x04_msg\"g\n\x17StateAuctionBidRejected\x12\x12\n\nauction_id\x18\x01 \x01(\t\x12\x1e\n\x16simulated_bid_lamports\x18\x02 \x01(\x04\x12\x10\n\x03msg\x18\x03 \x01(\tH\x00\x88\x01\x01\x42\x06\n\x04_msg\"C\n\x11SimulationFailure\x12\x14\n\x0ctx_signature\x18\x01 \x01(\t\x12\x10\n\x03msg\x18\x02 \x01(\tH\x00\x88\x01\x01\x42\x06\n\x04_msg\"\x1c\n\rInternalError\x12\x0b\n\x03msg\x18\x01 \x01(\t\"w\n\x0c\x42undleResult\x12\x11\n\tbundle_id\x18\x01 \x01(\t\x12$\n\x08\x61\x63\x63\x65pted\x18\x02 \x01(\x0b\x32\x10.bundle.AcceptedH\x00\x12$\n\x08rejected\x18\x03 \x01(\x0b\x32\x10.bundle.RejectedH\x00\x42\x08\n\x06resultb\x06proto3')
+
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'bundle_pb2', globals())
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+  DESCRIPTOR._options = None
+  _BUNDLE._serialized_start=52
+  _BUNDLE._serialized_end=125
+  _BUNDLEUUID._serialized_start=127
+  _BUNDLEUUID._serialized_end=185
+  _ACCEPTED._serialized_start=187
+  _ACCEPTED._serialized_end=239
+  _REJECTED._serialized_start=242
+  _REJECTED._serialized_end=510
+  _WINNINGBATCHBIDREJECTED._serialized_start=512
+  _WINNINGBATCHBIDREJECTED._serialized_end=615
+  _STATEAUCTIONBIDREJECTED._serialized_start=617
+  _STATEAUCTIONBIDREJECTED._serialized_end=720
+  _SIMULATIONFAILURE._serialized_start=722
+  _SIMULATIONFAILURE._serialized_end=789
+  _INTERNALERROR._serialized_start=791
+  _INTERNALERROR._serialized_end=819
+  _BUNDLERESULT._serialized_start=821
+  _BUNDLERESULT._serialized_end=940
+# @@protoc_insertion_point(module_scope)

+ 240 - 0
jito_searcher_client/generated/bundle_pb2.pyi

@@ -0,0 +1,240 @@
+"""
+@generated by mypy-protobuf.  Do not edit manually!
+isort:skip_file
+"""
+import builtins
+import collections.abc
+import google.protobuf.descriptor
+import google.protobuf.internal.containers
+import google.protobuf.message
+import packet_pb2
+import shared_pb2
+import sys
+
+if sys.version_info >= (3, 8):
+    import typing as typing_extensions
+else:
+    import typing_extensions
+
+DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
+
+@typing_extensions.final
+class Bundle(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    HEADER_FIELD_NUMBER: builtins.int
+    PACKETS_FIELD_NUMBER: builtins.int
+    @property
+    def header(self) -> shared_pb2.Header: ...
+    @property
+    def packets(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[packet_pb2.Packet]: ...
+    def __init__(
+        self,
+        *,
+        header: shared_pb2.Header | None = ...,
+        packets: collections.abc.Iterable[packet_pb2.Packet] | None = ...,
+    ) -> None: ...
+    def HasField(self, field_name: typing_extensions.Literal["header", b"header"]) -> builtins.bool: ...
+    def ClearField(self, field_name: typing_extensions.Literal["header", b"header", "packets", b"packets"]) -> None: ...
+
+global___Bundle = Bundle
+
+@typing_extensions.final
+class BundleUuid(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    BUNDLE_FIELD_NUMBER: builtins.int
+    UUID_FIELD_NUMBER: builtins.int
+    @property
+    def bundle(self) -> global___Bundle: ...
+    uuid: builtins.str
+    def __init__(
+        self,
+        *,
+        bundle: global___Bundle | None = ...,
+        uuid: builtins.str = ...,
+    ) -> None: ...
+    def HasField(self, field_name: typing_extensions.Literal["bundle", b"bundle"]) -> builtins.bool: ...
+    def ClearField(self, field_name: typing_extensions.Literal["bundle", b"bundle", "uuid", b"uuid"]) -> None: ...
+
+global___BundleUuid = BundleUuid
+
+@typing_extensions.final
+class Accepted(google.protobuf.message.Message):
+    """Bundle Result Types
+
+    Indicates the bundle was accepted and forwarded to a validator.
+    NOTE: A single bundle may have multiple events emitted if forwarded to many validators.
+    """
+
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    SLOT_FIELD_NUMBER: builtins.int
+    VALIDATOR_IDENTITY_FIELD_NUMBER: builtins.int
+    slot: builtins.int
+    """Slot at which bundle was forwarded."""
+    validator_identity: builtins.str
+    """Validator identity bundle was forwarded to."""
+    def __init__(
+        self,
+        *,
+        slot: builtins.int = ...,
+        validator_identity: builtins.str = ...,
+    ) -> None: ...
+    def ClearField(self, field_name: typing_extensions.Literal["slot", b"slot", "validator_identity", b"validator_identity"]) -> None: ...
+
+global___Accepted = Accepted
+
+@typing_extensions.final
+class Rejected(google.protobuf.message.Message):
+    """Indicates the bundle was dropped and therefore not forwarded to any validator."""
+
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    STATE_AUCTION_BID_REJECTED_FIELD_NUMBER: builtins.int
+    WINNING_BATCH_BID_REJECTED_FIELD_NUMBER: builtins.int
+    SIMULATION_FAILURE_FIELD_NUMBER: builtins.int
+    INTERNAL_ERROR_FIELD_NUMBER: builtins.int
+    @property
+    def state_auction_bid_rejected(self) -> global___StateAuctionBidRejected: ...
+    @property
+    def winning_batch_bid_rejected(self) -> global___WinningBatchBidRejected: ...
+    @property
+    def simulation_failure(self) -> global___SimulationFailure: ...
+    @property
+    def internal_error(self) -> global___InternalError: ...
+    def __init__(
+        self,
+        *,
+        state_auction_bid_rejected: global___StateAuctionBidRejected | None = ...,
+        winning_batch_bid_rejected: global___WinningBatchBidRejected | None = ...,
+        simulation_failure: global___SimulationFailure | None = ...,
+        internal_error: global___InternalError | None = ...,
+    ) -> None: ...
+    def HasField(self, field_name: typing_extensions.Literal["internal_error", b"internal_error", "reason", b"reason", "simulation_failure", b"simulation_failure", "state_auction_bid_rejected", b"state_auction_bid_rejected", "winning_batch_bid_rejected", b"winning_batch_bid_rejected"]) -> builtins.bool: ...
+    def ClearField(self, field_name: typing_extensions.Literal["internal_error", b"internal_error", "reason", b"reason", "simulation_failure", b"simulation_failure", "state_auction_bid_rejected", b"state_auction_bid_rejected", "winning_batch_bid_rejected", b"winning_batch_bid_rejected"]) -> None: ...
+    def WhichOneof(self, oneof_group: typing_extensions.Literal["reason", b"reason"]) -> typing_extensions.Literal["state_auction_bid_rejected", "winning_batch_bid_rejected", "simulation_failure", "internal_error"] | None: ...
+
+global___Rejected = Rejected
+
+@typing_extensions.final
+class WinningBatchBidRejected(google.protobuf.message.Message):
+    """Indicates the bundle's bid was high enough to win its state auction.
+    However, not high enough relative to other state auction winners and therefore excluded from being forwarded.
+    """
+
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    AUCTION_ID_FIELD_NUMBER: builtins.int
+    SIMULATED_BID_LAMPORTS_FIELD_NUMBER: builtins.int
+    MSG_FIELD_NUMBER: builtins.int
+    auction_id: builtins.str
+    """Auction's unique identifier."""
+    simulated_bid_lamports: builtins.int
+    """Bundle's simulated bid."""
+    msg: builtins.str
+    def __init__(
+        self,
+        *,
+        auction_id: builtins.str = ...,
+        simulated_bid_lamports: builtins.int = ...,
+        msg: builtins.str | None = ...,
+    ) -> None: ...
+    def HasField(self, field_name: typing_extensions.Literal["_msg", b"_msg", "msg", b"msg"]) -> builtins.bool: ...
+    def ClearField(self, field_name: typing_extensions.Literal["_msg", b"_msg", "auction_id", b"auction_id", "msg", b"msg", "simulated_bid_lamports", b"simulated_bid_lamports"]) -> None: ...
+    def WhichOneof(self, oneof_group: typing_extensions.Literal["_msg", b"_msg"]) -> typing_extensions.Literal["msg"] | None: ...
+
+global___WinningBatchBidRejected = WinningBatchBidRejected
+
+@typing_extensions.final
+class StateAuctionBidRejected(google.protobuf.message.Message):
+    """Indicates the bundle's bid was __not__ high enough to be included in its state auction's set of winners."""
+
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    AUCTION_ID_FIELD_NUMBER: builtins.int
+    SIMULATED_BID_LAMPORTS_FIELD_NUMBER: builtins.int
+    MSG_FIELD_NUMBER: builtins.int
+    auction_id: builtins.str
+    """Auction's unique identifier."""
+    simulated_bid_lamports: builtins.int
+    """Bundle's simulated bid."""
+    msg: builtins.str
+    def __init__(
+        self,
+        *,
+        auction_id: builtins.str = ...,
+        simulated_bid_lamports: builtins.int = ...,
+        msg: builtins.str | None = ...,
+    ) -> None: ...
+    def HasField(self, field_name: typing_extensions.Literal["_msg", b"_msg", "msg", b"msg"]) -> builtins.bool: ...
+    def ClearField(self, field_name: typing_extensions.Literal["_msg", b"_msg", "auction_id", b"auction_id", "msg", b"msg", "simulated_bid_lamports", b"simulated_bid_lamports"]) -> None: ...
+    def WhichOneof(self, oneof_group: typing_extensions.Literal["_msg", b"_msg"]) -> typing_extensions.Literal["msg"] | None: ...
+
+global___StateAuctionBidRejected = StateAuctionBidRejected
+
+@typing_extensions.final
+class SimulationFailure(google.protobuf.message.Message):
+    """Bundle dropped due to simulation failure."""
+
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    TX_SIGNATURE_FIELD_NUMBER: builtins.int
+    MSG_FIELD_NUMBER: builtins.int
+    tx_signature: builtins.str
+    """Signature of the offending transaction."""
+    msg: builtins.str
+    def __init__(
+        self,
+        *,
+        tx_signature: builtins.str = ...,
+        msg: builtins.str | None = ...,
+    ) -> None: ...
+    def HasField(self, field_name: typing_extensions.Literal["_msg", b"_msg", "msg", b"msg"]) -> builtins.bool: ...
+    def ClearField(self, field_name: typing_extensions.Literal["_msg", b"_msg", "msg", b"msg", "tx_signature", b"tx_signature"]) -> None: ...
+    def WhichOneof(self, oneof_group: typing_extensions.Literal["_msg", b"_msg"]) -> typing_extensions.Literal["msg"] | None: ...
+
+global___SimulationFailure = SimulationFailure
+
+@typing_extensions.final
+class InternalError(google.protobuf.message.Message):
+    """Bundle dropped due to an internal error."""
+
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    MSG_FIELD_NUMBER: builtins.int
+    msg: builtins.str
+    def __init__(
+        self,
+        *,
+        msg: builtins.str = ...,
+    ) -> None: ...
+    def ClearField(self, field_name: typing_extensions.Literal["msg", b"msg"]) -> None: ...
+
+global___InternalError = InternalError
+
+@typing_extensions.final
+class BundleResult(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    BUNDLE_ID_FIELD_NUMBER: builtins.int
+    ACCEPTED_FIELD_NUMBER: builtins.int
+    REJECTED_FIELD_NUMBER: builtins.int
+    bundle_id: builtins.str
+    """Bundle's Uuid."""
+    @property
+    def accepted(self) -> global___Accepted: ...
+    @property
+    def rejected(self) -> global___Rejected: ...
+    def __init__(
+        self,
+        *,
+        bundle_id: builtins.str = ...,
+        accepted: global___Accepted | None = ...,
+        rejected: global___Rejected | None = ...,
+    ) -> None: ...
+    def HasField(self, field_name: typing_extensions.Literal["accepted", b"accepted", "rejected", b"rejected", "result", b"result"]) -> builtins.bool: ...
+    def ClearField(self, field_name: typing_extensions.Literal["accepted", b"accepted", "bundle_id", b"bundle_id", "rejected", b"rejected", "result", b"result"]) -> None: ...
+    def WhichOneof(self, oneof_group: typing_extensions.Literal["result", b"result"]) -> typing_extensions.Literal["accepted", "rejected"] | None: ...
+
+global___BundleResult = BundleResult

+ 4 - 0
jito_searcher_client/generated/bundle_pb2_grpc.py

@@ -0,0 +1,4 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+

+ 4 - 0
jito_searcher_client/generated/bundle_pb2_grpc.pyi

@@ -0,0 +1,4 @@
+"""
+@generated by mypy-protobuf.  Do not edit manually!
+isort:skip_file
+"""

+ 31 - 0
jito_searcher_client/generated/packet_pb2.py

@@ -0,0 +1,31 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: packet.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import builder as _builder
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cpacket.proto\x12\x06packet\".\n\x0bPacketBatch\x12\x1f\n\x07packets\x18\x01 \x03(\x0b\x32\x0e.packet.Packet\"2\n\x06Packet\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x0c\x12\x1a\n\x04meta\x18\x02 \x01(\x0b\x32\x0c.packet.Meta\"j\n\x04Meta\x12\x0c\n\x04size\x18\x01 \x01(\x04\x12\x0c\n\x04\x61\x64\x64r\x18\x02 \x01(\t\x12\x0c\n\x04port\x18\x03 \x01(\r\x12\"\n\x05\x66lags\x18\x04 \x01(\x0b\x32\x13.packet.PacketFlags\x12\x14\n\x0csender_stake\x18\x05 \x01(\x04\"p\n\x0bPacketFlags\x12\x0f\n\x07\x64iscard\x18\x01 \x01(\x08\x12\x11\n\tforwarded\x18\x02 \x01(\x08\x12\x0e\n\x06repair\x18\x03 \x01(\x08\x12\x16\n\x0esimple_vote_tx\x18\x04 \x01(\x08\x12\x15\n\rtracer_packet\x18\x05 \x01(\x08\x62\x06proto3')
+
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'packet_pb2', globals())
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+  DESCRIPTOR._options = None
+  _PACKETBATCH._serialized_start=24
+  _PACKETBATCH._serialized_end=70
+  _PACKET._serialized_start=72
+  _PACKET._serialized_end=122
+  _META._serialized_start=124
+  _META._serialized_end=230
+  _PACKETFLAGS._serialized_start=232
+  _PACKETFLAGS._serialized_end=344
+# @@protoc_insertion_point(module_scope)

+ 109 - 0
jito_searcher_client/generated/packet_pb2.pyi

@@ -0,0 +1,109 @@
+"""
+@generated by mypy-protobuf.  Do not edit manually!
+isort:skip_file
+"""
+import builtins
+import collections.abc
+import google.protobuf.descriptor
+import google.protobuf.internal.containers
+import google.protobuf.message
+import sys
+
+if sys.version_info >= (3, 8):
+    import typing as typing_extensions
+else:
+    import typing_extensions
+
+DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
+
+@typing_extensions.final
+class PacketBatch(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    PACKETS_FIELD_NUMBER: builtins.int
+    @property
+    def packets(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Packet]: ...
+    def __init__(
+        self,
+        *,
+        packets: collections.abc.Iterable[global___Packet] | None = ...,
+    ) -> None: ...
+    def ClearField(self, field_name: typing_extensions.Literal["packets", b"packets"]) -> None: ...
+
+global___PacketBatch = PacketBatch
+
+@typing_extensions.final
+class Packet(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    DATA_FIELD_NUMBER: builtins.int
+    META_FIELD_NUMBER: builtins.int
+    data: builtins.bytes
+    @property
+    def meta(self) -> global___Meta: ...
+    def __init__(
+        self,
+        *,
+        data: builtins.bytes = ...,
+        meta: global___Meta | None = ...,
+    ) -> None: ...
+    def HasField(self, field_name: typing_extensions.Literal["meta", b"meta"]) -> builtins.bool: ...
+    def ClearField(self, field_name: typing_extensions.Literal["data", b"data", "meta", b"meta"]) -> None: ...
+
+global___Packet = Packet
+
+@typing_extensions.final
+class Meta(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    SIZE_FIELD_NUMBER: builtins.int
+    ADDR_FIELD_NUMBER: builtins.int
+    PORT_FIELD_NUMBER: builtins.int
+    FLAGS_FIELD_NUMBER: builtins.int
+    SENDER_STAKE_FIELD_NUMBER: builtins.int
+    size: builtins.int
+    addr: builtins.str
+    port: builtins.int
+    @property
+    def flags(self) -> global___PacketFlags: ...
+    sender_stake: builtins.int
+    def __init__(
+        self,
+        *,
+        size: builtins.int = ...,
+        addr: builtins.str = ...,
+        port: builtins.int = ...,
+        flags: global___PacketFlags | None = ...,
+        sender_stake: builtins.int = ...,
+    ) -> None: ...
+    def HasField(self, field_name: typing_extensions.Literal["flags", b"flags"]) -> builtins.bool: ...
+    def ClearField(self, field_name: typing_extensions.Literal["addr", b"addr", "flags", b"flags", "port", b"port", "sender_stake", b"sender_stake", "size", b"size"]) -> None: ...
+
+global___Meta = Meta
+
+@typing_extensions.final
+class PacketFlags(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    DISCARD_FIELD_NUMBER: builtins.int
+    FORWARDED_FIELD_NUMBER: builtins.int
+    REPAIR_FIELD_NUMBER: builtins.int
+    SIMPLE_VOTE_TX_FIELD_NUMBER: builtins.int
+    TRACER_PACKET_FIELD_NUMBER: builtins.int
+    discard: builtins.bool
+    forwarded: builtins.bool
+    repair: builtins.bool
+    simple_vote_tx: builtins.bool
+    tracer_packet: builtins.bool
+    def __init__(
+        self,
+        *,
+        discard: builtins.bool = ...,
+        forwarded: builtins.bool = ...,
+        repair: builtins.bool = ...,
+        simple_vote_tx: builtins.bool = ...,
+        tracer_packet: builtins.bool = ...,
+    ) -> None: ...
+    def ClearField(self, field_name: typing_extensions.Literal["discard", b"discard", "forwarded", b"forwarded", "repair", b"repair", "simple_vote_tx", b"simple_vote_tx", "tracer_packet", b"tracer_packet"]) -> None: ...
+
+global___PacketFlags = PacketFlags

+ 4 - 0
jito_searcher_client/generated/packet_pb2_grpc.py

@@ -0,0 +1,4 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+

+ 4 - 0
jito_searcher_client/generated/packet_pb2_grpc.pyi

@@ -0,0 +1,4 @@
+"""
+@generated by mypy-protobuf.  Do not edit manually!
+isort:skip_file
+"""

+ 35 - 0
jito_searcher_client/generated/relayer_pb2.py

@@ -0,0 +1,35 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: relayer.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import builder as _builder
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+import packet_pb2 as packet__pb2
+import shared_pb2 as shared__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\rrelayer.proto\x12\x07relayer\x1a\x0cpacket.proto\x1a\x0cshared.proto\"\x16\n\x14GetTpuConfigsRequest\"Y\n\x15GetTpuConfigsResponse\x12\x1b\n\x03tpu\x18\x01 \x01(\x0b\x32\x0e.shared.Socket\x12#\n\x0btpu_forward\x18\x02 \x01(\x0b\x32\x0e.shared.Socket\"\x19\n\x17SubscribePacketsRequest\"\x8f\x01\n\x18SubscribePacketsResponse\x12\x1e\n\x06header\x18\x01 \x01(\x0b\x32\x0e.shared.Header\x12&\n\theartbeat\x18\x02 \x01(\x0b\x32\x11.shared.HeartbeatH\x00\x12$\n\x05\x62\x61tch\x18\x03 \x01(\x0b\x32\x13.packet.PacketBatchH\x00\x42\x05\n\x03msg2\xb8\x01\n\x07Relayer\x12P\n\rGetTpuConfigs\x12\x1d.relayer.GetTpuConfigsRequest\x1a\x1e.relayer.GetTpuConfigsResponse\"\x00\x12[\n\x10SubscribePackets\x12 .relayer.SubscribePacketsRequest\x1a!.relayer.SubscribePacketsResponse\"\x00\x30\x01\x62\x06proto3')
+
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'relayer_pb2', globals())
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+  DESCRIPTOR._options = None
+  _GETTPUCONFIGSREQUEST._serialized_start=54
+  _GETTPUCONFIGSREQUEST._serialized_end=76
+  _GETTPUCONFIGSRESPONSE._serialized_start=78
+  _GETTPUCONFIGSRESPONSE._serialized_end=167
+  _SUBSCRIBEPACKETSREQUEST._serialized_start=169
+  _SUBSCRIBEPACKETSREQUEST._serialized_end=194
+  _SUBSCRIBEPACKETSRESPONSE._serialized_start=197
+  _SUBSCRIBEPACKETSRESPONSE._serialized_end=340
+  _RELAYER._serialized_start=343
+  _RELAYER._serialized_end=527
+# @@protoc_insertion_point(module_scope)

+ 84 - 0
jito_searcher_client/generated/relayer_pb2.pyi

@@ -0,0 +1,84 @@
+"""
+@generated by mypy-protobuf.  Do not edit manually!
+isort:skip_file
+"""
+import builtins
+import google.protobuf.descriptor
+import google.protobuf.message
+import packet_pb2
+import shared_pb2
+import sys
+
+if sys.version_info >= (3, 8):
+    import typing as typing_extensions
+else:
+    import typing_extensions
+
+DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
+
+@typing_extensions.final
+class GetTpuConfigsRequest(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    def __init__(
+        self,
+    ) -> None: ...
+
+global___GetTpuConfigsRequest = GetTpuConfigsRequest
+
+@typing_extensions.final
+class GetTpuConfigsResponse(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    TPU_FIELD_NUMBER: builtins.int
+    TPU_FORWARD_FIELD_NUMBER: builtins.int
+    @property
+    def tpu(self) -> shared_pb2.Socket: ...
+    @property
+    def tpu_forward(self) -> shared_pb2.Socket: ...
+    def __init__(
+        self,
+        *,
+        tpu: shared_pb2.Socket | None = ...,
+        tpu_forward: shared_pb2.Socket | None = ...,
+    ) -> None: ...
+    def HasField(self, field_name: typing_extensions.Literal["tpu", b"tpu", "tpu_forward", b"tpu_forward"]) -> builtins.bool: ...
+    def ClearField(self, field_name: typing_extensions.Literal["tpu", b"tpu", "tpu_forward", b"tpu_forward"]) -> None: ...
+
+global___GetTpuConfigsResponse = GetTpuConfigsResponse
+
+@typing_extensions.final
+class SubscribePacketsRequest(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    def __init__(
+        self,
+    ) -> None: ...
+
+global___SubscribePacketsRequest = SubscribePacketsRequest
+
+@typing_extensions.final
+class SubscribePacketsResponse(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    HEADER_FIELD_NUMBER: builtins.int
+    HEARTBEAT_FIELD_NUMBER: builtins.int
+    BATCH_FIELD_NUMBER: builtins.int
+    @property
+    def header(self) -> shared_pb2.Header: ...
+    @property
+    def heartbeat(self) -> shared_pb2.Heartbeat: ...
+    @property
+    def batch(self) -> packet_pb2.PacketBatch: ...
+    def __init__(
+        self,
+        *,
+        header: shared_pb2.Header | None = ...,
+        heartbeat: shared_pb2.Heartbeat | None = ...,
+        batch: packet_pb2.PacketBatch | None = ...,
+    ) -> None: ...
+    def HasField(self, field_name: typing_extensions.Literal["batch", b"batch", "header", b"header", "heartbeat", b"heartbeat", "msg", b"msg"]) -> builtins.bool: ...
+    def ClearField(self, field_name: typing_extensions.Literal["batch", b"batch", "header", b"header", "heartbeat", b"heartbeat", "msg", b"msg"]) -> None: ...
+    def WhichOneof(self, oneof_group: typing_extensions.Literal["msg", b"msg"]) -> typing_extensions.Literal["heartbeat", "batch"] | None: ...
+
+global___SubscribePacketsResponse = SubscribePacketsResponse

+ 115 - 0
jito_searcher_client/generated/relayer_pb2_grpc.py

@@ -0,0 +1,115 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+
+import relayer_pb2 as relayer__pb2
+
+
+class RelayerStub(object):
+    """/ Relayers offer a TPU and TPU forward proxy for Solana validators.
+    / Validators can connect and fetch the TPU configuration for the relayer and start to advertise the
+    / relayer's information in gossip.
+    / They can also subscribe to packets which arrived on the TPU ports at the relayer
+    """
+
+    def __init__(self, channel):
+        """Constructor.
+
+        Args:
+            channel: A grpc.Channel.
+        """
+        self.GetTpuConfigs = channel.unary_unary(
+                '/relayer.Relayer/GetTpuConfigs',
+                request_serializer=relayer__pb2.GetTpuConfigsRequest.SerializeToString,
+                response_deserializer=relayer__pb2.GetTpuConfigsResponse.FromString,
+                )
+        self.SubscribePackets = channel.unary_stream(
+                '/relayer.Relayer/SubscribePackets',
+                request_serializer=relayer__pb2.SubscribePacketsRequest.SerializeToString,
+                response_deserializer=relayer__pb2.SubscribePacketsResponse.FromString,
+                )
+
+
+class RelayerServicer(object):
+    """/ Relayers offer a TPU and TPU forward proxy for Solana validators.
+    / Validators can connect and fetch the TPU configuration for the relayer and start to advertise the
+    / relayer's information in gossip.
+    / They can also subscribe to packets which arrived on the TPU ports at the relayer
+    """
+
+    def GetTpuConfigs(self, request, context):
+        """The relayer has TPU and TPU forward sockets that validators can leverage.
+        A validator can fetch this config and change its TPU and TPU forward port in gossip.
+        """
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def SubscribePackets(self, request, context):
+        """Validators can subscribe to packets from the relayer and receive a multiplexed signal that contains a mixture
+        of packets and heartbeats
+        """
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+
+def add_RelayerServicer_to_server(servicer, server):
+    rpc_method_handlers = {
+            'GetTpuConfigs': grpc.unary_unary_rpc_method_handler(
+                    servicer.GetTpuConfigs,
+                    request_deserializer=relayer__pb2.GetTpuConfigsRequest.FromString,
+                    response_serializer=relayer__pb2.GetTpuConfigsResponse.SerializeToString,
+            ),
+            'SubscribePackets': grpc.unary_stream_rpc_method_handler(
+                    servicer.SubscribePackets,
+                    request_deserializer=relayer__pb2.SubscribePacketsRequest.FromString,
+                    response_serializer=relayer__pb2.SubscribePacketsResponse.SerializeToString,
+            ),
+    }
+    generic_handler = grpc.method_handlers_generic_handler(
+            'relayer.Relayer', rpc_method_handlers)
+    server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class Relayer(object):
+    """/ Relayers offer a TPU and TPU forward proxy for Solana validators.
+    / Validators can connect and fetch the TPU configuration for the relayer and start to advertise the
+    / relayer's information in gossip.
+    / They can also subscribe to packets which arrived on the TPU ports at the relayer
+    """
+
+    @staticmethod
+    def GetTpuConfigs(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/relayer.Relayer/GetTpuConfigs',
+            relayer__pb2.GetTpuConfigsRequest.SerializeToString,
+            relayer__pb2.GetTpuConfigsResponse.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def SubscribePackets(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_stream(request, target, '/relayer.Relayer/SubscribePackets',
+            relayer__pb2.SubscribePacketsRequest.SerializeToString,
+            relayer__pb2.SubscribePacketsResponse.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)

+ 59 - 0
jito_searcher_client/generated/relayer_pb2_grpc.pyi

@@ -0,0 +1,59 @@
+"""
+@generated by mypy-protobuf.  Do not edit manually!
+isort:skip_file
+"""
+import abc
+import collections.abc
+import grpc
+import relayer_pb2
+
+class RelayerStub:
+    """/ Relayers offer a TPU and TPU forward proxy for Solana validators.
+    / Validators can connect and fetch the TPU configuration for the relayer and start to advertise the
+    / relayer's information in gossip.
+    / They can also subscribe to packets which arrived on the TPU ports at the relayer
+    """
+
+    def __init__(self, channel: grpc.Channel) -> None: ...
+    GetTpuConfigs: grpc.UnaryUnaryMultiCallable[
+        relayer_pb2.GetTpuConfigsRequest,
+        relayer_pb2.GetTpuConfigsResponse,
+    ]
+    """The relayer has TPU and TPU forward sockets that validators can leverage.
+    A validator can fetch this config and change its TPU and TPU forward port in gossip.
+    """
+    SubscribePackets: grpc.UnaryStreamMultiCallable[
+        relayer_pb2.SubscribePacketsRequest,
+        relayer_pb2.SubscribePacketsResponse,
+    ]
+    """Validators can subscribe to packets from the relayer and receive a multiplexed signal that contains a mixture
+    of packets and heartbeats
+    """
+
+class RelayerServicer(metaclass=abc.ABCMeta):
+    """/ Relayers offer a TPU and TPU forward proxy for Solana validators.
+    / Validators can connect and fetch the TPU configuration for the relayer and start to advertise the
+    / relayer's information in gossip.
+    / They can also subscribe to packets which arrived on the TPU ports at the relayer
+    """
+
+    @abc.abstractmethod
+    def GetTpuConfigs(
+        self,
+        request: relayer_pb2.GetTpuConfigsRequest,
+        context: grpc.ServicerContext,
+    ) -> relayer_pb2.GetTpuConfigsResponse:
+        """The relayer has TPU and TPU forward sockets that validators can leverage.
+        A validator can fetch this config and change its TPU and TPU forward port in gossip.
+        """
+    @abc.abstractmethod
+    def SubscribePackets(
+        self,
+        request: relayer_pb2.SubscribePacketsRequest,
+        context: grpc.ServicerContext,
+    ) -> collections.abc.Iterator[relayer_pb2.SubscribePacketsResponse]:
+        """Validators can subscribe to packets from the relayer and receive a multiplexed signal that contains a mixture
+        of packets and heartbeats
+        """
+
+def add_RelayerServicer_to_server(servicer: RelayerServicer, server: grpc.Server) -> None: ...

La diferencia del archivo ha sido suprimido porque es demasiado grande
+ 18 - 0
jito_searcher_client/generated/searcher_pb2.py


+ 229 - 0
jito_searcher_client/generated/searcher_pb2.pyi

@@ -0,0 +1,229 @@
+"""
+@generated by mypy-protobuf.  Do not edit manually!
+isort:skip_file
+"""
+import builtins
+import bundle_pb2
+import collections.abc
+import google.protobuf.descriptor
+import google.protobuf.internal.containers
+import google.protobuf.message
+import google.protobuf.timestamp_pb2
+import packet_pb2
+import sys
+
+if sys.version_info >= (3, 8):
+    import typing as typing_extensions
+else:
+    import typing_extensions
+
+DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
+
+@typing_extensions.final
+class SlotList(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    SLOTS_FIELD_NUMBER: builtins.int
+    @property
+    def slots(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ...
+    def __init__(
+        self,
+        *,
+        slots: collections.abc.Iterable[builtins.int] | None = ...,
+    ) -> None: ...
+    def ClearField(self, field_name: typing_extensions.Literal["slots", b"slots"]) -> None: ...
+
+global___SlotList = SlotList
+
+@typing_extensions.final
+class SendBundleRequest(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    BUNDLE_FIELD_NUMBER: builtins.int
+    @property
+    def bundle(self) -> bundle_pb2.Bundle: ...
+    def __init__(
+        self,
+        *,
+        bundle: bundle_pb2.Bundle | None = ...,
+    ) -> None: ...
+    def HasField(self, field_name: typing_extensions.Literal["bundle", b"bundle"]) -> builtins.bool: ...
+    def ClearField(self, field_name: typing_extensions.Literal["bundle", b"bundle"]) -> None: ...
+
+global___SendBundleRequest = SendBundleRequest
+
+@typing_extensions.final
+class SendBundleResponse(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    UUID_FIELD_NUMBER: builtins.int
+    uuid: builtins.str
+    """server uuid for the bundle"""
+    def __init__(
+        self,
+        *,
+        uuid: builtins.str = ...,
+    ) -> None: ...
+    def ClearField(self, field_name: typing_extensions.Literal["uuid", b"uuid"]) -> None: ...
+
+global___SendBundleResponse = SendBundleResponse
+
+@typing_extensions.final
+class PendingTxSubscriptionRequest(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    ACCOUNTS_FIELD_NUMBER: builtins.int
+    @property
+    def accounts(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
+        """list of accounts to subscribe to
+        NOTE: the block-engine will only forward transactions that write lock the provided accounts here.
+        """
+    def __init__(
+        self,
+        *,
+        accounts: collections.abc.Iterable[builtins.str] | None = ...,
+    ) -> None: ...
+    def ClearField(self, field_name: typing_extensions.Literal["accounts", b"accounts"]) -> None: ...
+
+global___PendingTxSubscriptionRequest = PendingTxSubscriptionRequest
+
+@typing_extensions.final
+class PendingTxNotification(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    SERVER_SIDE_TS_FIELD_NUMBER: builtins.int
+    EXPIRATION_TIME_FIELD_NUMBER: builtins.int
+    TRANSACTIONS_FIELD_NUMBER: builtins.int
+    @property
+    def server_side_ts(self) -> google.protobuf.timestamp_pb2.Timestamp:
+        """server-side timestamp the transactions were generated at (for debugging/profiling purposes)"""
+    @property
+    def expiration_time(self) -> google.protobuf.timestamp_pb2.Timestamp:
+        """expiration time of the packet"""
+    @property
+    def transactions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[packet_pb2.Packet]:
+        """list of pending transactions"""
+    def __init__(
+        self,
+        *,
+        server_side_ts: google.protobuf.timestamp_pb2.Timestamp | None = ...,
+        expiration_time: google.protobuf.timestamp_pb2.Timestamp | None = ...,
+        transactions: collections.abc.Iterable[packet_pb2.Packet] | None = ...,
+    ) -> None: ...
+    def HasField(self, field_name: typing_extensions.Literal["expiration_time", b"expiration_time", "server_side_ts", b"server_side_ts"]) -> builtins.bool: ...
+    def ClearField(self, field_name: typing_extensions.Literal["expiration_time", b"expiration_time", "server_side_ts", b"server_side_ts", "transactions", b"transactions"]) -> None: ...
+
+global___PendingTxNotification = PendingTxNotification
+
+@typing_extensions.final
+class NextScheduledLeaderRequest(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    def __init__(
+        self,
+    ) -> None: ...
+
+global___NextScheduledLeaderRequest = NextScheduledLeaderRequest
+
+@typing_extensions.final
+class NextScheduledLeaderResponse(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    CURRENT_SLOT_FIELD_NUMBER: builtins.int
+    NEXT_LEADER_SLOT_FIELD_NUMBER: builtins.int
+    NEXT_LEADER_IDENTITY_FIELD_NUMBER: builtins.int
+    current_slot: builtins.int
+    """the current slot the backend is on"""
+    next_leader_slot: builtins.int
+    """the slot and identity of the next leader"""
+    next_leader_identity: builtins.str
+    def __init__(
+        self,
+        *,
+        current_slot: builtins.int = ...,
+        next_leader_slot: builtins.int = ...,
+        next_leader_identity: builtins.str = ...,
+    ) -> None: ...
+    def ClearField(self, field_name: typing_extensions.Literal["current_slot", b"current_slot", "next_leader_identity", b"next_leader_identity", "next_leader_slot", b"next_leader_slot"]) -> None: ...
+
+global___NextScheduledLeaderResponse = NextScheduledLeaderResponse
+
+@typing_extensions.final
+class ConnectedLeadersRequest(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    def __init__(
+        self,
+    ) -> None: ...
+
+global___ConnectedLeadersRequest = ConnectedLeadersRequest
+
+@typing_extensions.final
+class ConnectedLeadersResponse(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    @typing_extensions.final
+    class ConnectedValidatorsEntry(google.protobuf.message.Message):
+        DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+        KEY_FIELD_NUMBER: builtins.int
+        VALUE_FIELD_NUMBER: builtins.int
+        key: builtins.str
+        @property
+        def value(self) -> global___SlotList: ...
+        def __init__(
+            self,
+            *,
+            key: builtins.str = ...,
+            value: global___SlotList | None = ...,
+        ) -> None: ...
+        def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
+        def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
+
+    CONNECTED_VALIDATORS_FIELD_NUMBER: builtins.int
+    @property
+    def connected_validators(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___SlotList]: ...
+    def __init__(
+        self,
+        *,
+        connected_validators: collections.abc.Mapping[builtins.str, global___SlotList] | None = ...,
+    ) -> None: ...
+    def ClearField(self, field_name: typing_extensions.Literal["connected_validators", b"connected_validators"]) -> None: ...
+
+global___ConnectedLeadersResponse = ConnectedLeadersResponse
+
+@typing_extensions.final
+class GetTipAccountsRequest(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    def __init__(
+        self,
+    ) -> None: ...
+
+global___GetTipAccountsRequest = GetTipAccountsRequest
+
+@typing_extensions.final
+class GetTipAccountsResponse(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    ACCOUNTS_FIELD_NUMBER: builtins.int
+    @property
+    def accounts(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ...
+    def __init__(
+        self,
+        *,
+        accounts: collections.abc.Iterable[builtins.str] | None = ...,
+    ) -> None: ...
+    def ClearField(self, field_name: typing_extensions.Literal["accounts", b"accounts"]) -> None: ...
+
+global___GetTipAccountsResponse = GetTipAccountsResponse
+
+@typing_extensions.final
+class SubscribeBundleResultsRequest(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    def __init__(
+        self,
+    ) -> None: ...
+
+global___SubscribeBundleResultsRequest = SubscribeBundleResultsRequest

+ 239 - 0
jito_searcher_client/generated/searcher_pb2_grpc.py

@@ -0,0 +1,239 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+
+import bundle_pb2 as bundle__pb2
+import searcher_pb2 as searcher__pb2
+
+
+class SearcherServiceStub(object):
+    """Missing associated documentation comment in .proto file."""
+
+    def __init__(self, channel):
+        """Constructor.
+
+        Args:
+            channel: A grpc.Channel.
+        """
+        self.SubscribeBundleResults = channel.unary_stream(
+                '/searcher.SearcherService/SubscribeBundleResults',
+                request_serializer=searcher__pb2.SubscribeBundleResultsRequest.SerializeToString,
+                response_deserializer=bundle__pb2.BundleResult.FromString,
+                )
+        self.SubscribePendingTransactions = channel.unary_stream(
+                '/searcher.SearcherService/SubscribePendingTransactions',
+                request_serializer=searcher__pb2.PendingTxSubscriptionRequest.SerializeToString,
+                response_deserializer=searcher__pb2.PendingTxNotification.FromString,
+                )
+        self.SendBundle = channel.unary_unary(
+                '/searcher.SearcherService/SendBundle',
+                request_serializer=searcher__pb2.SendBundleRequest.SerializeToString,
+                response_deserializer=searcher__pb2.SendBundleResponse.FromString,
+                )
+        self.GetNextScheduledLeader = channel.unary_unary(
+                '/searcher.SearcherService/GetNextScheduledLeader',
+                request_serializer=searcher__pb2.NextScheduledLeaderRequest.SerializeToString,
+                response_deserializer=searcher__pb2.NextScheduledLeaderResponse.FromString,
+                )
+        self.GetConnectedLeaders = channel.unary_unary(
+                '/searcher.SearcherService/GetConnectedLeaders',
+                request_serializer=searcher__pb2.ConnectedLeadersRequest.SerializeToString,
+                response_deserializer=searcher__pb2.ConnectedLeadersResponse.FromString,
+                )
+        self.GetTipAccounts = channel.unary_unary(
+                '/searcher.SearcherService/GetTipAccounts',
+                request_serializer=searcher__pb2.GetTipAccountsRequest.SerializeToString,
+                response_deserializer=searcher__pb2.GetTipAccountsResponse.FromString,
+                )
+
+
+class SearcherServiceServicer(object):
+    """Missing associated documentation comment in .proto file."""
+
+    def SubscribeBundleResults(self, request, context):
+        """Searchers can invoke this endpoint to subscribe to their respective bundle results.
+        A success result would indicate the bundle won its state auction and was submitted to the validator.
+        """
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def SubscribePendingTransactions(self, request, context):
+        """RPC endpoint to subscribe to pending transactions. Clients can provide a list of base58 encoded accounts.
+        Any transactions that write-lock the provided accounts will be streamed to the searcher.
+        """
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def SendBundle(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def GetNextScheduledLeader(self, request, context):
+        """Returns the next scheduled leader connected to the block engine.
+        """
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def GetConnectedLeaders(self, request, context):
+        """Returns information on connected leader slots
+        """
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def GetTipAccounts(self, request, context):
+        """Returns the tip accounts searchers shall transfer funds to for the leader to claim.
+        """
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+
+def add_SearcherServiceServicer_to_server(servicer, server):
+    rpc_method_handlers = {
+            'SubscribeBundleResults': grpc.unary_stream_rpc_method_handler(
+                    servicer.SubscribeBundleResults,
+                    request_deserializer=searcher__pb2.SubscribeBundleResultsRequest.FromString,
+                    response_serializer=bundle__pb2.BundleResult.SerializeToString,
+            ),
+            'SubscribePendingTransactions': grpc.unary_stream_rpc_method_handler(
+                    servicer.SubscribePendingTransactions,
+                    request_deserializer=searcher__pb2.PendingTxSubscriptionRequest.FromString,
+                    response_serializer=searcher__pb2.PendingTxNotification.SerializeToString,
+            ),
+            'SendBundle': grpc.unary_unary_rpc_method_handler(
+                    servicer.SendBundle,
+                    request_deserializer=searcher__pb2.SendBundleRequest.FromString,
+                    response_serializer=searcher__pb2.SendBundleResponse.SerializeToString,
+            ),
+            'GetNextScheduledLeader': grpc.unary_unary_rpc_method_handler(
+                    servicer.GetNextScheduledLeader,
+                    request_deserializer=searcher__pb2.NextScheduledLeaderRequest.FromString,
+                    response_serializer=searcher__pb2.NextScheduledLeaderResponse.SerializeToString,
+            ),
+            'GetConnectedLeaders': grpc.unary_unary_rpc_method_handler(
+                    servicer.GetConnectedLeaders,
+                    request_deserializer=searcher__pb2.ConnectedLeadersRequest.FromString,
+                    response_serializer=searcher__pb2.ConnectedLeadersResponse.SerializeToString,
+            ),
+            'GetTipAccounts': grpc.unary_unary_rpc_method_handler(
+                    servicer.GetTipAccounts,
+                    request_deserializer=searcher__pb2.GetTipAccountsRequest.FromString,
+                    response_serializer=searcher__pb2.GetTipAccountsResponse.SerializeToString,
+            ),
+    }
+    generic_handler = grpc.method_handlers_generic_handler(
+            'searcher.SearcherService', rpc_method_handlers)
+    server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class SearcherService(object):
+    """Missing associated documentation comment in .proto file."""
+
+    @staticmethod
+    def SubscribeBundleResults(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_stream(request, target, '/searcher.SearcherService/SubscribeBundleResults',
+            searcher__pb2.SubscribeBundleResultsRequest.SerializeToString,
+            bundle__pb2.BundleResult.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def SubscribePendingTransactions(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_stream(request, target, '/searcher.SearcherService/SubscribePendingTransactions',
+            searcher__pb2.PendingTxSubscriptionRequest.SerializeToString,
+            searcher__pb2.PendingTxNotification.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def SendBundle(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/searcher.SearcherService/SendBundle',
+            searcher__pb2.SendBundleRequest.SerializeToString,
+            searcher__pb2.SendBundleResponse.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def GetNextScheduledLeader(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/searcher.SearcherService/GetNextScheduledLeader',
+            searcher__pb2.NextScheduledLeaderRequest.SerializeToString,
+            searcher__pb2.NextScheduledLeaderResponse.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def GetConnectedLeaders(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/searcher.SearcherService/GetConnectedLeaders',
+            searcher__pb2.ConnectedLeadersRequest.SerializeToString,
+            searcher__pb2.ConnectedLeadersResponse.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def GetTipAccounts(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/searcher.SearcherService/GetTipAccounts',
+            searcher__pb2.GetTipAccountsRequest.SerializeToString,
+            searcher__pb2.GetTipAccountsResponse.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)

+ 94 - 0
jito_searcher_client/generated/searcher_pb2_grpc.pyi

@@ -0,0 +1,94 @@
+"""
+@generated by mypy-protobuf.  Do not edit manually!
+isort:skip_file
+"""
+import abc
+import bundle_pb2
+import collections.abc
+import grpc
+import searcher_pb2
+
+class SearcherServiceStub:
+    def __init__(self, channel: grpc.Channel) -> None: ...
+    SubscribeBundleResults: grpc.UnaryStreamMultiCallable[
+        searcher_pb2.SubscribeBundleResultsRequest,
+        bundle_pb2.BundleResult,
+    ]
+    """Searchers can invoke this endpoint to subscribe to their respective bundle results.
+    A success result would indicate the bundle won its state auction and was submitted to the validator.
+    """
+    SubscribePendingTransactions: grpc.UnaryStreamMultiCallable[
+        searcher_pb2.PendingTxSubscriptionRequest,
+        searcher_pb2.PendingTxNotification,
+    ]
+    """RPC endpoint to subscribe to pending transactions. Clients can provide a list of base58 encoded accounts.
+    Any transactions that write-lock the provided accounts will be streamed to the searcher.
+    """
+    SendBundle: grpc.UnaryUnaryMultiCallable[
+        searcher_pb2.SendBundleRequest,
+        searcher_pb2.SendBundleResponse,
+    ]
+    GetNextScheduledLeader: grpc.UnaryUnaryMultiCallable[
+        searcher_pb2.NextScheduledLeaderRequest,
+        searcher_pb2.NextScheduledLeaderResponse,
+    ]
+    """Returns the next scheduled leader connected to the block engine."""
+    GetConnectedLeaders: grpc.UnaryUnaryMultiCallable[
+        searcher_pb2.ConnectedLeadersRequest,
+        searcher_pb2.ConnectedLeadersResponse,
+    ]
+    """Returns information on connected leader slots"""
+    GetTipAccounts: grpc.UnaryUnaryMultiCallable[
+        searcher_pb2.GetTipAccountsRequest,
+        searcher_pb2.GetTipAccountsResponse,
+    ]
+    """Returns the tip accounts searchers shall transfer funds to for the leader to claim."""
+
+class SearcherServiceServicer(metaclass=abc.ABCMeta):
+    @abc.abstractmethod
+    def SubscribeBundleResults(
+        self,
+        request: searcher_pb2.SubscribeBundleResultsRequest,
+        context: grpc.ServicerContext,
+    ) -> collections.abc.Iterator[bundle_pb2.BundleResult]:
+        """Searchers can invoke this endpoint to subscribe to their respective bundle results.
+        A success result would indicate the bundle won its state auction and was submitted to the validator.
+        """
+    @abc.abstractmethod
+    def SubscribePendingTransactions(
+        self,
+        request: searcher_pb2.PendingTxSubscriptionRequest,
+        context: grpc.ServicerContext,
+    ) -> collections.abc.Iterator[searcher_pb2.PendingTxNotification]:
+        """RPC endpoint to subscribe to pending transactions. Clients can provide a list of base58 encoded accounts.
+        Any transactions that write-lock the provided accounts will be streamed to the searcher.
+        """
+    @abc.abstractmethod
+    def SendBundle(
+        self,
+        request: searcher_pb2.SendBundleRequest,
+        context: grpc.ServicerContext,
+    ) -> searcher_pb2.SendBundleResponse: ...
+    @abc.abstractmethod
+    def GetNextScheduledLeader(
+        self,
+        request: searcher_pb2.NextScheduledLeaderRequest,
+        context: grpc.ServicerContext,
+    ) -> searcher_pb2.NextScheduledLeaderResponse:
+        """Returns the next scheduled leader connected to the block engine."""
+    @abc.abstractmethod
+    def GetConnectedLeaders(
+        self,
+        request: searcher_pb2.ConnectedLeadersRequest,
+        context: grpc.ServicerContext,
+    ) -> searcher_pb2.ConnectedLeadersResponse:
+        """Returns information on connected leader slots"""
+    @abc.abstractmethod
+    def GetTipAccounts(
+        self,
+        request: searcher_pb2.GetTipAccountsRequest,
+        context: grpc.ServicerContext,
+    ) -> searcher_pb2.GetTipAccountsResponse:
+        """Returns the tip accounts searchers shall transfer funds to for the leader to claim."""
+
+def add_SearcherServiceServicer_to_server(servicer: SearcherServiceServicer, server: grpc.Server) -> None: ...

+ 30 - 0
jito_searcher_client/generated/shared_pb2.py

@@ -0,0 +1,30 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: shared.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import builder as _builder
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0cshared.proto\x12\x06shared\x1a\x1fgoogle/protobuf/timestamp.proto\"0\n\x06Header\x12&\n\x02ts\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\x1a\n\tHeartbeat\x12\r\n\x05\x63ount\x18\x01 \x01(\x04\"\"\n\x06Socket\x12\n\n\x02ip\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x03\x62\x06proto3')
+
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'shared_pb2', globals())
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+  DESCRIPTOR._options = None
+  _HEADER._serialized_start=57
+  _HEADER._serialized_end=105
+  _HEARTBEAT._serialized_start=107
+  _HEARTBEAT._serialized_end=133
+  _SOCKET._serialized_start=135
+  _SOCKET._serialized_end=169
+# @@protoc_insertion_point(module_scope)

+ 66 - 0
jito_searcher_client/generated/shared_pb2.pyi

@@ -0,0 +1,66 @@
+"""
+@generated by mypy-protobuf.  Do not edit manually!
+isort:skip_file
+"""
+import builtins
+import google.protobuf.descriptor
+import google.protobuf.message
+import google.protobuf.timestamp_pb2
+import sys
+
+if sys.version_info >= (3, 8):
+    import typing as typing_extensions
+else:
+    import typing_extensions
+
+DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
+
+@typing_extensions.final
+class Header(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    TS_FIELD_NUMBER: builtins.int
+    @property
+    def ts(self) -> google.protobuf.timestamp_pb2.Timestamp: ...
+    def __init__(
+        self,
+        *,
+        ts: google.protobuf.timestamp_pb2.Timestamp | None = ...,
+    ) -> None: ...
+    def HasField(self, field_name: typing_extensions.Literal["ts", b"ts"]) -> builtins.bool: ...
+    def ClearField(self, field_name: typing_extensions.Literal["ts", b"ts"]) -> None: ...
+
+global___Header = Header
+
+@typing_extensions.final
+class Heartbeat(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    COUNT_FIELD_NUMBER: builtins.int
+    count: builtins.int
+    def __init__(
+        self,
+        *,
+        count: builtins.int = ...,
+    ) -> None: ...
+    def ClearField(self, field_name: typing_extensions.Literal["count", b"count"]) -> None: ...
+
+global___Heartbeat = Heartbeat
+
+@typing_extensions.final
+class Socket(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    IP_FIELD_NUMBER: builtins.int
+    PORT_FIELD_NUMBER: builtins.int
+    ip: builtins.str
+    port: builtins.int
+    def __init__(
+        self,
+        *,
+        ip: builtins.str = ...,
+        port: builtins.int = ...,
+    ) -> None: ...
+    def ClearField(self, field_name: typing_extensions.Literal["ip", b"ip", "port", b"port"]) -> None: ...
+
+global___Socket = Socket

+ 4 - 0
jito_searcher_client/generated/shared_pb2_grpc.py

@@ -0,0 +1,4 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+

+ 4 - 0
jito_searcher_client/generated/shared_pb2_grpc.pyi

@@ -0,0 +1,4 @@
+"""
+@generated by mypy-protobuf.  Do not edit manually!
+isort:skip_file
+"""

+ 30 - 0
jito_searcher_client/generated/shredstream_pb2.py

@@ -0,0 +1,30 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: shredstream.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import builder as _builder
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import descriptor_pool as _descriptor_pool
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+import shared_pb2 as shared__pb2
+
+
+DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x11shredstream.proto\x12\x0bshredstream\x1a\x0cshared.proto\"<\n\tHeartbeat\x12\x1e\n\x06socket\x18\x01 \x01(\x0b\x32\x0e.shared.Socket\x12\x0f\n\x07regions\x18\x02 \x03(\t\"#\n\x11HeartbeatResponse\x12\x0e\n\x06ttl_ms\x18\x01 \x01(\r2X\n\x0bShredstream\x12I\n\rSendHeartbeat\x12\x16.shredstream.Heartbeat\x1a\x1e.shredstream.HeartbeatResponse\"\x00\x62\x06proto3')
+
+_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals())
+_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'shredstream_pb2', globals())
+if _descriptor._USE_C_DESCRIPTORS == False:
+
+  DESCRIPTOR._options = None
+  _HEARTBEAT._serialized_start=48
+  _HEARTBEAT._serialized_end=108
+  _HEARTBEATRESPONSE._serialized_start=110
+  _HEARTBEATRESPONSE._serialized_end=145
+  _SHREDSTREAM._serialized_start=147
+  _SHREDSTREAM._serialized_end=235
+# @@protoc_insertion_point(module_scope)

+ 61 - 0
jito_searcher_client/generated/shredstream_pb2.pyi

@@ -0,0 +1,61 @@
+"""
+@generated by mypy-protobuf.  Do not edit manually!
+isort:skip_file
+"""
+import builtins
+import collections.abc
+import google.protobuf.descriptor
+import google.protobuf.internal.containers
+import google.protobuf.message
+import shared_pb2
+import sys
+
+if sys.version_info >= (3, 8):
+    import typing as typing_extensions
+else:
+    import typing_extensions
+
+DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
+
+@typing_extensions.final
+class Heartbeat(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    SOCKET_FIELD_NUMBER: builtins.int
+    REGIONS_FIELD_NUMBER: builtins.int
+    @property
+    def socket(self) -> shared_pb2.Socket:
+        """don't trust IP:PORT from tcp header since it can be tampered over the wire
+        `socket.ip` must match incoming packet's ip. this prevents spamming an unwitting destination
+        """
+    @property
+    def regions(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
+        """regions for shredstream proxy to receive shreds from
+        list of valid regions: https://jito-labs.gitbook.io/mev/systems/connecting/mainnet
+        """
+    def __init__(
+        self,
+        *,
+        socket: shared_pb2.Socket | None = ...,
+        regions: collections.abc.Iterable[builtins.str] | None = ...,
+    ) -> None: ...
+    def HasField(self, field_name: typing_extensions.Literal["socket", b"socket"]) -> builtins.bool: ...
+    def ClearField(self, field_name: typing_extensions.Literal["regions", b"regions", "socket", b"socket"]) -> None: ...
+
+global___Heartbeat = Heartbeat
+
+@typing_extensions.final
+class HeartbeatResponse(google.protobuf.message.Message):
+    DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
+    TTL_MS_FIELD_NUMBER: builtins.int
+    ttl_ms: builtins.int
+    """client must respond within `ttl_ms` to keep stream alive"""
+    def __init__(
+        self,
+        *,
+        ttl_ms: builtins.int = ...,
+    ) -> None: ...
+    def ClearField(self, field_name: typing_extensions.Literal["ttl_ms", b"ttl_ms"]) -> None: ...
+
+global___HeartbeatResponse = HeartbeatResponse

+ 67 - 0
jito_searcher_client/generated/shredstream_pb2_grpc.py

@@ -0,0 +1,67 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+
+import shredstream_pb2 as shredstream__pb2
+
+
+class ShredstreamStub(object):
+    """Missing associated documentation comment in .proto file."""
+
+    def __init__(self, channel):
+        """Constructor.
+
+        Args:
+            channel: A grpc.Channel.
+        """
+        self.SendHeartbeat = channel.unary_unary(
+                '/shredstream.Shredstream/SendHeartbeat',
+                request_serializer=shredstream__pb2.Heartbeat.SerializeToString,
+                response_deserializer=shredstream__pb2.HeartbeatResponse.FromString,
+                )
+
+
+class ShredstreamServicer(object):
+    """Missing associated documentation comment in .proto file."""
+
+    def SendHeartbeat(self, request, context):
+        """RPC endpoint to send heartbeats to keep shreds flowing
+        """
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+
+def add_ShredstreamServicer_to_server(servicer, server):
+    rpc_method_handlers = {
+            'SendHeartbeat': grpc.unary_unary_rpc_method_handler(
+                    servicer.SendHeartbeat,
+                    request_deserializer=shredstream__pb2.Heartbeat.FromString,
+                    response_serializer=shredstream__pb2.HeartbeatResponse.SerializeToString,
+            ),
+    }
+    generic_handler = grpc.method_handlers_generic_handler(
+            'shredstream.Shredstream', rpc_method_handlers)
+    server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class Shredstream(object):
+    """Missing associated documentation comment in .proto file."""
+
+    @staticmethod
+    def SendHeartbeat(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/shredstream.Shredstream/SendHeartbeat',
+            shredstream__pb2.Heartbeat.SerializeToString,
+            shredstream__pb2.HeartbeatResponse.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)

+ 26 - 0
jito_searcher_client/generated/shredstream_pb2_grpc.pyi

@@ -0,0 +1,26 @@
+"""
+@generated by mypy-protobuf.  Do not edit manually!
+isort:skip_file
+"""
+import abc
+import grpc
+import shredstream_pb2
+
+class ShredstreamStub:
+    def __init__(self, channel: grpc.Channel) -> None: ...
+    SendHeartbeat: grpc.UnaryUnaryMultiCallable[
+        shredstream_pb2.Heartbeat,
+        shredstream_pb2.HeartbeatResponse,
+    ]
+    """RPC endpoint to send heartbeats to keep shreds flowing"""
+
+class ShredstreamServicer(metaclass=abc.ABCMeta):
+    @abc.abstractmethod
+    def SendHeartbeat(
+        self,
+        request: shredstream_pb2.Heartbeat,
+        context: grpc.ServicerContext,
+    ) -> shredstream_pb2.HeartbeatResponse:
+        """RPC endpoint to send heartbeats to keep shreds flowing"""
+
+def add_ShredstreamServicer_to_server(servicer: ShredstreamServicer, server: grpc.Server) -> None: ...

+ 205 - 0
jito_searcher_client/searcher.py

@@ -0,0 +1,205 @@
+import time
+from dataclasses import dataclass
+from typing import List, Optional, Tuple
+
+from grpc import (
+    StreamStreamClientInterceptor,
+    StreamUnaryClientInterceptor,
+    UnaryStreamClientInterceptor,
+    UnaryUnaryClientInterceptor,
+    intercept_channel,
+    secure_channel,
+    ssl_channel_credentials,
+)
+from grpc.aio import ClientCallDetails
+from solders.keypair import Keypair
+
+from .generated.auth_pb2 import (
+    GenerateAuthChallengeRequest,
+    GenerateAuthTokensRequest,
+    GenerateAuthTokensResponse,
+    RefreshAccessTokenRequest,
+    RefreshAccessTokenResponse,
+    Role,
+)
+from .generated.auth_pb2_grpc import AuthServiceStub
+from .generated.searcher_pb2_grpc import SearcherServiceStub
+
+
+@dataclass
+class JwtToken:
+    # jwt token string
+    token: str
+    # time in seconds since epoch when the token expires
+    expiration: int
+
+
+class SearcherInterceptor(
+    UnaryUnaryClientInterceptor,
+    UnaryStreamClientInterceptor,
+    StreamUnaryClientInterceptor,
+    StreamStreamClientInterceptor,
+):
+    """
+    The jito_searcher_client interceptor is responsible for authenticating with the block engine.
+    Authentication happens in a challenge-response handshake.
+    1. Request a challenge and provide your public key.
+    2. Get challenge and sign a message "{pubkey}-{challenge}".
+    3. Get back a refresh token and access token.
+
+    When the access token expires, use the refresh token to get a new one.
+    When the refresh token expires, perform the challenge-response handshake again.
+    """
+
+    def __init__(self, url: str, kp: Keypair):
+        """
+
+        :param url: url of the Block Engine without http or https.
+        :param kp: block engine authentication keypair
+        """
+        self._url = url
+        self._kp = kp
+
+        self._access_token: Optional[JwtToken] = None
+        self._refresh_token: Optional[JwtToken] = None
+
+    def intercept_unary_stream(self, continuation, client_call_details, request):
+        self.authenticate_if_needed()
+
+        client_call_details = self._insert_headers(
+            [("authorization", f"Bearer {self._access_token.token}")],
+            client_call_details,
+        )
+
+        return continuation(client_call_details, request)
+
+    def intercept_stream_unary(
+            self, continuation, client_call_details, request_iterator
+    ):
+        self.authenticate_if_needed()
+
+        client_call_details = self._insert_headers(
+            [("authorization", f"Bearer {self._access_token.token}")],
+            client_call_details,
+        )
+
+        return continuation(client_call_details, request_iterator)
+
+    def intercept_stream_stream(
+            self, continuation, client_call_details, request_iterator
+    ):
+        self.authenticate_if_needed()
+
+        client_call_details = self._insert_headers(
+            [("authorization", f"Bearer {self._access_token.token}")],
+            client_call_details,
+        )
+
+        return continuation(client_call_details, request_iterator)
+
+    def intercept_unary_unary(self, continuation, client_call_details, request):
+        self.authenticate_if_needed()
+
+        client_call_details = self._insert_headers(
+            [("authorization", f"Bearer {self._access_token.token}")],
+            client_call_details,
+        )
+
+        return continuation(client_call_details, request)
+
+    @staticmethod
+    def _insert_headers(
+            new_metadata: List[Tuple[str, str]], client_call_details
+    ) -> ClientCallDetails:
+        metadata = []
+        if client_call_details.metadata is not None:
+            metadata = list(client_call_details.metadata)
+        metadata.extend(new_metadata)
+
+        return ClientCallDetails(
+            client_call_details.method,
+            client_call_details.timeout,
+            metadata,
+            client_call_details.credentials,
+            False,
+        )
+
+    def authenticate_if_needed(self):
+        """
+        Maybe authenticates depending on state of access + refresh tokens
+        """
+        now = int(time.time())
+        if self._access_token is None or self._refresh_token is None or now >= self._refresh_token.expiration:
+            self.full_authentication()
+        elif now >= self._access_token.expiration:
+            self.refresh_authentication()
+
+    def refresh_authentication(self):
+        """
+        Performs an authentication refresh with the block engine, which involves using the refresh token to get a new
+        access token.
+        """
+        credentials = ssl_channel_credentials()
+        channel = secure_channel(self._url, credentials)
+        auth_client = AuthServiceStub(channel)
+
+        new_access_token: RefreshAccessTokenResponse = auth_client.RefreshAccessToken(
+            RefreshAccessTokenRequest(refresh_token=self._refresh_token.token))
+        self._access_token = JwtToken(token=new_access_token.access_token.value,
+                                      expiration=new_access_token.access_token.expires_at_utc.seconds)
+
+    def full_authentication(self):
+        """
+        Performs full authentication with the block engine
+        """
+        credentials = ssl_channel_credentials()
+        channel = secure_channel(self._url, credentials)
+        auth_client = AuthServiceStub(channel)
+
+        challenge = auth_client.GenerateAuthChallenge(
+            GenerateAuthChallengeRequest(
+                role=Role.SEARCHER, pubkey=bytes(self._kp.pubkey())
+            )
+        ).challenge
+
+        challenge_to_sign = f"{str(self._kp.pubkey())}-{challenge}"
+
+        signed = self._kp.sign_message(bytes(challenge_to_sign, "utf8"))
+
+        auth_tokens_response: GenerateAuthTokensResponse = (
+            auth_client.GenerateAuthTokens(
+                GenerateAuthTokensRequest(
+                    challenge=challenge_to_sign,
+                    client_pubkey=bytes(self._kp.pubkey()),
+                    signed_challenge=bytes(signed),
+                )
+            )
+        )
+
+        self._access_token = JwtToken(
+            token=auth_tokens_response.access_token.value,
+            expiration=auth_tokens_response.access_token.expires_at_utc.seconds,
+        )
+
+        self._refresh_token = JwtToken(
+            token=auth_tokens_response.refresh_token.value,
+            expiration=auth_tokens_response.refresh_token.expires_at_utc.seconds,
+        )
+
+
+def get_searcher_client(url: str, kp: Keypair) -> SearcherServiceStub:
+    """
+    Returns a Searcher Service client that intercepts requests and authenticates with the block engine.
+    :param url: url of the block engine without http/https
+    :param kp: keypair of the block engine
+    :return: SearcherServiceStub which handles authentication on requests
+    """
+    # Authenticate immediately
+    searcher_interceptor = SearcherInterceptor(url, kp)
+    searcher_interceptor.authenticate_if_needed()
+
+    credentials = ssl_channel_credentials()
+    channel = secure_channel(url, credentials)
+    intercepted_channel = intercept_channel(channel, searcher_interceptor)
+
+    return SearcherServiceStub(intercepted_channel)

+ 1 - 0
mev-protos

@@ -0,0 +1 @@
+Subproject commit 5213a506c09543743f843c2c825e6da65d85d561

+ 468 - 0
poetry.lock

@@ -0,0 +1,468 @@
+# This file is automatically @generated by Poetry and should not be changed by hand.
+
+[[package]]
+name = "anyio"
+version = "3.6.2"
+description = "High level compatibility layer for multiple asynchronous event loop implementations"
+category = "main"
+optional = false
+python-versions = ">=3.6.2"
+files = [
+    {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"},
+    {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"},
+]
+
+[package.dependencies]
+idna = ">=2.8"
+sniffio = ">=1.1"
+
+[package.extras]
+doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
+test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"]
+trio = ["trio (>=0.16,<0.22)"]
+
+[[package]]
+name = "cachetools"
+version = "4.2.4"
+description = "Extensible memoizing collections and decorators"
+category = "main"
+optional = false
+python-versions = "~=3.5"
+files = [
+    {file = "cachetools-4.2.4-py3-none-any.whl", hash = "sha256:92971d3cb7d2a97efff7c7bb1657f21a8f5fb309a37530537c71b1774189f2d1"},
+    {file = "cachetools-4.2.4.tar.gz", hash = "sha256:89ea6f1b638d5a73a4f9226be57ac5e4f399d22770b92355f92dcb0f7f001693"},
+]
+
+[[package]]
+name = "certifi"
+version = "2022.12.7"
+description = "Python package for providing Mozilla's CA Bundle."
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"},
+    {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"},
+]
+
+[[package]]
+name = "click"
+version = "8.1.3"
+description = "Composable command line interface toolkit"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"},
+    {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"},
+]
+
+[package.dependencies]
+colorama = {version = "*", markers = "platform_system == \"Windows\""}
+
+[[package]]
+name = "colorama"
+version = "0.4.6"
+description = "Cross-platform colored terminal text."
+category = "main"
+optional = false
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
+files = [
+    {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
+    {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
+]
+
+[[package]]
+name = "construct"
+version = "2.10.68"
+description = "A powerful declarative symmetric parser/builder for binary data"
+category = "main"
+optional = false
+python-versions = ">=3.6"
+files = [
+    {file = "construct-2.10.68.tar.gz", hash = "sha256:7b2a3fd8e5f597a5aa1d614c3bd516fa065db01704c72a1efaaeec6ef23d8b45"},
+]
+
+[package.extras]
+extras = ["arrow", "cloudpickle", "enum34", "lz4", "numpy", "ruamel.yaml"]
+
+[[package]]
+name = "construct-typing"
+version = "0.5.5"
+description = "Extension for the python package 'construct' that adds typing features"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "construct-typing-0.5.5.tar.gz", hash = "sha256:29d1a07df539ae096bd1388ad7f58714d229a303047425d3830b282d8b154572"},
+    {file = "construct_typing-0.5.5-py3-none-any.whl", hash = "sha256:bfce2fa170373abe782c2ebaa7f52e14e8f7863a437b7ab63bf74287f922a655"},
+]
+
+[package.dependencies]
+construct = "2.10.68"
+
+[[package]]
+name = "grpcio"
+version = "1.51.1"
+description = "HTTP/2-based RPC framework"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "grpcio-1.51.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:cc2bece1737b44d878cc1510ea04469a8073dbbcdd762175168937ae4742dfb3"},
+    {file = "grpcio-1.51.1-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:e223a9793522680beae44671b9ed8f6d25bbe5ddf8887e66aebad5e0686049ef"},
+    {file = "grpcio-1.51.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:24ac1154c4b2ab4a0c5326a76161547e70664cd2c39ba75f00fc8a2170964ea2"},
+    {file = "grpcio-1.51.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4ef09f8997c4be5f3504cefa6b5c6cc3cf648274ce3cede84d4342a35d76db6"},
+    {file = "grpcio-1.51.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8a0b77e992c64880e6efbe0086fe54dfc0bbd56f72a92d9e48264dcd2a3db98"},
+    {file = "grpcio-1.51.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:eacad297ea60c72dd280d3353d93fb1dcca952ec11de6bb3c49d12a572ba31dd"},
+    {file = "grpcio-1.51.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:16c71740640ba3a882f50b01bf58154681d44b51f09a5728180a8fdc66c67bd5"},
+    {file = "grpcio-1.51.1-cp310-cp310-win32.whl", hash = "sha256:29cb97d41a4ead83b7bcad23bdb25bdd170b1e2cba16db6d3acbb090bc2de43c"},
+    {file = "grpcio-1.51.1-cp310-cp310-win_amd64.whl", hash = "sha256:9ff42c5620b4e4530609e11afefa4a62ca91fa0abb045a8957e509ef84e54d30"},
+    {file = "grpcio-1.51.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:bc59f7ba87972ab236f8669d8ca7400f02a0eadf273ca00e02af64d588046f02"},
+    {file = "grpcio-1.51.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:3c2b3842dcf870912da31a503454a33a697392f60c5e2697c91d133130c2c85d"},
+    {file = "grpcio-1.51.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22b011674090594f1f3245960ced7386f6af35485a38901f8afee8ad01541dbd"},
+    {file = "grpcio-1.51.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49d680356a975d9c66a678eb2dde192d5dc427a7994fb977363634e781614f7c"},
+    {file = "grpcio-1.51.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:094e64236253590d9d4075665c77b329d707b6fca864dd62b144255e199b4f87"},
+    {file = "grpcio-1.51.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:257478300735ce3c98d65a930bbda3db172bd4e00968ba743e6a1154ea6edf10"},
+    {file = "grpcio-1.51.1-cp311-cp311-win32.whl", hash = "sha256:5a6ebcdef0ef12005d56d38be30f5156d1cb3373b52e96f147f4a24b0ddb3a9d"},
+    {file = "grpcio-1.51.1-cp311-cp311-win_amd64.whl", hash = "sha256:3f9b0023c2c92bebd1be72cdfca23004ea748be1813a66d684d49d67d836adde"},
+    {file = "grpcio-1.51.1-cp37-cp37m-linux_armv7l.whl", hash = "sha256:cd3baccea2bc5c38aeb14e5b00167bd4e2373a373a5e4d8d850bd193edad150c"},
+    {file = "grpcio-1.51.1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:17ec9b13cec4a286b9e606b48191e560ca2f3bbdf3986f91e480a95d1582e1a7"},
+    {file = "grpcio-1.51.1-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:fbdbe9a849854fe484c00823f45b7baab159bdd4a46075302281998cb8719df5"},
+    {file = "grpcio-1.51.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:31bb6bc7ff145e2771c9baf612f4b9ebbc9605ccdc5f3ff3d5553de7fc0e0d79"},
+    {file = "grpcio-1.51.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e473525c28251558337b5c1ad3fa969511e42304524a4e404065e165b084c9e4"},
+    {file = "grpcio-1.51.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:6f0b89967ee11f2b654c23b27086d88ad7bf08c0b3c2a280362f28c3698b2896"},
+    {file = "grpcio-1.51.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7942b32a291421460d6a07883033e392167d30724aa84987e6956cd15f1a21b9"},
+    {file = "grpcio-1.51.1-cp37-cp37m-win32.whl", hash = "sha256:f96ace1540223f26fbe7c4ebbf8a98e3929a6aa0290c8033d12526847b291c0f"},
+    {file = "grpcio-1.51.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f1fec3abaf274cdb85bf3878167cfde5ad4a4d97c68421afda95174de85ba813"},
+    {file = "grpcio-1.51.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:0e1a9e1b4a23808f1132aa35f968cd8e659f60af3ffd6fb00bcf9a65e7db279f"},
+    {file = "grpcio-1.51.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:6df3b63538c362312bc5fa95fb965069c65c3ea91d7ce78ad9c47cab57226f54"},
+    {file = "grpcio-1.51.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:172405ca6bdfedd6054c74c62085946e45ad4d9cec9f3c42b4c9a02546c4c7e9"},
+    {file = "grpcio-1.51.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:506b9b7a4cede87d7219bfb31014d7b471cfc77157da9e820a737ec1ea4b0663"},
+    {file = "grpcio-1.51.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fb93051331acbb75b49a2a0fd9239c6ba9528f6bdc1dd400ad1cb66cf864292"},
+    {file = "grpcio-1.51.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5dca372268c6ab6372d37d6b9f9343e7e5b4bc09779f819f9470cd88b2ece3c3"},
+    {file = "grpcio-1.51.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:471d39d3370ca923a316d49c8aac66356cea708a11e647e3bdc3d0b5de4f0a40"},
+    {file = "grpcio-1.51.1-cp38-cp38-win32.whl", hash = "sha256:75e29a90dc319f0ad4d87ba6d20083615a00d8276b51512e04ad7452b5c23b04"},
+    {file = "grpcio-1.51.1-cp38-cp38-win_amd64.whl", hash = "sha256:f1158bccbb919da42544a4d3af5d9296a3358539ffa01018307337365a9a0c64"},
+    {file = "grpcio-1.51.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:59dffade859f157bcc55243714d57b286da6ae16469bf1ac0614d281b5f49b67"},
+    {file = "grpcio-1.51.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:dad6533411d033b77f5369eafe87af8583178efd4039c41d7515d3336c53b4f1"},
+    {file = "grpcio-1.51.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:4c4423ea38a7825b8fed8934d6d9aeebdf646c97e3c608c3b0bcf23616f33877"},
+    {file = "grpcio-1.51.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0dc5354e38e5adf2498312f7241b14c7ce3484eefa0082db4297189dcbe272e6"},
+    {file = "grpcio-1.51.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97d67983189e2e45550eac194d6234fc38b8c3b5396c153821f2d906ed46e0ce"},
+    {file = "grpcio-1.51.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:538d981818e49b6ed1e9c8d5e5adf29f71c4e334e7d459bf47e9b7abb3c30e09"},
+    {file = "grpcio-1.51.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9235dcd5144a83f9ca6f431bd0eccc46b90e2c22fe27b7f7d77cabb2fb515595"},
+    {file = "grpcio-1.51.1-cp39-cp39-win32.whl", hash = "sha256:aacb54f7789ede5cbf1d007637f792d3e87f1c9841f57dd51abf89337d1b8472"},
+    {file = "grpcio-1.51.1-cp39-cp39-win_amd64.whl", hash = "sha256:2b170eaf51518275c9b6b22ccb59450537c5a8555326fd96ff7391b5dd75303c"},
+    {file = "grpcio-1.51.1.tar.gz", hash = "sha256:e6dfc2b6567b1c261739b43d9c59d201c1b89e017afd9e684d85aa7a186c9f7a"},
+]
+
+[package.extras]
+protobuf = ["grpcio-tools (>=1.51.1)"]
+
+[[package]]
+name = "h11"
+version = "0.14.0"
+description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
+    {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
+]
+
+[[package]]
+name = "httpcore"
+version = "0.16.3"
+description = "A minimal low-level HTTP client."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"},
+    {file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"},
+]
+
+[package.dependencies]
+anyio = ">=3.0,<5.0"
+certifi = "*"
+h11 = ">=0.13,<0.15"
+sniffio = ">=1.0.0,<2.0.0"
+
+[package.extras]
+http2 = ["h2 (>=3,<5)"]
+socks = ["socksio (>=1.0.0,<2.0.0)"]
+
+[[package]]
+name = "httpx"
+version = "0.23.3"
+description = "The next generation HTTP client."
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "httpx-0.23.3-py3-none-any.whl", hash = "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6"},
+    {file = "httpx-0.23.3.tar.gz", hash = "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9"},
+]
+
+[package.dependencies]
+certifi = "*"
+httpcore = ">=0.15.0,<0.17.0"
+rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]}
+sniffio = "*"
+
+[package.extras]
+brotli = ["brotli", "brotlicffi"]
+cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"]
+http2 = ["h2 (>=3,<5)"]
+socks = ["socksio (>=1.0.0,<2.0.0)"]
+
+[[package]]
+name = "idna"
+version = "3.4"
+description = "Internationalized Domain Names in Applications (IDNA)"
+category = "main"
+optional = false
+python-versions = ">=3.5"
+files = [
+    {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
+    {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
+]
+
+[[package]]
+name = "isort"
+version = "5.11.4"
+description = "A Python utility / library to sort Python imports."
+category = "main"
+optional = false
+python-versions = ">=3.7.0"
+files = [
+    {file = "isort-5.11.4-py3-none-any.whl", hash = "sha256:c033fd0edb91000a7f09527fe5c75321878f98322a77ddcc81adbd83724afb7b"},
+    {file = "isort-5.11.4.tar.gz", hash = "sha256:6db30c5ded9815d813932c04c2f85a360bcdd35fed496f4d8f35495ef0a261b6"},
+]
+
+[package.extras]
+colors = ["colorama (>=0.4.3,<0.5.0)"]
+pipfile-deprecated-finder = ["pipreqs", "requirementslib"]
+plugins = ["setuptools"]
+requirements-deprecated-finder = ["pip-api", "pipreqs"]
+
+[[package]]
+name = "jsonalias"
+version = "0.1.1"
+description = "A microlibrary that defines a Json type alias for Python."
+category = "main"
+optional = false
+python-versions = ">=3.7,<4.0"
+files = [
+    {file = "jsonalias-0.1.1-py3-none-any.whl", hash = "sha256:a56d2888e6397812c606156504e861e8ec00e188005af149f003c787db3d3f18"},
+    {file = "jsonalias-0.1.1.tar.gz", hash = "sha256:64f04d935397d579fc94509e1fcb6212f2d081235d9d6395bd10baedf760a769"},
+]
+
+[[package]]
+name = "protobuf"
+version = "4.21.12"
+description = ""
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "protobuf-4.21.12-cp310-abi3-win32.whl", hash = "sha256:b135410244ebe777db80298297a97fbb4c862c881b4403b71bac9d4107d61fd1"},
+    {file = "protobuf-4.21.12-cp310-abi3-win_amd64.whl", hash = "sha256:89f9149e4a0169cddfc44c74f230d7743002e3aa0b9472d8c28f0388102fc4c2"},
+    {file = "protobuf-4.21.12-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:299ea899484ee6f44604deb71f424234f654606b983cb496ea2a53e3c63ab791"},
+    {file = "protobuf-4.21.12-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:d1736130bce8cf131ac7957fa26880ca19227d4ad68b4888b3be0dea1f95df97"},
+    {file = "protobuf-4.21.12-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:78a28c9fa223998472886c77042e9b9afb6fe4242bd2a2a5aced88e3f4422aa7"},
+    {file = "protobuf-4.21.12-cp37-cp37m-win32.whl", hash = "sha256:3d164928ff0727d97022957c2b849250ca0e64777ee31efd7d6de2e07c494717"},
+    {file = "protobuf-4.21.12-cp37-cp37m-win_amd64.whl", hash = "sha256:f45460f9ee70a0ec1b6694c6e4e348ad2019275680bd68a1d9314b8c7e01e574"},
+    {file = "protobuf-4.21.12-cp38-cp38-win32.whl", hash = "sha256:6ab80df09e3208f742c98443b6166bcb70d65f52cfeb67357d52032ea1ae9bec"},
+    {file = "protobuf-4.21.12-cp38-cp38-win_amd64.whl", hash = "sha256:1f22ac0ca65bb70a876060d96d914dae09ac98d114294f77584b0d2644fa9c30"},
+    {file = "protobuf-4.21.12-cp39-cp39-win32.whl", hash = "sha256:27f4d15021da6d2b706ddc3860fac0a5ddaba34ab679dc182b60a8bb4e1121cc"},
+    {file = "protobuf-4.21.12-cp39-cp39-win_amd64.whl", hash = "sha256:237216c3326d46808a9f7c26fd1bd4b20015fb6867dc5d263a493ef9a539293b"},
+    {file = "protobuf-4.21.12-py2.py3-none-any.whl", hash = "sha256:a53fd3f03e578553623272dc46ac2f189de23862e68565e83dde203d41b76fc5"},
+    {file = "protobuf-4.21.12-py3-none-any.whl", hash = "sha256:b98d0148f84e3a3c569e19f52103ca1feacdac0d2df8d6533cf983d1fda28462"},
+    {file = "protobuf-4.21.12.tar.gz", hash = "sha256:7cd532c4566d0e6feafecc1059d04c7915aec8e182d1cf7adee8b24ef1e2e6ab"},
+]
+
+[[package]]
+name = "rfc3986"
+version = "1.5.0"
+description = "Validating URI References per RFC 3986"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+    {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"},
+    {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"},
+]
+
+[package.dependencies]
+idna = {version = "*", optional = true, markers = "extra == \"idna2008\""}
+
+[package.extras]
+idna2008 = ["idna"]
+
+[[package]]
+name = "sniffio"
+version = "1.3.0"
+description = "Sniff out which async library your code is running under"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"},
+    {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"},
+]
+
+[[package]]
+name = "solana"
+version = "0.29.0"
+description = "Solana Python API"
+category = "main"
+optional = false
+python-versions = ">=3.7,<4.0"
+files = [
+    {file = "solana-0.29.0-py3-none-any.whl", hash = "sha256:9367893ee3a89b20ae20a271a41fcbc6843d0a86d4ee545442c004f5a0c8ecad"},
+    {file = "solana-0.29.0.tar.gz", hash = "sha256:d0c9160093c63c0fc7e1def7b795242666009ee388b248bd86f1dde056205d4e"},
+]
+
+[package.dependencies]
+cachetools = ">=4.2.2,<5.0.0"
+construct-typing = ">=0.5.2,<0.6.0"
+httpx = ">=0.23.0,<0.24.0"
+solders = ">=0.14.0,<0.15.0"
+types-cachetools = ">=4.2.4,<5.0.0"
+typing-extensions = ">=4.2.0"
+websockets = ">=10.3,<11.0"
+
+[[package]]
+name = "solders"
+version = "0.14.2"
+description = "Python binding to the Solana Rust SDK"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "solders-0.14.2-cp37-abi3-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:1fd382f4055d895bb0f7c6e362c70b409e551f8a465494f6b3354d0b05c70c7e"},
+    {file = "solders-0.14.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa47a64915c5c167de68a9e7ac06336dbb3107d952d8cfadaa9cdb19a8e8d24d"},
+    {file = "solders-0.14.2-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:30a50fcbb98619673953107e75ef98e7fe702df7194a338fe6a6748248780585"},
+    {file = "solders-0.14.2-cp37-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a83cd4a08bb66ecd46f34f21b5aca2f4b757ae082fee626b3e8281f00e2221ed"},
+    {file = "solders-0.14.2-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2e2cf6c1a40eda73f9e25795b5c836934d4dfc9b9135eb6a091f8273bc12e2f3"},
+    {file = "solders-0.14.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33468e93000df418cb7bfcfaca46163cf3fce584c7969ec80604bbb619660173"},
+    {file = "solders-0.14.2-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:83fa2e7c3cbae8b2d64ba091cc947d2f07108b891796dbc29f4119f4b047fdd8"},
+    {file = "solders-0.14.2-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:7e4fe209752e3d177018d98c178137d70db9fc5ca8041cf5ee5fbdf762b30963"},
+    {file = "solders-0.14.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:2973bb8aaeaf332a1110264fb31ba5669ccc59aa8cc60e8a0fea8a234458e3fd"},
+    {file = "solders-0.14.2-cp37-abi3-win_amd64.whl", hash = "sha256:3405baa01d3a27fa5be2523c512a486b056068913267f72296cf20eaa6f4d52a"},
+    {file = "solders-0.14.2.tar.gz", hash = "sha256:713d67b679d233bd0700327818e3f1f33e6340421aecf5648be65556e45cdea1"},
+]
+
+[package.dependencies]
+jsonalias = "0.1.1"
+typing-extensions = ">=4.2.0"
+
+[[package]]
+name = "types-cachetools"
+version = "4.2.10"
+description = "Typing stubs for cachetools"
+category = "main"
+optional = false
+python-versions = "*"
+files = [
+    {file = "types-cachetools-4.2.10.tar.gz", hash = "sha256:b1cb18aaff25d2ad47a060413c660c39fadddb01f72012dd1134584b1fdaada5"},
+    {file = "types_cachetools-4.2.10-py3-none-any.whl", hash = "sha256:48301115189d4879d0960baac5a8a2b2d31ce6129b2ce3b915000ed337284898"},
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.4.0"
+description = "Backported and Experimental Type Hints for Python 3.7+"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
+    {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
+]
+
+[[package]]
+name = "websockets"
+version = "10.4"
+description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)"
+category = "main"
+optional = false
+python-versions = ">=3.7"
+files = [
+    {file = "websockets-10.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d58804e996d7d2307173d56c297cf7bc132c52df27a3efaac5e8d43e36c21c48"},
+    {file = "websockets-10.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc0b82d728fe21a0d03e65f81980abbbcb13b5387f733a1a870672c5be26edab"},
+    {file = "websockets-10.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ba089c499e1f4155d2a3c2a05d2878a3428cf321c848f2b5a45ce55f0d7d310c"},
+    {file = "websockets-10.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33d69ca7612f0ddff3316b0c7b33ca180d464ecac2d115805c044bf0a3b0d032"},
+    {file = "websockets-10.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62e627f6b6d4aed919a2052efc408da7a545c606268d5ab5bfab4432734b82b4"},
+    {file = "websockets-10.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ea7b82bfcae927eeffc55d2ffa31665dc7fec7b8dc654506b8e5a518eb4d50"},
+    {file = "websockets-10.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e0cb5cc6ece6ffa75baccfd5c02cffe776f3f5c8bf486811f9d3ea3453676ce8"},
+    {file = "websockets-10.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae5e95cfb53ab1da62185e23b3130e11d64431179debac6dc3c6acf08760e9b1"},
+    {file = "websockets-10.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7c584f366f46ba667cfa66020344886cf47088e79c9b9d39c84ce9ea98aaa331"},
+    {file = "websockets-10.4-cp310-cp310-win32.whl", hash = "sha256:b029fb2032ae4724d8ae8d4f6b363f2cc39e4c7b12454df8df7f0f563ed3e61a"},
+    {file = "websockets-10.4-cp310-cp310-win_amd64.whl", hash = "sha256:8dc96f64ae43dde92530775e9cb169979f414dcf5cff670455d81a6823b42089"},
+    {file = "websockets-10.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:47a2964021f2110116cc1125b3e6d87ab5ad16dea161949e7244ec583b905bb4"},
+    {file = "websockets-10.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e789376b52c295c4946403bd0efecf27ab98f05319df4583d3c48e43c7342c2f"},
+    {file = "websockets-10.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7d3f0b61c45c3fa9a349cf484962c559a8a1d80dae6977276df8fd1fa5e3cb8c"},
+    {file = "websockets-10.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f55b5905705725af31ccef50e55391621532cd64fbf0bc6f4bac935f0fccec46"},
+    {file = "websockets-10.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00c870522cdb69cd625b93f002961ffb0c095394f06ba8c48f17eef7c1541f96"},
+    {file = "websockets-10.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f38706e0b15d3c20ef6259fd4bc1700cd133b06c3c1bb108ffe3f8947be15fa"},
+    {file = "websockets-10.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f2c38d588887a609191d30e902df2a32711f708abfd85d318ca9b367258cfd0c"},
+    {file = "websockets-10.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fe10ddc59b304cb19a1bdf5bd0a7719cbbc9fbdd57ac80ed436b709fcf889106"},
+    {file = "websockets-10.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:90fcf8929836d4a0e964d799a58823547df5a5e9afa83081761630553be731f9"},
+    {file = "websockets-10.4-cp311-cp311-win32.whl", hash = "sha256:b9968694c5f467bf67ef97ae7ad4d56d14be2751000c1207d31bf3bb8860bae8"},
+    {file = "websockets-10.4-cp311-cp311-win_amd64.whl", hash = "sha256:a7a240d7a74bf8d5cb3bfe6be7f21697a28ec4b1a437607bae08ac7acf5b4882"},
+    {file = "websockets-10.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:74de2b894b47f1d21cbd0b37a5e2b2392ad95d17ae983e64727e18eb281fe7cb"},
+    {file = "websockets-10.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3a686ecb4aa0d64ae60c9c9f1a7d5d46cab9bfb5d91a2d303d00e2cd4c4c5cc"},
+    {file = "websockets-10.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d15c968ea7a65211e084f523151dbf8ae44634de03c801b8bd070b74e85033"},
+    {file = "websockets-10.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00213676a2e46b6ebf6045bc11d0f529d9120baa6f58d122b4021ad92adabd41"},
+    {file = "websockets-10.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e23173580d740bf8822fd0379e4bf30aa1d5a92a4f252d34e893070c081050df"},
+    {file = "websockets-10.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:dd500e0a5e11969cdd3320935ca2ff1e936f2358f9c2e61f100a1660933320ea"},
+    {file = "websockets-10.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4239b6027e3d66a89446908ff3027d2737afc1a375f8fd3eea630a4842ec9a0c"},
+    {file = "websockets-10.4-cp37-cp37m-win32.whl", hash = "sha256:8a5cc00546e0a701da4639aa0bbcb0ae2bb678c87f46da01ac2d789e1f2d2038"},
+    {file = "websockets-10.4-cp37-cp37m-win_amd64.whl", hash = "sha256:a9f9a735deaf9a0cadc2d8c50d1a5bcdbae8b6e539c6e08237bc4082d7c13f28"},
+    {file = "websockets-10.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5c1289596042fad2cdceb05e1ebf7aadf9995c928e0da2b7a4e99494953b1b94"},
+    {file = "websockets-10.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0cff816f51fb33c26d6e2b16b5c7d48eaa31dae5488ace6aae468b361f422b63"},
+    {file = "websockets-10.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dd9becd5fe29773d140d68d607d66a38f60e31b86df75332703757ee645b6faf"},
+    {file = "websockets-10.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45ec8e75b7dbc9539cbfafa570742fe4f676eb8b0d3694b67dabe2f2ceed8aa6"},
+    {file = "websockets-10.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f72e5cd0f18f262f5da20efa9e241699e0cf3a766317a17392550c9ad7b37d8"},
+    {file = "websockets-10.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:185929b4808b36a79c65b7865783b87b6841e852ef5407a2fb0c03381092fa3b"},
+    {file = "websockets-10.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7d27a7e34c313b3a7f91adcd05134315002aaf8540d7b4f90336beafaea6217c"},
+    {file = "websockets-10.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:884be66c76a444c59f801ac13f40c76f176f1bfa815ef5b8ed44321e74f1600b"},
+    {file = "websockets-10.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:931c039af54fc195fe6ad536fde4b0de04da9d5916e78e55405436348cfb0e56"},
+    {file = "websockets-10.4-cp38-cp38-win32.whl", hash = "sha256:db3c336f9eda2532ec0fd8ea49fef7a8df8f6c804cdf4f39e5c5c0d4a4ad9a7a"},
+    {file = "websockets-10.4-cp38-cp38-win_amd64.whl", hash = "sha256:48c08473563323f9c9debac781ecf66f94ad5a3680a38fe84dee5388cf5acaf6"},
+    {file = "websockets-10.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:40e826de3085721dabc7cf9bfd41682dadc02286d8cf149b3ad05bff89311e4f"},
+    {file = "websockets-10.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:56029457f219ade1f2fc12a6504ea61e14ee227a815531f9738e41203a429112"},
+    {file = "websockets-10.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f5fc088b7a32f244c519a048c170f14cf2251b849ef0e20cbbb0fdf0fdaf556f"},
+    {file = "websockets-10.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fc8709c00704194213d45e455adc106ff9e87658297f72d544220e32029cd3d"},
+    {file = "websockets-10.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0154f7691e4fe6c2b2bc275b5701e8b158dae92a1ab229e2b940efe11905dff4"},
+    {file = "websockets-10.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c6d2264f485f0b53adf22697ac11e261ce84805c232ed5dbe6b1bcb84b00ff0"},
+    {file = "websockets-10.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9bc42e8402dc5e9905fb8b9649f57efcb2056693b7e88faa8fb029256ba9c68c"},
+    {file = "websockets-10.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:edc344de4dac1d89300a053ac973299e82d3db56330f3494905643bb68801269"},
+    {file = "websockets-10.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:84bc2a7d075f32f6ed98652db3a680a17a4edb21ca7f80fe42e38753a58ee02b"},
+    {file = "websockets-10.4-cp39-cp39-win32.whl", hash = "sha256:c94ae4faf2d09f7c81847c63843f84fe47bf6253c9d60b20f25edfd30fb12588"},
+    {file = "websockets-10.4-cp39-cp39-win_amd64.whl", hash = "sha256:bbccd847aa0c3a69b5f691a84d2341a4f8a629c6922558f2a70611305f902d74"},
+    {file = "websockets-10.4-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:82ff5e1cae4e855147fd57a2863376ed7454134c2bf49ec604dfe71e446e2193"},
+    {file = "websockets-10.4-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d210abe51b5da0ffdbf7b43eed0cfdff8a55a1ab17abbec4301c9ff077dd0342"},
+    {file = "websockets-10.4-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:942de28af58f352a6f588bc72490ae0f4ccd6dfc2bd3de5945b882a078e4e179"},
+    {file = "websockets-10.4-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9b27d6c1c6cd53dc93614967e9ce00ae7f864a2d9f99fe5ed86706e1ecbf485"},
+    {file = "websockets-10.4-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:3d3cac3e32b2c8414f4f87c1b2ab686fa6284a980ba283617404377cd448f631"},
+    {file = "websockets-10.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:da39dd03d130162deb63da51f6e66ed73032ae62e74aaccc4236e30edccddbb0"},
+    {file = "websockets-10.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:389f8dbb5c489e305fb113ca1b6bdcdaa130923f77485db5b189de343a179393"},
+    {file = "websockets-10.4-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09a1814bb15eff7069e51fed0826df0bc0702652b5cb8f87697d469d79c23576"},
+    {file = "websockets-10.4-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff64a1d38d156d429404aaa84b27305e957fd10c30e5880d1765c9480bea490f"},
+    {file = "websockets-10.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b343f521b047493dc4022dd338fc6db9d9282658862756b4f6fd0e996c1380e1"},
+    {file = "websockets-10.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:932af322458da7e4e35df32f050389e13d3d96b09d274b22a7aa1808f292fee4"},
+    {file = "websockets-10.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6a4162139374a49eb18ef5b2f4da1dd95c994588f5033d64e0bbfda4b6b6fcf"},
+    {file = "websockets-10.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c57e4c1349fbe0e446c9fa7b19ed2f8a4417233b6984277cce392819123142d3"},
+    {file = "websockets-10.4-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b627c266f295de9dea86bd1112ed3d5fafb69a348af30a2422e16590a8ecba13"},
+    {file = "websockets-10.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:05a7233089f8bd355e8cbe127c2e8ca0b4ea55467861906b80d2ebc7db4d6b72"},
+    {file = "websockets-10.4.tar.gz", hash = "sha256:eef610b23933c54d5d921c92578ae5f89813438fded840c2e9809d378dc765d3"},
+]
+
+[metadata]
+lock-version = "2.0"
+python-versions = "^3.10"
+content-hash = "5dd4901f6fcc4e85eae432d07c2b096ddbfc5736e8ecb9ff80ff7e3c79e64943"

+ 33 - 0
pyproject.toml

@@ -0,0 +1,33 @@
+[tool.poetry]
+name = "jito_searcher_client"
+version = "0.0.7"
+description = "Jito Labs Python Searcher Client"
+authors = ["Jito Labs <support@jito.wtf>"]
+readme = "README.md"
+packages = [{ include = "jito_searcher_client" }]
+
+[tool.poetry.dependencies]
+python = "^3.10"
+grpcio = "^1.51.1"
+protobuf = "^4.21.12"
+solders = "^0.14.2"
+click = "^8.1.3"
+solana = "^0.29.0"
+isort = "^5.11.4"
+
+[build-system]
+requires = ["poetry-core"]
+build-backend = "poetry.core.masonry.api"
+
+[tool.poetry-grpc-plugin]
+proto_path = "mev-protos"
+python_out = "./jito_searcher_client/generated"
+
+[tool.black]
+line-length = 120
+target-version = ['py37']
+include = '\.pyi?$'
+
+[tool.isort]
+profile = "black"
+extend_skip_glob = ["jito_searcher_client/generated/*"]

Algunos archivos no se mostraron porque demasiados archivos cambiaron en este cambio