diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d1a52badb3..8cac3ca906 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -30,7 +30,6 @@ jobs: repository: ${{ github.event.inputs.avalanchegoRepo }} ref: ${{ github.event.inputs.avalanchegoBranch }} path: avalanchego - token: ${{ secrets.AVALANCHE_PAT }} - uses: actions/setup-go@v5 with: go-version-file: "go.mod" @@ -72,7 +71,6 @@ jobs: repository: ${{ github.event.inputs.avalanchegoRepo }} ref: ${{ github.event.inputs.avalanchegoBranch }} path: avalanchego - token: ${{ secrets.AVALANCHE_PAT }} - uses: actions/setup-go@v5 with: go-version-file: "go.mod" @@ -128,10 +126,61 @@ jobs: - uses: actions/setup-go@v5 with: go-version-file: "go.mod" + - name: Build AvalancheGo and update Coreth dependency + run: ./scripts/build_avalanchego_with_coreth.sh - name: Run e2e tests uses: ava-labs/avalanchego/.github/actions/run-monitored-tmpnet-cmd@1a40195dc447a7b3b0303b03cd0af8e3a9389635 with: run: ./scripts/tests.e2e.sh + run_env: AVALANCHEGO_CLONE_PATH=avalanchego + prometheus_username: ${{ secrets.PROMETHEUS_ID || '' }} + prometheus_password: ${{ secrets.PROMETHEUS_PASSWORD || '' }} + loki_username: ${{ secrets.LOKI_ID || '' }} + loki_password: ${{ secrets.LOKI_PASSWORD || '' }} + e2e_warp: + name: e2e warp tests + runs-on: ubuntu-latest + steps: + - name: Git checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Set up Go + uses: actions/setup-go@v5 + with: + go-version-file: "go.mod" + - name: check out ${{ github.event.inputs.avalanchegoRepo }} ${{ github.event.inputs.avalanchegoBranch }} + if: ${{ github.event_name == 'workflow_dispatch' }} + uses: actions/checkout@v4 + with: + repository: ${{ github.event.inputs.avalanchegoRepo }} + ref: ${{ github.event.inputs.avalanchegoBranch }} + path: avalanchego + - name: Move AvalancheGo + if: ${{ github.event_name == 'workflow_dispatch' }} + run: mv avalanchego /tmp/e2e/warp/avalanchego + - name: Build AvalancheGo and update Coreth dependency + run: ./scripts/build_avalanchego_with_coreth.sh + env: + AVALANCHEGO_CLONE_PATH: /tmp/e2e/warp/avalanchego + - name: Clone Subnet-EVM + uses: actions/checkout@v4 + with: + repository: ava-labs/subnet-evm + ref: master + path: subnet-evm + - name: Move Subnet-EVM + run: mv subnet-evm /tmp/e2e/warp/subnet-evm + - name: Build Subnet-EVM + run: | + cd /tmp/e2e/warp/subnet-evm + ./scripts/build.sh + - name: Run Warp E2E Tests + uses: ava-labs/avalanchego/.github/actions/run-monitored-tmpnet-cmd@1a40195dc447a7b3b0303b03cd0af8e3a9389635 + with: + run: ./scripts/run_ginkgo_warp.sh + run_env: AVALANCHEGO_BUILD_PATH=/tmp/e2e/warp/avalanchego/build + artifact_prefix: warp prometheus_username: ${{ secrets.PROMETHEUS_ID || '' }} prometheus_password: ${{ secrets.PROMETHEUS_PASSWORD || '' }} loki_username: ${{ secrets.LOKI_ID || '' }} diff --git a/.gitignore b/.gitignore index 136b5da90f..c99388413b 100644 --- a/.gitignore +++ b/.gitignore @@ -50,3 +50,6 @@ build/ avalanchego .direnv + +cmd/simulator/.simulator/* +cmd/simulator/simulator diff --git a/bin/ginkgo b/bin/ginkgo new file mode 100755 index 0000000000..45fa2bd976 --- /dev/null +++ b/bin/ginkgo @@ -0,0 +1,13 @@ +#!/usr/bin/env bash + +set -euo pipefail + +# Ensure the go command is run from the root of the repository so that its go.mod file is used +REPO_ROOT=$( + cd "$(dirname "${BASH_SOURCE[0]}")" + cd .. && pwd +) +cd "${REPO_ROOT}" + +# If an explicit version is not specified, go run uses the ginkgo version from go.mod +go run github.com/onsi/ginkgo/v2/ginkgo "${@}" diff --git a/cmd/simulator/.simulator/keys/0x8db97C7cEcE249c2b98bDC0226Cc4C2A57BF52FC b/cmd/simulator/.simulator/keys/0x8db97C7cEcE249c2b98bDC0226Cc4C2A57BF52FC new file mode 100644 index 0000000000..289ffc12f2 --- /dev/null +++ b/cmd/simulator/.simulator/keys/0x8db97C7cEcE249c2b98bDC0226Cc4C2A57BF52FC @@ -0,0 +1 @@ +56289e99c94b6912bfc12adc093c9b51124f0dc54ac7a766b2bc5ccf558d8027 diff --git a/cmd/simulator/README.md b/cmd/simulator/README.md new file mode 100644 index 0000000000..1f602f61fa --- /dev/null +++ b/cmd/simulator/README.md @@ -0,0 +1,62 @@ +# Load Simulator + +When building developing your own blockchain using `coreth`, you may want to analyze how your fee parameterization behaves and/or how many resources your VM uses under different load patterns. For this reason, we developed `cmd/simulator`. `cmd/simulator` lets you drive arbitrary load across any number of [endpoints] with a user-specified `keys` directory (insecure) `timeout`, `workers`, `max-fee-cap`, and `max-tip-cap`. + +## Building the Load Simulator + +To build the load simulator, navigate to the base of the simulator directory: + +```bash +cd $GOPATH/src/github.com/ava-labs/coreth/cmd/simulator +``` + +Build the simulator: + +```bash +go build -o ./simulator main/*.go +``` + +To confirm that you built successfully, run the simulator and print the version: + +```bash +./simulator --version +``` + +This should give the following output: + +``` +v0.1.0 +``` + +To run the load simulator, you must first start an EVM based network. The load simulator works on both the C-Chain and Subnet-EVM, so we will start a single node network and run the load simulator on the C-Chain. + +To start a single node network, follow the instructions from the AvalancheGo [README](https://github.com/ava-labs/avalanchego#building-avalanchego) to build from source. + +Once you've built AvalancheGo, open the AvalancheGo directory in a separate terminal window and run a single node non-staking network with the following command: + +```bash +./build/avalanchego --sybil-protection-enabled=false --network-id=local +``` + +WARNING: + +The `--sybil-protection-enabled=false` flag is only suitable for local testing. Disabling staking serves two functions explicitly for testing purposes: + +1. Ignore stake weight on the P-Chain and count each connected peer as having a stake weight of 1 +2. Automatically opts in to validate every Subnet + +Once you have AvalancheGo running locally, it will be running an HTTP Server on the default port `9650`. This means that the RPC Endpoint for the C-Chain will be http://127.0.0.1:9650/ext/bc/C/rpc and ws://127.0.0.1:9650/ext/bc/C/ws for WebSocket connections. + +Now, we can run the simulator command to simulate some load on the local C-Chain for 30s: + +```bash +./simulator --timeout=1m --workers=1 --max-fee-cap=300 --max-tip-cap=10 --txs-per-worker=50 +``` + +## Command Line Flags + +To see all of the command line flag options, run + +```bash +./simulator --help +``` diff --git a/cmd/simulator/config/flags.go b/cmd/simulator/config/flags.go new file mode 100644 index 0000000000..a9916f4ca6 --- /dev/null +++ b/cmd/simulator/config/flags.go @@ -0,0 +1,129 @@ +// Copyright (C) 2019-2025, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package config + +import ( + "errors" + "fmt" + "strings" + "time" + + "github.com/spf13/pflag" + "github.com/spf13/viper" +) + +const Version = "v0.1.1" + +const ( + ConfigFilePathKey = "config-file" + LogLevelKey = "log-level" + EndpointsKey = "endpoints" + MaxFeeCapKey = "max-fee-cap" + MaxTipCapKey = "max-tip-cap" + WorkersKey = "workers" + TxsPerWorkerKey = "txs-per-worker" + KeyDirKey = "key-dir" + VersionKey = "version" + TimeoutKey = "timeout" + BatchSizeKey = "batch-size" + MetricsPortKey = "metrics-port" + MetricsOutputKey = "metrics-output" +) + +var ( + ErrNoEndpoints = errors.New("must specify at least one endpoint") + ErrNoWorkers = errors.New("must specify non-zero number of workers") + ErrNoTxs = errors.New("must specify non-zero number of txs-per-worker") +) + +type Config struct { + Endpoints []string `json:"endpoints"` + MaxFeeCap int64 `json:"max-fee-cap"` + MaxTipCap int64 `json:"max-tip-cap"` + Workers int `json:"workers"` + TxsPerWorker uint64 `json:"txs-per-worker"` + KeyDir string `json:"key-dir"` + Timeout time.Duration `json:"timeout"` + BatchSize uint64 `json:"batch-size"` + MetricsPort uint64 `json:"metrics-port"` + MetricsOutput string `json:"metrics-output"` +} + +func BuildConfig(v *viper.Viper) (Config, error) { + c := Config{ + Endpoints: v.GetStringSlice(EndpointsKey), + MaxFeeCap: v.GetInt64(MaxFeeCapKey), + MaxTipCap: v.GetInt64(MaxTipCapKey), + Workers: v.GetInt(WorkersKey), + TxsPerWorker: v.GetUint64(TxsPerWorkerKey), + KeyDir: v.GetString(KeyDirKey), + Timeout: v.GetDuration(TimeoutKey), + BatchSize: v.GetUint64(BatchSizeKey), + MetricsPort: v.GetUint64(MetricsPortKey), + MetricsOutput: v.GetString(MetricsOutputKey), + } + if len(c.Endpoints) == 0 { + return c, ErrNoEndpoints + } + if c.Workers == 0 { + return c, ErrNoWorkers + } + if c.TxsPerWorker == 0 { + return c, ErrNoTxs + } + // Note: it's technically valid for the fee/tip cap to be 0, but cannot + // be less than 0. + if c.MaxFeeCap < 0 { + return c, fmt.Errorf("invalid max fee cap %d < 0", c.MaxFeeCap) + } + if c.MaxTipCap < 0 { + return c, fmt.Errorf("invalid max tip cap %d <= 0", c.MaxTipCap) + } + return c, nil +} + +func BuildViper(fs *pflag.FlagSet, args []string) (*viper.Viper, error) { + if err := fs.Parse(args); err != nil { + return nil, err + } + + v := viper.New() + v.AutomaticEnv() + v.SetEnvKeyReplacer(strings.NewReplacer("-", "_")) + v.SetEnvPrefix("evm_simulator") + if err := v.BindPFlags(fs); err != nil { + return nil, err + } + + if v.IsSet(ConfigFilePathKey) { + v.SetConfigFile(v.GetString(ConfigFilePathKey)) + if err := v.ReadInConfig(); err != nil { + return nil, err + } + } + return v, nil +} + +// BuildFlagSet returns a complete set of flags for simulator +func BuildFlagSet() *pflag.FlagSet { + fs := pflag.NewFlagSet("simulator", pflag.ContinueOnError) + addSimulatorFlags(fs) + return fs +} + +func addSimulatorFlags(fs *pflag.FlagSet) { + fs.Bool(VersionKey, false, "Print the version and exit") + fs.String(ConfigFilePathKey, "", "Specify the config path to use to load a YAML config for the simulator") + fs.StringSlice(EndpointsKey, []string{"ws://127.0.0.1:9650/ext/bc/C/ws"}, "Specify a comma separated list of RPC Websocket Endpoints (minimum of 1 endpoint)") + fs.Int64(MaxFeeCapKey, 50, "Specify the maximum fee cap to use for transactions denominated in GWei (must be > 0)") + fs.Int64(MaxTipCapKey, 1, "Specify the max tip cap for transactions denominated in GWei (must be >= 0)") + fs.Uint64(TxsPerWorkerKey, 100, "Specify the number of transactions to create per worker (must be > 0)") + fs.Int(WorkersKey, 1, "Specify the number of workers to create for the simulator (must be > 0)") + fs.String(KeyDirKey, ".simulator/keys", "Specify the directory to save private keys in (INSECURE: only use for testing)") + fs.Duration(TimeoutKey, 5*time.Minute, "Specify the timeout for the simulator to complete (0 indicates no timeout)") + fs.String(LogLevelKey, "info", "Specify the log level to use in the simulator") + fs.Uint64(BatchSizeKey, 100, "Specify the batchsize for the worker to issue and confirm txs") + fs.Uint64(MetricsPortKey, 8082, "Specify the port to use for the metrics server") + fs.String(MetricsOutputKey, "", "Specify the file to write metrics in json format, or empty to write to stdout (defaults to stdout)") +} diff --git a/cmd/simulator/key/key.go b/cmd/simulator/key/key.go new file mode 100644 index 0000000000..c4d1c6266f --- /dev/null +++ b/cmd/simulator/key/key.go @@ -0,0 +1,85 @@ +// Copyright (C) 2019-2025, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package key + +import ( + "context" + "crypto/ecdsa" + "fmt" + "os" + "path/filepath" + + "github.com/ava-labs/libevm/common" + ethcrypto "github.com/ava-labs/libevm/crypto" +) + +type Key struct { + PrivKey *ecdsa.PrivateKey + Address common.Address +} + +func CreateKey(pk *ecdsa.PrivateKey) *Key { + return &Key{pk, ethcrypto.PubkeyToAddress(pk.PublicKey)} +} + +// Load attempts to open a [Key] stored at [file]. +func Load(file string) (*Key, error) { + pk, err := ethcrypto.LoadECDSA(file) + if err != nil { + return nil, fmt.Errorf("problem loading private key from %s: %w", file, err) + } + return CreateKey(pk), nil +} + +// LoadAll loads all keys in [dir]. +func LoadAll(ctx context.Context, dir string) ([]*Key, error) { + if _, err := os.Stat(dir); os.IsNotExist(err) { + if err := os.MkdirAll(dir, 0o755); err != nil { + return nil, fmt.Errorf("unable to create %s: %w", dir, err) + } + + return nil, nil + } + + var files []string + + err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { + if path == dir { + return nil + } + + files = append(files, path) + return nil + }) + if err != nil { + return nil, fmt.Errorf("could not walk %s: %w", dir, err) + } + + ks := make([]*Key, len(files)) + for i, file := range files { + k, err := Load(file) + if err != nil { + return nil, fmt.Errorf("could not load key at %s: %w", file, err) + } + + ks[i] = k + } + return ks, nil +} + +// Save persists a [Key] to [dir] (where the filename is the hex-encoded +// address). +func (k *Key) Save(dir string) error { + fp := filepath.Join(dir, k.Address.Hex()) + return ethcrypto.SaveECDSA(fp, k.PrivKey) +} + +// Generate creates a new [Key] and returns it. +func Generate() (*Key, error) { + pk, err := ethcrypto.GenerateKey() + if err != nil { + return nil, fmt.Errorf("%w: cannot generate key", err) + } + return CreateKey(pk), nil +} diff --git a/cmd/simulator/load/funder.go b/cmd/simulator/load/funder.go new file mode 100644 index 0000000000..7719e8a338 --- /dev/null +++ b/cmd/simulator/load/funder.go @@ -0,0 +1,125 @@ +// Copyright (C) 2019-2025, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package load + +import ( + "context" + "crypto/ecdsa" + "fmt" + "math/big" + + "github.com/ava-labs/coreth/cmd/simulator/key" + "github.com/ava-labs/coreth/cmd/simulator/metrics" + "github.com/ava-labs/coreth/cmd/simulator/txs" + "github.com/ava-labs/coreth/ethclient" + "github.com/ava-labs/libevm/common" + "github.com/ava-labs/libevm/core/types" + "github.com/ava-labs/libevm/log" + ethparams "github.com/ava-labs/libevm/params" +) + +// DistributeFunds ensures that each address in keys has at least [minFundsPerAddr] by sending funds +// from the key with the highest starting balance. +// This function returns a set of at least [numKeys] keys, each having a minimum balance [minFundsPerAddr]. +func DistributeFunds(ctx context.Context, client *ethclient.Client, keys []*key.Key, numKeys int, minFundsPerAddr *big.Int, m *metrics.Metrics) ([]*key.Key, error) { + if len(keys) < numKeys { + return nil, fmt.Errorf("insufficient number of keys %d < %d", len(keys), numKeys) + } + fundedKeys := make([]*key.Key, 0, numKeys) + // TODO: clean up fund distribution. + needFundsKeys := make([]*key.Key, 0) + needFundsAddrs := make([]common.Address, 0) + + maxFundsKey := keys[0] + maxFundsBalance := common.Big0 + log.Info("Checking balance of each key to distribute funds") + for _, key := range keys { + balance, err := client.BalanceAt(ctx, key.Address, nil) + if err != nil { + return nil, fmt.Errorf("failed to fetch balance for addr %s: %w", key.Address, err) + } + + if balance.Cmp(minFundsPerAddr) < 0 { + needFundsKeys = append(needFundsKeys, key) + needFundsAddrs = append(needFundsAddrs, key.Address) + } else { + fundedKeys = append(fundedKeys, key) + } + + if balance.Cmp(maxFundsBalance) > 0 { + maxFundsKey = key + maxFundsBalance = balance + } + } + requiredFunds := new(big.Int).Mul(minFundsPerAddr, big.NewInt(int64(numKeys))) + if maxFundsBalance.Cmp(requiredFunds) < 0 { + return nil, fmt.Errorf("insufficient funds to distribute %d < %d", maxFundsBalance, requiredFunds) + } + log.Info("Found max funded key", "address", maxFundsKey.Address, "balance", maxFundsBalance, "numFundAddrs", len(needFundsAddrs)) + if len(fundedKeys) >= numKeys { + return fundedKeys[:numKeys], nil + } + + // If there are not enough funded keys, cut [needFundsAddrs] to the number of keys that + // must be funded to reach [numKeys] required. + fundKeysCutLen := numKeys - len(fundedKeys) + needFundsKeys = needFundsKeys[:fundKeysCutLen] + needFundsAddrs = needFundsAddrs[:fundKeysCutLen] + + chainID, err := client.ChainID(ctx) + if err != nil { + return nil, fmt.Errorf("failed to fetch chainID: %w", err) + } + gasFeeCap, err := client.EstimateBaseFee(ctx) + if err != nil { + return nil, fmt.Errorf("failed to fetch estimated base fee: %w", err) + } + gasTipCap, err := client.SuggestGasTipCap(ctx) + if err != nil { + return nil, fmt.Errorf("failed to fetch suggested gas tip: %w", err) + } + signer := types.LatestSignerForChainID(chainID) + + // Generate a sequence of transactions to distribute the required funds. + log.Info("Generating distribution transactions...") + i := 0 + txGenerator := func(key *ecdsa.PrivateKey, nonce uint64) (*types.Transaction, error) { + tx, err := types.SignNewTx(key, signer, &types.DynamicFeeTx{ + ChainID: chainID, + Nonce: nonce, + GasTipCap: gasTipCap, + GasFeeCap: gasFeeCap, + Gas: ethparams.TxGas, + To: &needFundsAddrs[i], + Data: nil, + Value: requiredFunds, + }) + if err != nil { + return nil, err + } + i++ + return tx, nil + } + + numTxs := uint64(len(needFundsAddrs)) + txSequence, err := txs.GenerateTxSequence(ctx, txGenerator, client, maxFundsKey.PrivKey, numTxs, false) + if err != nil { + return nil, fmt.Errorf("failed to generate fund distribution sequence from %s of length %d", maxFundsKey.Address, len(needFundsAddrs)) + } + worker := NewSingleAddressTxWorker(ctx, client, maxFundsKey.Address) + txFunderAgent := txs.NewIssueNAgent[*types.Transaction](txSequence, worker, numTxs, m) + + if err := txFunderAgent.Execute(ctx); err != nil { + return nil, err + } + for _, addr := range needFundsAddrs { + balance, err := client.BalanceAt(ctx, addr, nil) + if err != nil { + return nil, fmt.Errorf("failed to fetch balance for addr %s: %w", addr, err) + } + log.Info("Funded address has balance", "addr", addr, "balance", balance) + } + fundedKeys = append(fundedKeys, needFundsKeys...) + return fundedKeys, nil +} diff --git a/cmd/simulator/load/loader.go b/cmd/simulator/load/loader.go new file mode 100644 index 0000000000..f4cf1be86b --- /dev/null +++ b/cmd/simulator/load/loader.go @@ -0,0 +1,245 @@ +// Copyright (C) 2019-2025, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package load + +import ( + "context" + "crypto/ecdsa" + "fmt" + "math/big" + "os" + "os/signal" + "strconv" + "syscall" + "time" + + "github.com/ava-labs/coreth/cmd/simulator/config" + "github.com/ava-labs/coreth/cmd/simulator/key" + "github.com/ava-labs/coreth/cmd/simulator/metrics" + "github.com/ava-labs/coreth/cmd/simulator/txs" + "github.com/ava-labs/coreth/ethclient" + "github.com/ava-labs/coreth/params" + "github.com/ava-labs/libevm/common" + "github.com/ava-labs/libevm/core/types" + ethcrypto "github.com/ava-labs/libevm/crypto" + "github.com/ava-labs/libevm/log" + ethparams "github.com/ava-labs/libevm/params" + "golang.org/x/sync/errgroup" +) + +const ( + MetricsEndpoint = "/metrics" // Endpoint for the Prometheus Metrics Server +) + +// Loader executes a series of worker/tx sequence pairs. +// Each worker/txSequence pair issues [batchSize] transactions, confirms all +// of them as accepted, and then moves to the next batch until the txSequence +// is exhausted. +type Loader[T txs.THash] struct { + clients []txs.Worker[T] + txSequences []txs.TxSequence[T] + batchSize uint64 + metrics *metrics.Metrics +} + +func New[T txs.THash]( + clients []txs.Worker[T], + txSequences []txs.TxSequence[T], + batchSize uint64, + metrics *metrics.Metrics, +) *Loader[T] { + return &Loader[T]{ + clients: clients, + txSequences: txSequences, + batchSize: batchSize, + metrics: metrics, + } +} + +func (l *Loader[T]) Execute(ctx context.Context) error { + log.Info("Constructing tx agents...", "numAgents", len(l.txSequences)) + agents := make([]txs.Agent[T], 0, len(l.txSequences)) + for i := 0; i < len(l.txSequences); i++ { + agents = append(agents, txs.NewIssueNAgent(l.txSequences[i], l.clients[i], l.batchSize, l.metrics)) + } + + log.Info("Starting tx agents...") + eg := errgroup.Group{} + for _, agent := range agents { + eg.Go(func() error { + return agent.Execute(ctx) + }) + } + + log.Info("Waiting for tx agents...") + if err := eg.Wait(); err != nil { + return err + } + log.Info("Tx agents completed successfully.") + return nil +} + +// ConfirmReachedTip finds the max height any client has reached and then ensures every client +// reaches at least that height. +// +// This allows the network to continue to roll forward and creates a synchronization point to ensure +// that every client in the loader has reached at least the max height observed of any client at +// the time this function was called. +func (l *Loader[T]) ConfirmReachedTip(ctx context.Context) error { + maxHeight := uint64(0) + for i, client := range l.clients { + latestHeight, err := client.LatestHeight(ctx) + if err != nil { + return fmt.Errorf("client %d failed to get latest height: %w", i, err) + } + if latestHeight > maxHeight { + maxHeight = latestHeight + } + } + + eg := errgroup.Group{} + for i, client := range l.clients { + eg.Go(func() error { + for { + latestHeight, err := client.LatestHeight(ctx) + if err != nil { + return fmt.Errorf("failed to get latest height from client %d: %w", i, err) + } + if latestHeight >= maxHeight { + return nil + } + select { + case <-ctx.Done(): + return fmt.Errorf("failed to get latest height from client %d: %w", i, ctx.Err()) + case <-time.After(time.Second): + } + } + }) + } + + return eg.Wait() +} + +// ExecuteLoader creates txSequences from [config] and has txAgents execute the specified simulation. +func ExecuteLoader(ctx context.Context, config config.Config) error { + if config.Timeout > 0 { + var cancel context.CancelFunc + ctx, cancel = context.WithTimeout(ctx, config.Timeout) + defer cancel() + } + + // Create buffered sigChan to receive SIGINT notifications + sigChan := make(chan os.Signal, 1) + signal.Notify(sigChan, syscall.SIGINT) + + // Create context with cancel + ctx, cancel := context.WithCancel(ctx) + + go func() { + // Blocks until we receive a SIGINT notification or if parent context is done + select { + case <-sigChan: + case <-ctx.Done(): + } + + // Cancel the child context and end all processes + cancel() + }() + + m := metrics.NewDefaultMetrics() + metricsCtx := context.Background() + ms := m.Serve(metricsCtx, strconv.Itoa(int(config.MetricsPort)), MetricsEndpoint) + defer ms.Shutdown() + + // Construct the arguments for the load simulator + clients := make([]*ethclient.Client, 0, len(config.Endpoints)) + for i := 0; i < config.Workers; i++ { + clientURI := config.Endpoints[i%len(config.Endpoints)] + client, err := ethclient.Dial(clientURI) + if err != nil { + return fmt.Errorf("failed to dial client at %s: %w", clientURI, err) + } + clients = append(clients, client) + } + + keys, err := key.LoadAll(ctx, config.KeyDir) + if err != nil { + return err + } + // Ensure there are at least [config.Workers] keys and save any newly generated ones. + if len(keys) < config.Workers { + for i := 0; len(keys) < config.Workers; i++ { + newKey, err := key.Generate() + if err != nil { + return fmt.Errorf("failed to generate %d new key: %w", i, err) + } + if err := newKey.Save(config.KeyDir); err != nil { + return fmt.Errorf("failed to save %d new key: %w", i, err) + } + keys = append(keys, newKey) + } + } + + // Each address needs: params.GWei * MaxFeeCap * ethparams.TxGas * TxsPerWorker total wei + // to fund gas for all of their transactions. + maxFeeCap := new(big.Int).Mul(big.NewInt(params.GWei), big.NewInt(config.MaxFeeCap)) + minFundsPerAddr := new(big.Int).Mul(maxFeeCap, big.NewInt(int64(config.TxsPerWorker*ethparams.TxGas))) + fundStart := time.Now() + log.Info("Distributing funds", "numTxsPerWorker", config.TxsPerWorker, "minFunds", minFundsPerAddr) + keys, err = DistributeFunds(ctx, clients[0], keys, config.Workers, minFundsPerAddr, m) + if err != nil { + return err + } + log.Info("Distributed funds successfully", "time", time.Since(fundStart)) + + pks := make([]*ecdsa.PrivateKey, 0, len(keys)) + senders := make([]common.Address, 0, len(keys)) + for _, key := range keys { + pks = append(pks, key.PrivKey) + senders = append(senders, key.Address) + } + + bigGwei := big.NewInt(params.GWei) + gasTipCap := new(big.Int).Mul(bigGwei, big.NewInt(config.MaxTipCap)) + gasFeeCap := new(big.Int).Mul(bigGwei, big.NewInt(config.MaxFeeCap)) + client := clients[0] + chainID, err := client.ChainID(ctx) + if err != nil { + return fmt.Errorf("failed to fetch chainID: %w", err) + } + signer := types.LatestSignerForChainID(chainID) + + log.Info("Creating transaction sequences...") + txGenerator := func(key *ecdsa.PrivateKey, nonce uint64) (*types.Transaction, error) { + addr := ethcrypto.PubkeyToAddress(key.PublicKey) + return types.SignNewTx(key, signer, &types.DynamicFeeTx{ + ChainID: chainID, + Nonce: nonce, + GasTipCap: gasTipCap, + GasFeeCap: gasFeeCap, + Gas: ethparams.TxGas, + To: &addr, + Data: nil, + Value: common.Big0, + }) + } + txSequenceStart := time.Now() + txSequences, err := txs.GenerateTxSequences(ctx, txGenerator, clients[0], pks, config.TxsPerWorker, false) + if err != nil { + return err + } + log.Info("Created transaction sequences successfully", "time", time.Since(txSequenceStart)) + + workers := make([]txs.Worker[*types.Transaction], 0, len(clients)) + for i, client := range clients { + workers = append(workers, NewSingleAddressTxWorker(ctx, client, ethcrypto.PubkeyToAddress(pks[i].PublicKey))) + } + loader := New(workers, txSequences, config.BatchSize, m) + err = loader.Execute(ctx) + prerr := m.Print(config.MetricsOutput) // Print regardless of execution error + if prerr != nil { + log.Warn("Failed to print metrics", "error", prerr) + } + return err +} diff --git a/cmd/simulator/load/worker.go b/cmd/simulator/load/worker.go new file mode 100644 index 0000000000..003dc2bf38 --- /dev/null +++ b/cmd/simulator/load/worker.go @@ -0,0 +1,122 @@ +// Copyright (C) 2019-2025, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package load + +import ( + "context" + "fmt" + "time" + + "github.com/ava-labs/coreth/ethclient" + ethereum "github.com/ava-labs/libevm" + "github.com/ava-labs/libevm/common" + "github.com/ava-labs/libevm/core/types" + "github.com/ava-labs/libevm/log" +) + +type ethereumTxWorker struct { + client *ethclient.Client + + acceptedNonce uint64 + address common.Address + + sub ethereum.Subscription + newHeads chan *types.Header +} + +// NewSingleAddressTxWorker creates and returns a new ethereumTxWorker that confirms transactions by checking the latest +// nonce of [address] and assuming any transaction with a lower nonce was already accepted. +func NewSingleAddressTxWorker(ctx context.Context, client *ethclient.Client, address common.Address) *ethereumTxWorker { + newHeads := make(chan *types.Header) + tw := ðereumTxWorker{ + client: client, + address: address, + newHeads: newHeads, + } + + sub, err := client.SubscribeNewHead(ctx, newHeads) + if err != nil { + log.Debug("failed to subscribe new heads, falling back to polling", "err", err) + } else { + tw.sub = sub + } + + return tw +} + +// NewTxReceiptWorker creates and returns a new ethereumTxWorker that confirms transactions by checking for the +// corresponding transaction receipt. +func NewTxReceiptWorker(ctx context.Context, client *ethclient.Client) *ethereumTxWorker { + newHeads := make(chan *types.Header) + tw := ðereumTxWorker{ + client: client, + newHeads: newHeads, + } + + sub, err := client.SubscribeNewHead(ctx, newHeads) + if err != nil { + log.Debug("failed to subscribe new heads, falling back to polling", "err", err) + } else { + tw.sub = sub + } + + return tw +} + +func (tw *ethereumTxWorker) IssueTx(ctx context.Context, tx *types.Transaction) error { + return tw.client.SendTransaction(ctx, tx) +} + +func (tw *ethereumTxWorker) ConfirmTx(ctx context.Context, tx *types.Transaction) error { + if tw.address == (common.Address{}) { + return tw.confirmTxByReceipt(ctx, tx) + } + return tw.confirmTxByNonce(ctx, tx) +} + +func (tw *ethereumTxWorker) confirmTxByNonce(ctx context.Context, tx *types.Transaction) error { + txNonce := tx.Nonce() + + for { + acceptedNonce, err := tw.client.NonceAt(ctx, tw.address, nil) + if err != nil { + return fmt.Errorf("failed to await tx %s nonce %d: %w", tx.Hash(), txNonce, err) + } + tw.acceptedNonce = acceptedNonce + + log.Debug("confirming tx", "txHash", tx.Hash(), "txNonce", txNonce, "acceptedNonce", tw.acceptedNonce) + // If the is less than what has already been accepted, the transaction is confirmed + if txNonce < tw.acceptedNonce { + return nil + } + + select { + case <-tw.newHeads: + case <-time.After(time.Second): + case <-ctx.Done(): + return fmt.Errorf("failed to await tx %s nonce %d: %w", tx.Hash(), txNonce, ctx.Err()) + } + } +} + +func (tw *ethereumTxWorker) confirmTxByReceipt(ctx context.Context, tx *types.Transaction) error { + for { + _, err := tw.client.TransactionReceipt(ctx, tx.Hash()) + if err == nil { + return nil + } + log.Debug("no tx receipt", "txHash", tx.Hash(), "nonce", tx.Nonce(), "err", err) + + select { + case <-tw.newHeads: + case <-time.After(time.Second): + case <-ctx.Done(): + return fmt.Errorf("failed to await tx %s nonce %d: %w", tx.Hash(), tx.Nonce(), ctx.Err()) + } + } +} + +func (tw *ethereumTxWorker) LatestHeight(ctx context.Context) (uint64, error) { + return tw.client.BlockNumber(ctx) +} diff --git a/cmd/simulator/main/main.go b/cmd/simulator/main/main.go new file mode 100644 index 0000000000..bb0c5a1105 --- /dev/null +++ b/cmd/simulator/main/main.go @@ -0,0 +1,58 @@ +// Copyright (C) 2019-2025, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package main + +import ( + "context" + "errors" + "fmt" + "os" + + "github.com/ava-labs/coreth/cmd/simulator/config" + "github.com/ava-labs/coreth/cmd/simulator/load" + "github.com/ava-labs/coreth/log" + gethlog "github.com/ava-labs/libevm/log" + "github.com/spf13/pflag" +) + +func main() { + fs := config.BuildFlagSet() + v, err := config.BuildViper(fs, os.Args[1:]) + if errors.Is(err, pflag.ErrHelp) { + os.Exit(0) + } + + if err != nil { + fmt.Printf("couldn't build viper: %s\n", err) + os.Exit(1) + } + + if err != nil { + fmt.Printf("couldn't configure flags: %s\n", err) + os.Exit(1) + } + + if v.GetBool(config.VersionKey) { + fmt.Printf("%s\n", config.Version) + os.Exit(0) + } + + logLevel, err := log.LvlFromString(v.GetString(config.LogLevelKey)) + if err != nil { + fmt.Printf("couldn't parse log level: %s\n", err) + os.Exit(1) + } + gethLogger := gethlog.NewLogger(log.NewTerminalHandlerWithLevel(os.Stderr, logLevel, true)) + gethlog.SetDefault(gethLogger) + + config, err := config.BuildConfig(v) + if err != nil { + fmt.Printf("%s\n", err) + os.Exit(1) + } + if err := load.ExecuteLoader(context.Background(), config); err != nil { + fmt.Printf("load execution failed: %s\n", err) + os.Exit(1) + } +} diff --git a/cmd/simulator/metrics/metrics.go b/cmd/simulator/metrics/metrics.go new file mode 100644 index 0000000000..ff3e59e5be --- /dev/null +++ b/cmd/simulator/metrics/metrics.go @@ -0,0 +1,139 @@ +// Copyright (C) 2019-2025, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package metrics + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "net/http" + "os" + + "github.com/ava-labs/libevm/log" + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promhttp" +) + +type Metrics struct { + reg *prometheus.Registry + // Summary of the quantiles of Individual Issuance Tx Times + IssuanceTxTimes prometheus.Summary + // Summary of the quantiles of Individual Confirmation Tx Times + ConfirmationTxTimes prometheus.Summary + // Summary of the quantiles of Individual Issuance To Confirmation Tx Times + IssuanceToConfirmationTxTimes prometheus.Summary +} + +func NewDefaultMetrics() *Metrics { + registry := prometheus.NewRegistry() + return NewMetrics(registry) +} + +// NewMetrics creates and returns a Metrics and registers it with a Collector +func NewMetrics(reg *prometheus.Registry) *Metrics { + m := &Metrics{ + reg: reg, + IssuanceTxTimes: prometheus.NewSummary(prometheus.SummaryOpts{ + Name: "tx_issuance_time", + Help: "Individual Tx Issuance Times for a Load Test", + Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001}, + }), + ConfirmationTxTimes: prometheus.NewSummary(prometheus.SummaryOpts{ + Name: "tx_confirmation_time", + Help: "Individual Tx Confirmation Times for a Load Test", + Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001}, + }), + IssuanceToConfirmationTxTimes: prometheus.NewSummary(prometheus.SummaryOpts{ + Name: "tx_issuance_to_confirmation_time", + Help: "Individual Tx Issuance To Confirmation Times for a Load Test", + Objectives: map[float64]float64{0.5: 0.05, 0.9: 0.01, 0.99: 0.001}, + }), + } + reg.MustRegister(m.IssuanceTxTimes) + reg.MustRegister(m.ConfirmationTxTimes) + reg.MustRegister(m.IssuanceToConfirmationTxTimes) + return m +} + +type MetricsServer struct { + metricsPort string + metricsEndpoint string + + cancel context.CancelFunc + stopCh chan struct{} +} + +func (m *Metrics) Serve(ctx context.Context, metricsPort string, metricsEndpoint string) *MetricsServer { + ctx, cancel := context.WithCancel(ctx) + // Create a prometheus server to expose individual tx metrics + server := &http.Server{ + Addr: fmt.Sprintf(":%s", metricsPort), + } + + // Start up go routine to listen for SIGINT notifications to gracefully shut down server + go func() { + // Blocks until signal is received + <-ctx.Done() + + if err := server.Shutdown(ctx); err != nil { + log.Error("Metrics server error: %v", err) + } + log.Info("Received a SIGINT signal: Gracefully shutting down metrics server") + }() + + // Start metrics server + ms := &MetricsServer{ + metricsPort: metricsPort, + metricsEndpoint: metricsEndpoint, + stopCh: make(chan struct{}), + cancel: cancel, + } + go func() { + defer close(ms.stopCh) + + http.Handle(metricsEndpoint, promhttp.HandlerFor(m.reg, promhttp.HandlerOpts{Registry: m.reg})) + log.Info(fmt.Sprintf("Metrics Server: localhost:%s%s", metricsPort, metricsEndpoint)) + if err := server.ListenAndServe(); !errors.Is(err, http.ErrServerClosed) { + log.Error("Metrics server error: %v", err) + } + }() + + return ms +} + +func (ms *MetricsServer) Shutdown() { + ms.cancel() + <-ms.stopCh +} + +func (m *Metrics) Print(outputFile string) error { + metrics, err := m.reg.Gather() + if err != nil { + return err + } + + if outputFile == "" { + // Printout to stdout + fmt.Println("*** Metrics ***") + for _, mf := range metrics { + for _, m := range mf.GetMetric() { + fmt.Printf("Type: %s, Name: %s, Description: %s, Values: %s\n", mf.GetType().String(), mf.GetName(), mf.GetHelp(), m.String()) + } + } + fmt.Println("***************") + } else { + jsonFile, err := os.Create(outputFile) + if err != nil { + return err + } + defer jsonFile.Close() + + if err := json.NewEncoder(jsonFile).Encode(metrics); err != nil { + return err + } + } + + return nil +} diff --git a/cmd/simulator/txs/agent.go b/cmd/simulator/txs/agent.go new file mode 100644 index 0000000000..0abef90172 --- /dev/null +++ b/cmd/simulator/txs/agent.go @@ -0,0 +1,142 @@ +// Copyright (C) 2019-2025, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package txs + +import ( + "context" + "errors" + "fmt" + "time" + + "github.com/ava-labs/coreth/cmd/simulator/metrics" + "github.com/ava-labs/libevm/common" + "github.com/ava-labs/libevm/log" +) + +type THash interface { + Hash() common.Hash +} + +// TxSequence provides an interface to return a channel of transactions. +// The sequence is responsible for closing the channel when there are no further +// transactions. +type TxSequence[T THash] interface { + Chan() <-chan T +} + +// Worker defines the interface for issuance and confirmation of transactions. +// The caller is responsible for calling Close to cleanup resources used by the +// worker at the end of the simulation. +type Worker[T THash] interface { + IssueTx(ctx context.Context, tx T) error + ConfirmTx(ctx context.Context, tx T) error + LatestHeight(ctx context.Context) (uint64, error) +} + +// Execute the work of the given agent. +type Agent[T THash] interface { + Execute(ctx context.Context) error +} + +// issueNAgent issues and confirms a batch of N transactions at a time. +type issueNAgent[T THash] struct { + sequence TxSequence[T] + worker Worker[T] + n uint64 + metrics *metrics.Metrics +} + +// NewIssueNAgent creates a new issueNAgent +func NewIssueNAgent[T THash](sequence TxSequence[T], worker Worker[T], n uint64, metrics *metrics.Metrics) Agent[T] { + return &issueNAgent[T]{ + sequence: sequence, + worker: worker, + n: n, + metrics: metrics, + } +} + +// Execute issues txs in batches of N and waits for them to confirm +func (a issueNAgent[T]) Execute(ctx context.Context) error { + if a.n == 0 { + return errors.New("batch size n cannot be equal to 0") + } + + txChan := a.sequence.Chan() + confirmedCount := 0 + batchI := 0 + m := a.metrics + txMap := make(map[common.Hash]time.Time) + + // Tracks the total amount of time waiting for issuing and confirming txs + var ( + totalIssuedTime time.Duration + totalConfirmedTime time.Duration + ) + + // Start time for execution + start := time.Now() + for { + var ( + txs = make([]T, 0, a.n) + tx T + moreTxs bool + ) + // Start issuance batch + issuedStart := time.Now() + L: + for i := uint64(0); i < a.n; i++ { + select { + case <-ctx.Done(): + return ctx.Err() + case tx, moreTxs = <-txChan: + if !moreTxs { + break L + } + issuanceIndividualStart := time.Now() + txMap[tx.Hash()] = issuanceIndividualStart + if err := a.worker.IssueTx(ctx, tx); err != nil { + return fmt.Errorf("failed to issue transaction %d: %w", len(txs), err) + } + issuanceIndividualDuration := time.Since(issuanceIndividualStart) + m.IssuanceTxTimes.Observe(issuanceIndividualDuration.Seconds()) + txs = append(txs, tx) + } + } + // Get the batch's issuance time and add it to totalIssuedTime + issuedDuration := time.Since(issuedStart) + log.Info("Issuance Batch Done", "batch", batchI, "time", issuedDuration.Seconds()) + totalIssuedTime += issuedDuration + + // Wait for txs in this batch to confirm + confirmedStart := time.Now() + for i, tx := range txs { + confirmedIndividualStart := time.Now() + if err := a.worker.ConfirmTx(ctx, tx); err != nil { + return fmt.Errorf("failed to await transaction %d: %w", i, err) + } + confirmationIndividualDuration := time.Since(confirmedIndividualStart) + issuanceToConfirmationIndividualDuration := time.Since(txMap[tx.Hash()]) + m.ConfirmationTxTimes.Observe(confirmationIndividualDuration.Seconds()) + m.IssuanceToConfirmationTxTimes.Observe(issuanceToConfirmationIndividualDuration.Seconds()) + delete(txMap, tx.Hash()) + confirmedCount++ + } + // Get the batch's confirmation time and add it to totalConfirmedTime + confirmedDuration := time.Since(confirmedStart) + log.Info("Confirmed Batch Done", "batch", batchI, "time", confirmedDuration.Seconds()) + totalConfirmedTime += confirmedDuration + + // Check if this is the last batch, if so write the final log and return + if !moreTxs { + totalTime := time.Since(start).Seconds() + log.Info("Execution complete", "totalTxs", confirmedCount, "totalTime", totalTime, "TPS", float64(confirmedCount)/totalTime, + "issuanceTime", totalIssuedTime.Seconds(), "confirmedTime", totalConfirmedTime.Seconds()) + + return nil + } + + batchI++ + } +} diff --git a/cmd/simulator/txs/tx_generator.go b/cmd/simulator/txs/tx_generator.go new file mode 100644 index 0000000000..a88bf3402f --- /dev/null +++ b/cmd/simulator/txs/tx_generator.go @@ -0,0 +1,90 @@ +// Copyright (C) 2019-2025, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package txs + +import ( + "context" + "crypto/ecdsa" + "fmt" + + "github.com/ava-labs/coreth/ethclient" + "github.com/ava-labs/libevm/core/types" + ethcrypto "github.com/ava-labs/libevm/crypto" +) + +var _ TxSequence[*types.Transaction] = (*txSequence)(nil) + +type CreateTx func(key *ecdsa.PrivateKey, nonce uint64) (*types.Transaction, error) + +func GenerateTxSequence(ctx context.Context, generator CreateTx, client *ethclient.Client, key *ecdsa.PrivateKey, numTxs uint64, async bool) (TxSequence[*types.Transaction], error) { + sequence := &txSequence{ + txChan: make(chan *types.Transaction, numTxs), + } + + if async { + go func() { + defer close(sequence.txChan) + + if err := addTxs(ctx, sequence, generator, client, key, numTxs); err != nil { + panic(err) + } + }() + } else { + if err := addTxs(ctx, sequence, generator, client, key, numTxs); err != nil { + return nil, err + } + close(sequence.txChan) + } + + return sequence, nil +} + +func GenerateTxSequences(ctx context.Context, generator CreateTx, client *ethclient.Client, keys []*ecdsa.PrivateKey, txsPerKey uint64, async bool) ([]TxSequence[*types.Transaction], error) { + txSequences := make([]TxSequence[*types.Transaction], len(keys)) + for i, key := range keys { + txs, err := GenerateTxSequence(ctx, generator, client, key, txsPerKey, async) + if err != nil { + return nil, fmt.Errorf("failed to generate tx sequence at index %d: %w", i, err) + } + txSequences[i] = txs + } + return txSequences, nil +} + +func addTxs(ctx context.Context, txSequence *txSequence, generator CreateTx, client *ethclient.Client, key *ecdsa.PrivateKey, numTxs uint64) error { + address := ethcrypto.PubkeyToAddress(key.PublicKey) + startingNonce, err := client.NonceAt(ctx, address, nil) + if err != nil { + return err + } + for i := uint64(0); i < numTxs; i++ { + tx, err := generator(key, startingNonce+i) + if err != nil { + return err + } + txSequence.txChan <- tx + } + + return nil +} + +type txSequence struct { + txChan chan *types.Transaction +} + +func ConvertTxSliceToSequence(txs []*types.Transaction) TxSequence[*types.Transaction] { + txChan := make(chan *types.Transaction, len(txs)) + for _, tx := range txs { + txChan <- tx + } + close(txChan) + + return &txSequence{ + txChan: txChan, + } +} + +func (t *txSequence) Chan() <-chan *types.Transaction { + return t.txChan +} diff --git a/go.mod b/go.mod index f4d6c44a36..818a304c68 100644 --- a/go.mod +++ b/go.mod @@ -9,6 +9,7 @@ require ( github.com/davecgh/go-spew v1.1.1 github.com/deckarep/golang-set/v2 v2.1.0 github.com/fjl/gencodec v0.1.1 + github.com/go-cmd/cmd v1.4.3 github.com/google/go-cmp v0.7.0 github.com/gorilla/rpc v1.2.0 github.com/gorilla/websocket v1.5.0 @@ -19,6 +20,7 @@ require ( github.com/holiman/uint256 v1.2.4 github.com/mattn/go-colorable v0.1.13 github.com/mattn/go-isatty v0.0.17 + github.com/onsi/ginkgo/v2 v2.13.1 github.com/prometheus/client_golang v1.16.0 github.com/prometheus/client_model v0.3.0 github.com/spf13/cast v1.5.0 @@ -61,6 +63,7 @@ require ( github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0 // indirect github.com/dlclark/regexp2 v1.7.0 // indirect github.com/dop251/goja v0.0.0-20230806174421-c933cf95e127 // indirect + github.com/emicklei/go-restful/v3 v3.11.0 // indirect github.com/ethereum/c-kzg-4844 v0.4.0 // indirect github.com/fsnotify/fsnotify v1.6.0 // indirect github.com/garslo/gogen v0.0.0-20170306192744-1d203ffc1f61 // indirect @@ -70,38 +73,57 @@ require ( github.com/go-logr/logr v1.4.1 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-ole/go-ole v1.3.0 // indirect + github.com/go-openapi/jsonpointer v0.19.6 // indirect + github.com/go-openapi/jsonreference v0.20.2 // indirect + github.com/go-openapi/swag v0.22.3 // indirect github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect + github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 // indirect github.com/gofrs/flock v0.8.1 // indirect github.com/gogo/protobuf v1.3.2 // indirect github.com/golang/protobuf v1.5.4 // indirect github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb // indirect + github.com/google/btree v1.1.2 // indirect + github.com/google/gnostic-models v0.6.8 // indirect + github.com/google/gofuzz v1.2.0 // indirect github.com/google/pprof v0.0.0-20230207041349-798e818bf904 // indirect github.com/google/renameio/v2 v2.0.0 // indirect github.com/google/uuid v1.6.0 // indirect + github.com/gorilla/mux v1.8.0 // indirect github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 // indirect github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 // indirect github.com/hashicorp/hcl v1.0.0 // indirect github.com/huin/goupnp v1.3.0 // indirect + github.com/imdario/mergo v0.3.16 // indirect github.com/jackpal/go-nat-pmp v1.0.2 // indirect + github.com/josharian/intern v1.0.0 // indirect + github.com/json-iterator/go v1.1.12 // indirect github.com/klauspost/compress v1.15.15 // indirect github.com/kr/pretty v0.3.1 // indirect github.com/kr/text v0.2.0 // indirect github.com/magiconair/properties v1.8.6 // indirect + github.com/mailru/easyjson v0.7.7 // indirect github.com/mattn/go-runewidth v0.0.13 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/mitchellh/pointerstructure v1.2.0 // indirect github.com/mmcloughlin/addchain v0.4.0 // indirect + github.com/moby/spdystream v0.2.0 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect github.com/mr-tron/base58 v1.2.0 // indirect + github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect + github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f // indirect github.com/olekukonko/tablewriter v0.0.5 // indirect github.com/pelletier/go-toml v1.9.5 // indirect github.com/pelletier/go-toml/v2 v2.0.5 // indirect + github.com/pires/go-proxyproto v0.6.2 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/prometheus/common v0.42.0 // indirect github.com/prometheus/procfs v0.10.1 // indirect github.com/rivo/uniseg v0.2.0 // indirect github.com/rogpeppe/go-internal v1.12.0 // indirect + github.com/rs/cors v1.7.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/shirou/gopsutil v3.21.11+incompatible // indirect github.com/spf13/afero v1.8.2 // indirect @@ -126,6 +148,7 @@ require ( go.uber.org/zap v1.26.0 // indirect golang.org/x/mod v0.22.0 // indirect golang.org/x/net v0.38.0 // indirect + golang.org/x/oauth2 v0.21.0 // indirect golang.org/x/sys v0.31.0 // indirect golang.org/x/term v0.30.0 // indirect golang.org/x/text v0.23.0 // indirect @@ -133,8 +156,18 @@ require ( google.golang.org/genproto/googleapis/api v0.0.0-20240604185151-ef581f913117 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20240827150818-7e3bb234dfed // indirect google.golang.org/grpc v1.66.0 // indirect + gopkg.in/inf.v0 v0.9.1 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect + k8s.io/api v0.29.0 // indirect + k8s.io/apimachinery v0.29.0 // indirect + k8s.io/client-go v0.29.0 // indirect + k8s.io/klog/v2 v2.110.1 // indirect + k8s.io/kube-openapi v0.0.0-20231010175941-2dd684a91f00 // indirect + k8s.io/utils v0.0.0-20230726121419-3b25d923346b // indirect rsc.io/tmplfunc v0.0.3 // indirect + sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd // indirect + sigs.k8s.io/structured-merge-diff/v4 v4.4.1 // indirect + sigs.k8s.io/yaml v1.3.0 // indirect ) diff --git a/go.sum b/go.sum index 080361da95..e9ff8729b1 100644 --- a/go.sum +++ b/go.sum @@ -58,6 +58,8 @@ github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY github.com/allegro/bigcache v1.2.1-0.20190218064605-e24eb225f156 h1:eMwmnE/GDgah4HI848JfFxHt+iPb26b4zyfspmqY0/8= github.com/allegro/bigcache v1.2.1-0.20190218064605-e24eb225f156/go.mod h1:Cb/ax3seSYIx7SuZdm2G2xzfwmv3TPSk2ucNfQESPXM= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= +github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= +github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= github.com/ava-labs/avalanchego v1.13.2-0.20250611151756-1a40195dc447 h1:ZMuCKMeb47pQujFVgCJNgbYxqvml7nLC0CHJ4mQPXvY= github.com/ava-labs/avalanchego v1.13.2-0.20250611151756-1a40195dc447/go.mod h1:mTa01sHk2kpDh73GP7v06vhJ9+udQ6vw4ffdlX9khUM= github.com/ava-labs/libevm v1.13.14-0.3.0.rc.1 h1:vBMYo+Iazw0rGTr+cwjkBdh5eadLPlv4ywI4lKye3CA= @@ -163,6 +165,8 @@ github.com/dop251/goja_nodejs v0.0.0-20210225215109-d91c329300e7/go.mod h1:hn7BA github.com/dop251/goja_nodejs v0.0.0-20211022123610-8dd9abb0616d/go.mod h1:DngW8aVqWbuLRMHItjPUyqdj+HWPvnQe8V8y1nDpIbM= github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385/go.mod h1:0vRUJqYpeSZifjYj7uP3BG/gKcuzL9xWVV/Y+cK33KM= +github.com/emicklei/go-restful/v3 v3.11.0 h1:rAQeMHw1c7zTmncogyy8VvRZwtkmkZ4FxERmMY4rD+g= +github.com/emicklei/go-restful/v3 v3.11.0/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= @@ -197,6 +201,8 @@ github.com/getsentry/sentry-go v0.18.0/go.mod h1:Kgon4Mby+FJ7ZWHFUAZgVaIa8sxHtnR github.com/gin-contrib/sse v0.0.0-20190301062529-5545eab6dad3/go.mod h1:VJ0WA2NBN22VlZ2dKZQPAPnyWw5XTlK1KymzLKsr59s= github.com/gin-gonic/gin v1.4.0/go.mod h1:OW2EZn3DO8Ln9oIKOvM++LBO+5UPHJJDH72/q/3rZdM= github.com/go-check/check v0.0.0-20180628173108-788fd7840127/go.mod h1:9ES+weclKsC9YodN5RgxqK/VD9HM9JsCSh7rNhMZE98= +github.com/go-cmd/cmd v1.4.3 h1:6y3G+3UqPerXvPcXvj+5QNPHT02BUw7p6PsqRxLNA7Y= +github.com/go-cmd/cmd v1.4.3/go.mod h1:u3hxg/ry+D5kwh8WvUkHLAMe2zQCaXd00t35WfQaOFk= github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA= github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= @@ -204,6 +210,7 @@ github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9 github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= +github.com/go-logr/logr v1.3.0/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/logr v1.4.1 h1:pKouT5E8xu9zeFC39JXRDukb6JFQPXM5p5I91188VAQ= github.com/go-logr/logr v1.4.1/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= @@ -212,9 +219,19 @@ github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AE github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE= github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= +github.com/go-openapi/jsonpointer v0.19.6 h1:eCs3fxoIi3Wh6vtgmLTOjdhSpiqphQ+DaPn38N2ZdrE= +github.com/go-openapi/jsonpointer v0.19.6/go.mod h1:osyAmYz/mB/C3I+WsTTSgw1ONzaLJoLCyoi6/zppojs= +github.com/go-openapi/jsonreference v0.20.2 h1:3sVjiK66+uXK/6oQ8xgcRKcFgQ5KXa2KvnJRumpMGbE= +github.com/go-openapi/jsonreference v0.20.2/go.mod h1:Bl1zwGIM8/wsvqjsOQLJ/SH+En5Ap4rVB5KVcIDZG2k= +github.com/go-openapi/swag v0.22.3 h1:yMBqmnQ0gyZvEb/+KzuWZOXgllrXT4SADYbvDaXHv/g= +github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= github.com/go-sourcemap/sourcemap v2.1.3+incompatible h1:W1iEw64niKVGogNgBN3ePyLFfuisuzeidWPMPWmECqU= github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg= github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= +github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI= +github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls= +github.com/go-test/deep v1.1.0 h1:WOcxcdHcvdgThNXjw0t76K42FXTU7HpNQWHpA2HHNlg= +github.com/go-test/deep v1.1.0/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= @@ -264,6 +281,10 @@ github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb/go.mod h1:/XxbfmMg github.com/gomodule/redigo v1.7.1-0.20190724094224-574c33c3df38/go.mod h1:B4C85qUVwatsJoIUNIfCRsp7qO0iAmpGFZ4EELWSbC4= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.1.2 h1:xf4v41cLI2Z6FxbKm+8Bu+m8ifhj15JuZ9sa0jZCMUU= +github.com/google/btree v1.1.2/go.mod h1:qOPhT0dTNdNzV6Z/lhRX0YXUafgPLFUh+gZMl761Gm4= +github.com/google/gnostic-models v0.6.8 h1:yo/ABAfM5IMRsS1VnXjTBvUb61tFIHozhlYvRgGre9I= +github.com/google/gnostic-models v0.6.8/go.mod h1:5n7qKqH0f5wFt+aWF8CW6pZLLNOfYuF5OpfBSENuI8U= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= @@ -274,6 +295,7 @@ github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= @@ -307,9 +329,12 @@ github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+ github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/google-cloud-go-testing v0.0.0-20200911160855-bcd43fbb19e8/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= +github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= github.com/gorilla/rpc v1.2.0 h1:WvvdC2lNeT1SP32zrIce5l0ECBfbAlmrmSBsuc57wfk= github.com/gorilla/rpc v1.2.0/go.mod h1:V4h9r+4sF5HnzqbwIez0fKSpANP0zlYd3qR7p36jkTQ= github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho= @@ -338,6 +363,8 @@ github.com/hydrogen18/memlistener v0.0.0-20200120041712-dcc25e7acd91/go.mod h1:q github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/ianlancetaylor/demangle v0.0.0-20220319035150-800ac71e25c2/go.mod h1:aYm2/VgdVmcIU8iMfdMvDMsRAQjcfZSKFby6HOFvi/w= +github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4= +github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= github.com/imkira/go-interpol v1.1.0/go.mod h1:z0h2/2T3XF8kyEPpRgJ3kmNv+C43p+I/CoI+jC3w2iA= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/iris-contrib/blackfriday v2.0.0+incompatible/go.mod h1:UzZ2bDEoaSGPbkg6SAB4att1aAwTmVIx/5gCVqeyUdI= @@ -349,9 +376,15 @@ github.com/jackpal/go-nat-pmp v1.0.2 h1:KzKSgb7qkJvOUTqYl9/Hg/me3pWgBmERKrTGD7Bd github.com/jackpal/go-nat-pmp v1.0.2/go.mod h1:QPH045xvCAeXUZOxsnwmrtiCoxIr9eob+4orBN1SBKc= github.com/jessevdk/go-flags v0.0.0-20141203071132-1679536dcc89/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= +github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= +github.com/jpillora/backoff v1.0.0 h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2EA= +github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/jrick/logrotate v1.0.0/go.mod h1:LNinyqDIJnpAur+b8yyulnQw/wDuN1+BYKlTRt3OuAQ= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= @@ -388,6 +421,8 @@ github.com/leanovate/gopter v0.2.9/go.mod h1:U2L/78B+KVFIx2VmW6onHJQzXtFb+p5y3y2 github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo= github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= @@ -419,13 +454,24 @@ github.com/mitchellh/pointerstructure v1.2.0/go.mod h1:BRAsLI5zgXmw97Lf6s25bs8oh github.com/mmcloughlin/addchain v0.4.0 h1:SobOdjm2xLj1KkXN5/n0xTIWyZA2+s99UCY1iPfkHRY= github.com/mmcloughlin/addchain v0.4.0/go.mod h1:A86O+tHqZLMNO4w6ZZ4FlVQEadcoqkyU72HC5wJ4RlU= github.com/mmcloughlin/profile v0.1.1/go.mod h1:IhHD7q1ooxgwTgjxQYkACGA77oFTDdFVejUS1/tS/qU= +github.com/moby/spdystream v0.2.0 h1:cjW1zVyyoiM0T7b6UoySUFqzXMoqRckQtXwGPiBhOM8= +github.com/moby/spdystream v0.2.0/go.mod h1:f7i0iNDQJ059oMTcWxx8MA/zKFIuD/lY+0GqbN2Wy8c= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/moul/http2curl v1.0.0/go.mod h1:8UbvGypXm98wA/IqH45anm5Y2Z6ep6O31QGOAZ3H0fQ= github.com/mr-tron/base58 v1.2.0 h1:T/HDJBh4ZCPbU39/+c3rRvE0uKBQlU27+QI8LJ4t64o= github.com/mr-tron/base58 v1.2.0/go.mod h1:BinMc/sQntlIE1frQmRFPUoPA1Zkr8VRgBdjWI2mNwc= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= +github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f h1:KUppIJq7/+SVif2QVs3tOP0zanoHgBEVAwHxUSIzRqU= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f h1:y5//uYreIhSUg3J1GEMiLbxo1LJaP8RfCpH6pymGZus= +github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f/go.mod h1:ZdcZmHo+o7JKHSa8/e818NopupXU1YMK5fe1lsApnBw= github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg= github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzEE/Zbp4w= github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= @@ -444,13 +490,16 @@ github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vv github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= +github.com/onsi/ginkgo/v2 v2.13.1 h1:LNGfMbR2OVGBfXjvRZIZ2YCTQdGKtPLvuI1rMCCj3OU= +github.com/onsi/ginkgo/v2 v2.13.1/go.mod h1:XStQ8QcGwLyF4HdfcZB8SFOS/MWCgDuXMSBe6zrvLgM= github.com/onsi/gomega v1.4.1/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= -github.com/onsi/gomega v1.19.0 h1:4ieX6qQjPP/BfC3mpsAtIGGlxTWPeA3Inl/7DtXw1tw= github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro= +github.com/onsi/gomega v1.29.0 h1:KIA/t2t5UBzoirT4H9tsML45GEbo3ouUnBHsCfD2tVg= +github.com/onsi/gomega v1.29.0/go.mod h1:9sxs+SwGrKI0+PWe4Fxa9tFQQBG5xSsSbMXOI8PPpoQ= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8= github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= @@ -458,6 +507,8 @@ github.com/pelletier/go-toml/v2 v2.0.5 h1:ipoSadvV8oGUjnUbMub59IDPPwfxF694nG/jwb github.com/pelletier/go-toml/v2 v2.0.5/go.mod h1:OMHamSCAODeSsVrwwvcJOaoN0LIUIaFVNZzmWyNfXas= github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4= github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= +github.com/pires/go-proxyproto v0.6.2 h1:KAZ7UteSOt6urjme6ZldyFm4wDe/z0ZUP0Yv0Dos0d8= +github.com/pires/go-proxyproto v0.6.2/go.mod h1:Odh9VFOZJCf9G8cLW5o435Xf1J95Jw9Gw5rnCjcwzAY= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= @@ -482,6 +533,8 @@ github.com/rogpeppe/go-internal v1.8.1/go.mod h1:JeRgkft04UBgHMgCIwADu4Pn6Mtm5d4 github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= +github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik= +github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= @@ -515,14 +568,17 @@ github.com/status-im/keycard-go v0.2.0 h1:QDLFswOQu1r5jsycloeQh3bVU8n/NatHHaZobt github.com/status-im/keycard-go v0.2.0/go.mod h1:wlp8ZLbsmrF6g6WjugPAx+IzoLrkdf9+mHxBEeo3Hbg= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.3.0 h1:mjC+YW8QpAdXibNi+vNWgzmgBH4+5l5dCXv8cNysBLI= @@ -710,6 +766,8 @@ golang.org/x/oauth2 v0.0.0-20200902213428-5d25da1a8d43/go.mod h1:KelEdhl1UZF7XfJ golang.org/x/oauth2 v0.0.0-20201109201403-9fd604954f58/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.21.0 h1:tsimM75w1tF/uws5rbeHzIWxEqElMehnc+iW793zsZs= +golang.org/x/oauth2 v0.21.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -989,6 +1047,8 @@ gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/go-playground/assert.v1 v1.2.1/go.mod h1:9RXL0bg/zibRAgZUYszZSwO/z8Y/a8bDuhia5mkpMnE= gopkg.in/go-playground/validator.v8 v8.18.2/go.mod h1:RX2a/7Ha8BgOhfk7j780h4/u/RRjR0eouCJSH80/M2Y= +gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= +gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/ini.v1 v1.51.1/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= @@ -1000,6 +1060,7 @@ gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWD gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= @@ -1015,8 +1076,26 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +k8s.io/api v0.29.0 h1:NiCdQMY1QOp1H8lfRyeEf8eOwV6+0xA6XEE44ohDX2A= +k8s.io/api v0.29.0/go.mod h1:sdVmXoz2Bo/cb77Pxi71IPTSErEW32xa4aXwKH7gfBA= +k8s.io/apimachinery v0.29.0 h1:+ACVktwyicPz0oc6MTMLwa2Pw3ouLAfAon1wPLtG48o= +k8s.io/apimachinery v0.29.0/go.mod h1:eVBxQ/cwiJxH58eK/jd/vAk4mrxmVlnpBH5J2GbMeis= +k8s.io/client-go v0.29.0 h1:KmlDtFcrdUzOYrBhXHgKw5ycWzc3ryPX5mQe0SkG3y8= +k8s.io/client-go v0.29.0/go.mod h1:yLkXH4HKMAywcrD82KMSmfYg2DlE8mepPR4JGSo5n38= +k8s.io/klog/v2 v2.110.1 h1:U/Af64HJf7FcwMcXyKm2RPM22WZzyR7OSpYj5tg3cL0= +k8s.io/klog/v2 v2.110.1/go.mod h1:YGtd1984u+GgbuZ7e08/yBuAfKLSO0+uR1Fhi6ExXjo= +k8s.io/kube-openapi v0.0.0-20231010175941-2dd684a91f00 h1:aVUu9fTY98ivBPKR9Y5w/AuzbMm96cd3YHRTU83I780= +k8s.io/kube-openapi v0.0.0-20231010175941-2dd684a91f00/go.mod h1:AsvuZPBlUDVuCdzJ87iajxtXuR9oktsTctW/R9wwouA= +k8s.io/utils v0.0.0-20230726121419-3b25d923346b h1:sgn3ZU783SCgtaSJjpcVVlRqd6GSnlTLKgpAAttJvpI= +k8s.io/utils v0.0.0-20230726121419-3b25d923346b/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= rsc.io/tmplfunc v0.0.3 h1:53XFQh69AfOa8Tw0Jm7t+GV7KZhOi6jzsCzTtKbMvzU= rsc.io/tmplfunc v0.0.3/go.mod h1:AG3sTPzElb1Io3Yg4voV9AGZJuleGAwaVRxL9M49PhA= +sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd h1:EDPBXCAspyGV4jQlpZSudPeMmr1bNJefnuqLsRAsHZo= +sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= +sigs.k8s.io/structured-merge-diff/v4 v4.4.1 h1:150L+0vs/8DA78h1u02ooW1/fFq/Lwr+sGiqlzvrtq4= +sigs.k8s.io/structured-merge-diff/v4 v4.4.1/go.mod h1:N8hJocpFajUSSeSJ9bOZ77VzejKZaXsTtZo4/u7Io08= +sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo= +sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8= diff --git a/scripts/build_avalanchego_with_coreth.sh b/scripts/build_avalanchego_with_coreth.sh new file mode 100755 index 0000000000..e4d015188f --- /dev/null +++ b/scripts/build_avalanchego_with_coreth.sh @@ -0,0 +1,44 @@ +#!/bin/bash + +# This script builds a new AvalancheGo binary with the Coreth dependency pointing to the local Coreth path +# Usage: ./build_avalanchego_with_coreth.sh with optional AVALANCHEGO_VERSION and AVALANCHEGO_CLONE_PATH environment variables + +set -euo pipefail + +# Coreth root directory +CORETH_PATH=$( + cd "$(dirname "${BASH_SOURCE[0]}")" + cd .. && pwd +) + +# Allow configuring the clone path to point to an existing clone +AVALANCHEGO_CLONE_PATH="${AVALANCHEGO_CLONE_PATH:-avalanchego}" + +# Load the version +source "$CORETH_PATH"/scripts/versions.sh + +# Always return to the coreth path on exit +function cleanup { + cd "${CORETH_PATH}" +} +trap cleanup EXIT + +echo "checking out target AvalancheGo version ${AVALANCHE_VERSION}" +if [[ -d "${AVALANCHEGO_CLONE_PATH}" ]]; then + echo "updating existing clone" + cd "${AVALANCHEGO_CLONE_PATH}" + git fetch +else + echo "creating new clone" + git clone https://github.com/ava-labs/avalanchego.git "${AVALANCHEGO_CLONE_PATH}" + cd "${AVALANCHEGO_CLONE_PATH}" +fi +# Branch will be reset to $AVALANCHE_VERSION if it already exists +git checkout -B "test-${AVALANCHE_VERSION}" "${AVALANCHE_VERSION}" + +echo "updating coreth dependency to point to ${CORETH_PATH}" +go mod edit -replace "github.com/ava-labs/coreth=${CORETH_PATH}" +go mod tidy + +echo "building avalanchego" +./scripts/build.sh diff --git a/scripts/build_test.sh b/scripts/build_test.sh index c3fb241349..b2390e53ea 100755 --- a/scripts/build_test.sh +++ b/scripts/build_test.sh @@ -27,7 +27,7 @@ MAX_RUNS=4 for ((i = 1; i <= MAX_RUNS; i++)); do # shellcheck disable=SC2046 - go test -shuffle=on ${race:-} -timeout="${TIMEOUT:-600s}" -coverprofile=coverage.out -covermode=atomic "$@" ./... | tee test.out || command_status=$? + go test -shuffle=on ${race:-} -timeout="${TIMEOUT:-600s}" -coverprofile=coverage.out -covermode=atomic "$@" $(go list ./... | grep -v github.com/ava-labs/coreth/tests) | tee test.out || command_status=$? # If the test passed, exit if [[ ${command_status:-0} == 0 ]]; then diff --git a/scripts/run_ginkgo_warp.sh b/scripts/run_ginkgo_warp.sh new file mode 100755 index 0000000000..032b1243ef --- /dev/null +++ b/scripts/run_ginkgo_warp.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash + +set -euo pipefail + +CORETH_PATH=$( + cd "$(dirname "${BASH_SOURCE[0]}")" + cd .. && pwd +) + +source "$CORETH_PATH"/scripts/constants.sh + +EXTRA_ARGS=() +AVALANCHEGO_BUILD_PATH="${AVALANCHEGO_BUILD_PATH:-}" +if [[ -n "${AVALANCHEGO_BUILD_PATH}" ]]; then + EXTRA_ARGS=("--avalanchego-path=${AVALANCHEGO_BUILD_PATH}/avalanchego") + echo "Running with extra args:" "${EXTRA_ARGS[@]}" +fi + +"${CORETH_PATH}"/bin/ginkgo -vv --label-filter="${GINKGO_LABEL_FILTER:-}" "${CORETH_PATH}"/tests/warp -- "${EXTRA_ARGS[@]}" diff --git a/scripts/tests.e2e.sh b/scripts/tests.e2e.sh index 84c5afa20e..95324aa40d 100755 --- a/scripts/tests.e2e.sh +++ b/scripts/tests.e2e.sh @@ -22,31 +22,14 @@ CORETH_PATH=$( # Allow configuring the clone path to point to an existing clone AVALANCHEGO_CLONE_PATH="${AVALANCHEGO_CLONE_PATH:-avalanchego}" -# Load the version -source "$CORETH_PATH"/scripts/versions.sh - # Always return to the coreth path on exit function cleanup { cd "${CORETH_PATH}" } -trap cleanup EXIT -echo "checking out target AvalancheGo version ${AVALANCHE_VERSION}" -if [[ -d "${AVALANCHEGO_CLONE_PATH}" ]]; then - echo "updating existing clone" - cd "${AVALANCHEGO_CLONE_PATH}" - git fetch -else - echo "creating new clone" - git clone https://github.com/ava-labs/avalanchego.git "${AVALANCHEGO_CLONE_PATH}" - cd "${AVALANCHEGO_CLONE_PATH}" -fi -# Branch will be reset to $AVALANCHE_VERSION if it already exists -git checkout -B "test-${AVALANCHE_VERSION}" "${AVALANCHE_VERSION}" +trap cleanup EXIT -echo "updating coreth dependency to point to ${CORETH_PATH}" -go mod edit -replace "github.com/ava-labs/coreth=${CORETH_PATH}" -go mod tidy +cd "${AVALANCHEGO_CLONE_PATH}" echo "running AvalancheGo e2e tests" ./scripts/run_task.sh test-e2e-ci -- --ginkgo.label-filter='c || uses-c' "${@}" diff --git a/tests/init.go b/tests/init.go index a878e3e6ac..39d8858994 100644 --- a/tests/init.go +++ b/tests/init.go @@ -30,7 +30,9 @@ package tests import ( "fmt" "math/big" + "os" "sort" + "strings" "github.com/ava-labs/coreth/params" "github.com/ava-labs/coreth/params/extras" @@ -404,3 +406,23 @@ type UnsupportedForkError struct { func (e UnsupportedForkError) Error() string { return fmt.Sprintf("unsupported fork %q", e.Name) } + +func GetRepoRootPath(suffix string) string { + // - When executed via a test binary, the working directory will be wherever + // the binary is executed from, but scripts should require execution from + // the repo root. + // + // - When executed via ginkgo (nicer for development + supports + // parallel execution) the working directory will always be the + // target path (e.g. [repo root]./tests/warp) and getting the repo + // root will require stripping the target path suffix. + // + // TODO(marun) Avoid relying on the current working directory to find test + // dependencies by embedding data where possible (e.g. for genesis) and + // explicitly configuring paths for execution. + cwd, err := os.Getwd() + if err != nil { + panic(err) + } + return strings.TrimSuffix(cwd, suffix) +} diff --git a/tests/precompile/genesis/warp.json b/tests/precompile/genesis/warp.json new file mode 100644 index 0000000000..e4c17d05f0 --- /dev/null +++ b/tests/precompile/genesis/warp.json @@ -0,0 +1,45 @@ +{ + "config": { + "chainId": 99999, + "homesteadBlock": 0, + "eip150Block": 0, + "eip155Block": 0, + "eip158Block": 0, + "byzantiumBlock": 0, + "constantinopleBlock": 0, + "petersburgBlock": 0, + "istanbulBlock": 0, + "muirGlacierBlock": 0, + "feeConfig": { + "gasLimit": 20000000, + "minBaseFee": 1000000000, + "targetGas": 100000000, + "baseFeeChangeDenominator": 48, + "minBlockGasCost": 0, + "maxBlockGasCost": 10000000, + "targetBlockRate": 2, + "blockGasCostStep": 500000 + }, + "warpConfig": { + "blockTimestamp": 1607144400 + } + }, + "alloc": { + "8db97C7cEcE249c2b98bDC0226Cc4C2A57BF52FC": { + "balance": "0x52B7D2DCC80CD2E4000000" + }, + "0x0Fa8EA536Be85F32724D57A37758761B86416123": { + "balance": "0x52B7D2DCC80CD2E4000000" + } + }, + "nonce": "0x0", + "timestamp": "0x5FCB13D0", + "extraData": "0x00", + "gasLimit": "0x1312D00", + "difficulty": "0x0", + "mixHash": "0x0000000000000000000000000000000000000000000000000000000000000000", + "coinbase": "0x0000000000000000000000000000000000000000", + "number": "0x0", + "gasUsed": "0x0", + "parentHash": "0x0000000000000000000000000000000000000000000000000000000000000000" +} diff --git a/tests/utils/command.go b/tests/utils/command.go new file mode 100644 index 0000000000..34657938f7 --- /dev/null +++ b/tests/utils/command.go @@ -0,0 +1,124 @@ +// Copyright (C) 2019-2025, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package utils + +import ( + "context" + "fmt" + "os" + "os/exec" + "strings" + "time" + + "github.com/ava-labs/avalanchego/api/health" + "github.com/ava-labs/libevm/log" + "github.com/go-cmd/cmd" + "github.com/onsi/ginkgo/v2" + "github.com/stretchr/testify/require" +) + +// RunCommand starts the command [bin] with the given [args] and returns the command to the caller +// TODO cmd package mentions we can do this more efficiently with cmd.NewCmdOptions rather than looping +// and calling Status(). +func RunCommand(bin string, args ...string) (*cmd.Cmd, error) { + log.Info("Executing", "cmd", fmt.Sprintf("%s %s", bin, strings.Join(args, " "))) + + curCmd := cmd.NewCmd(bin, args...) + _ = curCmd.Start() + + // to stream outputs + ticker := time.NewTicker(10 * time.Millisecond) + go func() { + prevLine := "" + for range ticker.C { + status := curCmd.Status() + n := len(status.Stdout) + if n == 0 { + continue + } + + line := status.Stdout[n-1] + if prevLine != line && line != "" { + fmt.Println("[streaming output]", line) + } + + prevLine = line + } + }() + + return curCmd, nil +} + +func RegisterPingTest() { + require := require.New(ginkgo.GinkgoT()) + + ginkgo.It("ping the network", ginkgo.Label("ping"), func() { + client := health.NewClient(DefaultLocalNodeURI) + healthy, err := client.Readiness(context.Background(), nil) + require.NoError(err) + require.True(healthy.Healthy) + }) +} + +// RegisterNodeRun registers a before suite that starts an AvalancheGo process to use for the e2e tests +// and an after suite that stops the AvalancheGo process +func RegisterNodeRun() { + require := require.New(ginkgo.GinkgoT()) + + // BeforeSuite starts an AvalancheGo process to use for the e2e tests + var startCmd *cmd.Cmd + _ = ginkgo.BeforeSuite(func() { + ctx, cancel := context.WithTimeout(context.Background(), time.Minute) + defer cancel() + + wd, err := os.Getwd() + require.NoError(err) + log.Info("Starting AvalancheGo node", "wd", wd) + cmd, err := RunCommand("./scripts/run.sh") + startCmd = cmd + require.NoError(err) + + // Assumes that startCmd will launch a node with HTTP Port at [utils.DefaultLocalNodeURI] + healthClient := health.NewClient(DefaultLocalNodeURI) + healthy, err := health.AwaitReady(ctx, healthClient, HealthCheckTimeout, nil) + require.NoError(err) + require.True(healthy) + log.Info("AvalancheGo node is healthy") + }) + + ginkgo.AfterSuite(func() { + require.NotNil(startCmd) + require.NoError(startCmd.Stop()) + // TODO add a new node to bootstrap off of the existing node and ensure it can bootstrap all subnets + // created during the test + }) +} + +// RunHardhatTests runs the hardhat tests in the given [testPath] on the blockchain with [blockchainID] +// [execPath] is the path where the test command is executed +func RunHardhatTests(ctx context.Context, blockchainID string, execPath string, testPath string) { + chainURI := GetDefaultChainURI(blockchainID) + RunHardhatTestsCustomURI(ctx, chainURI, execPath, testPath) +} + +func RunHardhatTestsCustomURI(ctx context.Context, chainURI string, execPath string, testPath string) { + require := require.New(ginkgo.GinkgoT()) + + log.Info( + "Executing HardHat tests on blockchain", + "testPath", testPath, + "ChainURI", chainURI, + ) + + cmd := exec.Command("npx", "hardhat", "test", testPath, "--network", "local") + cmd.Dir = execPath + + log.Info("Sleeping to wait for test ping", "rpcURI", chainURI) + require.NoError(os.Setenv("RPC_URI", chainURI)) + log.Info("Running test command", "cmd", cmd.String()) + + out, err := cmd.CombinedOutput() + fmt.Printf("\nCombined output:\n\n%s\n", string(out)) + require.NoError(err) +} diff --git a/tests/utils/constants.go b/tests/utils/constants.go new file mode 100644 index 0000000000..e8f3f0e50b --- /dev/null +++ b/tests/utils/constants.go @@ -0,0 +1,16 @@ +// Copyright (C) 2019-2025, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package utils + +import "time" + +const ( + // Timeout to boot the AvalancheGo node + BootAvalancheNodeTimeout = 5 * time.Minute + + // Timeout for the health API to check the AvalancheGo is ready + HealthCheckTimeout = 5 * time.Second + + DefaultLocalNodeURI = "http://127.0.0.1:9650" +) diff --git a/tests/utils/proposervm.go b/tests/utils/proposervm.go new file mode 100644 index 0000000000..6d7c81f6bf --- /dev/null +++ b/tests/utils/proposervm.go @@ -0,0 +1,69 @@ +// Copyright (C) 2019-2025, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package utils + +import ( + "context" + "crypto/ecdsa" + "math/big" + "time" + + "github.com/ava-labs/coreth/accounts/abi/bind" + "github.com/ava-labs/coreth/ethclient" + "github.com/ava-labs/coreth/plugin/evm/upgrade/ap1" + "github.com/ava-labs/libevm/common" + "github.com/ava-labs/libevm/core/types" + "github.com/ava-labs/libevm/crypto" + "github.com/ava-labs/libevm/log" + ethparams "github.com/ava-labs/libevm/params" +) + +const expectedBlockHeight = 2 + +// IssueTxsToActivateProposerVMFork issues transactions at the current +// timestamp, which should be after the ProposerVM activation time (aka +// ApricotPhase4). This should generate a PostForkBlock because its parent block +// (genesis) has a timestamp (0) that is greater than or equal to the fork +// activation time of 0. Therefore, subsequent blocks should be built with +// BuildBlockWithContext. +func IssueTxsToActivateProposerVMFork( + ctx context.Context, chainID *big.Int, fundedKey *ecdsa.PrivateKey, + client *ethclient.Client, +) error { + addr := crypto.PubkeyToAddress(fundedKey.PublicKey) + nonce, err := client.NonceAt(ctx, addr, nil) + if err != nil { + return err + } + + gasPrice := big.NewInt(ap1.MinGasPrice) // should be pretty generous for c-chain and subnets + txSigner := types.LatestSignerForChainID(chainID) + + // Send exactly 2 transactions, waiting for each to be included in a block + for i := 0; i < expectedBlockHeight; i++ { + tx := types.NewTransaction( + nonce, addr, common.Big1, ethparams.TxGas, gasPrice, nil) + triggerTx, err := types.SignTx(tx, txSigner, fundedKey) + if err != nil { + return err + } + if err := client.SendTransaction(ctx, triggerTx); err != nil { + return err + } + + // Wait for this transaction to be included in a block + receiptCtx, cancel := context.WithTimeout(ctx, 30*time.Second) + defer cancel() + if _, err := bind.WaitMined(receiptCtx, client, triggerTx); err != nil { + return err + } + nonce++ + } + + log.Info( + "Built sufficient blocks to activate proposerVM fork", + "blockCount", expectedBlockHeight, + ) + return nil +} diff --git a/tests/utils/subnet.go b/tests/utils/subnet.go new file mode 100644 index 0000000000..cb3b3f8afa --- /dev/null +++ b/tests/utils/subnet.go @@ -0,0 +1,182 @@ +// Copyright (C) 2019-2025, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package utils + +import ( + "context" + "encoding/json" + "fmt" + "os" + "path/filepath" + "strings" + "sync" + "time" + + "github.com/ava-labs/avalanchego/api/health" + "github.com/ava-labs/avalanchego/api/info" + "github.com/ava-labs/avalanchego/genesis" + "github.com/ava-labs/avalanchego/ids" + "github.com/ava-labs/avalanchego/vms/secp256k1fx" + wallet "github.com/ava-labs/avalanchego/wallet/subnet/primary" + "github.com/ava-labs/coreth/core" + "github.com/ava-labs/libevm/log" + "github.com/go-cmd/cmd" + "github.com/onsi/ginkgo/v2" + "github.com/stretchr/testify/require" +) + +type SubnetSuite struct { + blockchainIDs map[string]string + lock sync.RWMutex +} + +func (s *SubnetSuite) GetBlockchainID(alias string) string { + s.lock.RLock() + defer s.lock.RUnlock() + return s.blockchainIDs[alias] +} + +func (s *SubnetSuite) SetBlockchainIDs(blockchainIDs map[string]string) { + s.lock.Lock() + defer s.lock.Unlock() + s.blockchainIDs = blockchainIDs +} + +// CreateSubnetsSuite creates subnets for given [genesisFiles], and registers a before suite that starts an AvalancheGo process to use for the e2e tests. +// genesisFiles is a map of test aliases to genesis file paths. +func CreateSubnetsSuite(genesisFiles map[string]string) *SubnetSuite { + require := require.New(ginkgo.GinkgoT()) + + // Keep track of the AvalancheGo external bash script, it is null for most + // processes except the first process that starts AvalancheGo + var startCmd *cmd.Cmd + + // This is used to pass the blockchain IDs from the SynchronizedBeforeSuite() to the tests + var globalSuite SubnetSuite + + // Our test suite runs in separate processes, ginkgo has + // SynchronizedBeforeSuite() which runs once, and its return value is passed + // over to each worker. + // + // Here an AvalancheGo node instance is started, and subnets are created for + // each test case. Each test case has its own subnet, therefore all tests + // can run in parallel without any issue. + // + _ = ginkgo.SynchronizedBeforeSuite(func() []byte { + ctx, cancel := context.WithTimeout(context.Background(), BootAvalancheNodeTimeout) + defer cancel() + + wd, err := os.Getwd() + require.NoError(err) + log.Info("Starting AvalancheGo node", "wd", wd) + cmd, err := RunCommand("./scripts/run.sh") + require.NoError(err) + startCmd = cmd + + // Assumes that startCmd will launch a node with HTTP Port at [utils.DefaultLocalNodeURI] + healthClient := health.NewClient(DefaultLocalNodeURI) + healthy, err := health.AwaitReady(ctx, healthClient, HealthCheckTimeout, nil) + require.NoError(err) + require.True(healthy) + log.Info("AvalancheGo node is healthy") + + blockchainIDs := make(map[string]string) + for alias, file := range genesisFiles { + blockchainIDs[alias] = CreateNewSubnet(ctx, file) + } + + blockchainIDsBytes, err := json.Marshal(blockchainIDs) + require.NoError(err) + return blockchainIDsBytes + }, func(ctx ginkgo.SpecContext, data []byte) { + blockchainIDs := make(map[string]string) + require.NoError(json.Unmarshal(data, &blockchainIDs)) + + globalSuite.SetBlockchainIDs(blockchainIDs) + }) + + // SynchronizedAfterSuite() takes two functions, the first runs after each test suite is done and the second + // function is executed once when all the tests are done. This function is used + // to gracefully shutdown the AvalancheGo node. + _ = ginkgo.SynchronizedAfterSuite(func() {}, func() { + require.NotNil(startCmd) + require.NoError(startCmd.Stop()) + }) + + return &globalSuite +} + +// CreateNewSubnet creates a new subnet and Subnet-EVM blockchain with the given genesis file. +// returns the ID of the new created blockchain. +func CreateNewSubnet(ctx context.Context, genesisFilePath string) string { + require := require.New(ginkgo.GinkgoT()) + + kc := secp256k1fx.NewKeychain(genesis.EWOQKey) + + // MakeWallet fetches the available UTXOs owned by [kc] on the network + // that [LocalAPIURI] is hosting. + wallet, err := wallet.MakeWallet(ctx, DefaultLocalNodeURI, kc, kc, wallet.WalletConfig{}) + require.NoError(err) + + pWallet := wallet.P() + + owner := &secp256k1fx.OutputOwners{ + Threshold: 1, + Addrs: []ids.ShortID{ + genesis.EWOQKey.PublicKey().Address(), + }, + } + + wd, err := os.Getwd() + require.NoError(err) + log.Info("Reading genesis file", "filePath", genesisFilePath, "wd", wd) + genesisBytes, err := os.ReadFile(genesisFilePath) + require.NoError(err) + + log.Info("Creating new subnet") + createSubnetTx, err := pWallet.IssueCreateSubnetTx(owner) + require.NoError(err) + + genesis := &core.Genesis{} + require.NoError(json.Unmarshal(genesisBytes, genesis)) + + log.Info("Creating new Subnet-EVM blockchain", "genesis", genesis) + createChainTx, err := pWallet.IssueCreateChainTx( + createSubnetTx.ID(), + genesisBytes, + subnetVMID, + nil, + "testChain", + ) + require.NoError(err) + createChainTxID := createChainTx.ID() + + // Confirm the new blockchain is ready by waiting for the readiness endpoint + infoClient := info.NewClient(DefaultLocalNodeURI) + bootstrapped, err := info.AwaitBootstrapped(ctx, infoClient, createChainTxID.String(), 2*time.Second) + require.NoError(err) + require.True(bootstrapped) + + // Return the blockchainID of the newly created blockchain + return createChainTxID.String() +} + +// GetDefaultChainURI returns the default chain URI for a given blockchainID +func GetDefaultChainURI(blockchainID string) string { + return fmt.Sprintf("%s/ext/bc/%s/rpc", DefaultLocalNodeURI, blockchainID) +} + +// GetFilesAndAliases returns a map of aliases to file paths in given [dir]. +func GetFilesAndAliases(dir string) (map[string]string, error) { + files, err := filepath.Glob(dir) + if err != nil { + return nil, err + } + aliasesToFiles := make(map[string]string) + for _, file := range files { + alias := strings.TrimSuffix(filepath.Base(file), filepath.Ext(file)) + aliasesToFiles[alias] = file + } + return aliasesToFiles, nil +} diff --git a/tests/utils/tmpnet.go b/tests/utils/tmpnet.go new file mode 100644 index 0000000000..4ab1a750e7 --- /dev/null +++ b/tests/utils/tmpnet.go @@ -0,0 +1,71 @@ +// Copyright (C) 2019-2025, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package utils + +import ( + "encoding/json" + "os" + + "github.com/ava-labs/avalanchego/config" + "github.com/ava-labs/avalanchego/ids" + "github.com/ava-labs/avalanchego/tests/fixture/tmpnet" +) + +var subnetVMID = ids.ID{'s', 'u', 'b', 'n', 'e', 't', 'e', 'v', 'm'} + +var DefaultChainConfig = map[string]any{ + "log-level": "debug", + "warp-api-enabled": true, + "local-txs-enabled": true, +} + +func NewTmpnetNetwork(owner string, nodes []*tmpnet.Node, flags tmpnet.FlagsMap, subnets ...*tmpnet.Subnet) *tmpnet.Network { + defaultFlags := tmpnet.FlagsMap{} + defaultFlags.SetDefaults(flags) + defaultFlags.SetDefaults(tmpnet.FlagsMap{ + config.ProposerVMUseCurrentHeightKey: "true", + }) + return &tmpnet.Network{ + Owner: owner, + DefaultFlags: defaultFlags, + Nodes: nodes, + Subnets: subnets, + } +} + +// Create the configuration that will enable creation and access to a +// subnet created on a temporary network. +func NewTmpnetSubnet(name string, genesisPath string, chainConfig map[string]any, nodes ...*tmpnet.Node) *tmpnet.Subnet { + if len(nodes) == 0 { + panic("a subnet must be validated by at least one node") + } + + validatorIDs := make([]ids.NodeID, len(nodes)) + for i, node := range nodes { + validatorIDs[i] = node.NodeID + } + + genesisBytes, err := os.ReadFile(genesisPath) + if err != nil { + panic(err) + } + + chainConfigBytes, err := json.Marshal(chainConfig) + if err != nil { + panic(err) + } + + return &tmpnet.Subnet{ + Name: name, + Chains: []*tmpnet.Chain{ + { + VMID: subnetVMID, + Genesis: genesisBytes, + Config: string(chainConfigBytes), + PreFundedKey: tmpnet.HardhatKey, + }, + }, + ValidatorIDs: validatorIDs, + } +} diff --git a/tests/warp/warp_test.go b/tests/warp/warp_test.go new file mode 100644 index 0000000000..83807e8163 --- /dev/null +++ b/tests/warp/warp_test.go @@ -0,0 +1,669 @@ +// Copyright (C) 2019-2025, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +// Implements solidity tests. +package warp + +import ( + "context" + "crypto/ecdsa" + "fmt" + "math/big" + "path/filepath" + "strings" + "testing" + "time" + + ginkgo "github.com/onsi/ginkgo/v2" + + "github.com/stretchr/testify/require" + + "github.com/ava-labs/coreth/accounts/abi/bind" + "github.com/ava-labs/coreth/warp/aggregator" + "github.com/ava-labs/libevm/common" + "github.com/ava-labs/libevm/crypto" + + "github.com/ava-labs/avalanchego/api/info" + "github.com/ava-labs/avalanchego/ids" + "github.com/ava-labs/avalanchego/snow/validators" + "github.com/ava-labs/avalanchego/tests/fixture/e2e" + "github.com/ava-labs/avalanchego/tests/fixture/tmpnet" + "github.com/ava-labs/avalanchego/utils/constants" + "github.com/ava-labs/avalanchego/vms/platformvm" + "github.com/ava-labs/avalanchego/vms/platformvm/api" + avalancheWarp "github.com/ava-labs/avalanchego/vms/platformvm/warp" + "github.com/ava-labs/avalanchego/vms/platformvm/warp/payload" + + "github.com/ava-labs/coreth/cmd/simulator/key" + "github.com/ava-labs/coreth/cmd/simulator/load" + "github.com/ava-labs/coreth/cmd/simulator/metrics" + "github.com/ava-labs/coreth/cmd/simulator/txs" + "github.com/ava-labs/coreth/ethclient" + "github.com/ava-labs/coreth/params" + "github.com/ava-labs/coreth/precompile/contracts/warp" + "github.com/ava-labs/coreth/predicate" + "github.com/ava-labs/coreth/tests" + "github.com/ava-labs/coreth/tests/utils" + warpBackend "github.com/ava-labs/coreth/warp" + ethereum "github.com/ava-labs/libevm" + "github.com/ava-labs/libevm/core/types" +) + +const ( + subnetAName = "warp-subnet-a" +) + +var ( + flagVars *e2e.FlagVars + + repoRootPath = tests.GetRepoRootPath("tests/warp") + + genesisPath = filepath.Join(repoRootPath, "tests/precompile/genesis/warp.json") + + subnetA, cChainSubnetDetails *Subnet + + testPayload = []byte{1, 2, 3} +) + +func init() { + // Configures flags used to configure tmpnet (via SynchronizedBeforeSuite) + flagVars = e2e.RegisterFlags() +} + +// Subnet provides the basic details of a created subnet +type Subnet struct { + // SubnetID is the txID of the transaction that created the subnet + SubnetID ids.ID + // For simplicity assume a single blockchain per subnet + BlockchainID ids.ID + // Key funded in the genesis of the blockchain + PreFundedKey *ecdsa.PrivateKey + // ValidatorURIs are the base URIs for each participant of the Subnet + ValidatorURIs []string +} + +func TestE2E(t *testing.T) { + ginkgo.RunSpecs(t, "coreth warp e2e test") +} + +var _ = ginkgo.SynchronizedBeforeSuite(func() []byte { + // Run only once in the first ginkgo process + + tc := e2e.NewTestContext() + nodes := tmpnet.NewNodesOrPanic(tmpnet.DefaultNodeCount) + + env := e2e.NewTestEnvironment( + tc, + flagVars, + utils.NewTmpnetNetwork( + "coreth-warp-e2e", + nodes, + tmpnet.FlagsMap{}, + utils.NewTmpnetSubnet(subnetAName, genesisPath, utils.DefaultChainConfig, nodes...), + ), + ) + + return env.Marshal() +}, func(envBytes []byte) { + // Run in every ginkgo process + + require := require.New(ginkgo.GinkgoT()) + tc := e2e.NewTestContext() + + // Initialize the local test environment from the global state + if len(envBytes) > 0 { + e2e.InitSharedTestEnvironment(tc, envBytes) + } + + network := e2e.GetEnv(tc).GetNetwork() + + // By default all nodes are validating all subnets + validatorURIs := make([]string, len(network.Nodes)) + for i, node := range network.Nodes { + validatorURIs[i] = node.URI + } + + tmpnetSubnetA := network.GetSubnet(subnetAName) + require.NotNil(tmpnetSubnetA) + subnetA = &Subnet{ + SubnetID: tmpnetSubnetA.SubnetID, + BlockchainID: tmpnetSubnetA.Chains[0].ChainID, + PreFundedKey: tmpnetSubnetA.Chains[0].PreFundedKey.ToECDSA(), + ValidatorURIs: validatorURIs, + } + + infoClient := info.NewClient(network.Nodes[0].URI) + cChainBlockchainID, err := infoClient.GetBlockchainID(tc.DefaultContext(), "C") + require.NoError(err) + + cChainSubnetDetails = &Subnet{ + SubnetID: constants.PrimaryNetworkID, + BlockchainID: cChainBlockchainID, + PreFundedKey: tmpnet.HardhatKey.ToECDSA(), + ValidatorURIs: validatorURIs, + } +}) + +var _ = ginkgo.Describe("[Warp]", func() { + testFunc := func(sendingSubnet *Subnet, receivingSubnet *Subnet) { + tc := e2e.NewTestContext() + w := newWarpTest(tc.DefaultContext(), sendingSubnet, receivingSubnet) + + ginkgo.GinkgoLogr.Info("Sending message from A to B") + w.sendMessageFromSendingSubnet() + + ginkgo.GinkgoLogr.Info("Aggregating signatures via API") + w.aggregateSignaturesViaAPI() + + ginkgo.GinkgoLogr.Info("Aggregating signatures via p2p aggregator") + w.aggregateSignatures() + + ginkgo.GinkgoLogr.Info("Delivering addressed call payload to receiving subnet") + w.deliverAddressedCallToReceivingSubnet() + + ginkgo.GinkgoLogr.Info("Delivering block hash payload to receiving subnet") + w.deliverBlockHashPayload() + + ginkgo.GinkgoLogr.Info("Executing warp load test") + w.warpLoad() + } + ginkgo.It("SubnetA -> C-Chain", func() { testFunc(subnetA, cChainSubnetDetails) }) + ginkgo.It("C-Chain -> SubnetA", func() { testFunc(cChainSubnetDetails, subnetA) }) + ginkgo.It("C-Chain -> C-Chain", func() { testFunc(cChainSubnetDetails, cChainSubnetDetails) }) +}) + +type warpTest struct { + // network-wide fields set in the constructor + networkID uint32 + + // sendingSubnet fields set in the constructor + sendingSubnet *Subnet + sendingSubnetURIs []string + sendingSubnetClients []*ethclient.Client + sendingSubnetFundedKey *ecdsa.PrivateKey + sendingSubnetFundedAddress common.Address + sendingSubnetChainID *big.Int + sendingSubnetSigner types.Signer + + // receivingSubnet fields set in the constructor + receivingSubnet *Subnet + receivingSubnetURIs []string + receivingSubnetClients []*ethclient.Client + receivingSubnetFundedKey *ecdsa.PrivateKey + receivingSubnetFundedAddress common.Address + receivingSubnetChainID *big.Int + receivingSubnetSigner types.Signer + + // Fields set throughout test execution + blockID ids.ID + blockPayload *payload.Hash + blockPayloadUnsignedMessage *avalancheWarp.UnsignedMessage + blockPayloadSignedMessage *avalancheWarp.Message + + addressedCallUnsignedMessage *avalancheWarp.UnsignedMessage + addressedCallSignedMessage *avalancheWarp.Message +} + +func newWarpTest(ctx context.Context, sendingSubnet *Subnet, receivingSubnet *Subnet) *warpTest { + require := require.New(ginkgo.GinkgoT()) + + sendingSubnetFundedKey := sendingSubnet.PreFundedKey + receivingSubnetFundedKey := receivingSubnet.PreFundedKey + + warpTest := &warpTest{ + sendingSubnet: sendingSubnet, + sendingSubnetURIs: sendingSubnet.ValidatorURIs, + receivingSubnet: receivingSubnet, + receivingSubnetURIs: receivingSubnet.ValidatorURIs, + sendingSubnetFundedKey: sendingSubnetFundedKey, + sendingSubnetFundedAddress: crypto.PubkeyToAddress(sendingSubnetFundedKey.PublicKey), + receivingSubnetFundedKey: receivingSubnetFundedKey, + receivingSubnetFundedAddress: crypto.PubkeyToAddress(receivingSubnetFundedKey.PublicKey), + } + infoClient := info.NewClient(sendingSubnet.ValidatorURIs[0]) + networkID, err := infoClient.GetNetworkID(ctx) + require.NoError(err) + warpTest.networkID = networkID + + warpTest.initClients() + + sendingClient := warpTest.sendingSubnetClients[0] + sendingSubnetChainID, err := sendingClient.ChainID(ctx) + require.NoError(err) + warpTest.sendingSubnetChainID = sendingSubnetChainID + warpTest.sendingSubnetSigner = types.LatestSignerForChainID(sendingSubnetChainID) + + receivingClient := warpTest.receivingSubnetClients[0] + receivingChainID, err := receivingClient.ChainID(ctx) + require.NoError(err) + // Issue transactions to activate ProposerVM on the receiving chain + require.NoError(utils.IssueTxsToActivateProposerVMFork(ctx, receivingChainID, receivingSubnetFundedKey, receivingClient)) + warpTest.receivingSubnetChainID = receivingChainID + warpTest.receivingSubnetSigner = types.LatestSignerForChainID(receivingChainID) + + return warpTest +} + +func (w *warpTest) initClients() { + require := require.New(ginkgo.GinkgoT()) + + w.sendingSubnetClients = make([]*ethclient.Client, 0, len(w.sendingSubnetClients)) + for _, uri := range w.sendingSubnet.ValidatorURIs { + wsURI := toWebsocketURI(uri, w.sendingSubnet.BlockchainID.String()) + ginkgo.GinkgoLogr.Info("Creating ethclient for blockchain A", "blockchainID", w.sendingSubnet.BlockchainID) + client, err := ethclient.Dial(wsURI) + require.NoError(err) + w.sendingSubnetClients = append(w.sendingSubnetClients, client) + } + + w.receivingSubnetClients = make([]*ethclient.Client, 0, len(w.receivingSubnetClients)) + for _, uri := range w.receivingSubnet.ValidatorURIs { + wsURI := toWebsocketURI(uri, w.receivingSubnet.BlockchainID.String()) + ginkgo.GinkgoLogr.Info("Creating ethclient for blockchain B", "blockchainID", w.receivingSubnet.BlockchainID) + client, err := ethclient.Dial(wsURI) + require.NoError(err) + w.receivingSubnetClients = append(w.receivingSubnetClients, client) + } +} + +func (w *warpTest) sendMessageFromSendingSubnet() { + tc := e2e.NewTestContext() + ctx := tc.DefaultContext() + require := require.New(ginkgo.GinkgoT()) + + client := w.sendingSubnetClients[0] + + startingNonce, err := client.NonceAt(ctx, w.sendingSubnetFundedAddress, nil) + require.NoError(err) + + packedInput, err := warp.PackSendWarpMessage(testPayload) + require.NoError(err) + tx := types.NewTx(&types.DynamicFeeTx{ + ChainID: w.sendingSubnetChainID, + Nonce: startingNonce, + To: &warp.Module.Address, + Gas: 200_000, + GasFeeCap: big.NewInt(225 * params.GWei), + GasTipCap: big.NewInt(params.GWei), + Value: common.Big0, + Data: packedInput, + }) + signedTx, err := types.SignTx(tx, w.sendingSubnetSigner, w.sendingSubnetFundedKey) + require.NoError(err) + ginkgo.GinkgoLogr.Info("Sending sendWarpMessage transaction", "txHash", signedTx.Hash()) + require.NoError(client.SendTransaction(ctx, signedTx)) + + ginkgo.GinkgoLogr.Info("Waiting for transaction to be accepted") + receiptCtx, cancel := context.WithTimeout(ctx, 30*time.Second) + defer cancel() + receipt, err := bind.WaitMined(receiptCtx, client, signedTx) + require.NoError(err) + blockHash := receipt.BlockHash + blockNumber := receipt.BlockNumber.Uint64() + + ginkgo.GinkgoLogr.Info("Constructing warp block hash unsigned message", "blockHash", blockHash) + w.blockID = ids.ID(blockHash) // Set blockID to construct a warp message containing a block hash payload later + w.blockPayload, err = payload.NewHash(w.blockID) + require.NoError(err) + w.blockPayloadUnsignedMessage, err = avalancheWarp.NewUnsignedMessage(w.networkID, w.sendingSubnet.BlockchainID, w.blockPayload.Bytes()) + require.NoError(err) + + ginkgo.GinkgoLogr.Info("Fetching relevant warp logs from the newly produced block") + logs, err := client.FilterLogs(ctx, ethereum.FilterQuery{ + BlockHash: &blockHash, + Addresses: []common.Address{warp.Module.Address}, + }) + require.NoError(err) + require.Len(logs, 1) + + // Check for relevant warp log from subscription and ensure that it matches + // the log extracted from the last block. + txLog := logs[0] + ginkgo.GinkgoLogr.Info("Parsing logData as unsigned warp message") + unsignedMsg, err := warp.UnpackSendWarpEventDataToMessage(txLog.Data) + require.NoError(err) + + // Set local variables for the duration of the test + w.addressedCallUnsignedMessage = unsignedMsg + ginkgo.GinkgoLogr.Info("Parsed unsignedWarpMsg", "unsignedWarpMessageID", w.addressedCallUnsignedMessage.ID(), "unsignedWarpMessage", w.addressedCallUnsignedMessage) + + // Loop over each client on chain A to ensure they all have time to accept the block. + // Note: if we did not confirm this here, the next stage could be racy since it assumes every node + // has accepted the block. + for i, client := range w.sendingSubnetClients { + // Loop until each node has advanced to >= the height of the block that emitted the warp log + for { + receivedBlkNum, err := client.BlockNumber(ctx) + require.NoError(err) + if receivedBlkNum >= blockNumber { + ginkgo.GinkgoLogr.Info("client accepted the block containing SendWarpMessage", "client", i, "height", receivedBlkNum) + break + } + } + } +} + +func (w *warpTest) aggregateSignaturesViaAPI() { + require := require.New(ginkgo.GinkgoT()) + tc := e2e.NewTestContext() + ctx := tc.DefaultContext() + + warpAPIs := make(map[ids.NodeID]warpBackend.Client, len(w.sendingSubnetURIs)) + for _, uri := range w.sendingSubnetURIs { + client, err := warpBackend.NewClient(uri, w.sendingSubnet.BlockchainID.String()) + require.NoError(err) + + infoClient := info.NewClient(uri) + nodeID, _, err := infoClient.GetNodeID(ctx) + require.NoError(err) + warpAPIs[nodeID] = client + } + + pChainClient := platformvm.NewClient(w.sendingSubnetURIs[0]) + pChainHeight, err := pChainClient.GetHeight(ctx) + require.NoError(err) + // If the source subnet is the Primary Network, then we only need to aggregate signatures from the receiving + // subnet's validator set instead of the entire Primary Network. + // If the destination turns out to be the Primary Network as well, then this is a no-op. + var validators map[ids.NodeID]*validators.GetValidatorOutput + if w.sendingSubnet.SubnetID == constants.PrimaryNetworkID { + validators, err = pChainClient.GetValidatorsAt(ctx, w.receivingSubnet.SubnetID, api.Height(pChainHeight)) + } else { + validators, err = pChainClient.GetValidatorsAt(ctx, w.sendingSubnet.SubnetID, api.Height(pChainHeight)) + } + require.NoError(err) + require.NotZero(len(validators)) + + totalWeight := uint64(0) + warpValidators := make([]*avalancheWarp.Validator, 0, len(validators)) + for nodeID, validator := range validators { + warpValidators = append(warpValidators, &avalancheWarp.Validator{ + PublicKey: validator.PublicKey, + Weight: validator.Weight, + NodeIDs: []ids.NodeID{nodeID}, + }) + totalWeight += validator.Weight + } + + ginkgo.GinkgoLogr.Info("Aggregating signatures from validator set", "numValidators", len(warpValidators), "totalWeight", totalWeight) + apiSignatureGetter := warpBackend.NewAPIFetcher(warpAPIs) + signatureResult, err := aggregator.New(apiSignatureGetter, warpValidators, totalWeight).AggregateSignatures(ctx, w.addressedCallUnsignedMessage, 100) + require.NoError(err) + require.Equal(signatureResult.SignatureWeight, signatureResult.TotalWeight) + require.Equal(signatureResult.SignatureWeight, totalWeight) + + w.addressedCallSignedMessage = signatureResult.Message + + signatureResult, err = aggregator.New(apiSignatureGetter, warpValidators, totalWeight).AggregateSignatures(ctx, w.blockPayloadUnsignedMessage, 100) + require.NoError(err) + require.Equal(signatureResult.SignatureWeight, signatureResult.TotalWeight) + require.Equal(signatureResult.SignatureWeight, totalWeight) + w.blockPayloadSignedMessage = signatureResult.Message + + ginkgo.GinkgoLogr.Info("Aggregated signatures for warp messages", "addressedCallMessage", common.Bytes2Hex(w.addressedCallSignedMessage.Bytes()), "blockPayloadMessage", common.Bytes2Hex(w.blockPayloadSignedMessage.Bytes())) +} + +func (w *warpTest) aggregateSignatures() { + require := require.New(ginkgo.GinkgoT()) + tc := e2e.NewTestContext() + ctx := tc.DefaultContext() + + // Verify that the signature aggregation matches the results of manually constructing the warp message + client, err := warpBackend.NewClient(w.sendingSubnetURIs[0], w.sendingSubnet.BlockchainID.String()) + require.NoError(err) + + ginkgo.GinkgoLogr.Info("Fetching addressed call aggregate signature via p2p API") + subnetIDStr := "" + if w.sendingSubnet.SubnetID == constants.PrimaryNetworkID { + subnetIDStr = w.receivingSubnet.SubnetID.String() + } + signedWarpMessageBytes, err := client.GetMessageAggregateSignature(ctx, w.addressedCallSignedMessage.ID(), warp.WarpQuorumDenominator, subnetIDStr) + require.NoError(err) + require.Equal(w.addressedCallSignedMessage.Bytes(), signedWarpMessageBytes) + + ginkgo.GinkgoLogr.Info("Fetching block payload aggregate signature via p2p API") + signedWarpBlockBytes, err := client.GetBlockAggregateSignature(ctx, w.blockID, warp.WarpQuorumDenominator, subnetIDStr) + require.NoError(err) + require.Equal(w.blockPayloadSignedMessage.Bytes(), signedWarpBlockBytes) +} + +func (w *warpTest) deliverAddressedCallToReceivingSubnet() { + require := require.New(ginkgo.GinkgoT()) + tc := e2e.NewTestContext() + ctx := tc.DefaultContext() + + client := w.receivingSubnetClients[0] + + nonce, err := client.NonceAt(ctx, w.receivingSubnetFundedAddress, nil) + require.NoError(err) + + packedInput, err := warp.PackGetVerifiedWarpMessage(0) + require.NoError(err) + tx := predicate.NewPredicateTx( + w.receivingSubnetChainID, + nonce, + &warp.Module.Address, + 5_000_000, + big.NewInt(225*params.GWei), + big.NewInt(params.GWei), + common.Big0, + packedInput, + types.AccessList{}, + warp.ContractAddress, + w.addressedCallSignedMessage.Bytes(), + ) + signedTx, err := types.SignTx(tx, w.receivingSubnetSigner, w.receivingSubnetFundedKey) + require.NoError(err) + txBytes, err := signedTx.MarshalBinary() + require.NoError(err) + ginkgo.GinkgoLogr.Info("Sending getVerifiedWarpMessage transaction", "txHash", signedTx.Hash(), "txBytes", common.Bytes2Hex(txBytes)) + require.NoError(client.SendTransaction(ctx, signedTx)) + + ginkgo.GinkgoLogr.Info("Waiting for transaction to be accepted") + receiptCtx, cancel := context.WithTimeout(ctx, 30*time.Second) + defer cancel() + receipt, err := bind.WaitMined(receiptCtx, client, signedTx) + require.NoError(err) + blockHash := receipt.BlockHash + + ginkgo.GinkgoLogr.Info("Fetching relevant warp logs and receipts from new block") + logs, err := client.FilterLogs(ctx, ethereum.FilterQuery{ + BlockHash: &blockHash, + Addresses: []common.Address{warp.Module.Address}, + }) + require.NoError(err) + require.Len(logs, 0) + require.NoError(err) + require.Equal(receipt.Status, types.ReceiptStatusSuccessful) +} + +func (w *warpTest) deliverBlockHashPayload() { + require := require.New(ginkgo.GinkgoT()) + tc := e2e.NewTestContext() + ctx := tc.DefaultContext() + + client := w.receivingSubnetClients[0] + + nonce, err := client.NonceAt(ctx, w.receivingSubnetFundedAddress, nil) + require.NoError(err) + + packedInput, err := warp.PackGetVerifiedWarpBlockHash(0) + require.NoError(err) + tx := predicate.NewPredicateTx( + w.receivingSubnetChainID, + nonce, + &warp.Module.Address, + 5_000_000, + big.NewInt(225*params.GWei), + big.NewInt(params.GWei), + common.Big0, + packedInput, + types.AccessList{}, + warp.ContractAddress, + w.blockPayloadSignedMessage.Bytes(), + ) + signedTx, err := types.SignTx(tx, w.receivingSubnetSigner, w.receivingSubnetFundedKey) + require.NoError(err) + txBytes, err := signedTx.MarshalBinary() + require.NoError(err) + ginkgo.GinkgoLogr.Info("Sending getVerifiedWarpBlockHash transaction", "txHash", signedTx.Hash(), "txBytes", common.Bytes2Hex(txBytes)) + require.NoError(client.SendTransaction(ctx, signedTx)) + + ginkgo.GinkgoLogr.Info("Waiting for transaction to be accepted") + receiptCtx, cancel := context.WithTimeout(ctx, 30*time.Second) + defer cancel() + receipt, err := bind.WaitMined(receiptCtx, client, signedTx) + require.NoError(err) + blockHash := receipt.BlockHash + ginkgo.GinkgoLogr.Info("Fetching relevant warp logs and receipts from new block") + logs, err := client.FilterLogs(ctx, ethereum.FilterQuery{ + BlockHash: &blockHash, + Addresses: []common.Address{warp.Module.Address}, + }) + require.NoError(err) + require.Len(logs, 0) + require.NoError(err) + require.Equal(receipt.Status, types.ReceiptStatusSuccessful) +} + +func (w *warpTest) warpLoad() { + require := require.New(ginkgo.GinkgoT()) + tc := e2e.NewTestContext() + ctx := tc.DefaultContext() + + var ( + numWorkers = len(w.sendingSubnetClients) + txsPerWorker uint64 = 10 + batchSize uint64 = 10 + sendingClient = w.sendingSubnetClients[0] + ) + + chainAKeys, chainAPrivateKeys := generateKeys(w.sendingSubnetFundedKey, numWorkers) + chainBKeys, chainBPrivateKeys := generateKeys(w.receivingSubnetFundedKey, numWorkers) + + loadMetrics := metrics.NewDefaultMetrics() + + ginkgo.GinkgoLogr.Info("Distributing funds on sending subnet", "numKeys", len(chainAKeys)) + chainAKeys, err := load.DistributeFunds(ctx, sendingClient, chainAKeys, len(chainAKeys), new(big.Int).Mul(big.NewInt(100), big.NewInt(params.Ether)), loadMetrics) + require.NoError(err) + + ginkgo.GinkgoLogr.Info("Distributing funds on receiving subnet", "numKeys", len(chainBKeys)) + _, err = load.DistributeFunds(ctx, w.receivingSubnetClients[0], chainBKeys, len(chainBKeys), new(big.Int).Mul(big.NewInt(100), big.NewInt(params.Ether)), loadMetrics) + require.NoError(err) + + ginkgo.GinkgoLogr.Info("Creating workers for each subnet...") + chainAWorkers := make([]txs.Worker[*types.Transaction], 0, len(chainAKeys)) + for i := range chainAKeys { + chainAWorkers = append(chainAWorkers, load.NewTxReceiptWorker(ctx, w.sendingSubnetClients[i])) + } + chainBWorkers := make([]txs.Worker[*types.Transaction], 0, len(chainBKeys)) + for i := range chainBKeys { + chainBWorkers = append(chainBWorkers, load.NewTxReceiptWorker(ctx, w.receivingSubnetClients[i])) + } + + ginkgo.GinkgoLogr.Info("Subscribing to warp send events on sending subnet") + logs := make(chan types.Log, numWorkers*int(txsPerWorker)) + sub, err := sendingClient.SubscribeFilterLogs(ctx, ethereum.FilterQuery{ + Addresses: []common.Address{warp.Module.Address}, + }, logs) + require.NoError(err) + defer func() { + sub.Unsubscribe() + require.NoError(<-sub.Err()) + }() + + ginkgo.GinkgoLogr.Info("Generating tx sequence to send warp messages...") + warpSendSequences, err := txs.GenerateTxSequences(ctx, func(key *ecdsa.PrivateKey, nonce uint64) (*types.Transaction, error) { + data, err := warp.PackSendWarpMessage([]byte(fmt.Sprintf("Jets %d-%d Dolphins", key.X.Int64(), nonce))) + if err != nil { + return nil, err + } + tx := types.NewTx(&types.DynamicFeeTx{ + ChainID: w.sendingSubnetChainID, + Nonce: nonce, + To: &warp.Module.Address, + Gas: 200_000, + GasFeeCap: big.NewInt(225 * params.GWei), + GasTipCap: big.NewInt(params.GWei), + Value: common.Big0, + Data: data, + }) + return types.SignTx(tx, w.sendingSubnetSigner, key) + }, w.sendingSubnetClients[0], chainAPrivateKeys, txsPerWorker, false) + require.NoError(err) + ginkgo.GinkgoLogr.Info("Executing warp send loader...") + warpSendLoader := load.New(chainAWorkers, warpSendSequences, batchSize, loadMetrics) + // TODO: execute send and receive loaders concurrently. + require.NoError(warpSendLoader.Execute(ctx)) + require.NoError(warpSendLoader.ConfirmReachedTip(ctx)) + + warpClient, err := warpBackend.NewClient(w.sendingSubnetURIs[0], w.sendingSubnet.BlockchainID.String()) + require.NoError(err) + subnetIDStr := "" + if w.sendingSubnet.SubnetID == constants.PrimaryNetworkID { + subnetIDStr = w.receivingSubnet.SubnetID.String() + } + + ginkgo.GinkgoLogr.Info("Executing warp delivery sequences...") + warpDeliverSequences, err := txs.GenerateTxSequences(ctx, func(key *ecdsa.PrivateKey, nonce uint64) (*types.Transaction, error) { + // Wait for the next warp send log + warpLog := <-logs + + unsignedMessage, err := warp.UnpackSendWarpEventDataToMessage(warpLog.Data) + if err != nil { + return nil, err + } + ginkgo.GinkgoLogr.Info("Fetching addressed call aggregate signature via p2p API") + + signedWarpMessageBytes, err := warpClient.GetMessageAggregateSignature(ctx, unsignedMessage.ID(), warp.WarpDefaultQuorumNumerator, subnetIDStr) + if err != nil { + return nil, err + } + + packedInput, err := warp.PackGetVerifiedWarpMessage(0) + if err != nil { + return nil, err + } + tx := predicate.NewPredicateTx( + w.receivingSubnetChainID, + nonce, + &warp.Module.Address, + 5_000_000, + big.NewInt(225*params.GWei), + big.NewInt(params.GWei), + common.Big0, + packedInput, + types.AccessList{}, + warp.ContractAddress, + signedWarpMessageBytes, + ) + return types.SignTx(tx, w.receivingSubnetSigner, key) + }, w.receivingSubnetClients[0], chainBPrivateKeys, txsPerWorker, true) + require.NoError(err) + + ginkgo.GinkgoLogr.Info("Executing warp delivery...") + warpDeliverLoader := load.New(chainBWorkers, warpDeliverSequences, batchSize, loadMetrics) + require.NoError(warpDeliverLoader.Execute(ctx)) + require.NoError(warpSendLoader.ConfirmReachedTip(ctx)) + ginkgo.GinkgoLogr.Info("Completed warp delivery successfully.") +} + +func generateKeys(preFundedKey *ecdsa.PrivateKey, numWorkers int) ([]*key.Key, []*ecdsa.PrivateKey) { + keys := []*key.Key{ + key.CreateKey(preFundedKey), + } + privateKeys := []*ecdsa.PrivateKey{ + preFundedKey, + } + for i := 1; i < numWorkers; i++ { + newKey, err := key.Generate() + require.NoError(ginkgo.GinkgoT(), err) + keys = append(keys, newKey) + privateKeys = append(privateKeys, newKey.PrivKey) + } + return keys, privateKeys +} + +func toWebsocketURI(uri string, blockchainID string) string { + return fmt.Sprintf("ws://%s/ext/bc/%s/ws", strings.TrimPrefix(uri, "http://"), blockchainID) +}