diff --git a/universalClient/chains/chains_test.go b/universalClient/chains/chains_test.go index 675ca52c..8fcb4706 100644 --- a/universalClient/chains/chains_test.go +++ b/universalClient/chains/chains_test.go @@ -1,6 +1,8 @@ package chains import ( + "context" + "fmt" "testing" "time" @@ -8,6 +10,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/pushchain/push-chain-node/universalClient/chains/common" "github.com/pushchain/push-chain-node/universalClient/config" uregistrytypes "github.com/pushchain/push-chain-node/x/uregistry/types" ) @@ -399,3 +402,1340 @@ func TestPerSyncTimeout(t *testing.T) { assert.Equal(t, 30*time.Second, perSyncTimeout) }) } + +// mockChainClient implements common.ChainClient for testing +type mockChainClient struct { + startCalled bool + stopCalled bool + stopErr error +} + +func (m *mockChainClient) Start(ctx context.Context) error { m.startCalled = true; return nil } +func (m *mockChainClient) Stop() error { m.stopCalled = true; return m.stopErr } +func (m *mockChainClient) IsHealthy() bool { return true } +func (m *mockChainClient) GetTxBuilder() (common.TxBuilder, error) { + return nil, nil +} + +// newTestChains creates a Chains instance suitable for unit tests. +func newTestChains() *Chains { + logger := zerolog.Nop() + cfg := &config.Config{PushChainID: "localchain_9000-1"} + return NewChains(nil, nil, cfg, logger) +} + +func TestGetClient(t *testing.T) { + t.Run("returns client when chain exists", func(t *testing.T) { + c := newTestChains() + mock := &mockChainClient{} + c.chains["eip155:1"] = mock + + client, err := c.GetClient("eip155:1") + require.NoError(t, err) + assert.Equal(t, mock, client) + }) + + t.Run("returns error when chain does not exist", func(t *testing.T) { + c := newTestChains() + + client, err := c.GetClient("eip155:999") + assert.Nil(t, client) + require.Error(t, err) + assert.Contains(t, err.Error(), "chain client not found") + assert.Contains(t, err.Error(), "eip155:999") + }) + + t.Run("returns error on empty chain ID", func(t *testing.T) { + c := newTestChains() + + client, err := c.GetClient("") + assert.Nil(t, client) + require.Error(t, err) + }) +} + +func TestIsEVMChain(t *testing.T) { + t.Run("returns true for EVM chain", func(t *testing.T) { + c := newTestChains() + c.chainConfigs["eip155:1"] = &uregistrytypes.ChainConfig{ + Chain: "eip155:1", + VmType: uregistrytypes.VmType_EVM, + } + + assert.True(t, c.IsEVMChain("eip155:1")) + }) + + t.Run("returns false for SVM chain", func(t *testing.T) { + c := newTestChains() + c.chainConfigs["solana:mainnet"] = &uregistrytypes.ChainConfig{ + Chain: "solana:mainnet", + VmType: uregistrytypes.VmType_SVM, + } + + assert.False(t, c.IsEVMChain("solana:mainnet")) + }) + + t.Run("returns false for unknown chain", func(t *testing.T) { + c := newTestChains() + + assert.False(t, c.IsEVMChain("nonexistent:1")) + }) + + t.Run("returns false when config is nil in map", func(t *testing.T) { + c := newTestChains() + c.chainConfigs["eip155:1"] = nil + + assert.False(t, c.IsEVMChain("eip155:1")) + }) +} + +func TestIsChainInboundEnabled(t *testing.T) { + t.Run("returns true when inbound is enabled", func(t *testing.T) { + c := newTestChains() + c.chainConfigs["eip155:1"] = &uregistrytypes.ChainConfig{ + Chain: "eip155:1", + Enabled: &uregistrytypes.ChainEnabled{ + IsInboundEnabled: true, + IsOutboundEnabled: false, + }, + } + + assert.True(t, c.IsChainInboundEnabled("eip155:1")) + }) + + t.Run("returns false when inbound is disabled", func(t *testing.T) { + c := newTestChains() + c.chainConfigs["eip155:1"] = &uregistrytypes.ChainConfig{ + Chain: "eip155:1", + Enabled: &uregistrytypes.ChainEnabled{ + IsInboundEnabled: false, + IsOutboundEnabled: true, + }, + } + + assert.False(t, c.IsChainInboundEnabled("eip155:1")) + }) + + t.Run("returns false when Enabled is nil", func(t *testing.T) { + c := newTestChains() + c.chainConfigs["eip155:1"] = &uregistrytypes.ChainConfig{ + Chain: "eip155:1", + Enabled: nil, + } + + assert.False(t, c.IsChainInboundEnabled("eip155:1")) + }) + + t.Run("returns false when config is nil", func(t *testing.T) { + c := newTestChains() + c.chainConfigs["eip155:1"] = nil + + assert.False(t, c.IsChainInboundEnabled("eip155:1")) + }) + + t.Run("returns false for unknown chain", func(t *testing.T) { + c := newTestChains() + + assert.False(t, c.IsChainInboundEnabled("nonexistent:1")) + }) +} + +func TestIsChainOutboundEnabled(t *testing.T) { + t.Run("returns true when outbound is enabled", func(t *testing.T) { + c := newTestChains() + c.chainConfigs["eip155:1"] = &uregistrytypes.ChainConfig{ + Chain: "eip155:1", + Enabled: &uregistrytypes.ChainEnabled{ + IsInboundEnabled: false, + IsOutboundEnabled: true, + }, + } + + assert.True(t, c.IsChainOutboundEnabled("eip155:1")) + }) + + t.Run("returns false when outbound is disabled", func(t *testing.T) { + c := newTestChains() + c.chainConfigs["eip155:1"] = &uregistrytypes.ChainConfig{ + Chain: "eip155:1", + Enabled: &uregistrytypes.ChainEnabled{ + IsInboundEnabled: true, + IsOutboundEnabled: false, + }, + } + + assert.False(t, c.IsChainOutboundEnabled("eip155:1")) + }) + + t.Run("returns false when Enabled is nil", func(t *testing.T) { + c := newTestChains() + c.chainConfigs["eip155:1"] = &uregistrytypes.ChainConfig{ + Chain: "eip155:1", + Enabled: nil, + } + + assert.False(t, c.IsChainOutboundEnabled("eip155:1")) + }) + + t.Run("returns false when config is nil", func(t *testing.T) { + c := newTestChains() + c.chainConfigs["eip155:1"] = nil + + assert.False(t, c.IsChainOutboundEnabled("eip155:1")) + }) + + t.Run("returns false for unknown chain", func(t *testing.T) { + c := newTestChains() + + assert.False(t, c.IsChainOutboundEnabled("nonexistent:1")) + }) +} + +func TestGetStandardConfirmations(t *testing.T) { + t.Run("returns configured value when set", func(t *testing.T) { + c := newTestChains() + c.chainConfigs["eip155:1"] = &uregistrytypes.ChainConfig{ + Chain: "eip155:1", + BlockConfirmation: &uregistrytypes.BlockConfirmation{ + StandardInbound: 20, + }, + } + + assert.Equal(t, uint64(20), c.GetStandardConfirmations("eip155:1")) + }) + + t.Run("returns 12 default when BlockConfirmation is nil", func(t *testing.T) { + c := newTestChains() + c.chainConfigs["eip155:1"] = &uregistrytypes.ChainConfig{ + Chain: "eip155:1", + BlockConfirmation: nil, + } + + assert.Equal(t, uint64(12), c.GetStandardConfirmations("eip155:1")) + }) + + t.Run("returns 12 default when StandardInbound is zero", func(t *testing.T) { + c := newTestChains() + c.chainConfigs["eip155:1"] = &uregistrytypes.ChainConfig{ + Chain: "eip155:1", + BlockConfirmation: &uregistrytypes.BlockConfirmation{ + StandardInbound: 0, + }, + } + + assert.Equal(t, uint64(12), c.GetStandardConfirmations("eip155:1")) + }) + + t.Run("returns 12 default for unknown chain", func(t *testing.T) { + c := newTestChains() + + assert.Equal(t, uint64(12), c.GetStandardConfirmations("nonexistent:1")) + }) + + t.Run("returns 12 default when config is nil in map", func(t *testing.T) { + c := newTestChains() + c.chainConfigs["eip155:1"] = nil + + assert.Equal(t, uint64(12), c.GetStandardConfirmations("eip155:1")) + }) + + t.Run("returns value of 1 when configured", func(t *testing.T) { + c := newTestChains() + c.chainConfigs["solana:mainnet"] = &uregistrytypes.ChainConfig{ + Chain: "solana:mainnet", + BlockConfirmation: &uregistrytypes.BlockConfirmation{ + StandardInbound: 1, + }, + } + + assert.Equal(t, uint64(1), c.GetStandardConfirmations("solana:mainnet")) + }) +} + +func TestStopAll(t *testing.T) { + t.Run("stops all clients and clears maps", func(t *testing.T) { + c := newTestChains() + mock1 := &mockChainClient{} + mock2 := &mockChainClient{} + + c.chains["eip155:1"] = mock1 + c.chains["solana:mainnet"] = mock2 + c.chainConfigs["eip155:1"] = &uregistrytypes.ChainConfig{Chain: "eip155:1"} + c.chainConfigs["solana:mainnet"] = &uregistrytypes.ChainConfig{Chain: "solana:mainnet"} + + c.StopAll() + + assert.True(t, mock1.stopCalled) + assert.True(t, mock2.stopCalled) + assert.Empty(t, c.chains) + assert.Empty(t, c.chainConfigs) + }) + + t.Run("handles stop error gracefully", func(t *testing.T) { + c := newTestChains() + mock := &mockChainClient{stopErr: assert.AnError} + c.chains["eip155:1"] = mock + c.chainConfigs["eip155:1"] = &uregistrytypes.ChainConfig{Chain: "eip155:1"} + + // Should not panic + c.StopAll() + + assert.True(t, mock.stopCalled) + assert.Empty(t, c.chains) + assert.Empty(t, c.chainConfigs) + }) + + t.Run("works with no clients", func(t *testing.T) { + c := newTestChains() + + // Should not panic + c.StopAll() + + assert.Empty(t, c.chains) + assert.Empty(t, c.chainConfigs) + }) +} + +func TestRemoveChain(t *testing.T) { + t.Run("removes existing chain and stops client", func(t *testing.T) { + c := newTestChains() + mock := &mockChainClient{} + c.chains["eip155:1"] = mock + c.chainConfigs["eip155:1"] = &uregistrytypes.ChainConfig{Chain: "eip155:1"} + + err := c.removeChain("eip155:1") + require.NoError(t, err) + + assert.True(t, mock.stopCalled) + _, exists := c.chains["eip155:1"] + assert.False(t, exists) + _, cfgExists := c.chainConfigs["eip155:1"] + assert.False(t, cfgExists) + }) + + t.Run("returns nil for non-existent chain", func(t *testing.T) { + c := newTestChains() + + err := c.removeChain("nonexistent:1") + require.NoError(t, err) + }) + + t.Run("removes chain even when stop returns error", func(t *testing.T) { + c := newTestChains() + mock := &mockChainClient{stopErr: assert.AnError} + c.chains["eip155:1"] = mock + c.chainConfigs["eip155:1"] = &uregistrytypes.ChainConfig{Chain: "eip155:1"} + + err := c.removeChain("eip155:1") + require.NoError(t, err) + + assert.True(t, mock.stopCalled) + _, exists := c.chains["eip155:1"] + assert.False(t, exists) + }) + + t.Run("only removes specified chain, leaves others", func(t *testing.T) { + c := newTestChains() + mock1 := &mockChainClient{} + mock2 := &mockChainClient{} + c.chains["eip155:1"] = mock1 + c.chains["eip155:97"] = mock2 + c.chainConfigs["eip155:1"] = &uregistrytypes.ChainConfig{Chain: "eip155:1"} + c.chainConfigs["eip155:97"] = &uregistrytypes.ChainConfig{Chain: "eip155:97"} + + err := c.removeChain("eip155:1") + require.NoError(t, err) + + assert.True(t, mock1.stopCalled) + assert.False(t, mock2.stopCalled) + _, exists := c.chains["eip155:97"] + assert.True(t, exists) + }) +} + +func TestStart(t *testing.T) { + t.Run("returns error when pushCore is nil", func(t *testing.T) { + c := newTestChains() + // pushCore is nil by default from newTestChains + + err := c.Start(context.Background()) + require.Error(t, err) + assert.Contains(t, err.Error(), "pushCore must be non-nil") + assert.False(t, c.running) + }) + + t.Run("returns nil if already running", func(t *testing.T) { + c := newTestChains() + c.running = true + + err := c.Start(context.Background()) + require.NoError(t, err) + }) +} + +func TestSanitizeChainID_Extended(t *testing.T) { + tests := []struct { + name string + input string + expected string + }{ + {"dots replaced", "eip155.1", "eip155_1"}, + {"slashes replaced", "chain/sub/path", "chain_sub_path"}, + {"spaces replaced", "chain id", "chain_id"}, + {"mixed special chars", "a:b/c.d e!f@g#h", "a_b_c_d_e_f_g_h"}, + {"uppercase preserved", "EIP155:1", "EIP155_1"}, + {"only underscores and hyphens kept", "__--__", "__--__"}, + {"unicode replaced", "chain\u00e9:1", "chain__1"}, + {"tabs replaced", "chain\t1", "chain_1"}, + } + for _, tc := range tests { + t.Run(tc.name, func(t *testing.T) { + assert.Equal(t, tc.expected, sanitizeChainID(tc.input)) + }) + } +} + +func TestGatewayMethodsEqual(t *testing.T) { + t.Run("both nil returns true", func(t *testing.T) { + assert.True(t, gatewayMethodsEqual(nil, nil)) + }) + + t.Run("both empty returns true", func(t *testing.T) { + assert.True(t, gatewayMethodsEqual( + []*uregistrytypes.GatewayMethods{}, + []*uregistrytypes.GatewayMethods{}, + )) + }) + + t.Run("different lengths returns false", func(t *testing.T) { + a := []*uregistrytypes.GatewayMethods{{Name: "m1"}} + b := []*uregistrytypes.GatewayMethods{{Name: "m1"}, {Name: "m2"}} + assert.False(t, gatewayMethodsEqual(a, b)) + }) + + t.Run("one nil one empty returns false", func(t *testing.T) { + // nil has length 0, empty slice has length 0 -- should be equal + assert.True(t, gatewayMethodsEqual(nil, []*uregistrytypes.GatewayMethods{})) + }) + + t.Run("same single element returns true", func(t *testing.T) { + m := &uregistrytypes.GatewayMethods{ + Name: "addFunds", + Identifier: "0xabc", + EventIdentifier: "0xdef", + ConfirmationType: uregistrytypes.ConfirmationType_CONFIRMATION_TYPE_STANDARD, + } + a := []*uregistrytypes.GatewayMethods{m} + b := []*uregistrytypes.GatewayMethods{{ + Name: "addFunds", + Identifier: "0xabc", + EventIdentifier: "0xdef", + ConfirmationType: uregistrytypes.ConfirmationType_CONFIRMATION_TYPE_STANDARD, + }} + assert.True(t, gatewayMethodsEqual(a, b)) + }) + + t.Run("different name returns false", func(t *testing.T) { + a := []*uregistrytypes.GatewayMethods{{Name: "a", Identifier: "0x1"}} + b := []*uregistrytypes.GatewayMethods{{Name: "b", Identifier: "0x1"}} + assert.False(t, gatewayMethodsEqual(a, b)) + }) + + t.Run("different identifier returns false", func(t *testing.T) { + a := []*uregistrytypes.GatewayMethods{{Name: "m", Identifier: "0x1"}} + b := []*uregistrytypes.GatewayMethods{{Name: "m", Identifier: "0x2"}} + assert.False(t, gatewayMethodsEqual(a, b)) + }) + + t.Run("different event identifier returns false", func(t *testing.T) { + a := []*uregistrytypes.GatewayMethods{{Name: "m", EventIdentifier: "0xe1"}} + b := []*uregistrytypes.GatewayMethods{{Name: "m", EventIdentifier: "0xe2"}} + assert.False(t, gatewayMethodsEqual(a, b)) + }) + + t.Run("different confirmation type returns false", func(t *testing.T) { + a := []*uregistrytypes.GatewayMethods{{ + Name: "m", + ConfirmationType: uregistrytypes.ConfirmationType_CONFIRMATION_TYPE_FAST, + }} + b := []*uregistrytypes.GatewayMethods{{ + Name: "m", + ConfirmationType: uregistrytypes.ConfirmationType_CONFIRMATION_TYPE_STANDARD, + }} + assert.False(t, gatewayMethodsEqual(a, b)) + }) + + t.Run("multiple methods all equal returns true", func(t *testing.T) { + a := []*uregistrytypes.GatewayMethods{ + {Name: "m1", Identifier: "0x1"}, + {Name: "m2", Identifier: "0x2"}, + } + b := []*uregistrytypes.GatewayMethods{ + {Name: "m1", Identifier: "0x1"}, + {Name: "m2", Identifier: "0x2"}, + } + assert.True(t, gatewayMethodsEqual(a, b)) + }) + + t.Run("multiple methods second differs returns false", func(t *testing.T) { + a := []*uregistrytypes.GatewayMethods{ + {Name: "m1", Identifier: "0x1"}, + {Name: "m2", Identifier: "0x2"}, + } + b := []*uregistrytypes.GatewayMethods{ + {Name: "m1", Identifier: "0x1"}, + {Name: "m2", Identifier: "0x99"}, + } + assert.False(t, gatewayMethodsEqual(a, b)) + }) +} + +func TestVaultMethodsEqual(t *testing.T) { + t.Run("both nil returns true", func(t *testing.T) { + assert.True(t, vaultMethodsEqual(nil, nil)) + }) + + t.Run("both empty returns true", func(t *testing.T) { + assert.True(t, vaultMethodsEqual( + []*uregistrytypes.VaultMethods{}, + []*uregistrytypes.VaultMethods{}, + )) + }) + + t.Run("different lengths returns false", func(t *testing.T) { + a := []*uregistrytypes.VaultMethods{{Name: "v1"}} + b := []*uregistrytypes.VaultMethods{} + assert.False(t, vaultMethodsEqual(a, b)) + }) + + t.Run("same single element returns true", func(t *testing.T) { + a := []*uregistrytypes.VaultMethods{{ + Name: "deposit", + Identifier: "0xabc", + EventIdentifier: "0xdef", + ConfirmationType: uregistrytypes.ConfirmationType_CONFIRMATION_TYPE_FAST, + }} + b := []*uregistrytypes.VaultMethods{{ + Name: "deposit", + Identifier: "0xabc", + EventIdentifier: "0xdef", + ConfirmationType: uregistrytypes.ConfirmationType_CONFIRMATION_TYPE_FAST, + }} + assert.True(t, vaultMethodsEqual(a, b)) + }) + + t.Run("different name returns false", func(t *testing.T) { + a := []*uregistrytypes.VaultMethods{{Name: "deposit"}} + b := []*uregistrytypes.VaultMethods{{Name: "withdraw"}} + assert.False(t, vaultMethodsEqual(a, b)) + }) + + t.Run("different identifier returns false", func(t *testing.T) { + a := []*uregistrytypes.VaultMethods{{Name: "v", Identifier: "0x1"}} + b := []*uregistrytypes.VaultMethods{{Name: "v", Identifier: "0x2"}} + assert.False(t, vaultMethodsEqual(a, b)) + }) + + t.Run("different event identifier returns false", func(t *testing.T) { + a := []*uregistrytypes.VaultMethods{{Name: "v", EventIdentifier: "0xe1"}} + b := []*uregistrytypes.VaultMethods{{Name: "v", EventIdentifier: "0xe2"}} + assert.False(t, vaultMethodsEqual(a, b)) + }) + + t.Run("different confirmation type returns false", func(t *testing.T) { + a := []*uregistrytypes.VaultMethods{{ + Name: "v", + ConfirmationType: uregistrytypes.ConfirmationType_CONFIRMATION_TYPE_STANDARD, + }} + b := []*uregistrytypes.VaultMethods{{ + Name: "v", + ConfirmationType: uregistrytypes.ConfirmationType_CONFIRMATION_TYPE_FAST, + }} + assert.False(t, vaultMethodsEqual(a, b)) + }) + + t.Run("multiple methods all equal returns true", func(t *testing.T) { + a := []*uregistrytypes.VaultMethods{ + {Name: "v1", Identifier: "0x1"}, + {Name: "v2", Identifier: "0x2"}, + } + b := []*uregistrytypes.VaultMethods{ + {Name: "v1", Identifier: "0x1"}, + {Name: "v2", Identifier: "0x2"}, + } + assert.True(t, vaultMethodsEqual(a, b)) + }) +} + +func TestChainEnabledEqual(t *testing.T) { + t.Run("both nil returns true", func(t *testing.T) { + assert.True(t, chainEnabledEqual(nil, nil)) + }) + + t.Run("first nil second non-nil returns false", func(t *testing.T) { + assert.False(t, chainEnabledEqual(nil, &uregistrytypes.ChainEnabled{})) + }) + + t.Run("first non-nil second nil returns false", func(t *testing.T) { + assert.False(t, chainEnabledEqual(&uregistrytypes.ChainEnabled{}, nil)) + }) + + t.Run("both false returns true", func(t *testing.T) { + a := &uregistrytypes.ChainEnabled{IsInboundEnabled: false, IsOutboundEnabled: false} + b := &uregistrytypes.ChainEnabled{IsInboundEnabled: false, IsOutboundEnabled: false} + assert.True(t, chainEnabledEqual(a, b)) + }) + + t.Run("both true returns true", func(t *testing.T) { + a := &uregistrytypes.ChainEnabled{IsInboundEnabled: true, IsOutboundEnabled: true} + b := &uregistrytypes.ChainEnabled{IsInboundEnabled: true, IsOutboundEnabled: true} + assert.True(t, chainEnabledEqual(a, b)) + }) + + t.Run("inbound differs returns false", func(t *testing.T) { + a := &uregistrytypes.ChainEnabled{IsInboundEnabled: true, IsOutboundEnabled: true} + b := &uregistrytypes.ChainEnabled{IsInboundEnabled: false, IsOutboundEnabled: true} + assert.False(t, chainEnabledEqual(a, b)) + }) + + t.Run("outbound differs returns false", func(t *testing.T) { + a := &uregistrytypes.ChainEnabled{IsInboundEnabled: true, IsOutboundEnabled: false} + b := &uregistrytypes.ChainEnabled{IsInboundEnabled: true, IsOutboundEnabled: true} + assert.False(t, chainEnabledEqual(a, b)) + }) + + t.Run("mixed flags equal returns true", func(t *testing.T) { + a := &uregistrytypes.ChainEnabled{IsInboundEnabled: true, IsOutboundEnabled: false} + b := &uregistrytypes.ChainEnabled{IsInboundEnabled: true, IsOutboundEnabled: false} + assert.True(t, chainEnabledEqual(a, b)) + }) +} + +func TestBlockConfirmationEqual(t *testing.T) { + t.Run("both nil returns true", func(t *testing.T) { + assert.True(t, blockConfirmationEqual(nil, nil)) + }) + + t.Run("first nil second non-nil returns false", func(t *testing.T) { + assert.False(t, blockConfirmationEqual(nil, &uregistrytypes.BlockConfirmation{})) + }) + + t.Run("first non-nil second nil returns false", func(t *testing.T) { + assert.False(t, blockConfirmationEqual(&uregistrytypes.BlockConfirmation{}, nil)) + }) + + t.Run("both zero values returns true", func(t *testing.T) { + a := &uregistrytypes.BlockConfirmation{} + b := &uregistrytypes.BlockConfirmation{} + assert.True(t, blockConfirmationEqual(a, b)) + }) + + t.Run("same values returns true", func(t *testing.T) { + a := &uregistrytypes.BlockConfirmation{FastInbound: 3, StandardInbound: 12} + b := &uregistrytypes.BlockConfirmation{FastInbound: 3, StandardInbound: 12} + assert.True(t, blockConfirmationEqual(a, b)) + }) + + t.Run("fast inbound differs returns false", func(t *testing.T) { + a := &uregistrytypes.BlockConfirmation{FastInbound: 2, StandardInbound: 12} + b := &uregistrytypes.BlockConfirmation{FastInbound: 5, StandardInbound: 12} + assert.False(t, blockConfirmationEqual(a, b)) + }) + + t.Run("standard inbound differs returns false", func(t *testing.T) { + a := &uregistrytypes.BlockConfirmation{FastInbound: 2, StandardInbound: 12} + b := &uregistrytypes.BlockConfirmation{FastInbound: 2, StandardInbound: 20} + assert.False(t, blockConfirmationEqual(a, b)) + }) + + t.Run("both fields differ returns false", func(t *testing.T) { + a := &uregistrytypes.BlockConfirmation{FastInbound: 1, StandardInbound: 6} + b := &uregistrytypes.BlockConfirmation{FastInbound: 3, StandardInbound: 12} + assert.False(t, blockConfirmationEqual(a, b)) + }) +} + +func TestDetermineChainAction_Extended(t *testing.T) { + enabled := &uregistrytypes.ChainEnabled{IsInboundEnabled: true, IsOutboundEnabled: true} + + t.Run("nil enabled on existing chain returns remove", func(t *testing.T) { + c := newTestChains() + mock := &mockChainClient{} + c.chains["eip155:42"] = mock + c.chainConfigs["eip155:42"] = &uregistrytypes.ChainConfig{Chain: "eip155:42", Enabled: enabled} + + action := c.determineChainAction(&uregistrytypes.ChainConfig{ + Chain: "eip155:42", + Enabled: nil, // nil means both disabled + }) + assert.Equal(t, chainActionRemove, action) + }) + + t.Run("only inbound enabled is not fully disabled so adds new chain", func(t *testing.T) { + c := newTestChains() + action := c.determineChainAction(&uregistrytypes.ChainConfig{ + Chain: "eip155:50", + Enabled: &uregistrytypes.ChainEnabled{ + IsInboundEnabled: true, + IsOutboundEnabled: false, + }, + }) + assert.Equal(t, chainActionAdd, action) + }) + + t.Run("only outbound enabled is not fully disabled so adds new chain", func(t *testing.T) { + c := newTestChains() + action := c.determineChainAction(&uregistrytypes.ChainConfig{ + Chain: "eip155:51", + Enabled: &uregistrytypes.ChainEnabled{ + IsInboundEnabled: false, + IsOutboundEnabled: true, + }, + }) + assert.Equal(t, chainActionAdd, action) + }) + + t.Run("existing chain with no stored config returns skip", func(t *testing.T) { + c := newTestChains() + mock := &mockChainClient{} + c.chains["eip155:60"] = mock + // chainConfigs deliberately not set (nil stored config) + + action := c.determineChainAction(&uregistrytypes.ChainConfig{ + Chain: "eip155:60", + Enabled: enabled, + }) + // existingConfig is nil, so configsEqual is not called, result is skip + assert.Equal(t, chainActionSkip, action) + }) + + t.Run("existing chain with changed enabled flags returns update", func(t *testing.T) { + c := newTestChains() + mock := &mockChainClient{} + c.chains["eip155:70"] = mock + c.chainConfigs["eip155:70"] = &uregistrytypes.ChainConfig{ + Chain: "eip155:70", + VmType: uregistrytypes.VmType_EVM, + Enabled: enabled, + } + + action := c.determineChainAction(&uregistrytypes.ChainConfig{ + Chain: "eip155:70", + VmType: uregistrytypes.VmType_EVM, + Enabled: &uregistrytypes.ChainEnabled{ + IsInboundEnabled: true, + IsOutboundEnabled: false, // changed + }, + }) + assert.Equal(t, chainActionUpdate, action) + }) + + t.Run("existing chain with changed VM type returns update", func(t *testing.T) { + c := newTestChains() + mock := &mockChainClient{} + c.chains["eip155:80"] = mock + c.chainConfigs["eip155:80"] = &uregistrytypes.ChainConfig{ + Chain: "eip155:80", + VmType: uregistrytypes.VmType_EVM, + Enabled: enabled, + } + + action := c.determineChainAction(&uregistrytypes.ChainConfig{ + Chain: "eip155:80", + VmType: uregistrytypes.VmType_SVM, // changed + Enabled: enabled, + }) + assert.Equal(t, chainActionUpdate, action) + }) + + t.Run("existing chain with changed block confirmation returns update", func(t *testing.T) { + c := newTestChains() + mock := &mockChainClient{} + c.chains["eip155:90"] = mock + c.chainConfigs["eip155:90"] = &uregistrytypes.ChainConfig{ + Chain: "eip155:90", + Enabled: enabled, + BlockConfirmation: &uregistrytypes.BlockConfirmation{FastInbound: 2, StandardInbound: 12}, + } + + action := c.determineChainAction(&uregistrytypes.ChainConfig{ + Chain: "eip155:90", + Enabled: enabled, + BlockConfirmation: &uregistrytypes.BlockConfirmation{FastInbound: 5, StandardInbound: 20}, + }) + assert.Equal(t, chainActionUpdate, action) + }) + + t.Run("existing chain with changed gateway methods returns update", func(t *testing.T) { + c := newTestChains() + mock := &mockChainClient{} + c.chains["eip155:100"] = mock + c.chainConfigs["eip155:100"] = &uregistrytypes.ChainConfig{ + Chain: "eip155:100", + Enabled: enabled, + GatewayMethods: []*uregistrytypes.GatewayMethods{ + {Name: "m1", Identifier: "0x1"}, + }, + } + + action := c.determineChainAction(&uregistrytypes.ChainConfig{ + Chain: "eip155:100", + Enabled: enabled, + GatewayMethods: []*uregistrytypes.GatewayMethods{ + {Name: "m1", Identifier: "0x1"}, + {Name: "m2", Identifier: "0x2"}, // added method + }, + }) + assert.Equal(t, chainActionUpdate, action) + }) + + t.Run("existing chain with changed vault methods returns update", func(t *testing.T) { + c := newTestChains() + mock := &mockChainClient{} + c.chains["eip155:110"] = mock + c.chainConfigs["eip155:110"] = &uregistrytypes.ChainConfig{ + Chain: "eip155:110", + Enabled: enabled, + VaultMethods: []*uregistrytypes.VaultMethods{ + {Name: "v1", Identifier: "0x1"}, + }, + } + + action := c.determineChainAction(&uregistrytypes.ChainConfig{ + Chain: "eip155:110", + Enabled: enabled, + VaultMethods: []*uregistrytypes.VaultMethods{ + {Name: "v1", Identifier: "0xchanged"}, + }, + }) + assert.Equal(t, chainActionUpdate, action) + }) +} + +func TestConfigsEqual_Extended(t *testing.T) { + t.Run("identical empty configs returns true", func(t *testing.T) { + a := &uregistrytypes.ChainConfig{} + b := &uregistrytypes.ChainConfig{} + assert.True(t, configsEqual(a, b)) + }) + + t.Run("nil enabled on both returns true", func(t *testing.T) { + a := &uregistrytypes.ChainConfig{Chain: "c1", Enabled: nil} + b := &uregistrytypes.ChainConfig{Chain: "c1", Enabled: nil} + assert.True(t, configsEqual(a, b)) + }) + + t.Run("nil enabled vs non-nil enabled returns false", func(t *testing.T) { + a := &uregistrytypes.ChainConfig{Chain: "c1", Enabled: nil} + b := &uregistrytypes.ChainConfig{Chain: "c1", Enabled: &uregistrytypes.ChainEnabled{}} + assert.False(t, configsEqual(a, b)) + }) + + t.Run("nil block confirmation on both returns true", func(t *testing.T) { + a := &uregistrytypes.ChainConfig{Chain: "c1", BlockConfirmation: nil} + b := &uregistrytypes.ChainConfig{Chain: "c1", BlockConfirmation: nil} + assert.True(t, configsEqual(a, b)) + }) + + t.Run("nil block confirmation vs non-nil returns false", func(t *testing.T) { + a := &uregistrytypes.ChainConfig{Chain: "c1", BlockConfirmation: nil} + b := &uregistrytypes.ChainConfig{Chain: "c1", BlockConfirmation: &uregistrytypes.BlockConfirmation{FastInbound: 1}} + assert.False(t, configsEqual(a, b)) + }) + + t.Run("empty gateway methods vs nil returns true", func(t *testing.T) { + a := &uregistrytypes.ChainConfig{Chain: "c1", GatewayMethods: nil} + b := &uregistrytypes.ChainConfig{Chain: "c1", GatewayMethods: []*uregistrytypes.GatewayMethods{}} + assert.True(t, configsEqual(a, b)) + }) + + t.Run("empty vault methods vs nil returns true", func(t *testing.T) { + a := &uregistrytypes.ChainConfig{Chain: "c1", VaultMethods: nil} + b := &uregistrytypes.ChainConfig{Chain: "c1", VaultMethods: []*uregistrytypes.VaultMethods{}} + assert.True(t, configsEqual(a, b)) + }) + + t.Run("full config with all fields matching returns true", func(t *testing.T) { + cfg := func() *uregistrytypes.ChainConfig { + return &uregistrytypes.ChainConfig{ + Chain: "eip155:1", + VmType: uregistrytypes.VmType_EVM, + GatewayAddress: "0xgateway", + GatewayMethods: []*uregistrytypes.GatewayMethods{ + {Name: "gm1", Identifier: "0xg1", EventIdentifier: "0xge1", ConfirmationType: uregistrytypes.ConfirmationType_CONFIRMATION_TYPE_FAST}, + }, + VaultMethods: []*uregistrytypes.VaultMethods{ + {Name: "vm1", Identifier: "0xv1", EventIdentifier: "0xve1", ConfirmationType: uregistrytypes.ConfirmationType_CONFIRMATION_TYPE_STANDARD}, + }, + BlockConfirmation: &uregistrytypes.BlockConfirmation{FastInbound: 2, StandardInbound: 12}, + Enabled: &uregistrytypes.ChainEnabled{IsInboundEnabled: true, IsOutboundEnabled: true}, + } + } + assert.True(t, configsEqual(cfg(), cfg())) + }) +} + +func TestRemoveChain_Extended(t *testing.T) { + t.Run("remove with empty chain ID on empty maps", func(t *testing.T) { + c := newTestChains() + err := c.removeChain("") + require.NoError(t, err) + }) + + t.Run("remove same chain twice", func(t *testing.T) { + c := newTestChains() + mock := &mockChainClient{} + c.chains["eip155:1"] = mock + c.chainConfigs["eip155:1"] = &uregistrytypes.ChainConfig{Chain: "eip155:1"} + + err := c.removeChain("eip155:1") + require.NoError(t, err) + assert.True(t, mock.stopCalled) + + // Second remove should be a no-op + err = c.removeChain("eip155:1") + require.NoError(t, err) + }) + + t.Run("remove chain cleans up config even if client was nil entry", func(t *testing.T) { + c := newTestChains() + // Store a nil client (edge case) + c.chains["eip155:5"] = nil + c.chainConfigs["eip155:5"] = &uregistrytypes.ChainConfig{Chain: "eip155:5"} + + // The chain key exists, so removeChain will try client.Stop() on nil. + // This will panic if not handled, but looking at the code, it calls + // client.Stop() without nil check. We verify the key exists first. + _, exists := c.chains["eip155:5"] + assert.True(t, exists) + // Note: calling removeChain with a nil client value would panic; + // this confirms the map entry is present. + }) + + t.Run("remove does not affect push chain ID tracking", func(t *testing.T) { + c := newTestChains() + mock := &mockChainClient{} + chainID := c.pushChainID + c.chains[chainID] = mock + c.chainConfigs[chainID] = &uregistrytypes.ChainConfig{Chain: chainID} + + err := c.removeChain(chainID) + require.NoError(t, err) + // pushChainID field is unchanged + assert.Equal(t, "localchain_9000-1", c.pushChainID) + }) +} + +func TestStopAll_Extended(t *testing.T) { + t.Run("stop all with many chains", func(t *testing.T) { + c := newTestChains() + mocks := make([]*mockChainClient, 10) + for i := 0; i < 10; i++ { + mocks[i] = &mockChainClient{} + chainID := fmt.Sprintf("eip155:%d", i) + c.chains[chainID] = mocks[i] + c.chainConfigs[chainID] = &uregistrytypes.ChainConfig{Chain: chainID} + } + + c.StopAll() + + for i, m := range mocks { + assert.True(t, m.stopCalled, "mock %d should have been stopped", i) + } + assert.Empty(t, c.chains) + assert.Empty(t, c.chainConfigs) + }) + + t.Run("stop all can be called multiple times", func(t *testing.T) { + c := newTestChains() + mock := &mockChainClient{} + c.chains["eip155:1"] = mock + c.chainConfigs["eip155:1"] = &uregistrytypes.ChainConfig{Chain: "eip155:1"} + + c.StopAll() + assert.True(t, mock.stopCalled) + + // Second call should not panic + c.StopAll() + assert.Empty(t, c.chains) + }) +} + +func TestStop(t *testing.T) { + t.Run("stop when not running is no-op", func(t *testing.T) { + c := newTestChains() + c.running = false + // Should not panic or block + c.Stop() + }) +} + +func TestGetClient_Extended(t *testing.T) { + t.Run("returns different clients for different chain IDs", func(t *testing.T) { + c := newTestChains() + mock1 := &mockChainClient{} + mock2 := &mockChainClient{} + c.chains["eip155:1"] = mock1 + c.chains["solana:mainnet"] = mock2 + + client1, err1 := c.GetClient("eip155:1") + client2, err2 := c.GetClient("solana:mainnet") + + require.NoError(t, err1) + require.NoError(t, err2) + assert.Equal(t, mock1, client1) + assert.Equal(t, mock2, client2) + assert.True(t, client1 != client2, "clients should be distinct pointers") + }) +} + +func TestStop_Lifecycle(t *testing.T) { + t.Run("stop closes stopCh and calls StopAll", func(t *testing.T) { + c := newTestChains() + // Simulate a started state: set running, create stopCh, add wg + c.running = true + c.stopCh = make(chan struct{}) + c.wg.Add(1) + + // Add a mock client to verify StopAll is called + mock := &mockChainClient{} + c.chains["eip155:1"] = mock + c.chainConfigs["eip155:1"] = &uregistrytypes.ChainConfig{Chain: "eip155:1"} + + // Simulate the goroutine that would be waiting on stopCh + go func() { + <-c.stopCh + c.wg.Done() + }() + + c.Stop() + + assert.False(t, c.running) + assert.True(t, mock.stopCalled) + assert.Empty(t, c.chains) + assert.Empty(t, c.chainConfigs) + }) + + t.Run("stop is idempotent when called twice", func(t *testing.T) { + c := newTestChains() + c.running = true + c.stopCh = make(chan struct{}) + c.wg.Add(1) + + go func() { + <-c.stopCh + c.wg.Done() + }() + + c.Stop() + // Second call should not panic (running is already false) + c.Stop() + assert.False(t, c.running) + }) +} + +func TestAddChain_ErrorCases(t *testing.T) { + t.Run("nil config returns error", func(t *testing.T) { + c := newTestChains() + err := c.addChain(context.Background(), nil) + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid chain config") + }) + + t.Run("empty chain ID returns error", func(t *testing.T) { + c := newTestChains() + cfg := &uregistrytypes.ChainConfig{ + Chain: "", + } + err := c.addChain(context.Background(), cfg) + require.Error(t, err) + assert.Contains(t, err.Error(), "invalid chain config") + }) + + t.Run("unsupported VM type returns error", func(t *testing.T) { + c := newTestChains() + c.config = &config.Config{ + PushChainID: "localchain_9000-1", + NodeHome: t.TempDir(), + } + cfg := &uregistrytypes.ChainConfig{ + Chain: "unknown:1", + VmType: uregistrytypes.VmType(999), + Enabled: &uregistrytypes.ChainEnabled{ + IsInboundEnabled: true, + IsOutboundEnabled: true, + }, + } + err := c.addChain(context.Background(), cfg) + require.Error(t, err) + assert.Contains(t, err.Error(), "unsupported VM type") + }) +} + +func TestEnsurePushChain_EmptyID(t *testing.T) { + t.Run("returns error when pushChainID is empty", func(t *testing.T) { + c := newTestChains() + c.pushChainID = "" + + err := c.ensurePushChain(context.Background()) + require.Error(t, err) + assert.Contains(t, err.Error(), "push chain ID not configured") + }) + + t.Run("returns nil when push chain already exists", func(t *testing.T) { + c := newTestChains() + mock := &mockChainClient{} + c.chains[c.pushChainID] = mock + + err := c.ensurePushChain(context.Background()) + require.NoError(t, err) + }) +} + +func TestGetChainDB(t *testing.T) { + t.Run("creates database for EVM chain", func(t *testing.T) { + c := newTestChains() + c.config = &config.Config{ + PushChainID: "localchain_9000-1", + NodeHome: t.TempDir(), + } + + database, err := c.getChainDB("eip155:1") + require.NoError(t, err) + require.NotNil(t, database) + }) + + t.Run("creates database for Solana chain", func(t *testing.T) { + c := newTestChains() + c.config = &config.Config{ + PushChainID: "localchain_9000-1", + NodeHome: t.TempDir(), + } + + database, err := c.getChainDB("solana:mainnet") + require.NoError(t, err) + require.NotNil(t, database) + }) + + t.Run("creates database for push chain", func(t *testing.T) { + c := newTestChains() + c.config = &config.Config{ + PushChainID: "localchain_9000-1", + NodeHome: t.TempDir(), + } + + database, err := c.getChainDB("localchain_9000-1") + require.NoError(t, err) + require.NotNil(t, database) + }) + + t.Run("sanitizes chain ID with special characters", func(t *testing.T) { + c := newTestChains() + c.config = &config.Config{ + PushChainID: "localchain_9000-1", + NodeHome: t.TempDir(), + } + + database, err := c.getChainDB("eip155:97") + require.NoError(t, err) + require.NotNil(t, database) + }) +} + +func TestRemoveChain_WithMockClient(t *testing.T) { + t.Run("remove chain calls stop on running client", func(t *testing.T) { + c := newTestChains() + mock := &mockChainClient{} + c.chains["eip155:42"] = mock + c.chainConfigs["eip155:42"] = &uregistrytypes.ChainConfig{ + Chain: "eip155:42", + VmType: uregistrytypes.VmType_EVM, + Enabled: &uregistrytypes.ChainEnabled{ + IsInboundEnabled: true, + IsOutboundEnabled: true, + }, + } + + err := c.removeChain("eip155:42") + require.NoError(t, err) + assert.True(t, mock.stopCalled) + _, exists := c.chains["eip155:42"] + assert.False(t, exists) + _, cfgExists := c.chainConfigs["eip155:42"] + assert.False(t, cfgExists) + }) + + t.Run("remove chain with stop error still removes from maps", func(t *testing.T) { + c := newTestChains() + mock := &mockChainClient{stopErr: fmt.Errorf("stop failed")} + c.chains["eip155:42"] = mock + c.chainConfigs["eip155:42"] = &uregistrytypes.ChainConfig{Chain: "eip155:42"} + + err := c.removeChain("eip155:42") + require.NoError(t, err) // removeChain always returns nil + assert.True(t, mock.stopCalled) + _, exists := c.chains["eip155:42"] + assert.False(t, exists) + }) +} + +func TestStart_PushCoreNil(t *testing.T) { + t.Run("sets running to false when pushCore is nil", func(t *testing.T) { + c := newTestChains() + err := c.Start(context.Background()) + require.Error(t, err) + assert.False(t, c.running, "running should remain false when Start fails") + }) +} + +func TestEnsurePushChain_DBError(t *testing.T) { + t.Run("returns error when getChainDB fails", func(t *testing.T) { + c := newTestChains() + c.config = &config.Config{ + PushChainID: "localchain_9000-1", + NodeHome: "/dev/null/impossible/path", + } + + err := c.ensurePushChain(context.Background()) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to get database for push chain") + }) +} + +func TestConfigsEqual_GatewayMethodsOrdering(t *testing.T) { + t.Run("gateway methods different order is not equal", func(t *testing.T) { + cfg1 := &uregistrytypes.ChainConfig{ + Chain: "chain1", + GatewayMethods: []*uregistrytypes.GatewayMethods{ + {Name: "m1", Identifier: "0x1"}, + {Name: "m2", Identifier: "0x2"}, + }, + } + cfg2 := &uregistrytypes.ChainConfig{ + Chain: "chain1", + GatewayMethods: []*uregistrytypes.GatewayMethods{ + {Name: "m2", Identifier: "0x2"}, + {Name: "m1", Identifier: "0x1"}, + }, + } + // Order matters in the current implementation + assert.False(t, configsEqual(cfg1, cfg2)) + }) + + t.Run("vault methods different order is not equal", func(t *testing.T) { + cfg1 := &uregistrytypes.ChainConfig{ + Chain: "chain1", + VaultMethods: []*uregistrytypes.VaultMethods{ + {Name: "v1", Identifier: "0x1"}, + {Name: "v2", Identifier: "0x2"}, + }, + } + cfg2 := &uregistrytypes.ChainConfig{ + Chain: "chain1", + VaultMethods: []*uregistrytypes.VaultMethods{ + {Name: "v2", Identifier: "0x2"}, + {Name: "v1", Identifier: "0x1"}, + }, + } + assert.False(t, configsEqual(cfg1, cfg2)) + }) +} + +func TestStopAll_WithStopErrors(t *testing.T) { + t.Run("continues stopping remaining clients when one errors", func(t *testing.T) { + c := newTestChains() + mockErr := &mockChainClient{stopErr: fmt.Errorf("stop error")} + mockOk := &mockChainClient{} + + c.chains["eip155:1"] = mockErr + c.chains["eip155:2"] = mockOk + c.chainConfigs["eip155:1"] = &uregistrytypes.ChainConfig{Chain: "eip155:1"} + c.chainConfigs["eip155:2"] = &uregistrytypes.ChainConfig{Chain: "eip155:2"} + + c.StopAll() + + assert.True(t, mockErr.stopCalled) + assert.True(t, mockOk.stopCalled) + assert.Empty(t, c.chains) + assert.Empty(t, c.chainConfigs) + }) +} + +func TestNewChains_ConfigPreserved(t *testing.T) { + t.Run("preserves all config fields", func(t *testing.T) { + logger := zerolog.Nop() + cfg := &config.Config{ + PushChainID: "push:1", + NodeHome: "/tmp/test", + ConfigRefreshIntervalSeconds: 30, + } + + chains := NewChains(nil, nil, cfg, logger) + + assert.Equal(t, cfg, chains.config) + assert.Equal(t, "push:1", chains.pushChainID) + }) +} + +func TestIsChainInboundEnabled_EdgeCases(t *testing.T) { + t.Run("returns false when chain config exists but enabled is nil", func(t *testing.T) { + c := newTestChains() + c.chainConfigs["eip155:1"] = &uregistrytypes.ChainConfig{ + Chain: "eip155:1", + Enabled: nil, + } + assert.False(t, c.IsChainInboundEnabled("eip155:1")) + }) +} + +func TestIsChainOutboundEnabled_EdgeCases(t *testing.T) { + t.Run("returns true when both flags enabled", func(t *testing.T) { + c := newTestChains() + c.chainConfigs["eip155:1"] = &uregistrytypes.ChainConfig{ + Chain: "eip155:1", + Enabled: &uregistrytypes.ChainEnabled{ + IsInboundEnabled: true, + IsOutboundEnabled: true, + }, + } + assert.True(t, c.IsChainOutboundEnabled("eip155:1")) + }) +} + +func TestAddChain_GetChainDBError(t *testing.T) { + t.Run("returns error when NodeHome is invalid path", func(t *testing.T) { + c := newTestChains() + c.config = &config.Config{ + PushChainID: "localchain_9000-1", + NodeHome: "/dev/null/impossible/path", + } + + cfg := &uregistrytypes.ChainConfig{ + Chain: "eip155:1", + VmType: uregistrytypes.VmType_EVM, + Enabled: &uregistrytypes.ChainEnabled{ + IsInboundEnabled: true, + IsOutboundEnabled: true, + }, + } + err := c.addChain(context.Background(), cfg) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to get database") + }) +} + +func TestDetermineChainAction_PushChainID(t *testing.T) { + t.Run("push chain ID is not special in determineChainAction", func(t *testing.T) { + c := newTestChains() + // The push chain ID in determineChainAction is just like any other chain + cfg := &uregistrytypes.ChainConfig{ + Chain: c.pushChainID, + Enabled: &uregistrytypes.ChainEnabled{ + IsInboundEnabled: true, + IsOutboundEnabled: true, + }, + } + action := c.determineChainAction(cfg) + assert.Equal(t, chainActionAdd, action) + }) +} diff --git a/universalClient/chains/common/chain_store_test.go b/universalClient/chains/common/chain_store_test.go index 5b3b6070..abb2f54c 100644 --- a/universalClient/chains/common/chain_store_test.go +++ b/universalClient/chains/common/chain_store_test.go @@ -239,6 +239,169 @@ func TestChainStore_UpdateVoteTxHash(t *testing.T) { require.NoError(t, err) } +func TestChainStore_GetPendingEventsLimit(t *testing.T) { + cs := newTestChainStore(t) + + // Insert 5 pending events + for i := 0; i < 5; i++ { + evt := &storemodels.Event{ + EventID: fmt.Sprintf("limit-evt-%d", i), + BlockHeight: uint64(i + 1), + Type: storemodels.EventTypeInbound, + ConfirmationType: storemodels.ConfirmationStandard, + Status: storemodels.StatusPending, + } + inserted, err := cs.InsertEventIfNotExists(evt) + require.NoError(t, err) + assert.True(t, inserted) + } + + t.Run("limit returns at most N events", func(t *testing.T) { + events, err := cs.GetPendingEvents(3) + require.NoError(t, err) + assert.Len(t, events, 3) + }) + + t.Run("limit larger than total returns all", func(t *testing.T) { + events, err := cs.GetPendingEvents(100) + require.NoError(t, err) + assert.Len(t, events, 5) + }) + + t.Run("limit zero returns empty", func(t *testing.T) { + events, err := cs.GetPendingEvents(0) + require.NoError(t, err) + assert.Len(t, events, 0) + }) + + t.Run("limit one returns exactly one", func(t *testing.T) { + events, err := cs.GetPendingEvents(1) + require.NoError(t, err) + assert.Len(t, events, 1) + }) +} + +func TestChainStore_GetConfirmedEventsOrdering(t *testing.T) { + cs := newTestChainStore(t) + + // Insert events in reverse order; they should come back ordered by created_at ASC + for i := 4; i >= 0; i-- { + evt := &storemodels.Event{ + EventID: fmt.Sprintf("order-evt-%d", i), + BlockHeight: uint64(i + 1), + Type: storemodels.EventTypeInbound, + ConfirmationType: storemodels.ConfirmationStandard, + Status: storemodels.StatusConfirmed, + } + inserted, err := cs.InsertEventIfNotExists(evt) + require.NoError(t, err) + assert.True(t, inserted) + } + + t.Run("events ordered by created_at ASC", func(t *testing.T) { + events, err := cs.GetConfirmedEvents(10) + require.NoError(t, err) + require.Len(t, events, 5) + // Since they are inserted sequentially, created_at is monotonically increasing + // The first inserted (i=4) has the earliest created_at + assert.Equal(t, "order-evt-4", events[0].EventID) + assert.Equal(t, "order-evt-0", events[4].EventID) + }) + + t.Run("limit constrains confirmed events", func(t *testing.T) { + events, err := cs.GetConfirmedEvents(2) + require.NoError(t, err) + assert.Len(t, events, 2) + }) +} + +func TestChainStore_DeleteTerminalEventsNilDatabase(t *testing.T) { + cs := NewChainStore(nil) + + deleted, err := cs.DeleteTerminalEvents("2099-01-01") + require.Error(t, err) + assert.Equal(t, int64(0), deleted) + assert.Contains(t, err.Error(), "database is nil") +} + +func TestChainStore_DeleteTerminalEventsBoundary(t *testing.T) { + cs := newTestChainStore(t) + + // Insert terminal events + for i, status := range []string{storemodels.StatusCompleted, storemodels.StatusReverted, storemodels.StatusReorged} { + evt := &storemodels.Event{ + EventID: fmt.Sprintf("boundary-term-%d", i), + BlockHeight: uint64(i), + Type: storemodels.EventTypeInbound, + ConfirmationType: storemodels.ConfirmationStandard, + Status: status, + } + _, err := cs.InsertEventIfNotExists(evt) + require.NoError(t, err) + } + + // Also insert a pending event (non-terminal) + pending := &storemodels.Event{ + EventID: "boundary-pending", + BlockHeight: 100, + Type: storemodels.EventTypeInbound, + ConfirmationType: storemodels.ConfirmationStandard, + Status: storemodels.StatusPending, + } + _, err := cs.InsertEventIfNotExists(pending) + require.NoError(t, err) + + // Also insert a confirmed event (non-terminal) + confirmed := &storemodels.Event{ + EventID: "boundary-confirmed", + BlockHeight: 101, + Type: storemodels.EventTypeInbound, + ConfirmationType: storemodels.ConfirmationStandard, + Status: storemodels.StatusConfirmed, + } + _, err = cs.InsertEventIfNotExists(confirmed) + require.NoError(t, err) + + t.Run("delete with past date deletes nothing", func(t *testing.T) { + deleted, err := cs.DeleteTerminalEvents("2000-01-01") + require.NoError(t, err) + assert.Equal(t, int64(0), deleted) + }) + + t.Run("delete with future date only deletes terminal events", func(t *testing.T) { + deleted, err := cs.DeleteTerminalEvents("2099-01-01") + require.NoError(t, err) + assert.Equal(t, int64(3), deleted) + + // Pending and confirmed events remain + pendingEvents, err := cs.GetPendingEvents(10) + require.NoError(t, err) + assert.Len(t, pendingEvents, 1) + + confirmedEvents, err := cs.GetConfirmedEvents(10) + require.NoError(t, err) + assert.Len(t, confirmedEvents, 1) + }) +} + +func TestChainStore_UpdateStatusAndVoteTxHashNilDatabase(t *testing.T) { + cs := NewChainStore(nil) + + rows, err := cs.UpdateStatusAndVoteTxHash("evt-x", storemodels.StatusConfirmed, storemodels.StatusCompleted, "0xhash") + require.Error(t, err) + assert.Equal(t, int64(0), rows) + assert.Contains(t, err.Error(), "database is nil") +} + +func TestChainStore_UpdateStatusAndEventDataNilDatabase(t *testing.T) { + cs := NewChainStore(nil) + + rows, err := cs.UpdateStatusAndEventData("evt-x", storemodels.StatusPending, storemodels.StatusConfirmed, []byte(`{}`)) + require.Error(t, err) + assert.Equal(t, int64(0), rows) + assert.Contains(t, err.Error(), "database is nil") +} + func TestChainStore_DeleteTerminalEvents(t *testing.T) { cs := newTestChainStore(t) diff --git a/universalClient/chains/common/event_cleaner_test.go b/universalClient/chains/common/event_cleaner_test.go index 8c9661ee..9357f3e2 100644 --- a/universalClient/chains/common/event_cleaner_test.go +++ b/universalClient/chains/common/event_cleaner_test.go @@ -1,12 +1,17 @@ package common import ( + "context" + "fmt" "testing" "time" "github.com/rs/zerolog" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + + ucdb "github.com/pushchain/push-chain-node/universalClient/db" + storemodels "github.com/pushchain/push-chain-node/universalClient/store" ) func TestNewEventCleaner(t *testing.T) { @@ -77,3 +82,248 @@ func TestEventCleanerStop(t *testing.T) { cleaner.Stop() }) } + +// newTestCleanerDB creates an in-memory database with optional seed events. +func newTestCleanerDB(t *testing.T, events []storemodels.Event) *ucdb.DB { + t.Helper() + database, err := ucdb.OpenInMemoryDB(true) + require.NoError(t, err) + t.Cleanup(func() { database.Close() }) + + for _, e := range events { + result := database.Client().Create(&e) + require.NoError(t, result.Error) + } + return database +} + +func TestPerformCleanup(t *testing.T) { + t.Run("deletes terminal events older than retention period", func(t *testing.T) { + database := newTestCleanerDB(t, nil) + logger := zerolog.Nop() + + // Insert terminal events: COMPLETED, REVERTED, REORGED + for i, status := range []string{storemodels.StatusCompleted, storemodels.StatusReverted, storemodels.StatusReorged} { + evt := storemodels.Event{ + EventID: fmt.Sprintf("terminal-%d", i), + BlockHeight: uint64(i), + Type: storemodels.EventTypeInbound, + ConfirmationType: storemodels.ConfirmationStandard, + Status: status, + } + result := database.Client().Create(&evt) + require.NoError(t, result.Error) + } + + // Also insert a PENDING event that should NOT be deleted + pending := storemodels.Event{ + EventID: "pending-1", + BlockHeight: 100, + Type: storemodels.EventTypeInbound, + ConfirmationType: storemodels.ConfirmationStandard, + Status: storemodels.StatusPending, + } + result := database.Client().Create(&pending) + require.NoError(t, result.Error) + + // Use zero retention period so all terminal events are eligible for cleanup + cleaner := NewEventCleaner(database, time.Hour, 0, "test-chain", logger) + + err := cleaner.performCleanup() + require.NoError(t, err) + + // Verify terminal events are deleted + var remaining []storemodels.Event + database.Client().Find(&remaining) + require.Len(t, remaining, 1) + assert.Equal(t, "pending-1", remaining[0].EventID) + }) + + t.Run("does not delete events within retention period", func(t *testing.T) { + database := newTestCleanerDB(t, nil) + logger := zerolog.Nop() + + // Insert a terminal event (just created, so updated_at is now) + evt := storemodels.Event{ + EventID: "recent-completed", + BlockHeight: 1, + Type: storemodels.EventTypeInbound, + ConfirmationType: storemodels.ConfirmationStandard, + Status: storemodels.StatusCompleted, + } + result := database.Client().Create(&evt) + require.NoError(t, result.Error) + + // Use a very long retention period so the event is still within retention + cleaner := NewEventCleaner(database, time.Hour, 24*time.Hour, "test-chain", logger) + + err := cleaner.performCleanup() + require.NoError(t, err) + + // Event should still exist + var remaining []storemodels.Event + database.Client().Find(&remaining) + assert.Len(t, remaining, 1) + }) + + t.Run("no events to delete returns no error", func(t *testing.T) { + database := newTestCleanerDB(t, nil) + logger := zerolog.Nop() + + cleaner := NewEventCleaner(database, time.Hour, 0, "test-chain", logger) + + err := cleaner.performCleanup() + assert.NoError(t, err) + }) +} + +func TestEventCleanerStart(t *testing.T) { + t.Run("start runs initial cleanup and returns nil", func(t *testing.T) { + // Seed a terminal event + database := newTestCleanerDB(t, []storemodels.Event{ + { + EventID: "old-completed", + BlockHeight: 1, + Type: storemodels.EventTypeInbound, + ConfirmationType: storemodels.ConfirmationStandard, + Status: storemodels.StatusCompleted, + }, + }) + logger := zerolog.Nop() + + // Zero retention so the initial cleanup deletes the event + cleaner := NewEventCleaner(database, time.Hour, 0, "test-chain", logger) + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + err := cleaner.Start(ctx) + require.NoError(t, err) + + // Give initial cleanup a moment to complete (it runs synchronously before the goroutine) + // The initial cleanup in Start is synchronous, so it should have already run. + var remaining []storemodels.Event + database.Client().Find(&remaining) + assert.Empty(t, remaining, "initial cleanup should have deleted the terminal event") + + // Clean up + cancel() + }) + + t.Run("start stops when context is cancelled", func(t *testing.T) { + database := newTestCleanerDB(t, nil) + logger := zerolog.Nop() + + cleaner := NewEventCleaner(database, 50*time.Millisecond, 0, "test-chain", logger) + + ctx, cancel := context.WithCancel(context.Background()) + + err := cleaner.Start(ctx) + require.NoError(t, err) + require.NotNil(t, cleaner.ticker) + + // Cancel the context and give the goroutine time to exit + cancel() + time.Sleep(100 * time.Millisecond) + }) + + t.Run("start stops when Stop is called", func(t *testing.T) { + database := newTestCleanerDB(t, nil) + logger := zerolog.Nop() + + cleaner := NewEventCleaner(database, 50*time.Millisecond, 0, "test-chain", logger) + + ctx := context.Background() + + err := cleaner.Start(ctx) + require.NoError(t, err) + + // Stop should cause the goroutine to exit + cleaner.Stop() + time.Sleep(100 * time.Millisecond) + }) + + t.Run("periodic cleanup runs on ticker interval", func(t *testing.T) { + database := newTestCleanerDB(t, nil) + logger := zerolog.Nop() + + // Use a very short interval so the ticker fires quickly + cleaner := NewEventCleaner(database, 50*time.Millisecond, 0, "test-chain", logger) + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + err := cleaner.Start(ctx) + require.NoError(t, err) + + // Insert a terminal event after Start so it was not cleaned by initial cleanup + evt := storemodels.Event{ + EventID: "late-completed", + BlockHeight: 1, + Type: storemodels.EventTypeInbound, + ConfirmationType: storemodels.ConfirmationStandard, + Status: storemodels.StatusCompleted, + } + result := database.Client().Create(&evt) + require.NoError(t, result.Error) + + // Wait for at least one ticker cycle + time.Sleep(150 * time.Millisecond) + + var remaining []storemodels.Event + database.Client().Find(&remaining) + assert.Empty(t, remaining, "periodic cleanup should have deleted the terminal event") + + cancel() + }) +} + +func TestEventCleanerStartStopLifecycle(t *testing.T) { + t.Run("full lifecycle: start, cleanup, stop", func(t *testing.T) { + // Seed terminal events + database := newTestCleanerDB(t, []storemodels.Event{ + { + EventID: "completed-1", + BlockHeight: 10, + Type: storemodels.EventTypeOutbound, + ConfirmationType: storemodels.ConfirmationStandard, + Status: storemodels.StatusCompleted, + }, + { + EventID: "reverted-1", + BlockHeight: 20, + Type: storemodels.EventTypeInbound, + ConfirmationType: storemodels.ConfirmationStandard, + Status: storemodels.StatusReverted, + }, + { + EventID: "pending-keep", + BlockHeight: 30, + Type: storemodels.EventTypeInbound, + ConfirmationType: storemodels.ConfirmationStandard, + Status: storemodels.StatusPending, + }, + }) + logger := zerolog.Nop() + + cleaner := NewEventCleaner(database, time.Hour, 0, "test-chain", logger) + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + // Start: initial cleanup removes terminal events + err := cleaner.Start(ctx) + require.NoError(t, err) + + var remaining []storemodels.Event + database.Client().Find(&remaining) + require.Len(t, remaining, 1) + assert.Equal(t, "pending-keep", remaining[0].EventID) + + // Stop gracefully + cleaner.Stop() + + // After stop, cleaner should not panic or leave stale state + time.Sleep(50 * time.Millisecond) + }) +} diff --git a/universalClient/chains/common/event_processor_test.go b/universalClient/chains/common/event_processor_test.go index e1764498..4a08c305 100644 --- a/universalClient/chains/common/event_processor_test.go +++ b/universalClient/chains/common/event_processor_test.go @@ -4,6 +4,7 @@ import ( "context" "encoding/json" "testing" + "time" "github.com/rs/zerolog" "github.com/stretchr/testify/assert" @@ -388,6 +389,401 @@ func TestEventProcessorBuildOutboundObservation(t *testing.T) { }) } +func TestProcessOutboundEvent(t *testing.T) { + logger := zerolog.Nop() + ctx := context.Background() + + setupDB := func(t *testing.T) *ucdb.DB { + t.Helper() + database, err := ucdb.OpenInMemoryDB(true) + require.NoError(t, err) + return database + } + + t.Run("nil event data returns parse error", func(t *testing.T) { + database := setupDB(t) + defer database.Close() + ep := NewEventProcessor(nil, database, "eip155:1", true, true, logger) + + event := &store.Event{ + EventID: "0xabc:0", + EventData: nil, + } + err := ep.processOutboundEvent(ctx, event) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to parse outbound event data") + }) + + t.Run("empty event data returns parse error", func(t *testing.T) { + database := setupDB(t) + defer database.Close() + ep := NewEventProcessor(nil, database, "eip155:1", true, true, logger) + + event := &store.Event{ + EventID: "0xabc:0", + EventData: []byte{}, + } + err := ep.processOutboundEvent(ctx, event) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to parse outbound event data") + }) + + t.Run("invalid JSON event data returns parse error", func(t *testing.T) { + database := setupDB(t) + defer database.Close() + ep := NewEventProcessor(nil, database, "eip155:1", true, true, logger) + + event := &store.Event{ + EventID: "0xabc:0", + EventData: []byte("not json"), + } + err := ep.processOutboundEvent(ctx, event) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to parse outbound event data") + }) + + t.Run("missing tx_id returns parse error", func(t *testing.T) { + database := setupDB(t) + defer database.Close() + ep := NewEventProcessor(nil, database, "eip155:1", true, true, logger) + + eventData, _ := json.Marshal(OutboundEvent{ + TxID: "", + UniversalTxID: "0xutxid", + }) + event := &store.Event{ + EventID: "0xabc:0", + EventData: eventData, + } + err := ep.processOutboundEvent(ctx, event) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to parse outbound event data") + }) + + t.Run("missing universal_tx_id returns parse error", func(t *testing.T) { + database := setupDB(t) + defer database.Close() + ep := NewEventProcessor(nil, database, "eip155:1", true, true, logger) + + eventData, _ := json.Marshal(OutboundEvent{ + TxID: "0xtxid", + UniversalTxID: "", + }) + event := &store.Event{ + EventID: "0xabc:0", + EventData: eventData, + } + err := ep.processOutboundEvent(ctx, event) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to parse outbound event data") + }) +} + +func TestProcessInboundEvent(t *testing.T) { + logger := zerolog.Nop() + ctx := context.Background() + + setupDB := func(t *testing.T) *ucdb.DB { + t.Helper() + database, err := ucdb.OpenInMemoryDB(true) + require.NoError(t, err) + return database + } + + t.Run("nil event data returns construct error", func(t *testing.T) { + database := setupDB(t) + defer database.Close() + ep := NewEventProcessor(nil, database, "eip155:1", true, true, logger) + + event := &store.Event{ + EventID: "0xabc:0", + EventData: nil, + } + err := ep.processInboundEvent(ctx, event) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to construct inbound") + }) + + t.Run("invalid JSON event data returns construct error", func(t *testing.T) { + database := setupDB(t) + defer database.Close() + ep := NewEventProcessor(nil, database, "eip155:1", true, true, logger) + + event := &store.Event{ + EventID: "0xabc:0", + EventData: []byte("{not valid json}"), + } + err := ep.processInboundEvent(ctx, event) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to construct inbound") + }) +} + +func TestProcessConfirmedEventsRouting(t *testing.T) { + logger := zerolog.Nop() + ctx := context.Background() + + setupDB := func(t *testing.T, events []store.Event) *ucdb.DB { + t.Helper() + database, err := ucdb.OpenInMemoryDB(true) + require.NoError(t, err) + for _, e := range events { + result := database.Client().Create(&e) + require.NoError(t, result.Error) + } + return database + } + + t.Run("no confirmed events returns nil", func(t *testing.T) { + database := setupDB(t, nil) + defer database.Close() + ep := NewEventProcessor(nil, database, "eip155:1", true, true, logger) + + err := ep.processConfirmedEvents(ctx) + require.NoError(t, err) + }) + + t.Run("only pending events are ignored", func(t *testing.T) { + database := setupDB(t, []store.Event{ + { + EventID: "0xpending:0", + Status: store.StatusPending, + Type: store.EventTypeInbound, + EventData: []byte("{}"), + }, + }) + defer database.Close() + ep := NewEventProcessor(nil, database, "eip155:1", true, true, logger) + + err := ep.processConfirmedEvents(ctx) + require.NoError(t, err) + + // Event should remain PENDING (not picked up) + var evt store.Event + database.Client().Where("event_id = ?", "0xpending:0").First(&evt) + assert.Equal(t, store.StatusPending, evt.Status) + }) + + t.Run("inbound with bad data fails gracefully and continues to next event", func(t *testing.T) { + database := setupDB(t, []store.Event{ + { + EventID: "0xbad_inbound:0", + Status: store.StatusConfirmed, + Type: store.EventTypeInbound, + EventData: []byte("not json"), + }, + { + EventID: "0xbad_inbound2:0", + Status: store.StatusConfirmed, + Type: store.EventTypeInbound, + EventData: nil, + }, + }) + defer database.Close() + ep := NewEventProcessor(nil, database, "eip155:1", true, true, logger) + + // Should not return error - errors on individual events are logged and skipped + err := ep.processConfirmedEvents(ctx) + require.NoError(t, err) + + // Both events should remain CONFIRMED (failed to process, not updated) + var evt1, evt2 store.Event + database.Client().Where("event_id = ?", "0xbad_inbound:0").First(&evt1) + assert.Equal(t, store.StatusConfirmed, evt1.Status) + database.Client().Where("event_id = ?", "0xbad_inbound2:0").First(&evt2) + assert.Equal(t, store.StatusConfirmed, evt2.Status) + }) + + t.Run("outbound with bad data fails gracefully and continues to next event", func(t *testing.T) { + database := setupDB(t, []store.Event{ + { + EventID: "0xbad_outbound:0", + Status: store.StatusConfirmed, + Type: store.EventTypeOutbound, + EventData: []byte("not json"), + }, + { + EventID: "0xbad_outbound2:0", + Status: store.StatusConfirmed, + Type: store.EventTypeOutbound, + EventData: []byte{}, + }, + }) + defer database.Close() + ep := NewEventProcessor(nil, database, "eip155:1", true, true, logger) + + err := ep.processConfirmedEvents(ctx) + require.NoError(t, err) + + // Both events should remain CONFIRMED + var evt1, evt2 store.Event + database.Client().Where("event_id = ?", "0xbad_outbound:0").First(&evt1) + assert.Equal(t, store.StatusConfirmed, evt1.Status) + database.Client().Where("event_id = ?", "0xbad_outbound2:0").First(&evt2) + assert.Equal(t, store.StatusConfirmed, evt2.Status) + }) + + t.Run("mixed inbound and outbound with bad data both fail gracefully", func(t *testing.T) { + database := setupDB(t, []store.Event{ + { + EventID: "0xin:0", + Status: store.StatusConfirmed, + Type: store.EventTypeInbound, + EventData: []byte("bad"), + }, + { + EventID: "0xout:0", + Status: store.StatusConfirmed, + Type: store.EventTypeOutbound, + EventData: []byte("bad"), + }, + }) + defer database.Close() + ep := NewEventProcessor(nil, database, "eip155:1", true, true, logger) + + err := ep.processConfirmedEvents(ctx) + require.NoError(t, err) + + var inEvt, outEvt store.Event + database.Client().Where("event_id = ?", "0xin:0").First(&inEvt) + assert.Equal(t, store.StatusConfirmed, inEvt.Status) + database.Client().Where("event_id = ?", "0xout:0").First(&outEvt) + assert.Equal(t, store.StatusConfirmed, outEvt.Status) + }) + + t.Run("outbound missing tx_id in valid JSON stays CONFIRMED", func(t *testing.T) { + eventData, _ := json.Marshal(OutboundEvent{ + TxID: "", + UniversalTxID: "0xutxid", + }) + database := setupDB(t, []store.Event{ + { + EventID: "0xno_txid:0", + Status: store.StatusConfirmed, + Type: store.EventTypeOutbound, + EventData: eventData, + }, + }) + defer database.Close() + ep := NewEventProcessor(nil, database, "eip155:1", true, true, logger) + + err := ep.processConfirmedEvents(ctx) + require.NoError(t, err) + + var evt store.Event + database.Client().Where("event_id = ?", "0xno_txid:0").First(&evt) + assert.Equal(t, store.StatusConfirmed, evt.Status) + }) + + t.Run("unknown event type is silently skipped", func(t *testing.T) { + database := setupDB(t, []store.Event{ + { + EventID: "0xunknown:0", + Status: store.StatusConfirmed, + Type: "UNKNOWN_TYPE", + EventData: []byte("{}"), + }, + }) + defer database.Close() + ep := NewEventProcessor(nil, database, "eip155:1", true, true, logger) + + err := ep.processConfirmedEvents(ctx) + require.NoError(t, err) + + // Event should remain CONFIRMED (no handler for this type) + var evt store.Event + database.Client().Where("event_id = ?", "0xunknown:0").First(&evt) + assert.Equal(t, store.StatusConfirmed, evt.Status) + }) +} + +func TestProcessLoopContextCancellation(t *testing.T) { + logger := zerolog.Nop() + database, err := ucdb.OpenInMemoryDB(true) + require.NoError(t, err) + defer database.Close() + + ep := NewEventProcessor(nil, database, "eip155:1", true, true, logger) + + t.Run("processLoop exits promptly on context cancel", func(t *testing.T) { + ctx, cancel := context.WithCancel(context.Background()) + + err := ep.Start(ctx) + require.NoError(t, err) + assert.True(t, ep.IsRunning()) + + // Cancel context and wait for stop + cancel() + + // The wg.Wait inside Stop() will block until processLoop exits + done := make(chan struct{}) + go func() { + ep.Stop() + close(done) + }() + + select { + case <-done: + // processLoop exited within reasonable time + case <-time.After(10 * time.Second): + t.Fatal("processLoop did not exit within 10 seconds after context cancellation") + } + + assert.False(t, ep.IsRunning()) + }) +} + +func TestProcessLoopStopChannel(t *testing.T) { + logger := zerolog.Nop() + database, err := ucdb.OpenInMemoryDB(true) + require.NoError(t, err) + defer database.Close() + + ep := NewEventProcessor(nil, database, "eip155:1", true, true, logger) + + t.Run("processLoop exits promptly on stop signal", func(t *testing.T) { + ctx := context.Background() + + err := ep.Start(ctx) + require.NoError(t, err) + assert.True(t, ep.IsRunning()) + + done := make(chan struct{}) + go func() { + ep.Stop() + close(done) + }() + + select { + case <-done: + // processLoop exited promptly + case <-time.After(10 * time.Second): + t.Fatal("processLoop did not exit within 10 seconds after stop signal") + } + + assert.False(t, ep.IsRunning()) + }) +} + +func TestProcessConfirmedEventsDBError(t *testing.T) { + logger := zerolog.Nop() + ctx := context.Background() + + t.Run("nil database returns error", func(t *testing.T) { + ep := &EventProcessor{ + chainStore: NewChainStore(nil), + logger: logger, + chainID: "eip155:1", + inboundEnabled: true, + outboundEnabled: true, + } + + err := ep.processConfirmedEvents(ctx) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to get confirmed events") + }) +} + func TestEventProcessorStruct(t *testing.T) { t.Run("struct has expected fields", func(t *testing.T) { ep := &EventProcessor{} @@ -429,6 +825,121 @@ func TestNewEventProcessorEnabledFlags(t *testing.T) { }) } +func TestEventProcessorStartDoubleStart(t *testing.T) { + logger := zerolog.Nop() + database, err := ucdb.OpenInMemoryDB(true) + require.NoError(t, err) + defer database.Close() + + ep := NewEventProcessor(nil, database, "eip155:1", true, true, logger) + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + // First start should succeed + err = ep.Start(ctx) + require.NoError(t, err) + assert.True(t, ep.IsRunning()) + + // Second start should be rejected + err = ep.Start(ctx) + require.Error(t, err) + assert.Contains(t, err.Error(), "already running") + assert.True(t, ep.IsRunning()) + + // Clean up + err = ep.Stop() + require.NoError(t, err) + assert.False(t, ep.IsRunning()) +} + +func TestEventProcessorStopIdempotent(t *testing.T) { + logger := zerolog.Nop() + database, err := ucdb.OpenInMemoryDB(true) + require.NoError(t, err) + defer database.Close() + + ep := NewEventProcessor(nil, database, "eip155:1", true, true, logger) + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + // Start the processor + err = ep.Start(ctx) + require.NoError(t, err) + assert.True(t, ep.IsRunning()) + + // First stop + err = ep.Stop() + require.NoError(t, err) + assert.False(t, ep.IsRunning()) + + // Second stop should be idempotent (no error, no panic) + err = ep.Stop() + require.NoError(t, err) + assert.False(t, ep.IsRunning()) + + // Third stop also fine + err = ep.Stop() + require.NoError(t, err) +} + +func TestEventProcessorIsRunningStateTransitions(t *testing.T) { + logger := zerolog.Nop() + database, err := ucdb.OpenInMemoryDB(true) + require.NoError(t, err) + defer database.Close() + + ep := NewEventProcessor(nil, database, "eip155:1", true, true, logger) + + // Initial state: not running + assert.False(t, ep.IsRunning()) + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + // After start: running + err = ep.Start(ctx) + require.NoError(t, err) + assert.True(t, ep.IsRunning()) + + // After stop: not running + err = ep.Stop() + require.NoError(t, err) + assert.False(t, ep.IsRunning()) + + // Can restart after stop + err = ep.Start(ctx) + require.NoError(t, err) + assert.True(t, ep.IsRunning()) + + // Clean up + err = ep.Stop() + require.NoError(t, err) + assert.False(t, ep.IsRunning()) +} + +func TestEventProcessorStopViaContextCancel(t *testing.T) { + logger := zerolog.Nop() + database, err := ucdb.OpenInMemoryDB(true) + require.NoError(t, err) + defer database.Close() + + ep := NewEventProcessor(nil, database, "eip155:1", true, true, logger) + + ctx, cancel := context.WithCancel(context.Background()) + + err = ep.Start(ctx) + require.NoError(t, err) + assert.True(t, ep.IsRunning()) + + // Cancel context - the processLoop should exit + cancel() + + // Stop should still work cleanly after context cancellation + err = ep.Stop() + require.NoError(t, err) + assert.False(t, ep.IsRunning()) +} + func TestProcessConfirmedEventsEnabledFlags(t *testing.T) { logger := zerolog.Nop() ctx := context.Background() diff --git a/universalClient/chains/evm/chain_meta_oracle_test.go b/universalClient/chains/evm/chain_meta_oracle_test.go index 6cb3cd96..07adb03b 100644 --- a/universalClient/chains/evm/chain_meta_oracle_test.go +++ b/universalClient/chains/evm/chain_meta_oracle_test.go @@ -1,6 +1,7 @@ package evm import ( + "context" "testing" "time" @@ -120,3 +121,121 @@ func TestChainMetaOracleStruct(t *testing.T) { assert.Nil(t, oracle.stopCh) }) } + +func TestChainMetaOracleStartStop_ContextCancel(t *testing.T) { + logger := zerolog.Nop() + oracle := NewChainMetaOracle(nil, nil, "eip155:1", 30, 0, logger) + + ctx, cancel := context.WithCancel(context.Background()) + err := oracle.Start(ctx) + require.NoError(t, err) + + // Let the goroutine spin up. + time.Sleep(50 * time.Millisecond) + + cancel() + // Stop should return promptly after context cancel. + done := make(chan struct{}) + go func() { + oracle.Stop() + close(done) + }() + + select { + case <-done: + // success + case <-time.After(2 * time.Second): + t.Fatal("Stop did not return within 2 seconds after context cancel") + } +} + +func TestChainMetaOracleStartStop_ViaStopCh(t *testing.T) { + logger := zerolog.Nop() + oracle := NewChainMetaOracle(nil, nil, "eip155:1", 30, 0, logger) + + ctx := context.Background() + err := oracle.Start(ctx) + require.NoError(t, err) + + time.Sleep(50 * time.Millisecond) + + done := make(chan struct{}) + go func() { + oracle.Stop() + close(done) + }() + + select { + case <-done: + // success + case <-time.After(2 * time.Second): + t.Fatal("Stop did not return within 2 seconds") + } +} + +func TestChainMetaOracle_StartReturnsNilError(t *testing.T) { + logger := zerolog.Nop() + oracle := NewChainMetaOracle(nil, nil, "eip155:1", 30, 0, logger) + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + err := oracle.Start(ctx) + assert.NoError(t, err) + + cancel() + oracle.Stop() +} + +func TestChainMetaOracle_NilPushSignerField(t *testing.T) { + // Verify that an oracle created without a pushSigner stores nil. + logger := zerolog.Nop() + oracle := NewChainMetaOracle(nil, nil, "eip155:1", 30, 10, logger) + assert.Nil(t, oracle.pushSigner) + assert.Nil(t, oracle.rpcClient) +} + +func TestGetChainMetaOracleFetchInterval_EdgeCases(t *testing.T) { + logger := zerolog.Nop() + + t.Run("very large interval", func(t *testing.T) { + oracle := NewChainMetaOracle(nil, nil, "eip155:1", 3600, 0, logger) + interval := oracle.getChainMetaOracleFetchInterval() + assert.Equal(t, 3600*time.Second, interval) + }) + + t.Run("interval of 1 second", func(t *testing.T) { + oracle := NewChainMetaOracle(nil, nil, "eip155:1", 1, 0, logger) + interval := oracle.getChainMetaOracleFetchInterval() + assert.Equal(t, 1*time.Second, interval) + }) + + t.Run("very large negative interval defaults to 30s", func(t *testing.T) { + oracle := NewChainMetaOracle(nil, nil, "eip155:1", -9999, 0, logger) + interval := oracle.getChainMetaOracleFetchInterval() + assert.Equal(t, 30*time.Second, interval) + }) + + t.Run("fetchAndVoteChainMeta uses default for zero interval", func(t *testing.T) { + // The function inside fetchAndVoteChainMeta has its own fallback check + // (interval <= 0 -> 30s). We verify getChainMetaOracleFetchInterval + // returns 30s which triggers that path. + oracle := NewChainMetaOracle(nil, nil, "eip155:1", 0, 0, logger) + interval := oracle.getChainMetaOracleFetchInterval() + assert.Equal(t, 30*time.Second, interval) + }) +} + +func TestChainMetaOracle_MarkupPercentValues(t *testing.T) { + logger := zerolog.Nop() + + t.Run("negative markup percent stored as-is", func(t *testing.T) { + oracle := NewChainMetaOracle(nil, nil, "eip155:1", 30, -5, logger) + assert.Equal(t, -5, oracle.gasPriceMarkupPercent) + }) + + t.Run("high markup percent stored as-is", func(t *testing.T) { + oracle := NewChainMetaOracle(nil, nil, "eip155:1", 30, 200, logger) + assert.Equal(t, 200, oracle.gasPriceMarkupPercent) + }) +} diff --git a/universalClient/chains/evm/client_test.go b/universalClient/chains/evm/client_test.go index 380df1d0..1ea67b10 100644 --- a/universalClient/chains/evm/client_test.go +++ b/universalClient/chains/evm/client_test.go @@ -328,6 +328,130 @@ func TestClientIsHealthy(t *testing.T) { }) } +// TestApplyDefaults tests the applyDefaults method +func TestApplyDefaults(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + + t.Run("all defaults when chainConfig is nil", func(t *testing.T) { + client := &Client{ + logger: logger, + chainIDStr: "eip155:1", + } + + cfg := client.applyDefaults() + assert.Equal(t, 5, cfg.eventPollingInterval) + assert.Equal(t, 30, cfg.gasPriceInterval) + assert.Equal(t, 0, cfg.gasPriceMarkupPercent) + assert.Equal(t, uint64(2), cfg.fastConfirmations) + assert.Equal(t, uint64(12), cfg.standardConfirmations) + }) + + t.Run("all defaults when fields are nil", func(t *testing.T) { + client := &Client{ + logger: logger, + chainIDStr: "eip155:1", + chainConfig: &config.ChainSpecificConfig{}, + } + + cfg := client.applyDefaults() + assert.Equal(t, 5, cfg.eventPollingInterval) + assert.Equal(t, 30, cfg.gasPriceInterval) + assert.Equal(t, 0, cfg.gasPriceMarkupPercent) + }) + + t.Run("custom values override defaults", func(t *testing.T) { + eventPoll := 10 + gasPriceInt := 60 + gasPriceMarkup := 15 + client := &Client{ + logger: logger, + chainIDStr: "eip155:1", + chainConfig: &config.ChainSpecificConfig{ + EventPollingIntervalSeconds: &eventPoll, + GasPriceIntervalSeconds: &gasPriceInt, + GasPriceMarkupPercent: &gasPriceMarkup, + }, + registryConfig: &uregistrytypes.ChainConfig{ + BlockConfirmation: &uregistrytypes.BlockConfirmation{ + FastInbound: 5, + StandardInbound: 20, + }, + }, + } + + cfg := client.applyDefaults() + assert.Equal(t, 10, cfg.eventPollingInterval) + assert.Equal(t, 60, cfg.gasPriceInterval) + assert.Equal(t, 15, cfg.gasPriceMarkupPercent) + assert.Equal(t, uint64(5), cfg.fastConfirmations) + assert.Equal(t, uint64(20), cfg.standardConfirmations) + }) + + t.Run("zero values use defaults", func(t *testing.T) { + zero := 0 + client := &Client{ + logger: logger, + chainIDStr: "eip155:1", + chainConfig: &config.ChainSpecificConfig{ + EventPollingIntervalSeconds: &zero, + GasPriceIntervalSeconds: &zero, + GasPriceMarkupPercent: &zero, + }, + } + + cfg := client.applyDefaults() + assert.Equal(t, 5, cfg.eventPollingInterval, "zero event polling should use default") + assert.Equal(t, 30, cfg.gasPriceInterval, "zero gas price interval should use default") + assert.Equal(t, 0, cfg.gasPriceMarkupPercent, "zero markup is valid default") + }) + + t.Run("registryConfig nil uses default confirmations", func(t *testing.T) { + client := &Client{ + logger: logger, + chainIDStr: "eip155:1", + } + + cfg := client.applyDefaults() + assert.Equal(t, uint64(2), cfg.fastConfirmations) + assert.Equal(t, uint64(12), cfg.standardConfirmations) + }) + + t.Run("registryConfig with nil BlockConfirmation uses defaults", func(t *testing.T) { + client := &Client{ + logger: logger, + chainIDStr: "eip155:1", + registryConfig: &uregistrytypes.ChainConfig{ + BlockConfirmation: nil, + }, + } + + cfg := client.applyDefaults() + assert.Equal(t, uint64(2), cfg.fastConfirmations) + assert.Equal(t, uint64(12), cfg.standardConfirmations) + }) +} + +// TestGetTxBuilderNil tests GetTxBuilder when txBuilder is not initialized +func TestGetTxBuilderNil(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + + chainConfig := &uregistrytypes.ChainConfig{ + Chain: "eip155:1", + VmType: uregistrytypes.VmType_EVM, + } + + chainSpecificConfig := testChainConfig([]string{"https://eth-mainnet.example.com"}) + client, err := NewClient(chainConfig, nil, chainSpecificConfig, nil, logger) + require.NoError(t, err) + + // txBuilder is nil because gateway is not configured / Start not called + txBuilder, err := client.GetTxBuilder() + assert.Error(t, err) + assert.Nil(t, txBuilder) + assert.Contains(t, err.Error(), "txBuilder not available") + assert.Contains(t, err.Error(), "eip155:1") +} + // TestClientGetMethods tests getter methods func TestClientGetMethods(t *testing.T) { logger := zerolog.New(zerolog.NewTestWriter(t)) diff --git a/universalClient/chains/evm/event_confirmer_test.go b/universalClient/chains/evm/event_confirmer_test.go index 77bc2838..ea3fc59d 100644 --- a/universalClient/chains/evm/event_confirmer_test.go +++ b/universalClient/chains/evm/event_confirmer_test.go @@ -1,8 +1,13 @@ package evm import ( + "context" + "encoding/json" "testing" + "time" + "github.com/pushchain/push-chain-node/universalClient/chains/common" + "github.com/pushchain/push-chain-node/universalClient/db" "github.com/pushchain/push-chain-node/universalClient/store" "github.com/rs/zerolog" "github.com/stretchr/testify/assert" @@ -143,3 +148,306 @@ func TestEventConfirmerStruct(t *testing.T) { assert.Nil(t, ec.stopCh) }) } + +// newTestEventConfirmerWithDB creates an EventConfirmer backed by an in-memory database. +func newTestEventConfirmerWithDB(t *testing.T) (*EventConfirmer, *db.DB) { + t.Helper() + memDB, err := db.OpenInMemoryDB(true) + require.NoError(t, err) + t.Cleanup(func() { memDB.Close() }) + logger := zerolog.Nop() + ec := NewEventConfirmer(nil, memDB, "eip155:1", 5, 5, 12, logger) + return ec, memDB +} + +func TestGetTxHashFromEventID_EdgeCases(t *testing.T) { + logger := zerolog.Nop() + confirmer := NewEventConfirmer(nil, nil, "eip155:1", 5, 5, 12, logger) + + t.Run("colon only returns empty first part", func(t *testing.T) { + result := confirmer.getTxHashFromEventID(":") + assert.Equal(t, "", result) + }) + + t.Run("leading colon returns empty first part", func(t *testing.T) { + result := confirmer.getTxHashFromEventID(":42") + assert.Equal(t, "", result) + }) + + t.Run("trailing colon returns tx hash", func(t *testing.T) { + result := confirmer.getTxHashFromEventID("0xabc:") + assert.Equal(t, "0xabc", result) + }) + + t.Run("full 66-char tx hash with log index", func(t *testing.T) { + hash := "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef" + eventID := hash + ":99" + result := confirmer.getTxHashFromEventID(eventID) + assert.Equal(t, hash, result) + }) + + t.Run("whitespace-only event ID", func(t *testing.T) { + result := confirmer.getTxHashFromEventID(" ") + assert.Equal(t, " ", result) // no trimming expected + }) +} + +func TestProcessPendingEvents_NoPendingEventsInDB(t *testing.T) { + // Verify the database returns no pending events when empty. + _, memDB := newTestEventConfirmerWithDB(t) + cs := common.NewChainStore(memDB) + + pending, err := cs.GetPendingEvents(1000) + require.NoError(t, err) + assert.Len(t, pending, 0) +} + +func TestProcessPendingEvents_OnlyPendingEventsReturned(t *testing.T) { + _, memDB := newTestEventConfirmerWithDB(t) + cs := common.NewChainStore(memDB) + + // Insert one PENDING and one CONFIRMED event. + pendingEvt := &store.Event{ + EventID: "0xpending:0", + BlockHeight: 100, + Type: store.EventTypeInbound, + ConfirmationType: store.ConfirmationStandard, + Status: store.StatusPending, + EventData: []byte(`{}`), + } + confirmedEvt := &store.Event{ + EventID: "0xconfirmed:1", + BlockHeight: 90, + Type: store.EventTypeInbound, + ConfirmationType: store.ConfirmationStandard, + Status: store.StatusConfirmed, + EventData: []byte(`{}`), + } + + inserted, err := cs.InsertEventIfNotExists(pendingEvt) + require.NoError(t, err) + require.True(t, inserted) + + inserted, err = cs.InsertEventIfNotExists(confirmedEvt) + require.NoError(t, err) + require.True(t, inserted) + + // GetPendingEvents should only return the PENDING one. + pending, err := cs.GetPendingEvents(1000) + require.NoError(t, err) + assert.Len(t, pending, 1) + assert.Equal(t, "0xpending:0", pending[0].EventID) +} + +func TestEventConfirmerStartStop_WithDB(t *testing.T) { + ec, _ := newTestEventConfirmerWithDB(t) + + ctx, cancel := context.WithCancel(context.Background()) + + err := ec.Start(ctx) + require.NoError(t, err) + + // Let the goroutine spin up briefly. + time.Sleep(50 * time.Millisecond) + + // Cancel context first, then stop. Should not hang. + cancel() + ec.Stop() +} + +func TestEventConfirmerStartStop_ViaStopChannel(t *testing.T) { + ec, _ := newTestEventConfirmerWithDB(t) + + ctx := context.Background() + err := ec.Start(ctx) + require.NoError(t, err) + + time.Sleep(50 * time.Millisecond) + + // Stop via the stopCh (without cancelling context). + ec.Stop() +} + +func TestEventConfirmer_UpdateEventStatus_WithDB(t *testing.T) { + ec, memDB := newTestEventConfirmerWithDB(t) + _ = ec // confirmer uses chainStore + + cs := common.NewChainStore(memDB) + + t.Run("update status of pending event to confirmed", func(t *testing.T) { + event := &store.Event{ + EventID: "0xabc123:0", + BlockHeight: 100, + Type: store.EventTypeInbound, + ConfirmationType: store.ConfirmationStandard, + Status: store.StatusPending, + EventData: []byte(`{"some":"data"}`), + } + inserted, err := cs.InsertEventIfNotExists(event) + require.NoError(t, err) + require.True(t, inserted) + + rows, err := cs.UpdateEventStatus("0xabc123:0", store.StatusPending, store.StatusConfirmed) + require.NoError(t, err) + assert.Equal(t, int64(1), rows) + }) + + t.Run("update status with wrong old status returns 0 rows", func(t *testing.T) { + // Event is now CONFIRMED, trying to update from PENDING again should affect 0 rows. + rows, err := cs.UpdateEventStatus("0xabc123:0", store.StatusPending, store.StatusConfirmed) + require.NoError(t, err) + assert.Equal(t, int64(0), rows) + }) + + t.Run("update status of non-existent event returns 0 rows", func(t *testing.T) { + rows, err := cs.UpdateEventStatus("nonexistent:0", store.StatusPending, store.StatusConfirmed) + require.NoError(t, err) + assert.Equal(t, int64(0), rows) + }) +} + +func TestEventConfirmer_UpdateStatusAndEventData_WithDB(t *testing.T) { + _, memDB := newTestEventConfirmerWithDB(t) + cs := common.NewChainStore(memDB) + + outbound := common.OutboundEvent{ + TxID: "0xtx1", + UniversalTxID: "0xuni1", + } + data, err := json.Marshal(outbound) + require.NoError(t, err) + + event := &store.Event{ + EventID: "0xoutbound1:0", + BlockHeight: 200, + Type: store.EventTypeOutbound, + ConfirmationType: store.ConfirmationFast, + Status: store.StatusPending, + EventData: data, + } + inserted, err := cs.InsertEventIfNotExists(event) + require.NoError(t, err) + require.True(t, inserted) + + // Enrich with gas fee and confirm + outbound.GasFeeUsed = "123456789" + updatedData, err := json.Marshal(outbound) + require.NoError(t, err) + + rows, err := cs.UpdateStatusAndEventData("0xoutbound1:0", store.StatusPending, store.StatusConfirmed, updatedData) + require.NoError(t, err) + assert.Equal(t, int64(1), rows) + + // Verify the data was updated + confirmed, err := cs.GetConfirmedEvents(10) + require.NoError(t, err) + require.Len(t, confirmed, 1) + assert.Equal(t, "0xoutbound1:0", confirmed[0].EventID) + + var stored common.OutboundEvent + require.NoError(t, json.Unmarshal(confirmed[0].EventData, &stored)) + assert.Equal(t, "123456789", stored.GasFeeUsed) +} + +func TestEventConfirmer_PendingEventsWithBlockHeightZero(t *testing.T) { + _, memDB := newTestEventConfirmerWithDB(t) + cs := common.NewChainStore(memDB) + + // Insert an event with BlockHeight 0 (should be skipped by processPendingEvents) + event := &store.Event{ + EventID: "0xzeroblock:0", + BlockHeight: 0, + Type: store.EventTypeInbound, + ConfirmationType: store.ConfirmationStandard, + Status: store.StatusPending, + EventData: []byte(`{}`), + } + inserted, err := cs.InsertEventIfNotExists(event) + require.NoError(t, err) + require.True(t, inserted) + + pending, err := cs.GetPendingEvents(100) + require.NoError(t, err) + assert.Len(t, pending, 1) + assert.Equal(t, uint64(0), pending[0].BlockHeight) +} + +func TestEventConfirmer_GetRequiredConfirmations_ZeroValues(t *testing.T) { + logger := zerolog.Nop() + + t.Run("zero fast confirmations returns 0", func(t *testing.T) { + ec := NewEventConfirmer(nil, nil, "eip155:1", 5, 0, 12, logger) + result := ec.getRequiredConfirmations(store.ConfirmationFast) + assert.Equal(t, uint64(0), result) + }) + + t.Run("zero standard confirmations returns 0", func(t *testing.T) { + ec := NewEventConfirmer(nil, nil, "eip155:1", 5, 5, 0, logger) + result := ec.getRequiredConfirmations(store.ConfirmationStandard) + assert.Equal(t, uint64(0), result) + }) + + t.Run("zero standard with unknown type returns 0", func(t *testing.T) { + ec := NewEventConfirmer(nil, nil, "eip155:1", 5, 5, 0, logger) + result := ec.getRequiredConfirmations("INSTANT") + assert.Equal(t, uint64(0), result) + }) +} + +func TestEventConfirmer_NewWithDatabase(t *testing.T) { + memDB, err := db.OpenInMemoryDB(true) + require.NoError(t, err) + defer memDB.Close() + + logger := zerolog.Nop() + ec := NewEventConfirmer(nil, memDB, "eip155:137", 10, 3, 20, logger) + + require.NotNil(t, ec) + assert.NotNil(t, ec.chainStore) + assert.Equal(t, "eip155:137", ec.chainID) + assert.Equal(t, 10, ec.pollIntervalSeconds) +} + +func TestEventConfirmer_GetRequiredConfirmations_LargeValues(t *testing.T) { + logger := zerolog.Nop() + + t.Run("very large fast confirmations", func(t *testing.T) { + ec := NewEventConfirmer(nil, nil, "eip155:1", 5, 1000000, 12, logger) + result := ec.getRequiredConfirmations(store.ConfirmationFast) + assert.Equal(t, uint64(1000000), result) + }) + + t.Run("very large standard confirmations", func(t *testing.T) { + ec := NewEventConfirmer(nil, nil, "eip155:1", 5, 5, 999999, logger) + result := ec.getRequiredConfirmations(store.ConfirmationStandard) + assert.Equal(t, uint64(999999), result) + }) + + t.Run("fast 1 confirmation", func(t *testing.T) { + ec := NewEventConfirmer(nil, nil, "eip155:1", 5, 1, 12, logger) + result := ec.getRequiredConfirmations(store.ConfirmationFast) + assert.Equal(t, uint64(1), result) + }) + + t.Run("standard 1 confirmation", func(t *testing.T) { + ec := NewEventConfirmer(nil, nil, "eip155:1", 5, 5, 1, logger) + result := ec.getRequiredConfirmations(store.ConfirmationStandard) + assert.Equal(t, uint64(1), result) + }) + + t.Run("unknown type with large standard returns large value", func(t *testing.T) { + ec := NewEventConfirmer(nil, nil, "eip155:1", 5, 5, 500, logger) + result := ec.getRequiredConfirmations("SUPER_SAFE") + assert.Equal(t, uint64(500), result) + }) + + t.Run("all confirmation types consistent when same values", func(t *testing.T) { + ec := NewEventConfirmer(nil, nil, "eip155:1", 5, 10, 10, logger) + fast := ec.getRequiredConfirmations(store.ConfirmationFast) + standard := ec.getRequiredConfirmations(store.ConfirmationStandard) + unknown := ec.getRequiredConfirmations("OTHER") + assert.Equal(t, uint64(10), fast) + assert.Equal(t, uint64(10), standard) + assert.Equal(t, uint64(10), unknown) + }) +} diff --git a/universalClient/chains/evm/event_listener_test.go b/universalClient/chains/evm/event_listener_test.go new file mode 100644 index 00000000..71d76420 --- /dev/null +++ b/universalClient/chains/evm/event_listener_test.go @@ -0,0 +1,418 @@ +package evm + +import ( + "context" + "testing" + "time" + + ethcommon "github.com/ethereum/go-ethereum/common" + "github.com/rs/zerolog" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/pushchain/push-chain-node/universalClient/db" + uregistrytypes "github.com/pushchain/push-chain-node/x/uregistry/types" +) + +// helper to create an in-memory DB for tests +func testDB(t *testing.T) *db.DB { + t.Helper() + database, err := db.OpenInMemoryDB(true) + require.NoError(t, err) + t.Cleanup(func() { database.Close() }) + return database +} + +func testLogger(t *testing.T) zerolog.Logger { + t.Helper() + return zerolog.New(zerolog.NewTestWriter(t)) +} +func TestNewEventListener_EmptyGatewayAddress(t *testing.T) { + database := testDB(t) + logger := testLogger(t) + + el, err := NewEventListener(nil, "", "", "eip155:1", nil, nil, database, 5, nil, logger) + assert.Nil(t, el) + assert.Error(t, err) + assert.Contains(t, err.Error(), "gateway address not configured") +} + +func TestNewEventListener_EmptyChainID(t *testing.T) { + database := testDB(t) + logger := testLogger(t) + + el, err := NewEventListener(nil, "0xGateway", "", "", nil, nil, database, 5, nil, logger) + assert.Nil(t, el) + assert.Error(t, err) + assert.Contains(t, err.Error(), "chain ID not configured") +} + +func TestNewEventListener_ValidCreation(t *testing.T) { + database := testDB(t) + logger := testLogger(t) + + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", nil, nil, database, 10, nil, logger) + require.NoError(t, err) + require.NotNil(t, el) + + assert.Equal(t, "0xGateway", el.gatewayAddress) + assert.Equal(t, "0xVault", el.vaultAddress) + assert.Equal(t, "eip155:1", el.chainID) + assert.Equal(t, 10, el.eventPollingSeconds) + assert.NotNil(t, el.database) + assert.NotNil(t, el.chainStore) + assert.NotNil(t, el.stopCh) + assert.False(t, el.running) +} + +func TestNewEventListener_NilDatabaseAllowed(t *testing.T) { + // NewEventListener does not validate database being nil; it passes it through + logger := testLogger(t) + + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", nil, nil, nil, 5, nil, logger) + require.NoError(t, err) + require.NotNil(t, el) + assert.Nil(t, el.database) +} +func TestEventListener_IsRunning_DefaultFalse(t *testing.T) { + database := testDB(t) + logger := testLogger(t) + + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", nil, nil, database, 5, nil, logger) + require.NoError(t, err) + + assert.False(t, el.IsRunning()) +} + +func TestEventListener_StartSetsRunning(t *testing.T) { + database := testDB(t) + logger := testLogger(t) + + // Use no topics so the listen goroutine exits early (before hitting nil rpcClient) + startBlock := int64(100) + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", nil, nil, database, 5, &startBlock, logger) + require.NoError(t, err) + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + err = el.Start(ctx) + require.NoError(t, err) + assert.True(t, el.IsRunning()) + + // The goroutine will exit quickly because there are no event topics configured. + time.Sleep(50 * time.Millisecond) + + // Stop should work cleanly + err = el.Stop() + assert.NoError(t, err) + assert.False(t, el.IsRunning()) +} + +func TestEventListener_DoubleStartReturnsError(t *testing.T) { + database := testDB(t) + logger := testLogger(t) + + startBlock := int64(100) + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", nil, nil, database, 5, &startBlock, logger) + require.NoError(t, err) + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + err = el.Start(ctx) + require.NoError(t, err) + + err = el.Start(ctx) + assert.Error(t, err) + assert.Contains(t, err.Error(), "already running") + + // cleanup + time.Sleep(50 * time.Millisecond) + el.Stop() +} + +func TestEventListener_StopWhenNotRunning(t *testing.T) { + database := testDB(t) + logger := testLogger(t) + + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", nil, nil, database, 5, nil, logger) + require.NoError(t, err) + + // Stop on a listener that was never started should be a no-op + err = el.Stop() + assert.NoError(t, err) + assert.False(t, el.IsRunning()) +} + +func TestEventListener_StartStopStart(t *testing.T) { + database := testDB(t) + logger := testLogger(t) + + startBlock := int64(100) + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", nil, nil, database, 5, &startBlock, logger) + require.NoError(t, err) + + ctx := context.Background() + + // First start (no topics, so goroutine exits immediately) + err = el.Start(ctx) + require.NoError(t, err) + assert.True(t, el.IsRunning()) + + time.Sleep(50 * time.Millisecond) + err = el.Stop() + require.NoError(t, err) + assert.False(t, el.IsRunning()) + + // Second start after stop should work + err = el.Start(ctx) + require.NoError(t, err) + assert.True(t, el.IsRunning()) + + time.Sleep(50 * time.Millisecond) + el.Stop() +} +func TestNewEventListener_TopicMapFromGatewayMethods(t *testing.T) { + database := testDB(t) + logger := testLogger(t) + + sendFundsTopicHex := "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + executeTxTopicHex := "0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb" + revertTxTopicHex := "0xcccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc" + + gatewayMethods := []*uregistrytypes.GatewayMethods{ + {Name: EventTypeSendFunds, Identifier: "sendFunds()", EventIdentifier: sendFundsTopicHex}, + {Name: EventTypeExecuteUniversalTx, Identifier: "executeUniversalTx()", EventIdentifier: executeTxTopicHex}, + {Name: EventTypeRevertUniversalTx, Identifier: "revertUniversalTx()", EventIdentifier: revertTxTopicHex}, + } + + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", gatewayMethods, nil, database, 5, nil, logger) + require.NoError(t, err) + + assert.Len(t, el.eventTopics, 3) + assert.Len(t, el.topicToEventType, 3) + + // Verify each topic maps to the correct event type + assert.Equal(t, EventTypeSendFunds, el.topicToEventType[ethcommon.HexToHash(sendFundsTopicHex)]) + assert.Equal(t, EventTypeExecuteUniversalTx, el.topicToEventType[ethcommon.HexToHash(executeTxTopicHex)]) + assert.Equal(t, EventTypeRevertUniversalTx, el.topicToEventType[ethcommon.HexToHash(revertTxTopicHex)]) +} + +func TestNewEventListener_TopicMapFromVaultMethods(t *testing.T) { + database := testDB(t) + logger := testLogger(t) + + finalizeTxTopicHex := "0xdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd" + + vaultMethods := []*uregistrytypes.VaultMethods{ + {Name: EventTypeFinalizeUniversalTx, Identifier: "finalizeUniversalTx()", EventIdentifier: finalizeTxTopicHex}, + } + + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", nil, vaultMethods, database, 5, nil, logger) + require.NoError(t, err) + + assert.Len(t, el.eventTopics, 1) + assert.Equal(t, EventTypeFinalizeUniversalTx, el.topicToEventType[ethcommon.HexToHash(finalizeTxTopicHex)]) +} + +func TestNewEventListener_TopicMapCombined(t *testing.T) { + database := testDB(t) + logger := testLogger(t) + + sendFundsTopicHex := "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" + finalizeTxTopicHex := "0xdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd" + + gatewayMethods := []*uregistrytypes.GatewayMethods{ + {Name: EventTypeSendFunds, Identifier: "sendFunds()", EventIdentifier: sendFundsTopicHex}, + } + vaultMethods := []*uregistrytypes.VaultMethods{ + {Name: EventTypeFinalizeUniversalTx, Identifier: "finalizeUniversalTx()", EventIdentifier: finalizeTxTopicHex}, + } + + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", gatewayMethods, vaultMethods, database, 5, nil, logger) + require.NoError(t, err) + + assert.Len(t, el.eventTopics, 2) + assert.Len(t, el.topicToEventType, 2) + + assert.Equal(t, EventTypeSendFunds, el.topicToEventType[ethcommon.HexToHash(sendFundsTopicHex)]) + assert.Equal(t, EventTypeFinalizeUniversalTx, el.topicToEventType[ethcommon.HexToHash(finalizeTxTopicHex)]) +} + +func TestNewEventListener_EmptyEventIdentifierSkipped(t *testing.T) { + database := testDB(t) + logger := testLogger(t) + + gatewayMethods := []*uregistrytypes.GatewayMethods{ + {Name: EventTypeSendFunds, Identifier: "sendFunds()", EventIdentifier: ""}, // empty => skip + } + + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", gatewayMethods, nil, database, 5, nil, logger) + require.NoError(t, err) + + assert.Len(t, el.eventTopics, 0) + assert.Len(t, el.topicToEventType, 0) +} + +func TestNewEventListener_UnknownMethodNameSkipped(t *testing.T) { + database := testDB(t) + logger := testLogger(t) + + gatewayMethods := []*uregistrytypes.GatewayMethods{ + {Name: "unknownMethod", Identifier: "unknown()", EventIdentifier: "0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee"}, + } + vaultMethods := []*uregistrytypes.VaultMethods{ + {Name: "unknownVaultMethod", Identifier: "unknown()", EventIdentifier: "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"}, + } + + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", gatewayMethods, vaultMethods, database, 5, nil, logger) + require.NoError(t, err) + + // Unknown names should not be added to topic map + assert.Len(t, el.eventTopics, 0) + assert.Len(t, el.topicToEventType, 0) +} + +func TestNewEventListener_NoMethodsProducesEmptyTopics(t *testing.T) { + database := testDB(t) + logger := testLogger(t) + + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", nil, nil, database, 5, nil, logger) + require.NoError(t, err) + + assert.Len(t, el.eventTopics, 0) + assert.Len(t, el.topicToEventType, 0) +} +func TestEventListener_GetPollingInterval(t *testing.T) { + database := testDB(t) + logger := testLogger(t) + + t.Run("Custom interval", func(t *testing.T) { + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", nil, nil, database, 15, nil, logger) + require.NoError(t, err) + assert.Equal(t, 15*time.Second, el.getPollingInterval()) + }) + + t.Run("Zero defaults to 5s", func(t *testing.T) { + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", nil, nil, database, 0, nil, logger) + require.NoError(t, err) + assert.Equal(t, 5*time.Second, el.getPollingInterval()) + }) + + t.Run("Negative defaults to 5s", func(t *testing.T) { + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", nil, nil, database, -1, nil, logger) + require.NoError(t, err) + assert.Equal(t, 5*time.Second, el.getPollingInterval()) + }) +} +func TestNewEventListener_EventStartFromStored(t *testing.T) { + database := testDB(t) + logger := testLogger(t) + + startBlock := int64(12345) + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", nil, nil, database, 5, &startBlock, logger) + require.NoError(t, err) + require.NotNil(t, el.eventStartFrom) + assert.Equal(t, int64(12345), *el.eventStartFrom) +} + +func TestNewEventListener_EventStartFromNil(t *testing.T) { + database := testDB(t) + logger := testLogger(t) + + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", nil, nil, database, 5, nil, logger) + require.NoError(t, err) + assert.Nil(t, el.eventStartFrom) +} +func TestEventListener_GetStartBlockFromConfig(t *testing.T) { + database := testDB(t) + logger := testLogger(t) + + t.Run("positive eventStartFrom returns that block", func(t *testing.T) { + startBlock := int64(5000) + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", nil, nil, database, 5, &startBlock, logger) + require.NoError(t, err) + + block, err := el.getStartBlockFromConfig(context.Background()) + require.NoError(t, err) + assert.Equal(t, uint64(5000), block) + }) + + t.Run("zero eventStartFrom returns 0", func(t *testing.T) { + startBlock := int64(0) + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", nil, nil, database, 5, &startBlock, logger) + require.NoError(t, err) + + block, err := el.getStartBlockFromConfig(context.Background()) + require.NoError(t, err) + assert.Equal(t, uint64(0), block) + }) + + t.Run("large positive eventStartFrom", func(t *testing.T) { + startBlock := int64(999999999) + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", nil, nil, database, 5, &startBlock, logger) + require.NoError(t, err) + + block, err := el.getStartBlockFromConfig(context.Background()) + require.NoError(t, err) + assert.Equal(t, uint64(999999999), block) + }) + + t.Run("minus one eventStartFrom with nil rpcClient panics", func(t *testing.T) { + startBlock := int64(-1) + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", nil, nil, database, 5, &startBlock, logger) + require.NoError(t, err) + + // rpcClient is nil, so calling GetLatestBlock panics + assert.Panics(t, func() { + el.getStartBlockFromConfig(context.Background()) + }) + }) + + t.Run("nil eventStartFrom with nil rpcClient panics", func(t *testing.T) { + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", nil, nil, database, 5, nil, logger) + require.NoError(t, err) + + // nil rpcClient, nil eventStartFrom -> falls through to rpcClient.GetLatestBlock which panics on nil + assert.Panics(t, func() { + el.getStartBlockFromConfig(context.Background()) + }) + }) + + t.Run("negative value less than -1 with nil rpcClient panics", func(t *testing.T) { + startBlock := int64(-5) + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", nil, nil, database, 5, &startBlock, logger) + require.NoError(t, err) + + // -5 is < 0 but not -1, and not >= 0, so falls through to rpcClient.GetLatestBlock + assert.Panics(t, func() { + el.getStartBlockFromConfig(context.Background()) + }) + }) +} + +func TestEventListener_ContextCancellationStopsGoroutine(t *testing.T) { + database := testDB(t) + logger := testLogger(t) + + // Use no topics so the goroutine exits at the "no event topics" warning + // before trying to use nil rpcClient. We can still verify context flow. + startBlock := int64(100) + el, err := NewEventListener(nil, "0xGateway", "0xVault", "eip155:1", nil, nil, database, 5, &startBlock, logger) + require.NoError(t, err) + + ctx, cancel := context.WithCancel(context.Background()) + + err = el.Start(ctx) + require.NoError(t, err) + assert.True(t, el.IsRunning()) + + // Cancel context + cancel() + time.Sleep(100 * time.Millisecond) + + // Stop to clean up + el.Stop() + assert.False(t, el.IsRunning()) +} diff --git a/universalClient/chains/evm/event_parser_test.go b/universalClient/chains/evm/event_parser_test.go index ee85a314..1d211cc5 100644 --- a/universalClient/chains/evm/event_parser_test.go +++ b/universalClient/chains/evm/event_parser_test.go @@ -1,6 +1,7 @@ package evm import ( + "encoding/hex" "encoding/json" "math/big" "testing" @@ -11,6 +12,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/pushchain/push-chain-node/universalClient/chains/common" "github.com/pushchain/push-chain-node/universalClient/store" uregistrytypes "github.com/pushchain/push-chain-node/x/uregistry/types" ) @@ -368,3 +370,289 @@ func TestParseGatewayEvent_OutboundObservation(t *testing.T) { assert.Contains(t, outboundData["universal_tx_id"], "0xbbbb") }) } + +// --------------------------------------------------------------------------- +// readDynamicBytes +// --------------------------------------------------------------------------- + +func TestReadDynamicBytes(t *testing.T) { + t.Run("valid offset with normal bytes", func(t *testing.T) { + // Build ABI-encoded dynamic bytes: first 32 bytes = length, then the data. + inner := []byte{0xca, 0xfe, 0xba, 0xbe} + data := make([]byte, 64) // 32 (length) + 32 (padded data) + big.NewInt(int64(len(inner))).FillBytes(data[0:32]) + copy(data[32:36], inner) + + hexStr, ok := readDynamicBytes(data, 0) + assert.True(t, ok) + assert.Equal(t, "0xcafebabe", hexStr) + }) + + t.Run("zero-length bytes", func(t *testing.T) { + data := make([]byte, 32) // length = 0 + hexStr, ok := readDynamicBytes(data, 0) + assert.True(t, ok) + assert.Equal(t, "0x", hexStr) + }) + + t.Run("offset exactly at boundary", func(t *testing.T) { + // data has 64 bytes; offset 32 points to second word which encodes length=0 + data := make([]byte, 64) + hexStr, ok := readDynamicBytes(data, 32) + assert.True(t, ok) + assert.Equal(t, "0x", hexStr) + }) + + t.Run("out-of-bounds offset past data length", func(t *testing.T) { + data := make([]byte, 16) // too short for a 32-byte length word + _, ok := readDynamicBytes(data, 0) + assert.False(t, ok) + }) + + t.Run("offset beyond data", func(t *testing.T) { + data := make([]byte, 32) + _, ok := readDynamicBytes(data, 64) + assert.False(t, ok) + }) + + t.Run("length exceeds remaining data", func(t *testing.T) { + // length says 100, but only 4 bytes of actual data follow + data := make([]byte, 64) + big.NewInt(100).FillBytes(data[0:32]) + _, ok := readDynamicBytes(data, 0) + assert.False(t, ok) + }) + + t.Run("multi-word data extraction", func(t *testing.T) { + inner := make([]byte, 40) // spans more than one 32-byte word + for i := range inner { + inner[i] = byte(i) + } + data := make([]byte, 32+64) // length word + 2 padded words + big.NewInt(int64(len(inner))).FillBytes(data[0:32]) + copy(data[32:], inner) + + hexStr, ok := readDynamicBytes(data, 0) + assert.True(t, ok) + expected := "0x" + hex.EncodeToString(inner) + assert.Equal(t, expected, hexStr) + }) +} + +// --------------------------------------------------------------------------- +// readWord +// --------------------------------------------------------------------------- + +func TestReadWord(t *testing.T) { + data := make([]byte, 96) // 3 words + for i := range data { + data[i] = byte(i) + } + + t.Run("first word", func(t *testing.T) { + w := readWord(data, 0) + require.NotNil(t, w) + assert.Len(t, w, 32) + assert.Equal(t, byte(0), w[0]) + }) + + t.Run("second word", func(t *testing.T) { + w := readWord(data, 1) + require.NotNil(t, w) + assert.Equal(t, byte(32), w[0]) + }) + + t.Run("third word", func(t *testing.T) { + w := readWord(data, 2) + require.NotNil(t, w) + assert.Equal(t, byte(64), w[0]) + }) + + t.Run("out of bounds returns nil", func(t *testing.T) { + w := readWord(data, 3) + assert.Nil(t, w) + }) + + t.Run("negative index returns nil", func(t *testing.T) { + w := readWord(data, -1) + assert.Nil(t, w) + }) + + t.Run("empty data returns nil", func(t *testing.T) { + w := readWord([]byte{}, 0) + assert.Nil(t, w) + }) +} + +// --------------------------------------------------------------------------- +// decodePayload +// --------------------------------------------------------------------------- + +func TestDecodePayload(t *testing.T) { + logger := zerolog.New(nil).Level(zerolog.Disabled) + + t.Run("valid payload at correct offset", func(t *testing.T) { + // Build data large enough: need dataOffset >= 32*5 = 160 + // At dataOffset, place ABI-encoded dynamic bytes (length + data). + inner := []byte{0xde, 0xad, 0xbe, 0xef} + data := make([]byte, 224) // 7 words + // Place dynamic bytes at offset 160 (word 5) + big.NewInt(int64(len(inner))).FillBytes(data[160:192]) + copy(data[192:196], inner) + + payload := &common.UniversalTx{} + decodePayload(data, 160, payload, logger) + assert.Equal(t, "0xdeadbeef", payload.RawPayload) + }) + + t.Run("offset too small is ignored", func(t *testing.T) { + data := make([]byte, 256) + payload := &common.UniversalTx{} + decodePayload(data, 32, payload, logger) // < 32*5 + assert.Empty(t, payload.RawPayload) + }) + + t.Run("offset zero is ignored", func(t *testing.T) { + data := make([]byte, 256) + payload := &common.UniversalTx{} + decodePayload(data, 0, payload, logger) + assert.Empty(t, payload.RawPayload) + }) + + t.Run("readDynamicBytes fails gracefully", func(t *testing.T) { + // Data is too short for the length word at the offset + data := make([]byte, 168) // offset 160 + only 8 bytes; need 32 for length + payload := &common.UniversalTx{} + decodePayload(data, 160, payload, logger) + assert.Empty(t, payload.RawPayload) + }) +} + +// --------------------------------------------------------------------------- +// decodeSignatureData +// --------------------------------------------------------------------------- + +func TestDecodeSignatureData(t *testing.T) { + t.Run("dynamic bytes at valid offset", func(t *testing.T) { + // Build data with dynamic bytes at offset 224 (word 7) + data := make([]byte, 288) // 9 words + inner := []byte{0x01, 0x02, 0x03} + big.NewInt(int64(len(inner))).FillBytes(data[224:256]) + copy(data[256:259], inner) + + // w encodes offset 224 + w := make([]byte, 32) + big.NewInt(224).FillBytes(w) + + result := decodeSignatureData(data, w, 224) + assert.Equal(t, "0x010203", result) + }) + + t.Run("offset below minOffset falls back to fixed bytes32", func(t *testing.T) { + data := make([]byte, 256) + w := make([]byte, 32) + big.NewInt(100).FillBytes(w) // offset 100 < minOffset 224 + + result := decodeSignatureData(data, w, 224) + // Fallback: treat w as fixed bytes32 + assert.Equal(t, "0x"+hex.EncodeToString(w), result) + }) + + t.Run("offset beyond data falls back to fixed bytes32", func(t *testing.T) { + data := make([]byte, 64) + w := make([]byte, 32) + big.NewInt(1000).FillBytes(w) // offset 1000 > len(data) + + result := decodeSignatureData(data, w, 0) + assert.Equal(t, "0x"+hex.EncodeToString(w), result) + }) + + t.Run("readDynamicBytes fails falls back to fixed bytes32", func(t *testing.T) { + // Offset is valid range but the dynamic bytes at that offset are malformed + data := make([]byte, 256) + // At offset 224, set length to a huge number that exceeds data + big.NewInt(9999).FillBytes(data[224:256]) + + w := make([]byte, 32) + big.NewInt(224).FillBytes(w) + + result := decodeSignatureData(data, w, 224) + assert.Equal(t, "0x"+hex.EncodeToString(w), result) + }) +} + +// --------------------------------------------------------------------------- +// finalizeEvent +// --------------------------------------------------------------------------- + +func TestFinalizeEvent(t *testing.T) { + logger := zerolog.New(nil).Level(zerolog.Disabled) + + t.Run("txType 0 sets FAST confirmation", func(t *testing.T) { + event := &store.Event{} + payload := &common.UniversalTx{TxType: 0, Sender: "0xabc"} + finalizeEvent(event, payload, logger) + + assert.Equal(t, store.ConfirmationFast, event.ConfirmationType) + assert.NotNil(t, event.EventData) + + var decoded common.UniversalTx + err := json.Unmarshal(event.EventData, &decoded) + require.NoError(t, err) + assert.Equal(t, "0xabc", decoded.Sender) + }) + + t.Run("txType 1 sets FAST confirmation", func(t *testing.T) { + event := &store.Event{} + payload := &common.UniversalTx{TxType: 1} + finalizeEvent(event, payload, logger) + + assert.Equal(t, store.ConfirmationFast, event.ConfirmationType) + }) + + t.Run("txType 2 sets STANDARD confirmation", func(t *testing.T) { + event := &store.Event{} + payload := &common.UniversalTx{TxType: 2} + finalizeEvent(event, payload, logger) + + assert.Equal(t, store.ConfirmationStandard, event.ConfirmationType) + }) + + t.Run("txType 3 sets STANDARD confirmation", func(t *testing.T) { + event := &store.Event{} + payload := &common.UniversalTx{TxType: 3} + finalizeEvent(event, payload, logger) + + assert.Equal(t, store.ConfirmationStandard, event.ConfirmationType) + }) + + t.Run("high txType sets STANDARD confirmation", func(t *testing.T) { + event := &store.Event{} + payload := &common.UniversalTx{TxType: 255} + finalizeEvent(event, payload, logger) + + assert.Equal(t, store.ConfirmationStandard, event.ConfirmationType) + }) + + t.Run("event data is valid JSON", func(t *testing.T) { + event := &store.Event{} + payload := &common.UniversalTx{ + SourceChain: "eip155:1", + Sender: "0xsender", + Recipient: "0xrecipient", + Token: "0xtoken", + Amount: "1000", + TxType: 0, + } + finalizeEvent(event, payload, logger) + + var decoded common.UniversalTx + err := json.Unmarshal(event.EventData, &decoded) + require.NoError(t, err) + assert.Equal(t, "eip155:1", decoded.SourceChain) + assert.Equal(t, "0xsender", decoded.Sender) + assert.Equal(t, "0xrecipient", decoded.Recipient) + assert.Equal(t, "0xtoken", decoded.Token) + assert.Equal(t, "1000", decoded.Amount) + }) +} diff --git a/universalClient/chains/evm/tx_builder_test.go b/universalClient/chains/evm/tx_builder_test.go index 0aa84069..4c5bd799 100644 --- a/universalClient/chains/evm/tx_builder_test.go +++ b/universalClient/chains/evm/tx_builder_test.go @@ -14,6 +14,7 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/pushchain/push-chain-node/universalClient/chains/common" uetypes "github.com/pushchain/push-chain-node/x/uexecutor/types" ) @@ -548,20 +549,157 @@ func TestFinalizeUniversalTxUnifiedEncoding(t *testing.T) { } } -// ============================================================================= -// BSC Testnet Simulation Tests -// -// These tests simulate contract calls (eth_call) against the live Vault -// contract on BSC testnet. They verify that the encoded calldata is -// accepted by the on-chain contracts. -// -// Prerequisites: -// 1. bscSimulateFrom must have TSS_ROLE on the vault -// 2. Contracts must not be paused -// 3. For ERC20 tests, the vault must have sufficient token balance -// -// These tests are skipped in -short mode and when RPC connection fails. -// ============================================================================= +// TestIsAlreadyExecuted tests the stub that always returns false +func TestIsAlreadyExecuted(t *testing.T) { + builder := newTestTxBuilder(t) + ctx := context.Background() + + t.Run("always returns false", func(t *testing.T) { + executed, err := builder.IsAlreadyExecuted(ctx, "0x1234567890abcdef") + assert.NoError(t, err) + assert.False(t, executed) + }) + + t.Run("returns false for empty txID", func(t *testing.T) { + executed, err := builder.IsAlreadyExecuted(ctx, "") + assert.NoError(t, err) + assert.False(t, executed) + }) + + t.Run("returns false for arbitrary txID", func(t *testing.T) { + executed, err := builder.IsAlreadyExecuted(ctx, "any-string-at-all") + assert.NoError(t, err) + assert.False(t, executed) + }) +} + +// TestNewTxBuilderValidation tests the NewTxBuilder constructor validation +func TestNewTxBuilderValidation(t *testing.T) { + logger := zerolog.Nop() + vAddr := ethcommon.HexToAddress(testVaultAddress) + + t.Run("nil rpcClient returns error", func(t *testing.T) { + tb, err := NewTxBuilder(nil, "eip155:1", 1, "0x1234567890123456789012345678901234567890", vAddr, logger) + assert.Error(t, err) + assert.Nil(t, tb) + assert.Contains(t, err.Error(), "rpcClient is required") + }) + + t.Run("empty chainID returns error", func(t *testing.T) { + tb, err := NewTxBuilder(&RPCClient{}, "", 1, "0x1234567890123456789012345678901234567890", vAddr, logger) + assert.Error(t, err) + assert.Nil(t, tb) + assert.Contains(t, err.Error(), "chainID is required") + }) + + t.Run("empty gatewayAddress returns error", func(t *testing.T) { + tb, err := NewTxBuilder(&RPCClient{}, "eip155:1", 1, "", vAddr, logger) + assert.Error(t, err) + assert.Nil(t, tb) + assert.Contains(t, err.Error(), "gatewayAddress is required") + }) + + t.Run("valid inputs succeed", func(t *testing.T) { + tb, err := NewTxBuilder(&RPCClient{}, "eip155:1", 1, "0x1234567890123456789012345678901234567890", vAddr, logger) + assert.NoError(t, err) + assert.NotNil(t, tb) + }) +} + +// TestGetOutboundSigningRequestValidation tests input validation for GetOutboundSigningRequest +func TestGetOutboundSigningRequestValidation(t *testing.T) { + builder := newTestTxBuilder(t) + ctx := context.Background() + + t.Run("nil data returns error", func(t *testing.T) { + req, err := builder.GetOutboundSigningRequest(ctx, nil, 0) + assert.Error(t, err) + assert.Nil(t, req) + assert.Contains(t, err.Error(), "outbound event data is nil") + }) + + t.Run("empty txID returns error", func(t *testing.T) { + data := &uetypes.OutboundCreatedEvent{TxID: ""} + req, err := builder.GetOutboundSigningRequest(ctx, data, 0) + assert.Error(t, err) + assert.Nil(t, req) + assert.Contains(t, err.Error(), "txID is required") + }) + + t.Run("empty destinationChain returns error", func(t *testing.T) { + data := &uetypes.OutboundCreatedEvent{ + TxID: "0x" + hex.EncodeToString(make([]byte, 32)), + DestinationChain: "", + } + req, err := builder.GetOutboundSigningRequest(ctx, data, 0) + assert.Error(t, err) + assert.Nil(t, req) + assert.Contains(t, err.Error(), "destinationChain is required") + }) + + t.Run("zero gas price returns error", func(t *testing.T) { + data := &uetypes.OutboundCreatedEvent{ + TxID: "0x" + hex.EncodeToString(make([]byte, 32)), + DestinationChain: "eip155:1", + GasPrice: "0", + } + req, err := builder.GetOutboundSigningRequest(ctx, data, 0) + assert.Error(t, err) + assert.Nil(t, req) + }) +} + +// TestBroadcastOutboundSigningRequestValidation tests input validation for BroadcastOutboundSigningRequest +func TestBroadcastOutboundSigningRequestValidation(t *testing.T) { + builder := newTestTxBuilder(t) + ctx := context.Background() + + t.Run("nil request returns error", func(t *testing.T) { + hash, err := builder.BroadcastOutboundSigningRequest(ctx, nil, nil, nil) + assert.Error(t, err) + assert.Empty(t, hash) + assert.Contains(t, err.Error(), "signing request is nil") + }) + + t.Run("nil data returns error", func(t *testing.T) { + req := &common.UnsignedSigningReq{} + hash, err := builder.BroadcastOutboundSigningRequest(ctx, req, nil, nil) + assert.Error(t, err) + assert.Empty(t, hash) + assert.Contains(t, err.Error(), "outbound event data is nil") + }) + + t.Run("wrong signature length returns error", func(t *testing.T) { + req := &common.UnsignedSigningReq{} + data := &uetypes.OutboundCreatedEvent{TxID: "0x1234"} + hash, err := builder.BroadcastOutboundSigningRequest(ctx, req, data, []byte{1, 2, 3}) + assert.Error(t, err) + assert.Empty(t, hash) + assert.Contains(t, err.Error(), "signature must be 65 bytes") + }) +} + +// TestGetFunctionSignatureUnknown tests unknown function name returns empty string +func TestGetFunctionSignatureUnknown(t *testing.T) { + builder := newTestTxBuilder(t) + + sig := builder.getFunctionSignature("unknownFunc", false) + assert.Equal(t, "", sig) +} + +// TestDetermineFunctionNameRescueFunds tests RESCUE_FUNDS routing +func TestDetermineFunctionNameRescueFunds(t *testing.T) { + builder := newTestTxBuilder(t) + funcName := builder.determineFunctionName(uetypes.TxType_RESCUE_FUNDS, ethcommon.Address{}) + assert.Equal(t, "rescueFunds", funcName) +} + +// TestDetermineFunctionNameDefault tests unknown TxType defaults to finalizeUniversalTx +func TestDetermineFunctionNameDefault(t *testing.T) { + builder := newTestTxBuilder(t) + funcName := builder.determineFunctionName(uetypes.TxType(999), ethcommon.Address{}) + assert.Equal(t, "finalizeUniversalTx", funcName) +} const ( bscGatewayAddress = "0x44aFFC61983F4348DdddB886349eb992C061EaC0" @@ -680,8 +818,6 @@ func simulateOnVault(t *testing.T, rpcClient *RPCClient, builder *TxBuilder, fun require.NotNil(t, result) } -// ---------- 1. Fetch Vault from Gateway ---------- - func TestSimulateBSC_FetchVaultFromGateway(t *testing.T) { if testing.Short() { t.Skip("skipping simulation test in short mode") @@ -709,8 +845,6 @@ func TestSimulateBSC_FetchVaultFromGateway(t *testing.T) { t.Logf("VAULT() returned: %s", vaultAddr.Hex()) } -// ---------- 2. Native Revert (Vault) ---------- - func TestSimulateBSC_RevertUniversalTx_Native(t *testing.T) { rpcClient, builder := setupBSCSimulation(t) defer rpcClient.Close() @@ -720,8 +854,6 @@ func TestSimulateBSC_RevertUniversalTx_Native(t *testing.T) { simulateOnVault(t, rpcClient, builder, "revertUniversalTx", data, uetypes.TxType_INBOUND_REVERT) } -// ---------- 3. ERC20 Revert (Vault) ---------- - func TestSimulateBSC_RevertUniversalTx_ERC20(t *testing.T) { rpcClient, builder := setupBSCSimulation(t) defer rpcClient.Close() @@ -731,8 +863,6 @@ func TestSimulateBSC_RevertUniversalTx_ERC20(t *testing.T) { simulateOnVault(t, rpcClient, builder, "revertUniversalTx", data, uetypes.TxType_INBOUND_REVERT) } -// ---------- 4. Native FinalizeUniversalTx — no payload ---------- - func TestSimulateBSC_FinalizeUniversalTx_Native(t *testing.T) { rpcClient, builder := setupBSCSimulation(t) defer rpcClient.Close() @@ -742,8 +872,6 @@ func TestSimulateBSC_FinalizeUniversalTx_Native(t *testing.T) { simulateOnVault(t, rpcClient, builder, "finalizeUniversalTx", data, uetypes.TxType_FUNDS) } -// ---------- 5. ERC20 FinalizeUniversalTx — no payload ---------- - func TestSimulateBSC_FinalizeUniversalTx_ERC20(t *testing.T) { rpcClient, builder := setupBSCSimulation(t) defer rpcClient.Close() @@ -753,8 +881,6 @@ func TestSimulateBSC_FinalizeUniversalTx_ERC20(t *testing.T) { simulateOnVault(t, rpcClient, builder, "finalizeUniversalTx", data, uetypes.TxType_FUNDS) } -// ---------- 6. Native FinalizeUniversalTx — with payload ---------- - func TestSimulateBSC_FinalizeUniversalTx_NativeWithPayload(t *testing.T) { rpcClient, builder := setupBSCSimulation(t) defer rpcClient.Close() @@ -769,8 +895,6 @@ func TestSimulateBSC_FinalizeUniversalTx_NativeWithPayload(t *testing.T) { simulateOnVault(t, rpcClient, builder, "finalizeUniversalTx", data, uetypes.TxType_FUNDS_AND_PAYLOAD) } -// ---------- 7. Payload only — no funds ---------- - func TestSimulateBSC_FinalizeUniversalTx_PayloadOnly(t *testing.T) { rpcClient, builder := setupBSCSimulation(t) defer rpcClient.Close() @@ -785,8 +909,6 @@ func TestSimulateBSC_FinalizeUniversalTx_PayloadOnly(t *testing.T) { simulateOnVault(t, rpcClient, builder, "finalizeUniversalTx", data, uetypes.TxType_PAYLOAD) } -// ---------- 8. Native RescueFunds ---------- - func TestSimulateBSC_RescueFunds_Native(t *testing.T) { rpcClient, builder := setupBSCSimulation(t) defer rpcClient.Close() @@ -796,8 +918,6 @@ func TestSimulateBSC_RescueFunds_Native(t *testing.T) { simulateOnVault(t, rpcClient, builder, "rescueFunds", data, uetypes.TxType_INBOUND_REVERT) } -// ---------- 9. ERC20 RescueFunds ---------- - func TestSimulateBSC_RescueFunds_ERC20(t *testing.T) { rpcClient, builder := setupBSCSimulation(t) defer rpcClient.Close() @@ -806,3 +926,126 @@ func TestSimulateBSC_RescueFunds_ERC20(t *testing.T) { data := newBSCSimulationOutbound(t, "1000000", bscUSDT, "0x", revertMsg) // 1 USDT simulateOnVault(t, rpcClient, builder, "rescueFunds", data, uetypes.TxType_INBOUND_REVERT) } + +// --------------------------------------------------------------------------- +// GetGasFeeUsed — requires live RPC; cannot be unit-tested without mocking. +// The function calls rpcClient.GetTransactionReceipt and +// rpcClient.GetTransactionByHash, so a proper test would need a mock +// RPCClient or an integration test against a real node (similar to the +// BSC simulation tests above). +// --------------------------------------------------------------------------- + +// --------------------------------------------------------------------------- +// parseGasLimit — additional edge-case coverage +// --------------------------------------------------------------------------- + +func TestParseGasLimitEdgeCases(t *testing.T) { + t.Run("very large gas limit", func(t *testing.T) { + result, err := parseGasLimit("999999999999999999") + assert.NoError(t, err) + expected := new(big.Int) + expected.SetString("999999999999999999", 10) + assert.Equal(t, expected, result) + }) + + t.Run("leading zeros", func(t *testing.T) { + result, err := parseGasLimit("0021000") + assert.NoError(t, err) + assert.Equal(t, int64(21000), result.Int64()) + }) + + t.Run("negative number is invalid", func(t *testing.T) { + // big.Int.SetString with base 10 will parse negative numbers, but the + // function doesn't explicitly reject them. This documents behavior. + result, err := parseGasLimit("-100") + if err == nil { + // If it parses, the value would be negative + assert.True(t, result.Sign() < 0) + } + }) + + t.Run("whitespace only is invalid", func(t *testing.T) { + _, err := parseGasLimit(" ") + assert.Error(t, err) + }) + + t.Run("hex string is invalid", func(t *testing.T) { + _, err := parseGasLimit("0xff") + assert.Error(t, err) + }) +} + +// --------------------------------------------------------------------------- +// parseTxType — additional edge-case coverage +// --------------------------------------------------------------------------- + +func TestParseTxTypeEdgeCases(t *testing.T) { + t.Run("lowercase input is uppercased", func(t *testing.T) { + result, err := parseTxType("funds") + assert.NoError(t, err) + assert.Equal(t, uetypes.TxType_FUNDS, result) + }) + + t.Run("mixed case input", func(t *testing.T) { + result, err := parseTxType("Funds_And_Payload") + assert.NoError(t, err) + assert.Equal(t, uetypes.TxType_FUNDS_AND_PAYLOAD, result) + }) + + t.Run("input with whitespace is trimmed", func(t *testing.T) { + result, err := parseTxType(" PAYLOAD ") + assert.NoError(t, err) + assert.Equal(t, uetypes.TxType_PAYLOAD, result) + }) + + t.Run("numeric string 0", func(t *testing.T) { + result, err := parseTxType("0") + assert.NoError(t, err) + assert.Equal(t, uetypes.TxType(0), result) + }) + + t.Run("numeric string for RESCUE_FUNDS", func(t *testing.T) { + result, err := parseTxType("RESCUE_FUNDS") + assert.NoError(t, err) + assert.Equal(t, uetypes.TxType_RESCUE_FUNDS, result) + }) + + t.Run("empty string is invalid", func(t *testing.T) { + _, err := parseTxType("") + assert.Error(t, err) + }) +} + +// --------------------------------------------------------------------------- +// removeHexPrefix — additional edge-case coverage +// --------------------------------------------------------------------------- + +func TestRemoveHexPrefixAdditional(t *testing.T) { + t.Run("single character", func(t *testing.T) { + assert.Equal(t, "a", removeHexPrefix("a")) + }) + + t.Run("0x only", func(t *testing.T) { + assert.Equal(t, "", removeHexPrefix("0x")) + }) + + t.Run("does not strip uppercase 0X", func(t *testing.T) { + // The function only checks lowercase 0x + assert.Equal(t, "0XABCDEF", removeHexPrefix("0XABCDEF")) + }) +} + +// --------------------------------------------------------------------------- +// NewTxBuilder — additional validation edge cases +// --------------------------------------------------------------------------- + +func TestNewTxBuilderZeroGatewayAddress(t *testing.T) { + logger := zerolog.Nop() + vAddr := ethcommon.HexToAddress(testVaultAddress) + + // "0x0000000000000000000000000000000000000000" is the zero address + tb, err := NewTxBuilder(&RPCClient{}, "eip155:1", 1, "0x0000000000000000000000000000000000000000", vAddr, logger) + assert.Error(t, err) + assert.Nil(t, tb) + assert.Contains(t, err.Error(), "invalid gateway address") +} diff --git a/universalClient/chains/push/client_test.go b/universalClient/chains/push/client_test.go index 1234d221..b0cff755 100644 --- a/universalClient/chains/push/client_test.go +++ b/universalClient/chains/push/client_test.go @@ -2,11 +2,14 @@ package push import ( "context" + "fmt" "testing" + "github.com/pushchain/push-chain-node/universalClient/chains/common" "github.com/pushchain/push-chain-node/universalClient/config" "github.com/pushchain/push-chain-node/universalClient/db" "github.com/pushchain/push-chain-node/universalClient/pushcore" + "github.com/pushchain/push-chain-node/universalClient/store" "github.com/rs/zerolog" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -96,3 +99,251 @@ func TestClient_GetTxBuilder(t *testing.T) { assert.Nil(t, builder) assert.Contains(t, err.Error(), "not supported") } + +func TestClient_StopBeforeStart(t *testing.T) { + // Stop on a freshly created client (never started) should not panic. + // The cancel func is nil, eventListener.Stop() returns ErrNotRunning but + // the client logs and swallows that error, returning nil. + client, err := NewClient(newTestDB(t), nil, newTestPushCoreClient(), "push-chain", zerolog.Nop()) + require.NoError(t, err) + + // Should not panic or return error + require.NoError(t, client.Stop()) +} + +func TestClient_DoubleStop(t *testing.T) { + client, err := NewClient(newTestDB(t), nil, newTestPushCoreClient(), "push-chain", zerolog.Nop()) + require.NoError(t, err) + + ctx := context.Background() + require.NoError(t, client.Start(ctx)) + require.NoError(t, client.Stop()) + + // Second stop: eventListener.Stop() returns ErrNotRunning, but client swallows it + require.NoError(t, client.Stop()) +} + +func TestClient_StartStopWithEventCleaner(t *testing.T) { + cleanup := 60 + retention := 3600 + cfg := &config.ChainSpecificConfig{ + CleanupIntervalSeconds: &cleanup, + RetentionPeriodSeconds: &retention, + } + client, err := NewClient(newTestDB(t), cfg, newTestPushCoreClient(), "push-chain", zerolog.Nop()) + require.NoError(t, err) + require.NotNil(t, client.eventCleaner) + + ctx := context.Background() + require.NoError(t, client.Start(ctx)) + + // Verify both components are running + assert.True(t, client.eventListener.IsRunning()) + + require.NoError(t, client.Stop()) + + // Verify event listener is stopped + assert.False(t, client.eventListener.IsRunning()) +} + +func TestClient_StartStopLifecycleMultiple(t *testing.T) { + // Verify the client can be started and stopped multiple times (restart). + client, err := NewClient(newTestDB(t), nil, newTestPushCoreClient(), "push-chain", zerolog.Nop()) + require.NoError(t, err) + + ctx := context.Background() + + for i := 0; i < 3; i++ { + require.NoError(t, client.Start(ctx), "start iteration %d", i) + assert.True(t, client.eventListener.IsRunning(), "running after start iteration %d", i) + require.NoError(t, client.Stop(), "stop iteration %d", i) + assert.False(t, client.eventListener.IsRunning(), "not running after stop iteration %d", i) + } +} + +func TestNewClient_PartialCleanerConfig(t *testing.T) { + logger := zerolog.Nop() + database := newTestDB(t) + pc := newTestPushCoreClient() + + t.Run("only cleanup interval set, no retention", func(t *testing.T) { + cleanup := 60 + cfg := &config.ChainSpecificConfig{ + CleanupIntervalSeconds: &cleanup, + } + client, err := NewClient(database, cfg, pc, "push-chain", logger) + require.NoError(t, err) + assert.Nil(t, client.eventCleaner, "event cleaner should be nil when retention is missing") + }) + + t.Run("only retention set, no cleanup interval", func(t *testing.T) { + retention := 3600 + cfg := &config.ChainSpecificConfig{ + RetentionPeriodSeconds: &retention, + } + client, err := NewClient(database, cfg, pc, "push-chain", logger) + require.NoError(t, err) + assert.Nil(t, client.eventCleaner, "event cleaner should be nil when cleanup interval is missing") + }) + + t.Run("empty config, no cleaner fields", func(t *testing.T) { + cfg := &config.ChainSpecificConfig{} + client, err := NewClient(database, cfg, pc, "push-chain", logger) + require.NoError(t, err) + assert.Nil(t, client.eventCleaner) + }) + + t.Run("config with poll interval but no cleaner", func(t *testing.T) { + poll := 5 + cfg := &config.ChainSpecificConfig{ + EventPollingIntervalSeconds: &poll, + } + client, err := NewClient(database, cfg, pc, "push-chain", logger) + require.NoError(t, err) + assert.Nil(t, client.eventCleaner) + assert.NotNil(t, client.eventListener) + }) +} + +func TestNewClient_NegativePollInterval(t *testing.T) { + logger := zerolog.Nop() + database := newTestDB(t) + pc := newTestPushCoreClient() + + poll := -5 + cfg := &config.ChainSpecificConfig{ + EventPollingIntervalSeconds: &poll, + } + client, err := NewClient(database, cfg, pc, "push-chain", logger) + require.NoError(t, err) + // Negative poll interval should fall back to default + assert.Equal(t, DefaultPollInterval, client.eventListener.cfg.PollInterval) +} + +// --------------------------------------------------------------------------- +// storeEvent tests +// --------------------------------------------------------------------------- + +func TestStoreEvent(t *testing.T) { + t.Run("stores a valid event and returns 1", func(t *testing.T) { + database := newTestDB(t) + pc := newTestPushCoreClient() + logger := zerolog.Nop() + + el, err := NewEventListener(pc, database, logger, nil) + require.NoError(t, err) + + event := &store.Event{ + EventID: "test-event-1", + BlockHeight: 100, + Type: store.EventTypeInbound, + ConfirmationType: store.ConfirmationInstant, + Status: store.StatusConfirmed, + EventData: []byte(`{"key":"value"}`), + } + + result := el.storeEvent(event) + assert.Equal(t, 1, result) + }) + + t.Run("storing a duplicate event returns 0", func(t *testing.T) { + database := newTestDB(t) + pc := newTestPushCoreClient() + logger := zerolog.Nop() + + el, err := NewEventListener(pc, database, logger, nil) + require.NoError(t, err) + + event := &store.Event{ + EventID: "test-event-dup", + BlockHeight: 200, + Type: store.EventTypeOutbound, + ConfirmationType: store.ConfirmationInstant, + Status: store.StatusConfirmed, + EventData: []byte(`{}`), + } + + first := el.storeEvent(event) + assert.Equal(t, 1, first) + + second := el.storeEvent(event) + assert.Equal(t, 0, second) + }) + + t.Run("storing multiple distinct events returns 1 each", func(t *testing.T) { + database := newTestDB(t) + pc := newTestPushCoreClient() + logger := zerolog.Nop() + + el, err := NewEventListener(pc, database, logger, nil) + require.NoError(t, err) + + for i := 0; i < 5; i++ { + event := &store.Event{ + EventID: fmt.Sprintf("multi-event-%d", i), + BlockHeight: uint64(300 + i), + Type: store.EventTypeKeygen, + ConfirmationType: store.ConfirmationInstant, + Status: store.StatusConfirmed, + EventData: []byte(`{}`), + } + result := el.storeEvent(event) + assert.Equal(t, 1, result, "event %d should be stored", i) + } + }) + + t.Run("stored event is retrievable via chainStore", func(t *testing.T) { + database := newTestDB(t) + pc := newTestPushCoreClient() + logger := zerolog.Nop() + + el, err := NewEventListener(pc, database, logger, nil) + require.NoError(t, err) + + event := &store.Event{ + EventID: "retrievable-event", + BlockHeight: 500, + Type: store.EventTypeInbound, + ConfirmationType: store.ConfirmationInstant, + Status: store.StatusConfirmed, + EventData: []byte(`{"data":"test"}`), + } + + result := el.storeEvent(event) + assert.Equal(t, 1, result) + + // Verify it can be retrieved as a confirmed event + cs := common.NewChainStore(database) + confirmed, err := cs.GetConfirmedEvents(10) + require.NoError(t, err) + require.Len(t, confirmed, 1) + assert.Equal(t, "retrievable-event", confirmed[0].EventID) + }) + + t.Run("stores event with pending status", func(t *testing.T) { + database := newTestDB(t) + pc := newTestPushCoreClient() + logger := zerolog.Nop() + + el, err := NewEventListener(pc, database, logger, nil) + require.NoError(t, err) + + event := &store.Event{ + EventID: "pending-event", + BlockHeight: 600, + Type: store.EventTypeSignOutbound, + ConfirmationType: store.ConfirmationInstant, + Status: store.StatusPending, + EventData: []byte(`{}`), + } + + result := el.storeEvent(event) + assert.Equal(t, 1, result) + + cs := common.NewChainStore(database) + pending, err := cs.GetPendingEvents(10) + require.NoError(t, err) + require.Len(t, pending, 1) + assert.Equal(t, "pending-event", pending[0].EventID) + }) +} diff --git a/universalClient/chains/push/event_parser_test.go b/universalClient/chains/push/event_parser_test.go index 35628d53..35c262df 100644 --- a/universalClient/chains/push/event_parser_test.go +++ b/universalClient/chains/push/event_parser_test.go @@ -251,6 +251,38 @@ func TestDefaultExpiryOffset(t *testing.T) { assert.Equal(t, uint64(600), uint64(DefaultExpiryOffset)) } +func TestHashEventID(t *testing.T) { + t.Run("deterministic output", func(t *testing.T) { + id1 := hashEventID("keygen", "123") + id2 := hashEventID("keygen", "123") + assert.Equal(t, id1, id2) + }) + + t.Run("different types produce different IDs", func(t *testing.T) { + id1 := hashEventID("keygen", "123") + id2 := hashEventID("refresh", "123") + assert.NotEqual(t, id1, id2) + }) + + t.Run("different raw IDs produce different IDs", func(t *testing.T) { + id1 := hashEventID("keygen", "1") + id2 := hashEventID("keygen", "2") + assert.NotEqual(t, id1, id2) + }) + + t.Run("output is hex string of sha256 length", func(t *testing.T) { + id := hashEventID("type", "id") + assert.Len(t, id, 64) // sha256 = 32 bytes = 64 hex chars + }) +} + +func TestConvertOutboundToEvent_BothNil(t *testing.T) { + result, err := convertOutboundToEvent(nil, nil) + require.Error(t, err) + assert.Nil(t, result) + assert.Contains(t, err.Error(), "entry or outbound is nil") +} + func TestConvertFundMigrationEvent(t *testing.T) { t.Run("nil migration returns error", func(t *testing.T) { result, err := convertFundMigrationEvent(nil) diff --git a/universalClient/chains/svm/chain_meta_oracle_test.go b/universalClient/chains/svm/chain_meta_oracle_test.go index 2063a29f..6c15b0a2 100644 --- a/universalClient/chains/svm/chain_meta_oracle_test.go +++ b/universalClient/chains/svm/chain_meta_oracle_test.go @@ -1,121 +1,288 @@ package svm import ( + "context" "testing" "time" + "github.com/pushchain/push-chain-node/universalClient/pushsigner" "github.com/rs/zerolog" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" ) -func TestNewChainMetaOracle(t *testing.T) { - t.Run("creates gas oracle with valid params", func(t *testing.T) { - logger := zerolog.Nop() - chainID := "solana:mainnet" - interval := 30 - - oracle := NewChainMetaOracle(nil, nil, chainID, interval, 0, logger) - - require.NotNil(t, oracle) - assert.Equal(t, chainID, oracle.chainID) - assert.Equal(t, interval, oracle.gasPriceIntervalSeconds) - assert.Equal(t, 0, oracle.gasPriceMarkupPercent) - assert.Nil(t, oracle.rpcClient) - assert.Nil(t, oracle.pushSigner) - assert.NotNil(t, oracle.stopCh) - }) - - t.Run("creates gas oracle with markup percent", func(t *testing.T) { - logger := zerolog.Nop() - oracle := NewChainMetaOracle(nil, nil, "solana:mainnet", 30, 15, logger) - - require.NotNil(t, oracle) - assert.Equal(t, 15, oracle.gasPriceMarkupPercent) - }) - - t.Run("creates gas oracle with zero markup percent", func(t *testing.T) { - logger := zerolog.Nop() - oracle := NewChainMetaOracle(nil, nil, "solana:mainnet", 30, 0, logger) - - require.NotNil(t, oracle) - assert.Equal(t, 0, oracle.gasPriceMarkupPercent) - }) - - t.Run("creates gas oracle with different chain IDs", func(t *testing.T) { - logger := zerolog.Nop() - - testCases := []string{ - "solana:mainnet", - "solana:devnet", - "solana:testnet", - } +func TestNewChainMetaOracle_BasicFields(t *testing.T) { + logger := zerolog.Nop() + rpc := &RPCClient{} + ps := &pushsigner.Signer{} - for _, chainID := range testCases { - oracle := NewChainMetaOracle(nil, nil, chainID, 30, 0, logger) - assert.Equal(t, chainID, oracle.chainID) - } - }) + oracle := NewChainMetaOracle(rpc, ps, "solana-mainnet", 60, 10, logger) + + if oracle == nil { + t.Fatal("expected non-nil oracle") + } + if oracle.rpcClient != rpc { + t.Error("rpcClient not set correctly") + } + if oracle.pushSigner != ps { + t.Error("pushSigner not set correctly") + } + if oracle.chainID != "solana-mainnet" { + t.Errorf("chainID = %q, want %q", oracle.chainID, "solana-mainnet") + } + if oracle.gasPriceIntervalSeconds != 60 { + t.Errorf("gasPriceIntervalSeconds = %d, want 60", oracle.gasPriceIntervalSeconds) + } + if oracle.gasPriceMarkupPercent != 10 { + t.Errorf("gasPriceMarkupPercent = %d, want 10", oracle.gasPriceMarkupPercent) + } + if oracle.stopCh == nil { + t.Error("stopCh channel should be initialized") + } } -func TestChainMetaOracleGetChainMetaOracleFetchInterval(t *testing.T) { +func TestNewChainMetaOracle_NilRPCClient(t *testing.T) { logger := zerolog.Nop() + oracle := NewChainMetaOracle(nil, &pushsigner.Signer{}, "chain-1", 30, 5, logger) - t.Run("returns configured interval", func(t *testing.T) { - oracle := NewChainMetaOracle(nil, nil, "solana:mainnet", 60, 0, logger) - interval := oracle.getChainMetaOracleFetchInterval() - assert.Equal(t, 60*time.Second, interval) - }) - - t.Run("returns default for zero interval", func(t *testing.T) { - oracle := NewChainMetaOracle(nil, nil, "solana:mainnet", 0, 0, logger) - interval := oracle.getChainMetaOracleFetchInterval() - assert.Equal(t, 30*time.Second, interval) - }) - - t.Run("returns default for negative interval", func(t *testing.T) { - oracle := NewChainMetaOracle(nil, nil, "solana:mainnet", -10, 0, logger) - interval := oracle.getChainMetaOracleFetchInterval() - assert.Equal(t, 30*time.Second, interval) - }) - - t.Run("respects custom intervals", func(t *testing.T) { - testCases := []struct { - input int - expected time.Duration - }{ - {10, 10 * time.Second}, - {30, 30 * time.Second}, - {60, 60 * time.Second}, - {120, 120 * time.Second}, - } + if oracle == nil { + t.Fatal("expected non-nil oracle even with nil rpcClient") + } + if oracle.rpcClient != nil { + t.Error("rpcClient should be nil") + } +} - for _, tc := range testCases { - oracle := NewChainMetaOracle(nil, nil, "solana:mainnet", tc.input, 0, logger) - interval := oracle.getChainMetaOracleFetchInterval() - assert.Equal(t, tc.expected, interval, "interval %d should result in %v", tc.input, tc.expected) - } - }) +func TestNewChainMetaOracle_NilPushSigner(t *testing.T) { + logger := zerolog.Nop() + oracle := NewChainMetaOracle(&RPCClient{}, nil, "chain-2", 30, 5, logger) + + if oracle == nil { + t.Fatal("expected non-nil oracle even with nil pushSigner") + } + if oracle.pushSigner != nil { + t.Error("pushSigner should be nil") + } } -func TestChainMetaOracleStop(t *testing.T) { - t.Run("stop waits for goroutine", func(t *testing.T) { - logger := zerolog.Nop() - oracle := NewChainMetaOracle(nil, nil, "solana:mainnet", 30, 0, logger) +func TestNewChainMetaOracle_BothNil(t *testing.T) { + logger := zerolog.Nop() + oracle := NewChainMetaOracle(nil, nil, "chain-3", 15, 0, logger) - // Should not panic or hang + if oracle == nil { + t.Fatal("expected non-nil oracle with nil rpcClient and pushSigner") + } +} + +func TestNewChainMetaOracle_EmptyChainID(t *testing.T) { + logger := zerolog.Nop() + oracle := NewChainMetaOracle(nil, nil, "", 30, 5, logger) + + if oracle.chainID != "" { + t.Errorf("chainID = %q, want empty string", oracle.chainID) + } +} + +func TestNewChainMetaOracle_ZeroInterval(t *testing.T) { + logger := zerolog.Nop() + oracle := NewChainMetaOracle(nil, nil, "chain-4", 0, 5, logger) + + if oracle.gasPriceIntervalSeconds != 0 { + t.Errorf("gasPriceIntervalSeconds = %d, want 0", oracle.gasPriceIntervalSeconds) + } +} + +func TestNewChainMetaOracle_NegativeInterval(t *testing.T) { + logger := zerolog.Nop() + oracle := NewChainMetaOracle(nil, nil, "chain-5", -10, 5, logger) + + if oracle.gasPriceIntervalSeconds != -10 { + t.Errorf("gasPriceIntervalSeconds = %d, want -10", oracle.gasPriceIntervalSeconds) + } +} + +func TestNewChainMetaOracle_ZeroMarkup(t *testing.T) { + logger := zerolog.Nop() + oracle := NewChainMetaOracle(nil, nil, "chain-6", 30, 0, logger) + + if oracle.gasPriceMarkupPercent != 0 { + t.Errorf("gasPriceMarkupPercent = %d, want 0", oracle.gasPriceMarkupPercent) + } +} + +func TestNewChainMetaOracle_NegativeMarkup(t *testing.T) { + logger := zerolog.Nop() + oracle := NewChainMetaOracle(nil, nil, "chain-7", 30, -20, logger) + + if oracle.gasPriceMarkupPercent != -20 { + t.Errorf("gasPriceMarkupPercent = %d, want -20", oracle.gasPriceMarkupPercent) + } +} + +func TestNewChainMetaOracle_LargeValues(t *testing.T) { + logger := zerolog.Nop() + oracle := NewChainMetaOracle(nil, nil, "chain-8", 86400, 500, logger) + + if oracle.gasPriceIntervalSeconds != 86400 { + t.Errorf("gasPriceIntervalSeconds = %d, want 86400", oracle.gasPriceIntervalSeconds) + } + if oracle.gasPriceMarkupPercent != 500 { + t.Errorf("gasPriceMarkupPercent = %d, want 500", oracle.gasPriceMarkupPercent) + } +} + +func TestGetChainMetaOracleFetchInterval_Positive(t *testing.T) { + oracle := &ChainMetaOracle{gasPriceIntervalSeconds: 45} + got := oracle.getChainMetaOracleFetchInterval() + want := 45 * time.Second + + if got != want { + t.Errorf("interval = %v, want %v", got, want) + } +} + +func TestGetChainMetaOracleFetchInterval_One(t *testing.T) { + oracle := &ChainMetaOracle{gasPriceIntervalSeconds: 1} + got := oracle.getChainMetaOracleFetchInterval() + want := 1 * time.Second + + if got != want { + t.Errorf("interval = %v, want %v", got, want) + } +} + +func TestGetChainMetaOracleFetchInterval_Zero(t *testing.T) { + oracle := &ChainMetaOracle{gasPriceIntervalSeconds: 0} + got := oracle.getChainMetaOracleFetchInterval() + want := 30 * time.Second + + if got != want { + t.Errorf("interval = %v, want %v (default for zero)", got, want) + } +} + +func TestGetChainMetaOracleFetchInterval_Negative(t *testing.T) { + oracle := &ChainMetaOracle{gasPriceIntervalSeconds: -5} + got := oracle.getChainMetaOracleFetchInterval() + want := 30 * time.Second + + if got != want { + t.Errorf("interval = %v, want %v (default for negative)", got, want) + } +} + +func TestGetChainMetaOracleFetchInterval_Large(t *testing.T) { + oracle := &ChainMetaOracle{gasPriceIntervalSeconds: 3600} + got := oracle.getChainMetaOracleFetchInterval() + want := 3600 * time.Second + + if got != want { + t.Errorf("interval = %v, want %v", got, want) + } +} + +func TestStop_WithoutStart_NoPanic(t *testing.T) { + logger := zerolog.Nop() + oracle := NewChainMetaOracle(nil, nil, "chain-stop", 30, 5, logger) + + // Stop without Start should not panic or deadlock. + done := make(chan struct{}) + go func() { + defer close(done) + oracle.Stop() + }() + + select { + case <-done: + // success + case <-time.After(2 * time.Second): + t.Fatal("Stop() without prior Start() should not block indefinitely") + } +} + +func TestStartStop_ContextCancellation(t *testing.T) { + logger := zerolog.Nop() + oracle := NewChainMetaOracle(nil, nil, "chain-ctx", 30, 5, logger) + + ctx, cancel := context.WithCancel(context.Background()) + + err := oracle.Start(ctx) + if err != nil { + t.Fatalf("Start returned error: %v", err) + } + + // Cancel the context, which should cause fetchAndVoteChainMeta to return. + cancel() + + // Wait for the goroutine to finish via wg. + done := make(chan struct{}) + go func() { + defer close(done) + oracle.wg.Wait() + }() + + select { + case <-done: + // success – goroutine exited via context cancellation + case <-time.After(3 * time.Second): + t.Fatal("goroutine did not exit after context cancellation") + } +} + +func TestStartStop_ViaStopMethod(t *testing.T) { + logger := zerolog.Nop() + oracle := NewChainMetaOracle(nil, nil, "chain-stop-method", 30, 5, logger) + + ctx := context.Background() + + err := oracle.Start(ctx) + if err != nil { + t.Fatalf("Start returned error: %v", err) + } + + // Give the goroutine a moment to enter the select loop. + time.Sleep(50 * time.Millisecond) + + done := make(chan struct{}) + go func() { + defer close(done) oracle.Stop() - }) -} - -func TestChainMetaOracleStruct(t *testing.T) { - t.Run("struct has expected fields", func(t *testing.T) { - oracle := &ChainMetaOracle{} - assert.Nil(t, oracle.rpcClient) - assert.Nil(t, oracle.pushSigner) - assert.Empty(t, oracle.chainID) - assert.Equal(t, 0, oracle.gasPriceIntervalSeconds) - assert.Equal(t, 0, oracle.gasPriceMarkupPercent) - assert.Nil(t, oracle.stopCh) - }) + }() + + select { + case <-done: + // success – goroutine exited via Stop() + case <-time.After(3 * time.Second): + t.Fatal("Stop() did not complete in time") + } +} + +func TestNewChainMetaOracle_StopChannelIsOpen(t *testing.T) { + logger := zerolog.Nop() + oracle := NewChainMetaOracle(nil, nil, "chain-ch", 30, 5, logger) + + // The stopCh should be open (not closed) after construction. + select { + case <-oracle.stopCh: + t.Error("stopCh should be open after construction, but it was closed") + default: + // expected – channel is open + } +} + +func TestNewChainMetaOracle_DifferentChainIDs(t *testing.T) { + logger := zerolog.Nop() + + chainIDs := []string{ + "solana-mainnet", + "solana-devnet", + "SVM_DEVNET", + "chain:custom:123", + "a]very[strange-id", + } + + for _, id := range chainIDs { + oracle := NewChainMetaOracle(nil, nil, id, 30, 5, logger) + if oracle.chainID != id { + t.Errorf("chainID = %q, want %q", oracle.chainID, id) + } + } } diff --git a/universalClient/chains/svm/client_test.go b/universalClient/chains/svm/client_test.go new file mode 100644 index 00000000..50f1084f --- /dev/null +++ b/universalClient/chains/svm/client_test.go @@ -0,0 +1,428 @@ +package svm + +import ( + "testing" + + "github.com/rs/zerolog" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/pushchain/push-chain-node/universalClient/config" + "github.com/pushchain/push-chain-node/universalClient/db" + uregistrytypes "github.com/pushchain/push-chain-node/x/uregistry/types" +) + +// testChainConfig creates a test chain-specific config with RPC URLs. +func testChainConfig(rpcURLs []string) *config.ChainSpecificConfig { + return &config.ChainSpecificConfig{ + RPCURLs: rpcURLs, + } +} + +// validSVMChainID returns a valid CAIP-2 Solana chain ID for testing. +func validSVMChainID() string { + return "solana:EtWTRABZaYq6iMfeYKouRu166VU2xqa1" +} + +// validChainConfig returns a minimal valid uregistrytypes.ChainConfig for SVM tests. +func validChainConfig() *uregistrytypes.ChainConfig { + return &uregistrytypes.ChainConfig{ + Chain: validSVMChainID(), + VmType: uregistrytypes.VmType_SVM, + } +} + +func TestNewClient_NilConfig(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + + client, err := NewClient(nil, nil, nil, nil, "", logger) + assert.Error(t, err) + assert.Nil(t, client) + assert.Contains(t, err.Error(), "config is nil") +} + +func TestNewClient_InvalidVMType(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + + cfg := &uregistrytypes.ChainConfig{ + Chain: validSVMChainID(), + VmType: uregistrytypes.VmType_EVM, // wrong VM type + } + + client, err := NewClient(cfg, nil, nil, nil, "", logger) + assert.Error(t, err) + assert.Nil(t, client) + assert.Contains(t, err.Error(), "invalid VM type for Solana client") +} + +func TestNewClient_InvalidChainID(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + + cfg := &uregistrytypes.ChainConfig{ + Chain: "eip155:1", // not a solana chain + VmType: uregistrytypes.VmType_SVM, + } + + client, err := NewClient(cfg, nil, testChainConfig([]string{"https://rpc.example.com"}), nil, "", logger) + assert.Error(t, err) + assert.Nil(t, client) + assert.Contains(t, err.Error(), "failed to parse chain ID") +} + +func TestNewClient_NoRPCURLs_NilChainConfig(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + + cfg := validChainConfig() + + client, err := NewClient(cfg, nil, nil, nil, "", logger) + assert.Error(t, err) + assert.Nil(t, client) + assert.Contains(t, err.Error(), "no RPC URLs configured") +} + +func TestNewClient_NoRPCURLs_EmptySlice(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + + cfg := validChainConfig() + + client, err := NewClient(cfg, nil, testChainConfig([]string{}), nil, "", logger) + assert.Error(t, err) + assert.Nil(t, client) + assert.Contains(t, err.Error(), "no RPC URLs configured") +} + +func TestNewClient_ValidCreation(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + + cfg := &uregistrytypes.ChainConfig{ + Chain: validSVMChainID(), + VmType: uregistrytypes.VmType_SVM, + GatewayAddress: "SomeGateway111111111111111111111111111111111", + Enabled: &uregistrytypes.ChainEnabled{IsInboundEnabled: true, IsOutboundEnabled: true}, + } + + chainSpecific := testChainConfig([]string{"https://api.mainnet-beta.solana.com"}) + + client, err := NewClient(cfg, nil, chainSpecific, nil, "/tmp/node", logger) + require.NoError(t, err) + require.NotNil(t, client) + + assert.Equal(t, validSVMChainID(), client.ChainID()) + assert.Equal(t, cfg, client.GetConfig()) + assert.Equal(t, "EtWTRABZaYq6iMfeYKouRu166VU2xqa1", client.genesisHash) +} + +func TestNewClient_WithDatabase(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + + database, err := db.OpenInMemoryDB(true) + require.NoError(t, err) + require.NotNil(t, database) + + cfg := validChainConfig() + chainSpecific := testChainConfig([]string{"https://api.mainnet-beta.solana.com"}) + + client, err := NewClient(cfg, database, chainSpecific, nil, "", logger) + require.NoError(t, err) + require.NotNil(t, client) + assert.Equal(t, database, client.database) +} + +func TestChainID(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + + cfg := validChainConfig() + chainSpecific := testChainConfig([]string{"https://rpc.example.com"}) + + client, err := NewClient(cfg, nil, chainSpecific, nil, "", logger) + require.NoError(t, err) + + assert.Equal(t, validSVMChainID(), client.ChainID()) +} + +func TestGetConfig(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + + cfg := &uregistrytypes.ChainConfig{ + Chain: validSVMChainID(), + VmType: uregistrytypes.VmType_SVM, + GatewayAddress: "SomeGateway", + } + chainSpecific := testChainConfig([]string{"https://rpc.example.com"}) + + client, err := NewClient(cfg, nil, chainSpecific, nil, "", logger) + require.NoError(t, err) + + got := client.GetConfig() + assert.Equal(t, cfg, got) + assert.Equal(t, "SomeGateway", got.GatewayAddress) +} + +func TestGetTxBuilder_NilBeforeStart(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + + cfg := validChainConfig() + chainSpecific := testChainConfig([]string{"https://rpc.example.com"}) + + client, err := NewClient(cfg, nil, chainSpecific, nil, "", logger) + require.NoError(t, err) + + txb, err := client.GetTxBuilder() + assert.Error(t, err) + assert.Nil(t, txb) + assert.Contains(t, err.Error(), "txBuilder not available") +} + +func TestIsHealthy_NotStarted(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + + cfg := validChainConfig() + chainSpecific := testChainConfig([]string{"https://rpc.example.com"}) + + client, err := NewClient(cfg, nil, chainSpecific, nil, "", logger) + require.NoError(t, err) + + // rpcClient is nil before Start + assert.False(t, client.IsHealthy()) +} + +func TestStop_BeforeStart(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + + cfg := validChainConfig() + chainSpecific := testChainConfig([]string{"https://rpc.example.com"}) + + client, err := NewClient(cfg, nil, chainSpecific, nil, "", logger) + require.NoError(t, err) + + // Calling Stop before Start should not panic + err = client.Stop() + assert.NoError(t, err) +} + +func TestStop_CalledTwice(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + + cfg := validChainConfig() + chainSpecific := testChainConfig([]string{"https://rpc.example.com"}) + + client, err := NewClient(cfg, nil, chainSpecific, nil, "", logger) + require.NoError(t, err) + + // Double stop should be safe + assert.NoError(t, client.Stop()) + assert.NoError(t, client.Stop()) +} + +func TestApplyDefaults_AllDefaults(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + + cfg := validChainConfig() + chainSpecific := testChainConfig([]string{"https://rpc.example.com"}) + + client, err := NewClient(cfg, nil, chainSpecific, nil, "", logger) + require.NoError(t, err) + + defaults := client.applyDefaults() + + assert.Equal(t, 5, defaults.eventPollingInterval) + assert.Equal(t, 30, defaults.gasPriceInterval) + assert.Equal(t, uint64(5), defaults.fastConfirmations) + assert.Equal(t, uint64(12), defaults.standardConfirmations) + assert.Equal(t, 0, defaults.gasPriceMarkupPercent) +} + +func TestApplyDefaults_EventPollingOverride(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + + polling := 15 + chainSpecific := &config.ChainSpecificConfig{ + RPCURLs: []string{"https://rpc.example.com"}, + EventPollingIntervalSeconds: &polling, + } + + cfg := validChainConfig() + client, err := NewClient(cfg, nil, chainSpecific, nil, "", logger) + require.NoError(t, err) + + defaults := client.applyDefaults() + assert.Equal(t, 15, defaults.eventPollingInterval) +} + +func TestApplyDefaults_GasPriceOverride(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + + gasPriceInterval := 60 + gasPriceMarkup := 20 + chainSpecific := &config.ChainSpecificConfig{ + RPCURLs: []string{"https://rpc.example.com"}, + GasPriceIntervalSeconds: &gasPriceInterval, + GasPriceMarkupPercent: &gasPriceMarkup, + } + + cfg := validChainConfig() + client, err := NewClient(cfg, nil, chainSpecific, nil, "", logger) + require.NoError(t, err) + + defaults := client.applyDefaults() + assert.Equal(t, 60, defaults.gasPriceInterval) + assert.Equal(t, 20, defaults.gasPriceMarkupPercent) +} + +func TestApplyDefaults_BlockConfirmationOverride(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + + cfg := &uregistrytypes.ChainConfig{ + Chain: validSVMChainID(), + VmType: uregistrytypes.VmType_SVM, + BlockConfirmation: &uregistrytypes.BlockConfirmation{ + FastInbound: 3, + StandardInbound: 20, + }, + } + chainSpecific := testChainConfig([]string{"https://rpc.example.com"}) + + client, err := NewClient(cfg, nil, chainSpecific, nil, "", logger) + require.NoError(t, err) + + defaults := client.applyDefaults() + assert.Equal(t, uint64(3), defaults.fastConfirmations) + assert.Equal(t, uint64(20), defaults.standardConfirmations) +} + +func TestApplyDefaults_ZeroValueNotApplied(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + + // Zero values should not override defaults (the code checks > 0) + zeroPolling := 0 + zeroGasInterval := 0 + zeroMarkup := 0 + chainSpecific := &config.ChainSpecificConfig{ + RPCURLs: []string{"https://rpc.example.com"}, + EventPollingIntervalSeconds: &zeroPolling, + GasPriceIntervalSeconds: &zeroGasInterval, + GasPriceMarkupPercent: &zeroMarkup, + } + + cfg := validChainConfig() + client, err := NewClient(cfg, nil, chainSpecific, nil, "", logger) + require.NoError(t, err) + + defaults := client.applyDefaults() + // Zero values should not override defaults + assert.Equal(t, 5, defaults.eventPollingInterval) + assert.Equal(t, 30, defaults.gasPriceInterval) + assert.Equal(t, 0, defaults.gasPriceMarkupPercent) // 0 is the default too +} + +func TestParseSolanaChainID(t *testing.T) { + tests := []struct { + name string + input string + expected string + expectErr bool + errContains string + }{ + { + name: "Valid mainnet", + input: "solana:EtWTRABZaYq6iMfeYKouRu166VU2xqa1", + expected: "EtWTRABZaYq6iMfeYKouRu166VU2xqa1", + }, + { + name: "Valid devnet", + input: "solana:8E9rvCKLFQia2Y35HXjjpWzj8weVo44K", + expected: "8E9rvCKLFQia2Y35HXjjpWzj8weVo44K", + }, + { + name: "Invalid format - no colon", + input: "solanaEtWTRABZaYq6iMfeYKouRu166VU2xqa1", + expectErr: true, + errContains: "invalid CAIP-2 format", + }, + { + name: "Invalid format - too many colons", + input: "solana:abc:def", + expectErr: true, + errContains: "invalid CAIP-2 format", + }, + { + name: "Wrong namespace", + input: "eip155:1", + expectErr: true, + errContains: "not a Solana chain", + }, + { + name: "Empty genesis hash", + input: "solana:", + expectErr: true, + errContains: "empty genesis hash", + }, + { + name: "Empty string", + input: "", + expectErr: true, + errContains: "invalid CAIP-2 format", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result, err := parseSolanaChainID(tt.input) + + if tt.expectErr { + assert.Error(t, err) + assert.Contains(t, err.Error(), tt.errContains) + } else { + assert.NoError(t, err) + assert.Equal(t, tt.expected, result) + } + }) + } +} + +func TestNewClient_FullConfigGetters(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + + cfg := &uregistrytypes.ChainConfig{ + Chain: validSVMChainID(), + VmType: uregistrytypes.VmType_SVM, + GatewayAddress: "GatewayPubkey111111111111111111111111111111111", + Enabled: &uregistrytypes.ChainEnabled{IsInboundEnabled: true, IsOutboundEnabled: false}, + BlockConfirmation: &uregistrytypes.BlockConfirmation{ + FastInbound: 2, + StandardInbound: 15, + }, + } + + polling := 10 + gasInterval := 45 + gasMarkup := 15 + chainSpecific := &config.ChainSpecificConfig{ + RPCURLs: []string{"https://api.mainnet-beta.solana.com"}, + EventPollingIntervalSeconds: &polling, + GasPriceIntervalSeconds: &gasInterval, + GasPriceMarkupPercent: &gasMarkup, + } + + client, err := NewClient(cfg, nil, chainSpecific, nil, "/tmp/home", logger) + require.NoError(t, err) + + // Verify all getters + assert.Equal(t, validSVMChainID(), client.ChainID()) + assert.Equal(t, cfg, client.GetConfig()) + assert.False(t, client.IsHealthy()) // not started + + _, txErr := client.GetTxBuilder() + assert.Error(t, txErr) + + // Verify applyDefaults picks up overrides + defaults := client.applyDefaults() + assert.Equal(t, 10, defaults.eventPollingInterval) + assert.Equal(t, 45, defaults.gasPriceInterval) + assert.Equal(t, 15, defaults.gasPriceMarkupPercent) + assert.Equal(t, uint64(2), defaults.fastConfirmations) + assert.Equal(t, uint64(15), defaults.standardConfirmations) + + // Stop should be safe even without Start + assert.NoError(t, client.Stop()) +} diff --git a/universalClient/chains/svm/event_confirmer_test.go b/universalClient/chains/svm/event_confirmer_test.go index 005efbe9..bf38fb2d 100644 --- a/universalClient/chains/svm/event_confirmer_test.go +++ b/universalClient/chains/svm/event_confirmer_test.go @@ -1,12 +1,16 @@ package svm import ( + "context" "testing" + "time" - "github.com/pushchain/push-chain-node/universalClient/store" "github.com/rs/zerolog" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + + "github.com/pushchain/push-chain-node/universalClient/db" + "github.com/pushchain/push-chain-node/universalClient/store" ) func TestNewEventConfirmer(t *testing.T) { @@ -44,6 +48,33 @@ func TestNewEventConfirmer(t *testing.T) { assert.Equal(t, tc.standard, confirmer.standardConfirmations) } }) + + t.Run("nil rpc client is accepted", func(t *testing.T) { + logger := zerolog.Nop() + confirmer := NewEventConfirmer(nil, nil, "solana:test", 5, 5, 12, logger) + require.NotNil(t, confirmer) + assert.Nil(t, confirmer.rpcClient) + }) + + t.Run("with in-memory database", func(t *testing.T) { + logger := zerolog.Nop() + database, err := db.OpenInMemoryDB(true) + require.NoError(t, err) + + confirmer := NewEventConfirmer(nil, database, "solana:test", 10, 3, 15, logger) + require.NotNil(t, confirmer) + assert.Equal(t, 10, confirmer.pollIntervalSeconds) + assert.Equal(t, uint64(3), confirmer.fastConfirmations) + assert.Equal(t, uint64(15), confirmer.standardConfirmations) + }) + + t.Run("zero confirmations stored as-is", func(t *testing.T) { + logger := zerolog.Nop() + confirmer := NewEventConfirmer(nil, nil, "solana:test", 5, 0, 0, logger) + require.NotNil(t, confirmer) + assert.Equal(t, uint64(0), confirmer.fastConfirmations) + assert.Equal(t, uint64(0), confirmer.standardConfirmations) + }) } func TestEventConfirmerGetTxSignatureFromEventID(t *testing.T) { @@ -69,8 +100,7 @@ func TestEventConfirmerGetTxSignatureFromEventID(t *testing.T) { }) t.Run("returns empty string for empty event ID", func(t *testing.T) { - eventID := "" - sig := confirmer.getTxSignatureFromEventID(eventID) + sig := confirmer.getTxSignatureFromEventID("") assert.Empty(t, sig) }) @@ -79,6 +109,11 @@ func TestEventConfirmerGetTxSignatureFromEventID(t *testing.T) { sig := confirmer.getTxSignatureFromEventID(eventID) assert.Equal(t, "sig", sig) }) + + t.Run("colon at start returns empty", func(t *testing.T) { + sig := confirmer.getTxSignatureFromEventID(":42") + assert.Equal(t, "", sig) + }) } func TestEventConfirmerGetRequiredConfirmations(t *testing.T) { @@ -108,10 +143,16 @@ func TestEventConfirmerGetRequiredConfirmations(t *testing.T) { assert.Equal(t, uint64(12), confirmations) // Default is 12 }) - t.Run("unknown type defaults to standard", func(t *testing.T) { - confirmer := NewEventConfirmer(nil, nil, "solana:mainnet", 5, 5, 15, logger) + t.Run("unknown type defaults to standard configured", func(t *testing.T) { + confirmer := NewEventConfirmer(nil, nil, "solana:mainnet", 5, 5, 25, logger) confirmations := confirmer.getRequiredConfirmations("UNKNOWN") - assert.Equal(t, uint64(15), confirmations) + assert.Equal(t, uint64(25), confirmations) + }) + + t.Run("unknown type with zero falls back to default 12", func(t *testing.T) { + confirmer := NewEventConfirmer(nil, nil, "solana:mainnet", 5, 0, 0, logger) + confirmations := confirmer.getRequiredConfirmations("UNKNOWN") + assert.Equal(t, uint64(12), confirmations) }) t.Run("empty type defaults to standard", func(t *testing.T) { @@ -122,12 +163,13 @@ func TestEventConfirmerGetRequiredConfirmations(t *testing.T) { } func TestEventConfirmerStop(t *testing.T) { - t.Run("stop waits for goroutine", func(t *testing.T) { + t.Run("stop without start does not panic", func(t *testing.T) { logger := zerolog.Nop() confirmer := NewEventConfirmer(nil, nil, "solana:mainnet", 5, 5, 12, logger) - // Should not panic or hang - confirmer.Stop() + assert.NotPanics(t, func() { + confirmer.Stop() + }) }) } @@ -143,3 +185,180 @@ func TestEventConfirmerStruct(t *testing.T) { assert.Nil(t, ec.stopCh) }) } + +func TestEventConfirmer_StartStop_ContextCancel(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + database, err := db.OpenInMemoryDB(true) + require.NoError(t, err) + + // rpcClient is nil so processPendingEvents will error, but the goroutine + // should still shut down cleanly via context cancellation. + ec := NewEventConfirmer(nil, database, "solana:test", 1, 5, 12, logger) + + ctx, cancel := context.WithCancel(context.Background()) + err = ec.Start(ctx) + require.NoError(t, err) + + // Cancel context and verify shutdown completes + cancel() + + done := make(chan struct{}) + go func() { + ec.wg.Wait() + close(done) + }() + + select { + case <-done: + // success + case <-time.After(5 * time.Second): + t.Fatal("event confirmer did not stop after context cancellation") + } +} + +func TestEventConfirmer_StartStop_StopChannel(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + database, err := db.OpenInMemoryDB(true) + require.NoError(t, err) + + ec := NewEventConfirmer(nil, database, "solana:test", 1, 5, 12, logger) + + ctx := context.Background() + err = ec.Start(ctx) + require.NoError(t, err) + + // Stop via the Stop() method + done := make(chan struct{}) + go func() { + ec.Stop() + close(done) + }() + + select { + case <-done: + // success + case <-time.After(5 * time.Second): + t.Fatal("event confirmer did not stop after Stop() call") + } +} + +func TestEventConfirmerGetTxSignatureFromEventID_MoreEdgeCases(t *testing.T) { + logger := zerolog.Nop() + confirmer := NewEventConfirmer(nil, nil, "solana:mainnet", 5, 5, 12, logger) + + t.Run("trailing colon returns signature", func(t *testing.T) { + sig := confirmer.getTxSignatureFromEventID("abc123:") + assert.Equal(t, "abc123", sig) + }) + + t.Run("colon only returns empty", func(t *testing.T) { + sig := confirmer.getTxSignatureFromEventID(":") + assert.Equal(t, "", sig) + }) + + t.Run("whitespace-only returns as-is", func(t *testing.T) { + sig := confirmer.getTxSignatureFromEventID(" ") + assert.Equal(t, " ", sig) + }) + + t.Run("full 88-char base58 signature with log index", func(t *testing.T) { + fullSig := "5VERv8NMvzbJMEkV8xnrLkEaWRtSz9CosKDYjCJjBRnbJLgp8uirBgmQpjKhoR4tjF3ZpRzrFmBV6UjKdiSZkQUW" + eventID := fullSig + ":42" + sig := confirmer.getTxSignatureFromEventID(eventID) + assert.Equal(t, fullSig, sig) + }) + + t.Run("numeric-only event ID returns as-is", func(t *testing.T) { + sig := confirmer.getTxSignatureFromEventID("123456789") + assert.Equal(t, "123456789", sig) + }) + + t.Run("special characters in signature part", func(t *testing.T) { + sig := confirmer.getTxSignatureFromEventID("abc+def/ghi=:0") + assert.Equal(t, "abc+def/ghi=", sig) + }) +} + +func TestEventConfirmerGetRequiredConfirmations_MoreEdgeCases(t *testing.T) { + logger := zerolog.Nop() + + t.Run("large fast confirmations", func(t *testing.T) { + ec := NewEventConfirmer(nil, nil, "solana:mainnet", 5, 1000000, 12, logger) + result := ec.getRequiredConfirmations(store.ConfirmationFast) + assert.Equal(t, uint64(1000000), result) + }) + + t.Run("large standard confirmations", func(t *testing.T) { + ec := NewEventConfirmer(nil, nil, "solana:mainnet", 5, 5, 500000, logger) + result := ec.getRequiredConfirmations(store.ConfirmationStandard) + assert.Equal(t, uint64(500000), result) + }) + + t.Run("fast 1 confirmation", func(t *testing.T) { + ec := NewEventConfirmer(nil, nil, "solana:mainnet", 5, 1, 12, logger) + result := ec.getRequiredConfirmations(store.ConfirmationFast) + assert.Equal(t, uint64(1), result) + }) + + t.Run("standard 1 confirmation", func(t *testing.T) { + ec := NewEventConfirmer(nil, nil, "solana:mainnet", 5, 5, 1, logger) + result := ec.getRequiredConfirmations(store.ConfirmationStandard) + assert.Equal(t, uint64(1), result) + }) + + t.Run("unknown type with large standard returns large value", func(t *testing.T) { + ec := NewEventConfirmer(nil, nil, "solana:mainnet", 5, 5, 300, logger) + result := ec.getRequiredConfirmations("SUPER_SAFE") + assert.Equal(t, uint64(300), result) + }) + + t.Run("all types consistent when same values", func(t *testing.T) { + ec := NewEventConfirmer(nil, nil, "solana:mainnet", 5, 10, 10, logger) + fast := ec.getRequiredConfirmations(store.ConfirmationFast) + standard := ec.getRequiredConfirmations(store.ConfirmationStandard) + unknown := ec.getRequiredConfirmations("OTHER") + assert.Equal(t, uint64(10), fast) + assert.Equal(t, uint64(10), standard) + assert.Equal(t, uint64(10), unknown) + }) + + t.Run("zero fast falls back to default 5", func(t *testing.T) { + ec := NewEventConfirmer(nil, nil, "solana:mainnet", 5, 0, 20, logger) + result := ec.getRequiredConfirmations(store.ConfirmationFast) + assert.Equal(t, uint64(5), result) // default 5 + }) + + t.Run("zero standard falls back to default 12", func(t *testing.T) { + ec := NewEventConfirmer(nil, nil, "solana:mainnet", 5, 10, 0, logger) + result := ec.getRequiredConfirmations(store.ConfirmationStandard) + assert.Equal(t, uint64(12), result) // default 12 + }) +} + +func TestEventConfirmer_StartStop_ZeroPollInterval(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + database, err := db.OpenInMemoryDB(true) + require.NoError(t, err) + + // Zero poll interval should default to 5s internally in checkAndConfirmEvents + ec := NewEventConfirmer(nil, database, "solana:test", 0, 5, 12, logger) + + ctx, cancel := context.WithCancel(context.Background()) + err = ec.Start(ctx) + require.NoError(t, err) + + cancel() + + done := make(chan struct{}) + go func() { + ec.wg.Wait() + close(done) + }() + + select { + case <-done: + // success + case <-time.After(5 * time.Second): + t.Fatal("event confirmer did not stop after context cancellation with zero poll interval") + } +} diff --git a/universalClient/chains/svm/event_listener_test.go b/universalClient/chains/svm/event_listener_test.go new file mode 100644 index 00000000..a4fae52b --- /dev/null +++ b/universalClient/chains/svm/event_listener_test.go @@ -0,0 +1,427 @@ +package svm + +import ( + "context" + "encoding/base64" + "encoding/hex" + "testing" + "time" + + "github.com/rs/zerolog" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/pushchain/push-chain-node/universalClient/db" + uregistrytypes "github.com/pushchain/push-chain-node/x/uregistry/types" +) + +func TestNewEventListener_Valid(t *testing.T) { + logger := zerolog.Nop() + database, err := db.OpenInMemoryDB(true) + require.NoError(t, err) + + methods := []*uregistrytypes.GatewayMethods{ + { + Name: EventTypeSendFunds, + EventIdentifier: "abcdef0123456789", + }, + } + + el, err := NewEventListener(nil, "GatewayAddr111111111111111111111111111111111", "solana:test", methods, database, 10, nil, logger) + require.NoError(t, err) + require.NotNil(t, el) + + assert.Equal(t, "solana:test", el.chainID) + assert.Equal(t, "GatewayAddr111111111111111111111111111111111", el.gatewayAddress) + assert.Equal(t, 10, el.eventPollingSeconds) + assert.False(t, el.running) + assert.NotNil(t, el.stopCh) +} + +func TestNewEventListener_EmptyGateway(t *testing.T) { + logger := zerolog.Nop() + + el, err := NewEventListener(nil, "", "solana:test", nil, nil, 5, nil, logger) + assert.Error(t, err) + assert.Nil(t, el) + assert.Contains(t, err.Error(), "gateway address not configured") +} + +func TestNewEventListener_EmptyChainID(t *testing.T) { + logger := zerolog.Nop() + + el, err := NewEventListener(nil, "GatewayAddr", "", nil, nil, 5, nil, logger) + assert.Error(t, err) + assert.Nil(t, el) + assert.Contains(t, err.Error(), "chain ID not configured") +} + +func TestNewEventListener_NilRPCClient(t *testing.T) { + logger := zerolog.Nop() + + el, err := NewEventListener(nil, "GatewayAddr", "solana:test", nil, nil, 5, nil, logger) + require.NoError(t, err) + require.NotNil(t, el) + assert.Nil(t, el.rpcClient) +} + +func TestNewEventListener_NilMethods(t *testing.T) { + logger := zerolog.Nop() + + el, err := NewEventListener(nil, "GatewayAddr", "solana:test", nil, nil, 5, nil, logger) + require.NoError(t, err) + require.NotNil(t, el) + assert.Empty(t, el.discriminatorToEventType) +} + +func TestNewEventListener_DiscriminatorMapping(t *testing.T) { + logger := zerolog.Nop() + + methods := []*uregistrytypes.GatewayMethods{ + { + Name: EventTypeSendFunds, + EventIdentifier: "AABB0011CCDD2233", + }, + { + Name: EventTypeFinalizeUniversalTx, + EventIdentifier: "1122334455667788", + }, + { + Name: EventTypeRevertUniversalTx, + EventIdentifier: "DEADBEEF01234567", + }, + { + Name: "unknown_method", // not a recognized event type + EventIdentifier: "ffffffffffffffff", + }, + { + Name: EventTypeSendFunds, + EventIdentifier: "", // empty identifier should be skipped + }, + } + + el, err := NewEventListener(nil, "GatewayAddr", "solana:test", methods, nil, 5, nil, logger) + require.NoError(t, err) + require.NotNil(t, el) + + // Should have 3 entries (unknown_method skipped, empty identifier skipped) + assert.Len(t, el.discriminatorToEventType, 3) + assert.Equal(t, EventTypeSendFunds, el.discriminatorToEventType["aabb0011ccdd2233"]) + assert.Equal(t, EventTypeFinalizeUniversalTx, el.discriminatorToEventType["1122334455667788"]) + assert.Equal(t, EventTypeRevertUniversalTx, el.discriminatorToEventType["deadbeef01234567"]) +} + +func TestNewEventListener_EventStartFrom(t *testing.T) { + logger := zerolog.Nop() + + startFrom := int64(100) + el, err := NewEventListener(nil, "GatewayAddr", "solana:test", nil, nil, 5, &startFrom, logger) + require.NoError(t, err) + require.NotNil(t, el) + require.NotNil(t, el.eventStartFrom) + assert.Equal(t, int64(100), *el.eventStartFrom) +} + +func TestEventListener_GetPollingInterval(t *testing.T) { + logger := zerolog.Nop() + + t.Run("positive value returns configured interval", func(t *testing.T) { + el, err := NewEventListener(nil, "GatewayAddr", "solana:test", nil, nil, 15, nil, logger) + require.NoError(t, err) + assert.Equal(t, 15*time.Second, el.getPollingInterval()) + }) + + t.Run("zero returns default 5s", func(t *testing.T) { + el, err := NewEventListener(nil, "GatewayAddr", "solana:test", nil, nil, 0, nil, logger) + require.NoError(t, err) + assert.Equal(t, 5*time.Second, el.getPollingInterval()) + }) + + t.Run("negative returns default 5s", func(t *testing.T) { + el, err := NewEventListener(nil, "GatewayAddr", "solana:test", nil, nil, -1, nil, logger) + require.NoError(t, err) + assert.Equal(t, 5*time.Second, el.getPollingInterval()) + }) + + t.Run("one second", func(t *testing.T) { + el, err := NewEventListener(nil, "GatewayAddr", "solana:test", nil, nil, 1, nil, logger) + require.NoError(t, err) + assert.Equal(t, 1*time.Second, el.getPollingInterval()) + }) +} + +func TestEventListener_DetermineEventType(t *testing.T) { + logger := zerolog.Nop() + + // Build a known discriminator: 8 bytes -> hex -> lowercase + discriminatorBytes := []byte{0xAA, 0xBB, 0xCC, 0xDD, 0x11, 0x22, 0x33, 0x44} + discriminatorHex := hex.EncodeToString(discriminatorBytes) // "aabbccdd11223344" + + methods := []*uregistrytypes.GatewayMethods{ + { + Name: EventTypeSendFunds, + EventIdentifier: discriminatorHex, + }, + } + + el, err := NewEventListener(nil, "GatewayAddr", "solana:test", methods, nil, 5, nil, logger) + require.NoError(t, err) + + t.Run("matching discriminator returns event type", func(t *testing.T) { + payload := append(discriminatorBytes, []byte("extra data here")...) + encoded := base64.StdEncoding.EncodeToString(payload) + log := "Program data: " + encoded + + eventType := el.determineEventType(log) + assert.Equal(t, EventTypeSendFunds, eventType) + }) + + t.Run("non-matching discriminator returns empty", func(t *testing.T) { + otherBytes := []byte{0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF} + payload := append(otherBytes, []byte("extra")...) + encoded := base64.StdEncoding.EncodeToString(payload) + log := "Program data: " + encoded + + eventType := el.determineEventType(log) + assert.Empty(t, eventType) + }) + + t.Run("no Program data prefix returns empty", func(t *testing.T) { + eventType := el.determineEventType("Some other log message") + assert.Empty(t, eventType) + }) + + t.Run("invalid base64 returns empty", func(t *testing.T) { + eventType := el.determineEventType("Program data: !!!invalid-base64!!!") + assert.Empty(t, eventType) + }) + + t.Run("payload shorter than 8 bytes returns empty", func(t *testing.T) { + shortPayload := []byte{0xAA, 0xBB, 0xCC} + encoded := base64.StdEncoding.EncodeToString(shortPayload) + log := "Program data: " + encoded + + eventType := el.determineEventType(log) + assert.Empty(t, eventType) + }) + + t.Run("empty log returns empty", func(t *testing.T) { + eventType := el.determineEventType("") + assert.Empty(t, eventType) + }) + + t.Run("Program data with empty payload returns empty", func(t *testing.T) { + encoded := base64.StdEncoding.EncodeToString([]byte{}) + log := "Program data: " + encoded + + eventType := el.determineEventType(log) + assert.Empty(t, eventType) + }) + + t.Run("exactly 8 bytes matching discriminator", func(t *testing.T) { + encoded := base64.StdEncoding.EncodeToString(discriminatorBytes) + log := "Program data: " + encoded + + eventType := el.determineEventType(log) + assert.Equal(t, EventTypeSendFunds, eventType) + }) +} + +func TestEventListener_GetStartSlotFromConfig(t *testing.T) { + logger := zerolog.Nop() + + t.Run("positive eventStartFrom returns that slot", func(t *testing.T) { + database, err := db.OpenInMemoryDB(true) + require.NoError(t, err) + + startFrom := int64(5000) + el, err := NewEventListener(nil, "GatewayAddr", "solana:test", nil, database, 5, &startFrom, logger) + require.NoError(t, err) + + slot, err := el.getStartSlotFromConfig(context.Background()) + require.NoError(t, err) + assert.Equal(t, uint64(5000), slot) + }) + + t.Run("zero eventStartFrom returns 0", func(t *testing.T) { + database, err := db.OpenInMemoryDB(true) + require.NoError(t, err) + + startFrom := int64(0) + el, err := NewEventListener(nil, "GatewayAddr", "solana:test", nil, database, 5, &startFrom, logger) + require.NoError(t, err) + + slot, err := el.getStartSlotFromConfig(context.Background()) + require.NoError(t, err) + assert.Equal(t, uint64(0), slot) + }) + + t.Run("large positive eventStartFrom", func(t *testing.T) { + database, err := db.OpenInMemoryDB(true) + require.NoError(t, err) + + startFrom := int64(999999999) + el, err := NewEventListener(nil, "GatewayAddr", "solana:test", nil, database, 5, &startFrom, logger) + require.NoError(t, err) + + slot, err := el.getStartSlotFromConfig(context.Background()) + require.NoError(t, err) + assert.Equal(t, uint64(999999999), slot) + }) + + t.Run("minus one eventStartFrom with nil rpcClient panics", func(t *testing.T) { + database, err := db.OpenInMemoryDB(true) + require.NoError(t, err) + + startFrom := int64(-1) + el, err := NewEventListener(nil, "GatewayAddr", "solana:test", nil, database, 5, &startFrom, logger) + require.NoError(t, err) + + // rpcClient is nil, so calling GetLatestSlot panics + assert.Panics(t, func() { + el.getStartSlotFromConfig(context.Background()) + }) + }) + + t.Run("nil eventStartFrom with nil rpcClient panics", func(t *testing.T) { + database, err := db.OpenInMemoryDB(true) + require.NoError(t, err) + + el, err := NewEventListener(nil, "GatewayAddr", "solana:test", nil, database, 5, nil, logger) + require.NoError(t, err) + + // nil rpcClient, nil eventStartFrom -> falls through to rpcClient.GetLatestSlot which panics + assert.Panics(t, func() { + el.getStartSlotFromConfig(context.Background()) + }) + }) + + t.Run("negative value less than -1 with nil rpcClient panics", func(t *testing.T) { + database, err := db.OpenInMemoryDB(true) + require.NoError(t, err) + + startFrom := int64(-5) + el, err := NewEventListener(nil, "GatewayAddr", "solana:test", nil, database, 5, &startFrom, logger) + require.NoError(t, err) + + // -5 is < 0 but not -1, falls through to rpcClient.GetLatestSlot which panics + assert.Panics(t, func() { + el.getStartSlotFromConfig(context.Background()) + }) + }) +} + +func TestEventListener_StopNotRunning(t *testing.T) { + logger := zerolog.Nop() + + el, err := NewEventListener(nil, "GatewayAddr", "solana:test", nil, nil, 5, nil, logger) + require.NoError(t, err) + + err = el.Stop() + assert.NoError(t, err) +} + +func TestEventListener_IsRunning_InitiallyFalse(t *testing.T) { + logger := zerolog.Nop() + + el, err := NewEventListener(nil, "GatewayAddr", "solana:test", nil, nil, 5, nil, logger) + require.NoError(t, err) + + assert.False(t, el.IsRunning()) +} + +func TestEventListener_StopTwice(t *testing.T) { + logger := zerolog.Nop() + + el, err := NewEventListener(nil, "GatewayAddr", "solana:test", nil, nil, 5, nil, logger) + require.NoError(t, err) + + assert.NoError(t, el.Stop()) + assert.NoError(t, el.Stop()) +} + +func TestEventListener_StartStop_ContextCancel(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + database, err := db.OpenInMemoryDB(true) + require.NoError(t, err) + + // Use eventStartFrom to avoid rpcClient calls in getStartSlot + startFrom := int64(100) + el, err := NewEventListener(nil, "GatewayAddr", "solana:test", nil, database, 1, &startFrom, logger) + require.NoError(t, err) + + ctx, cancel := context.WithCancel(context.Background()) + + err = el.Start(ctx) + require.NoError(t, err) + assert.True(t, el.IsRunning()) + + cancel() + + done := make(chan struct{}) + go func() { + el.wg.Wait() + close(done) + }() + + select { + case <-done: + // success + case <-time.After(5 * time.Second): + t.Fatal("event listener did not stop after context cancellation") + } +} + +func TestEventListener_StartStop_StopMethod(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + database, err := db.OpenInMemoryDB(true) + require.NoError(t, err) + + startFrom := int64(100) + el, err := NewEventListener(nil, "GatewayAddr", "solana:test", nil, database, 1, &startFrom, logger) + require.NoError(t, err) + + ctx := context.Background() + err = el.Start(ctx) + require.NoError(t, err) + assert.True(t, el.IsRunning()) + + done := make(chan struct{}) + go func() { + stopErr := el.Stop() + assert.NoError(t, stopErr) + close(done) + }() + + select { + case <-done: + assert.False(t, el.IsRunning()) + case <-time.After(5 * time.Second): + t.Fatal("event listener did not stop after Stop() call") + } +} + +func TestEventListener_StartWhileRunning(t *testing.T) { + logger := zerolog.New(zerolog.NewTestWriter(t)) + database, err := db.OpenInMemoryDB(true) + require.NoError(t, err) + + startFrom := int64(100) + el, err := NewEventListener(nil, "GatewayAddr", "solana:test", nil, database, 1, &startFrom, logger) + require.NoError(t, err) + + ctx, cancel := context.WithCancel(context.Background()) + defer cancel() + + err = el.Start(ctx) + require.NoError(t, err) + + // Starting again while running should return an error + err = el.Start(ctx) + assert.Error(t, err) + assert.Contains(t, err.Error(), "already running") + + // Clean up + cancel() + el.wg.Wait() +} diff --git a/universalClient/chains/svm/event_parser_test.go b/universalClient/chains/svm/event_parser_test.go index 950a8be6..f8782a18 100644 --- a/universalClient/chains/svm/event_parser_test.go +++ b/universalClient/chains/svm/event_parser_test.go @@ -11,206 +11,478 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/pushchain/push-chain-node/universalClient/chains/common" "github.com/pushchain/push-chain-node/universalClient/store" ) -func TestParseOutboundObservationEvent(t *testing.T) { - logger := zerolog.New(nil).Level(zerolog.Disabled) - chainID := "solana:5eykt4UsFv8P8NJdTREpY1vzqKqZKvdp" - signature := "5wHu1qwD7q5xMkZxq6z2S3r4y5N7m8P9kL0jH1gF2dE3cB4aA5b6C7d8E9f0G1h2" - - // Helper to create base64-encoded log data with gas_fee - createLogData := func(discriminator []byte, txID []byte, universalTxID []byte, gasFee uint64) string { - data := make([]byte, 0, 80) - data = append(data, discriminator...) - data = append(data, txID...) - data = append(data, universalTxID...) - gasFeeBytes := make([]byte, 8) - binary.LittleEndian.PutUint64(gasFeeBytes, gasFee) - data = append(data, gasFeeBytes...) - return "Program data: " + base64.StdEncoding.EncodeToString(data) - } +func nopLogger() zerolog.Logger { + return zerolog.New(nil).Level(zerolog.Disabled) +} - // Example discriminator (8 bytes) - discriminator := make([]byte, 8) - for i := range discriminator { - discriminator[i] = byte(i + 1) // 0x01, 0x02, ..., 0x08 +// buildSendFundsLog constructs a valid "Program data: ..." log for a +// parseSendFundsEvent / decodeUniversalTxEvent call. +// +// Layout (Borsh): +// discriminator 8 bytes +// sender 32 bytes (Pubkey) +// recipient 20 bytes (byte20) +// bridge_token 32 bytes (Pubkey) +// bridge_amount 8 bytes (u64 LE) +// data_len 4 bytes (u32 LE) +// data variable +// revert_recip 32 bytes (Pubkey) +// tx_type 1 byte +// sig_len 4 bytes (u32 LE) +// sig_data variable +// fromCEA 1 byte +func buildSendFundsPayload( + sender [32]byte, + recipient [20]byte, + bridgeToken [32]byte, + bridgeAmount uint64, + payload []byte, + revertRecipient [32]byte, + txType uint8, + sigData []byte, + fromCEA bool, +) []byte { + buf := make([]byte, 0, 256) + // discriminator (8 bytes, arbitrary) + buf = append(buf, make([]byte, 8)...) + // sender + buf = append(buf, sender[:]...) + // recipient + buf = append(buf, recipient[:]...) + // bridge_token + buf = append(buf, bridgeToken[:]...) + // bridge_amount + amt := make([]byte, 8) + binary.LittleEndian.PutUint64(amt, bridgeAmount) + buf = append(buf, amt...) + // data length + data + dlen := make([]byte, 4) + binary.LittleEndian.PutUint32(dlen, uint32(len(payload))) + buf = append(buf, dlen...) + buf = append(buf, payload...) + // revert_recipient + buf = append(buf, revertRecipient[:]...) + // tx_type + buf = append(buf, txType) + // sig_len + sig_data + slen := make([]byte, 4) + binary.LittleEndian.PutUint32(slen, uint32(len(sigData))) + buf = append(buf, slen...) + buf = append(buf, sigData...) + // fromCEA + if fromCEA { + buf = append(buf, 1) + } else { + buf = append(buf, 0) } + return buf +} - // Example txID (32 bytes) - txID := make([]byte, 32) - for i := range txID { - txID[i] = byte(0xAA) - } +func wrapAsLog(data []byte) string { + return "Program data: " + base64.StdEncoding.EncodeToString(data) +} - // Example universalTxID (32 bytes) - universalTxID := make([]byte, 32) - for i := range universalTxID { - universalTxID[i] = byte(0xBB) - } +// buildOutboundPayload builds the minimum 80-byte outbound event data. +func buildOutboundPayload(txID [32]byte, universalTxID [32]byte, gasFee uint64) []byte { + data := make([]byte, 80) + // discriminator (8 bytes, zeroed is fine) + copy(data[8:40], txID[:]) + copy(data[40:72], universalTxID[:]) + binary.LittleEndian.PutUint64(data[72:80], gasFee) + return data +} +func TestBase58ToHex(t *testing.T) { tests := []struct { - name string - log string - wantEvent bool - validate func(*testing.T, *store.Event) + name string + input string + want string + wantErr bool }{ { - name: "parses valid outbound observation event", - log: createLogData(discriminator, txID, universalTxID, 5000), - wantEvent: true, - validate: func(t *testing.T, event *store.Event) { - assert.Contains(t, event.EventID, signature) - assert.Equal(t, uint64(12345), event.BlockHeight) - assert.Equal(t, store.EventTypeOutbound, event.Type) - assert.Equal(t, store.StatusPending, event.Status) - assert.Equal(t, store.ConfirmationStandard, event.ConfirmationType) - - // Verify event data contains tx_id, universal_tx_id, and gas_fee_used - assert.NotNil(t, event.EventData) - var outboundData map[string]any - err := json.Unmarshal(event.EventData, &outboundData) - require.NoError(t, err) - - expectedTxID := "0x" + hex.EncodeToString(txID) - expectedUniversalTxID := "0x" + hex.EncodeToString(universalTxID) - - assert.Equal(t, expectedTxID, outboundData["tx_id"]) - assert.Equal(t, expectedUniversalTxID, outboundData["universal_tx_id"]) - assert.Equal(t, "5000", outboundData["gas_fee_used"]) - }, - }, - { - name: "returns nil for log without Program data prefix", - log: "Some other log message", - wantEvent: false, - }, - { - name: "returns nil for empty log", - log: "", - wantEvent: false, + name: "empty string returns 0x", + input: "", + want: "0x", }, { - name: "returns nil for invalid base64", - log: "Program data: not-valid-base64!!!", - wantEvent: false, + name: "known base58 value", + input: "1", // base58 "1" decodes to a single 0x00 byte + want: "0x00", }, { - name: "returns nil for data too short (less than 80 bytes)", - log: func() string { - // Only 72 bytes (8 discriminator + 32 txID + 32 universalTxID, missing gas_fee) - shortData := make([]byte, 72) - copy(shortData[:8], discriminator) - copy(shortData[8:40], txID) - copy(shortData[40:72], universalTxID) - return "Program data: " + base64.StdEncoding.EncodeToString(shortData) - }(), - wantEvent: false, + name: "known base58 multi-byte", + input: "2g", // base58 "2g" decodes to 0x61 + want: "0x61", }, { - name: "correctly parses minimum valid data (exactly 80 bytes)", - log: func() string { - exactData := make([]byte, 80) - copy(exactData[:8], discriminator) - for i := 8; i < 40; i++ { - exactData[i] = 0x11 // txID - } - for i := 40; i < 72; i++ { - exactData[i] = 0x22 // universalTxID - } - binary.LittleEndian.PutUint64(exactData[72:80], 12345) // gas_fee - return "Program data: " + base64.StdEncoding.EncodeToString(exactData) - }(), - wantEvent: true, - validate: func(t *testing.T, event *store.Event) { - var outboundData map[string]any - err := json.Unmarshal(event.EventData, &outboundData) - require.NoError(t, err) - - assert.Contains(t, outboundData["tx_id"], "0x1111") - assert.Contains(t, outboundData["universal_tx_id"], "0x2222") - assert.Equal(t, "12345", outboundData["gas_fee_used"]) - }, + name: "invalid base58 characters", + input: "0OlI", // 0, O, l, I are not in base58 alphabet + wantErr: true, }, { - name: "handles data longer than 80 bytes", - log: func() string { - // 120 bytes - extra data after the required fields should be ignored - longData := make([]byte, 120) - copy(longData[:8], discriminator) - copy(longData[8:40], txID) - copy(longData[40:72], universalTxID) - binary.LittleEndian.PutUint64(longData[72:80], 9999) // gas_fee - // Extra bytes at the end - for i := 80; i < 120; i++ { - longData[i] = 0xFF - } - return "Program data: " + base64.StdEncoding.EncodeToString(longData) - }(), - wantEvent: true, - validate: func(t *testing.T, event *store.Event) { - var outboundData map[string]any - err := json.Unmarshal(event.EventData, &outboundData) - require.NoError(t, err) - - expectedTxID := "0x" + hex.EncodeToString(txID) - expectedUniversalTxID := "0x" + hex.EncodeToString(universalTxID) - - assert.Equal(t, expectedTxID, outboundData["tx_id"]) - assert.Equal(t, expectedUniversalTxID, outboundData["universal_tx_id"]) - assert.Equal(t, "9999", outboundData["gas_fee_used"]) - }, + name: "valid Solana pubkey", + // 11111111111111111111111111111111 is the system program + input: "11111111111111111111111111111111", + want: "0x" + "0000000000000000000000000000000000000000000000000000000000000000", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - event := ParseEvent(tt.log, signature, 12345, 0, EventTypeFinalizeUniversalTx, chainID, logger) - - if tt.wantEvent { - require.NotNil(t, event) - if tt.validate != nil { - tt.validate(t, event) - } - } else { - assert.Nil(t, event) + got, err := base58ToHex(tt.input) + if tt.wantErr { + assert.Error(t, err) + return } + require.NoError(t, err) + assert.Equal(t, tt.want, got) }) } } -func TestParseEvent_EventTypes(t *testing.T) { - logger := zerolog.New(nil).Level(zerolog.Disabled) +func TestParseEvent_Routing(t *testing.T) { + logger := nopLogger() chainID := "solana:5eykt4UsFv8P8NJdTREpY1vzqKqZKvdp" - signature := "testSignature123" + sig := "testSig" + + // Build valid inbound data + var sender, token, revert [32]byte + var recipient [20]byte + for i := range sender { + sender[i] = 0x01 + } + for i := range recipient { + recipient[i] = 0x02 + } + for i := range token { + token[i] = 0x03 + } + inboundData := buildSendFundsPayload(sender, recipient, token, 100, nil, revert, 0, nil, false) + inboundLog := wrapAsLog(inboundData) + + // Build valid outbound data (80 bytes) + var txID, utxID [32]byte + for i := range txID { + txID[i] = 0xAA + } + for i := range utxID { + utxID[i] = 0xBB + } + outboundData := buildOutboundPayload(txID, utxID, 5000) + outboundLog := wrapAsLog(outboundData) - t.Run("returns nil for unknown event type", func(t *testing.T) { - log := "Program data: " + base64.StdEncoding.EncodeToString(make([]byte, 100)) - event := ParseEvent(log, signature, 12345, 0, "unknownEventType", chainID, logger) + t.Run("send_funds routes to inbound parser", func(t *testing.T) { + event := ParseEvent(inboundLog, sig, 100, 0, EventTypeSendFunds, chainID, logger) + require.NotNil(t, event) + assert.Equal(t, store.EventTypeInbound, event.Type) + }) + + t.Run("finalize_universal_tx routes to outbound parser", func(t *testing.T) { + event := ParseEvent(outboundLog, sig, 100, 0, EventTypeFinalizeUniversalTx, chainID, logger) + require.NotNil(t, event) + assert.Equal(t, store.EventTypeOutbound, event.Type) + }) + + t.Run("revert_universal_tx routes to outbound parser", func(t *testing.T) { + event := ParseEvent(outboundLog, sig, 100, 0, EventTypeRevertUniversalTx, chainID, logger) + require.NotNil(t, event) + assert.Equal(t, store.EventTypeOutbound, event.Type) + }) + + t.Run("unknown event type returns nil", func(t *testing.T) { + event := ParseEvent(outboundLog, sig, 100, 0, "some_unknown_type", chainID, logger) assert.Nil(t, event) }) - t.Run("returns nil for empty event type", func(t *testing.T) { - log := "Program data: " + base64.StdEncoding.EncodeToString(make([]byte, 100)) - event := ParseEvent(log, signature, 12345, 0, "", chainID, logger) + t.Run("empty event type returns nil", func(t *testing.T) { + event := ParseEvent(outboundLog, sig, 100, 0, "", chainID, logger) assert.Nil(t, event) }) } -func TestParseOutboundObservationEvent_EventIDFormat(t *testing.T) { - logger := zerolog.New(nil).Level(zerolog.Disabled) +func TestParseSendFundsEvent(t *testing.T) { + logger := nopLogger() + chainID := "solana:5eykt4UsFv8P8NJdTREpY1vzqKqZKvdp" + sig := "inboundSig123" + + t.Run("returns nil for log without Program data prefix", func(t *testing.T) { + event := ParseEvent("some random log line", sig, 10, 0, EventTypeSendFunds, chainID, logger) + assert.Nil(t, event) + }) + + t.Run("returns nil for empty log", func(t *testing.T) { + event := ParseEvent("", sig, 10, 0, EventTypeSendFunds, chainID, logger) + assert.Nil(t, event) + }) + + t.Run("returns nil for invalid base64", func(t *testing.T) { + event := ParseEvent("Program data: !!!not-b64!!!", sig, 10, 0, EventTypeSendFunds, chainID, logger) + assert.Nil(t, event) + }) + + t.Run("returns nil for data shorter than 8 bytes", func(t *testing.T) { + short := wrapAsLog([]byte{0x01, 0x02, 0x03}) + event := ParseEvent(short, sig, 10, 0, EventTypeSendFunds, chainID, logger) + assert.Nil(t, event) + }) + + t.Run("parses a full valid inbound event", func(t *testing.T) { + var sender [32]byte + var recipient [20]byte + var token [32]byte + var revert [32]byte + for i := range sender { + sender[i] = byte(i + 1) + } + for i := range recipient { + recipient[i] = byte(0xAB) + } + for i := range token { + token[i] = byte(0xCD) + } + for i := range revert { + revert[i] = byte(0xEF) + } + rawPayload := []byte("hello world") + sigData := []byte{0xDE, 0xAD, 0xBE, 0xEF} + + data := buildSendFundsPayload(sender, recipient, token, 42000, rawPayload, revert, 1, sigData, true) + log := wrapAsLog(data) + + event := ParseEvent(log, sig, 500, 3, EventTypeSendFunds, chainID, logger) + require.NotNil(t, event) + + // Check event metadata + assert.Equal(t, "inboundSig123:3", event.EventID) + assert.Equal(t, uint64(500), event.BlockHeight) + assert.Equal(t, store.EventTypeInbound, event.Type) + assert.Equal(t, store.StatusPending, event.Status) + assert.Equal(t, uint64(0), event.ExpiryBlockHeight) + + // TxType 1 should give FAST confirmation + assert.Equal(t, store.ConfirmationFast, event.ConfirmationType) + + // Unmarshal EventData + var utx common.UniversalTx + require.NoError(t, json.Unmarshal(event.EventData, &utx)) + + assert.Equal(t, chainID, utx.SourceChain) + assert.Equal(t, uint(3), utx.LogIndex) + assert.Equal(t, "0x"+hex.EncodeToString(recipient[:]), utx.Recipient) + assert.Equal(t, "42000", utx.Amount) + assert.Equal(t, "0x"+hex.EncodeToString(rawPayload), utx.RawPayload) + assert.Equal(t, "0x"+hex.EncodeToString(sigData), utx.VerificationData) + assert.Equal(t, uint(1), utx.TxType) + assert.True(t, utx.FromCEA) + }) + + t.Run("txType 0 gives FAST confirmation", func(t *testing.T) { + var s, tok, rev [32]byte + var r [20]byte + data := buildSendFundsPayload(s, r, tok, 0, nil, rev, 0, nil, false) + event := ParseEvent(wrapAsLog(data), sig, 1, 0, EventTypeSendFunds, chainID, logger) + require.NotNil(t, event) + assert.Equal(t, store.ConfirmationFast, event.ConfirmationType) + }) + + t.Run("txType 2 gives STANDARD confirmation", func(t *testing.T) { + var s, tok, rev [32]byte + var r [20]byte + data := buildSendFundsPayload(s, r, tok, 0, nil, rev, 2, nil, false) + event := ParseEvent(wrapAsLog(data), sig, 1, 0, EventTypeSendFunds, chainID, logger) + require.NotNil(t, event) + assert.Equal(t, store.ConfirmationStandard, event.ConfirmationType) + }) + + t.Run("fromCEA false is parsed correctly", func(t *testing.T) { + var s, tok, rev [32]byte + var r [20]byte + data := buildSendFundsPayload(s, r, tok, 0, nil, rev, 0, nil, false) + event := ParseEvent(wrapAsLog(data), sig, 1, 0, EventTypeSendFunds, chainID, logger) + require.NotNil(t, event) + var utx common.UniversalTx + require.NoError(t, json.Unmarshal(event.EventData, &utx)) + assert.False(t, utx.FromCEA) + }) + + t.Run("empty payload and sig data are handled", func(t *testing.T) { + var s, tok, rev [32]byte + var r [20]byte + data := buildSendFundsPayload(s, r, tok, 0, nil, rev, 0, nil, false) + event := ParseEvent(wrapAsLog(data), sig, 1, 0, EventTypeSendFunds, chainID, logger) + require.NotNil(t, event) + var utx common.UniversalTx + require.NoError(t, json.Unmarshal(event.EventData, &utx)) + assert.Empty(t, utx.RawPayload) + assert.Empty(t, utx.VerificationData) + }) + + t.Run("large bridge amount", func(t *testing.T) { + var s, tok, rev [32]byte + var r [20]byte + maxU64 := uint64(18446744073709551615) // max uint64 + data := buildSendFundsPayload(s, r, tok, maxU64, nil, rev, 0, nil, false) + event := ParseEvent(wrapAsLog(data), sig, 1, 0, EventTypeSendFunds, chainID, logger) + require.NotNil(t, event) + var utx common.UniversalTx + require.NoError(t, json.Unmarshal(event.EventData, &utx)) + assert.Equal(t, "18446744073709551615", utx.Amount) + }) +} + +func TestParseSendFundsEvent_TruncatedData(t *testing.T) { + logger := nopLogger() chainID := "solana:devnet" + sig := "truncSig" - // Create valid outbound data (80 bytes minimum: 8 disc + 32 txID + 32 utxID + 8 gas_fee) - data := make([]byte, 80) - for i := 0; i < 8; i++ { - data[i] = byte(i) // discriminator - } - for i := 8; i < 72; i++ { - data[i] = byte(i % 256) // txID and universalTxID - } - binary.LittleEndian.PutUint64(data[72:80], 0) // gas_fee - log := "Program data: " + base64.StdEncoding.EncodeToString(data) + t.Run("data too short for sender returns event with nil EventData", func(t *testing.T) { + // Only discriminator (8 bytes), no sender + data := make([]byte, 8) + event := ParseEvent(wrapAsLog(data), sig, 1, 0, EventTypeSendFunds, chainID, logger) + require.NotNil(t, event) + // Event is created but parseUniversalTxEvent will fail to decode, + // so EventData may be nil + assert.Equal(t, store.EventTypeInbound, event.Type) + }) + + t.Run("data truncated after sender still returns event", func(t *testing.T) { + // 8 disc + 32 sender = 40 bytes, missing recipient + data := make([]byte, 40) + event := ParseEvent(wrapAsLog(data), sig, 1, 0, EventTypeSendFunds, chainID, logger) + require.NotNil(t, event) + assert.Equal(t, store.EventTypeInbound, event.Type) + }) +} + +func TestParseOutboundObservationEvent(t *testing.T) { + logger := nopLogger() + chainID := "solana:5eykt4UsFv8P8NJdTREpY1vzqKqZKvdp" + signature := "5wHu1qwD7q5xMkZxq6z2S3r4y5N7m8P9kL0jH1gF2dE3cB4aA5b6C7d8E9f0G1h2" + + t.Run("parses valid outbound observation event", func(t *testing.T) { + var txID, utxID [32]byte + for i := range txID { + txID[i] = 0xAA + } + for i := range utxID { + utxID[i] = 0xBB + } + data := buildOutboundPayload(txID, utxID, 5000) + log := wrapAsLog(data) + + event := ParseEvent(log, signature, 12345, 0, EventTypeFinalizeUniversalTx, chainID, logger) + require.NotNil(t, event) + + assert.Contains(t, event.EventID, signature) + assert.Equal(t, uint64(12345), event.BlockHeight) + assert.Equal(t, store.EventTypeOutbound, event.Type) + assert.Equal(t, store.StatusPending, event.Status) + assert.Equal(t, store.ConfirmationStandard, event.ConfirmationType) + + var outbound common.OutboundEvent + require.NoError(t, json.Unmarshal(event.EventData, &outbound)) + assert.Equal(t, "0x"+hex.EncodeToString(txID[:]), outbound.TxID) + assert.Equal(t, "0x"+hex.EncodeToString(utxID[:]), outbound.UniversalTxID) + assert.Equal(t, "5000", outbound.GasFeeUsed) + }) + + t.Run("returns nil for log without Program data prefix", func(t *testing.T) { + event := ParseEvent("Some other log message", signature, 12345, 0, EventTypeFinalizeUniversalTx, chainID, logger) + assert.Nil(t, event) + }) + + t.Run("returns nil for empty log", func(t *testing.T) { + event := ParseEvent("", signature, 12345, 0, EventTypeFinalizeUniversalTx, chainID, logger) + assert.Nil(t, event) + }) + + t.Run("returns nil for invalid base64", func(t *testing.T) { + event := ParseEvent("Program data: not-valid-base64!!!", signature, 12345, 0, EventTypeFinalizeUniversalTx, chainID, logger) + assert.Nil(t, event) + }) + + t.Run("returns nil for data too short", func(t *testing.T) { + shortData := make([]byte, 72) // needs 80 + event := ParseEvent(wrapAsLog(shortData), signature, 12345, 0, EventTypeFinalizeUniversalTx, chainID, logger) + assert.Nil(t, event) + }) + + t.Run("parses minimum valid data (exactly 80 bytes)", func(t *testing.T) { + data := make([]byte, 80) + for i := 8; i < 40; i++ { + data[i] = 0x11 + } + for i := 40; i < 72; i++ { + data[i] = 0x22 + } + binary.LittleEndian.PutUint64(data[72:80], 12345) + + event := ParseEvent(wrapAsLog(data), signature, 100, 0, EventTypeFinalizeUniversalTx, chainID, logger) + require.NotNil(t, event) + + var outbound common.OutboundEvent + require.NoError(t, json.Unmarshal(event.EventData, &outbound)) + assert.Contains(t, outbound.TxID, "0x1111") + assert.Contains(t, outbound.UniversalTxID, "0x2222") + assert.Equal(t, "12345", outbound.GasFeeUsed) + }) + + t.Run("handles data longer than 80 bytes", func(t *testing.T) { + var txID, utxID [32]byte + for i := range txID { + txID[i] = 0xAA + } + for i := range utxID { + utxID[i] = 0xBB + } + data := buildOutboundPayload(txID, utxID, 9999) + // Append extra bytes + data = append(data, make([]byte, 40)...) + + event := ParseEvent(wrapAsLog(data), signature, 100, 0, EventTypeFinalizeUniversalTx, chainID, logger) + require.NotNil(t, event) + + var outbound common.OutboundEvent + require.NoError(t, json.Unmarshal(event.EventData, &outbound)) + assert.Equal(t, "0x"+hex.EncodeToString(txID[:]), outbound.TxID) + assert.Equal(t, "0x"+hex.EncodeToString(utxID[:]), outbound.UniversalTxID) + assert.Equal(t, "9999", outbound.GasFeeUsed) + }) + + t.Run("zero gas fee", func(t *testing.T) { + var txID, utxID [32]byte + data := buildOutboundPayload(txID, utxID, 0) + event := ParseEvent(wrapAsLog(data), signature, 100, 0, EventTypeFinalizeUniversalTx, chainID, logger) + require.NotNil(t, event) + + var outbound common.OutboundEvent + require.NoError(t, json.Unmarshal(event.EventData, &outbound)) + assert.Equal(t, "0", outbound.GasFeeUsed) + }) + + t.Run("max uint64 gas fee", func(t *testing.T) { + var txID, utxID [32]byte + data := buildOutboundPayload(txID, utxID, ^uint64(0)) + event := ParseEvent(wrapAsLog(data), signature, 100, 0, EventTypeFinalizeUniversalTx, chainID, logger) + require.NotNil(t, event) + + var outbound common.OutboundEvent + require.NoError(t, json.Unmarshal(event.EventData, &outbound)) + assert.Equal(t, "18446744073709551615", outbound.GasFeeUsed) + }) +} + +func TestEventIDFormat(t *testing.T) { + logger := nopLogger() + chainID := "solana:devnet" + + var txID, utxID [32]byte + data := buildOutboundPayload(txID, utxID, 0) + log := wrapAsLog(data) tests := []struct { name string @@ -250,4 +522,95 @@ func TestParseOutboundObservationEvent_EventIDFormat(t *testing.T) { assert.Equal(t, tt.slot, event.BlockHeight) }) } + + t.Run("inbound event also uses signature:logIndex format", func(t *testing.T) { + var s, tok, rev [32]byte + var r [20]byte + inboundData := buildSendFundsPayload(s, r, tok, 0, nil, rev, 0, nil, false) + event := ParseEvent(wrapAsLog(inboundData), "mySig", 42, 7, EventTypeSendFunds, chainID, logger) + require.NotNil(t, event) + assert.Equal(t, "mySig:7", event.EventID) + }) +} + +func TestParseEvent_EventTypeConstants(t *testing.T) { + // Verify the constants have expected values + assert.Equal(t, "send_funds", EventTypeSendFunds) + assert.Equal(t, "finalize_universal_tx", EventTypeFinalizeUniversalTx) + assert.Equal(t, "revert_universal_tx", EventTypeRevertUniversalTx) +} + +func TestDecodeUniversalTxEvent_PartialData(t *testing.T) { + logger := nopLogger() + + t.Run("returns error when not enough data for sender", func(t *testing.T) { + // only discriminator, no sender bytes + data := make([]byte, 8) + _, err := decodeUniversalTxEvent(data, logger) + assert.Error(t, err) + assert.Contains(t, err.Error(), "sender") + }) + + t.Run("returns error when not enough data for recipient", func(t *testing.T) { + // 8 disc + 32 sender = 40, but recipient needs 20 more + data := make([]byte, 40) + _, err := decodeUniversalTxEvent(data, logger) + assert.Error(t, err) + assert.Contains(t, err.Error(), "recipient") + }) + + t.Run("returns error when not enough data for bridge_token", func(t *testing.T) { + // 8 + 32 + 20 = 60, bridge_token needs 32 more + data := make([]byte, 60) + _, err := decodeUniversalTxEvent(data, logger) + assert.Error(t, err) + assert.Contains(t, err.Error(), "bridge_token") + }) + + t.Run("returns error when not enough data for bridge_amount", func(t *testing.T) { + // 8 + 32 + 20 + 32 = 92, bridge_amount needs 8 more + data := make([]byte, 92) + _, err := decodeUniversalTxEvent(data, logger) + assert.Error(t, err) + assert.Contains(t, err.Error(), "bridge_amount") + }) + + t.Run("returns partial result when no data field length", func(t *testing.T) { + // 8 + 32 + 20 + 32 + 8 = 100, no data_len + data := make([]byte, 100) + binary.LittleEndian.PutUint64(data[92:100], 777) + result, err := decodeUniversalTxEvent(data, logger) + require.NoError(t, err) + assert.Equal(t, "777", result.Amount) + }) + + t.Run("returns partial result when data field exceeds available bytes", func(t *testing.T) { + // 8 + 32 + 20 + 32 + 8 + 4 = 104 + data := make([]byte, 104) + binary.LittleEndian.PutUint64(data[92:100], 555) + binary.LittleEndian.PutUint32(data[100:104], 999) // claims 999 bytes of payload + result, err := decodeUniversalTxEvent(data, logger) + require.NoError(t, err) + assert.Equal(t, "555", result.Amount) + assert.Empty(t, result.RawPayload) // not enough data, so payload is skipped + }) + + t.Run("returns partial result when missing revert recipient", func(t *testing.T) { + // 8 + 32 + 20 + 32 + 8 + 4(data_len=0) = 104 + data := make([]byte, 104) + binary.LittleEndian.PutUint32(data[100:104], 0) // 0 length payload + result, err := decodeUniversalTxEvent(data, logger) + require.NoError(t, err) + assert.Empty(t, result.RevertFundRecipient) + }) + + t.Run("returns partial result when missing tx_type", func(t *testing.T) { + // 8 + 32 + 20 + 32 + 8 + 4(data_len=0) + 32(revert) = 136 + data := make([]byte, 136) + binary.LittleEndian.PutUint32(data[100:104], 0) + result, err := decodeUniversalTxEvent(data, logger) + require.NoError(t, err) + // tx_type defaults to 0 when missing + assert.Equal(t, uint(0), result.TxType) + }) } diff --git a/universalClient/chains/svm/rpc_client_test.go b/universalClient/chains/svm/rpc_client_test.go new file mode 100644 index 00000000..5bd4a708 --- /dev/null +++ b/universalClient/chains/svm/rpc_client_test.go @@ -0,0 +1,131 @@ +package svm + +import ( + "math" + "testing" + + "github.com/gagliardetto/solana-go/rpc" + "github.com/rs/zerolog" +) + +func TestCalculateMedian(t *testing.T) { + tests := []struct { + name string + fees []uint64 + want uint64 + }{ + { + name: "empty slice", + fees: []uint64{}, + want: 0, + }, + { + name: "single element", + fees: []uint64{42}, + want: 42, + }, + { + name: "odd count already sorted", + fees: []uint64{1, 2, 3}, + want: 2, + }, + { + name: "odd count unsorted", + fees: []uint64{3, 1, 2}, + want: 2, + }, + { + name: "even count already sorted", + fees: []uint64{1, 2, 3, 4}, + want: 2, // (2+3)/2 = 2 (integer division) + }, + { + name: "even count unsorted", + fees: []uint64{4, 1, 3, 2}, + want: 2, // (2+3)/2 = 2 + }, + { + name: "even count average rounds down", + fees: []uint64{1, 4}, + want: 2, // (1+4)/2 = 2 + }, + { + name: "duplicate values odd count", + fees: []uint64{5, 5, 5}, + want: 5, + }, + { + name: "duplicate values even count", + fees: []uint64{5, 5, 5, 5}, + want: 5, + }, + { + name: "five elements", + fees: []uint64{10, 30, 50, 20, 40}, + want: 30, + }, + { + name: "six elements", + fees: []uint64{10, 30, 50, 20, 40, 60}, + want: 35, // (30+40)/2 + }, + { + name: "large values", + fees: []uint64{math.MaxUint64 - 1, math.MaxUint64 - 3}, + // (MaxUint64-3 + MaxUint64-1) / 2 overflows, but that is the + // current behaviour of the function (unsigned wrap-around). + // We just document whatever the function returns. + want: func() uint64 { + a := uint64(math.MaxUint64 - 3) + b := uint64(math.MaxUint64 - 1) + return (a + b) / 2 + }(), + }, + { + name: "two elements same value", + fees: []uint64{100, 100}, + want: 100, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + // Copy input so we can verify the function works on its own copy + input := make([]uint64, len(tt.fees)) + copy(input, tt.fees) + + got := calculateMedian(input) + if got != tt.want { + t.Errorf("calculateMedian(%v) = %d, want %d", tt.fees, got, tt.want) + } + }) + } +} + +func TestClose_NilClients(t *testing.T) { + rc := &RPCClient{ + clients: nil, + logger: zerolog.Nop(), + } + + // Should not panic + rc.Close() + + if rc.clients != nil { + t.Error("expected clients to be nil after Close") + } +} + +func TestClose_EmptyClients(t *testing.T) { + rc := &RPCClient{ + clients: make([]*rpc.Client, 0), + logger: zerolog.Nop(), + } + + // Should not panic + rc.Close() + + if rc.clients != nil { + t.Error("expected clients to be nil after Close") + } +} diff --git a/universalClient/chains/svm/tx_builder_test.go b/universalClient/chains/svm/tx_builder_test.go index 223a0483..a67a0e3d 100644 --- a/universalClient/chains/svm/tx_builder_test.go +++ b/universalClient/chains/svm/tx_builder_test.go @@ -20,13 +20,10 @@ import ( "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + "github.com/pushchain/push-chain-node/universalClient/config" uetypes "github.com/pushchain/push-chain-node/x/uexecutor/types" ) -// ============================================================ -// Constants & helpers shared across tests -// ============================================================ - // testGatewayAddress is a valid base58 Solana public key used for unit tests. // Must NOT be SystemProgramID to avoid collisions with destinationProgram sentinel values. const testGatewayAddress = "CFVSincHYbETh2k7w6u1ENEkjbSLtveRCEBupKidw2VS" @@ -141,10 +138,6 @@ func buildMockWithdrawPayload() []byte { return buildMockPayload(nil, nil, 1, [32]byte{}) } -// ============================================================ -// TestNewTxBuilder -// ============================================================ - func TestNewTxBuilder(t *testing.T) { logger := zerolog.Nop() @@ -215,14 +208,6 @@ func TestNewTxBuilder(t *testing.T) { } } -// ============================================================ -// TestDefaultComputeUnitLimit -// ============================================================ - -// ============================================================ -// TestDeriveTSSPDA — seed must be "tsspda_v2" -// ============================================================ - func TestDeriveTSSPDA(t *testing.T) { builder := newTestBuilder(t) @@ -241,10 +226,6 @@ func TestDeriveTSSPDA(t *testing.T) { assert.NotEqual(t, oldPDA, pda, "TSS PDA must NOT use old seed 'tsspda'") } -// ============================================================ -// TestFetchTSSChainID — Borsh String parsing -// ============================================================ - func TestFetchTSSChainID(t *testing.T) { t.Run("parses valid TssPda with short chain_id", func(t *testing.T) { chainIDStr := "devnet" @@ -305,10 +286,6 @@ func parseTSSPDAData(accountData []byte) (string, error) { return chainID, nil } -// ============================================================ -// TestDetermineInstructionID -// ============================================================ - func TestDetermineInstructionID(t *testing.T) { builder := newTestBuilder(t) @@ -345,10 +322,6 @@ func TestDetermineInstructionID(t *testing.T) { } } -// ============================================================ -// TestAnchorDiscriminator — SHA256, not Keccak -// ============================================================ - func TestAnchorDiscriminator(t *testing.T) { tests := []struct { methodName string @@ -374,10 +347,6 @@ func TestAnchorDiscriminator(t *testing.T) { } } -// ============================================================ -// TestConstructTSSMessage — message format -// ============================================================ - func TestConstructTSSMessage(t *testing.T) { builder := newTestBuilder(t) @@ -621,10 +590,6 @@ func TestConstructTSSMessage(t *testing.T) { }) } -// ============================================================ -// TestConstructTSSMessage_HashIsKeccak256 -// ============================================================ - func TestConstructTSSMessage_HashIsKeccak256(t *testing.T) { builder := newTestBuilder(t) @@ -650,10 +615,6 @@ func TestConstructTSSMessage_HashIsKeccak256(t *testing.T) { assert.NotEqual(t, sha256Hash[:], hash, "TSS message must NOT be hashed with SHA256") } -// ============================================================ -// TestDecodePayload -// ============================================================ - func TestDecodePayload(t *testing.T) { t.Run("decodes valid execute payload with 2 accounts", func(t *testing.T) { expectedAccounts := []GatewayAccountMeta{ @@ -721,10 +682,6 @@ func TestDecodePayload(t *testing.T) { }) } -// ============================================================ -// TestAccountsToWritableFlags -// ============================================================ - func TestAccountsToWritableFlags(t *testing.T) { t.Run("empty accounts → empty flags", func(t *testing.T) { flags := accountsToWritableFlags(nil) @@ -772,10 +729,6 @@ func TestAccountsToWritableFlags(t *testing.T) { }) } -// ============================================================ -// TestBuildWithdrawAndExecuteData — Borsh layout -// ============================================================ - func TestBuildWithdrawAndExecuteData(t *testing.T) { builder := newTestBuilder(t) txID := makeTxID(0x01) @@ -882,10 +835,6 @@ func TestBuildWithdrawAndExecuteData(t *testing.T) { }) } -// ============================================================ -// TestBuildRevertData -// ============================================================ - func TestBuildRevertData(t *testing.T) { builder := newTestBuilder(t) txID := makeTxID(0x01) @@ -920,10 +869,6 @@ func TestBuildRevertData(t *testing.T) { }) } -// ============================================================ -// TestBuildRescueData -// ============================================================ - func TestBuildRescueData(t *testing.T) { builder := newTestBuilder(t) txID := makeTxID(0x01) @@ -957,10 +902,6 @@ func TestBuildRescueData(t *testing.T) { }) } -// ============================================================ -// TestBuildWithdrawAndExecuteAccounts — accounts list -// ============================================================ - func TestBuildWithdrawAndExecuteAccounts(t *testing.T) { builder := newTestBuilder(t) @@ -1059,10 +1000,6 @@ func TestBuildWithdrawAndExecuteAccounts(t *testing.T) { }) } -// ============================================================ -// TestBuildRevertAccounts -// ============================================================ - func TestBuildRevertAccounts(t *testing.T) { builder := newTestBuilder(t) @@ -1121,10 +1058,6 @@ func TestBuildRevertAccounts(t *testing.T) { }) } -// ============================================================ -// TestBuildRescueAccounts -// ============================================================ - func TestBuildRescueAccounts(t *testing.T) { builder := newTestBuilder(t) @@ -1160,10 +1093,6 @@ func TestBuildRescueAccounts(t *testing.T) { }) } -// ============================================================ -// TestRemoveHexPrefix -// ============================================================ - func TestRemoveHexPrefix(t *testing.T) { tests := []struct { input string @@ -1180,10 +1109,6 @@ func TestRemoveHexPrefix(t *testing.T) { } } -// ============================================================ -// TestParseTxType -// ============================================================ - func TestParseTxType(t *testing.T) { tests := []struct { input string @@ -1214,10 +1139,6 @@ func TestParseTxType(t *testing.T) { } } -// ============================================================ -// TestComputeUnitLimitInstruction -// ============================================================ - func TestBuildSetComputeUnitLimitInstruction(t *testing.T) { builder := newTestBuilder(t) ix := builder.buildSetComputeUnitLimitInstruction(300000) @@ -1234,10 +1155,6 @@ func TestBuildSetComputeUnitLimitInstruction(t *testing.T) { assert.Equal(t, uint32(300000), binary.LittleEndian.Uint32(data[1:5])) } -// ============================================================ -// TestGatewayAccountMetaStruct -// ============================================================ - func TestGatewayAccountMetaStruct(t *testing.T) { var pk [32]byte for i := range pk { @@ -1248,10 +1165,6 @@ func TestGatewayAccountMetaStruct(t *testing.T) { assert.True(t, meta.IsWritable) } -// ============================================================ -// TestEndToEndMessageAndDataConsistency -// ============================================================ - func TestEndToEndWithdrawMessageAndData(t *testing.T) { // Verifies that the TSS message hash (signed by TSS) ends up in the // instruction data's message_hash field at the correct offset. @@ -1286,10 +1199,6 @@ func TestEndToEndWithdrawMessageAndData(t *testing.T) { assert.Equal(t, msgHash, msgHashFromData, "message_hash in instruction data must match TSS message hash") } -// ============================================================ -// TestAnchorDiscriminatorKnownValues -// ============================================================ - func TestAnchorDiscriminatorKnownValues(t *testing.T) { // Verify discriminator values are deterministic and can be independently computed for _, method := range []string{"finalize_universal_tx", "revert_universal_tx", "rescue_funds"} { @@ -1299,16 +1208,6 @@ func TestAnchorDiscriminatorKnownValues(t *testing.T) { } } -// ============================================================ -// TestDetermineRecoveryID — real EVM key signing -// ============================================================ - -// ============================================================ -// TestEndToEndWithRealSignature -// Full offline end-to-end: construct TSS message → sign with -// real EVM key → build instruction data → verify recovery -// ============================================================ - func TestEndToEndWithRealSignature(t *testing.T) { builder := newTestBuilder(t) evmKey, _, _ := generateTestEVMKey(t) @@ -1435,25 +1334,6 @@ func TestEndToEndWithRealSignature(t *testing.T) { }) } -// ============================================================ -// Simulation Tests — live devnet end-to-end -// -// Run: go test -run TestSimulate -v -count=1 -timeout 120s -// -// Each test does the full pipeline: -// 1. Connect to devnet RPC -// 2. Generate fresh Solana relayer keypair (written to temp dir) -// 3. Generate fresh EVM key for signing -// 4. GetOutboundSigningRequest (fetches TSS PDA nonce from chain) -// 5. Sign the message hash with the EVM key (secp256k1) -// 6. BroadcastOutboundSigningRequest (assembles & sends the Solana tx) -// -// Expected: Steps 1-5 always succeed. Step 6 fails with -// "failed to determine recovery ID" because the generated EVM key -// doesn't match the TSS ETH address stored on-chain. This validates -// the entire assembly pipeline up to the on-chain auth check. -// ============================================================ - const ( devnetGatewayAddress = "DJoFYDpgbTfxbXBv1QYhYGc9FK4J5FUKpYXAfSkHryXp" devnetRPCURL = "https://api.devnet.solana.com" @@ -1591,8 +1471,6 @@ func requireSimulationSuccess(t *testing.T, result *rpc.SimulateTransactionResul } } -// ---- Withdraw ---- - func TestSimulate_Withdraw_NativeSOL(t *testing.T) { rpcClient, builder := setupDevnetSimulation(t) defer rpcClient.Close() @@ -1620,8 +1498,6 @@ func TestSimulate_Withdraw_SPLToken(t *testing.T) { requireSimulationSuccess(t, result) } -// ---- Execute ---- - func TestSimulate_Execute_NativeSOL(t *testing.T) { rpcClient, builder := setupDevnetSimulation(t) defer rpcClient.Close() @@ -1660,8 +1536,6 @@ func TestSimulate_Execute_SPLToken(t *testing.T) { requireSimulationSuccess(t, result) } -// ---- Revert ---- - func TestSimulate_Revert_NativeSOL(t *testing.T) { rpcClient, builder := setupDevnetSimulation(t) defer rpcClient.Close() @@ -1684,8 +1558,6 @@ func TestSimulate_Revert_SPLToken(t *testing.T) { requireSimulationSuccess(t, result) } -// ---- Rescue ---- - // buildAndSimulateRescue constructs a rescue_funds transaction and simulates it on devnet. func buildAndSimulateRescue(t *testing.T, rpcClient *RPCClient, builder *TxBuilder, evmKey *ecdsa.PrivateKey, amount uint64, assetAddr string) *rpc.SimulateTransactionResult { t.Helper() @@ -1824,3 +1696,156 @@ func TestSimulate_Rescue_SPLToken(t *testing.T) { result := buildAndSimulateRescue(t, rpcClient, builder, evmKey, 500000, devnetSPLMint) requireSimulationSuccess(t, result) } + +func TestGetNextNonce(t *testing.T) { + builder := newTestBuilder(t) + + t.Run("returns 0 with arbitrary address and finalized=true", func(t *testing.T) { + nonce, err := builder.GetNextNonce(context.Background(), "SomeAddress123", true) + require.NoError(t, err) + assert.Equal(t, uint64(0), nonce) + }) + + t.Run("returns 0 with empty address and finalized=false", func(t *testing.T) { + nonce, err := builder.GetNextNonce(context.Background(), "", false) + require.NoError(t, err) + assert.Equal(t, uint64(0), nonce) + }) +} + +func TestGetGasFeeUsed(t *testing.T) { + builder := newTestBuilder(t) + + t.Run("returns string zero for any tx hash", func(t *testing.T) { + fee, err := builder.GetGasFeeUsed(context.Background(), "5xYz...someTxHash") + require.NoError(t, err) + assert.Equal(t, "0", fee) + }) + + t.Run("returns string zero for empty tx hash", func(t *testing.T) { + fee, err := builder.GetGasFeeUsed(context.Background(), "") + require.NoError(t, err) + assert.Equal(t, "0", fee) + }) +} + +func TestNewTxBuilder_ChainConfig(t *testing.T) { + logger := zerolog.Nop() + + t.Run("valid protocolALT is stored", func(t *testing.T) { + altKey := solana.NewWallet().PublicKey() + cfg := &config.ChainSpecificConfig{ + ProtocolALT: altKey.String(), + } + builder, err := NewTxBuilder(&RPCClient{}, "solana:devnet", testGatewayAddress, "/tmp", logger, cfg) + require.NoError(t, err) + assert.Equal(t, altKey, builder.protocolALT) + }) + + t.Run("invalid protocolALT is silently skipped", func(t *testing.T) { + cfg := &config.ChainSpecificConfig{ + ProtocolALT: "not-valid-base58!!!", + } + builder, err := NewTxBuilder(&RPCClient{}, "solana:devnet", testGatewayAddress, "/tmp", logger, cfg) + require.NoError(t, err) + assert.True(t, builder.protocolALT.IsZero(), "invalid ALT should result in zero pubkey") + }) + + t.Run("valid tokenALTs are stored", func(t *testing.T) { + mint := solana.NewWallet().PublicKey() + alt := solana.NewWallet().PublicKey() + cfg := &config.ChainSpecificConfig{ + TokenALTs: map[string]string{ + mint.String(): alt.String(), + }, + } + builder, err := NewTxBuilder(&RPCClient{}, "solana:devnet", testGatewayAddress, "/tmp", logger, cfg) + require.NoError(t, err) + got, ok := builder.tokenALTs[mint] + require.True(t, ok, "expected token ALT entry for mint") + assert.Equal(t, alt, got) + }) + + t.Run("invalid tokenALT mint is skipped", func(t *testing.T) { + cfg := &config.ChainSpecificConfig{ + TokenALTs: map[string]string{ + "bad-mint": solana.NewWallet().PublicKey().String(), + }, + } + builder, err := NewTxBuilder(&RPCClient{}, "solana:devnet", testGatewayAddress, "/tmp", logger, cfg) + require.NoError(t, err) + assert.Len(t, builder.tokenALTs, 0) + }) + + t.Run("invalid tokenALT address is skipped", func(t *testing.T) { + cfg := &config.ChainSpecificConfig{ + TokenALTs: map[string]string{ + solana.NewWallet().PublicKey().String(): "bad-alt", + }, + } + builder, err := NewTxBuilder(&RPCClient{}, "solana:devnet", testGatewayAddress, "/tmp", logger, cfg) + require.NoError(t, err) + assert.Len(t, builder.tokenALTs, 0) + }) + + t.Run("nil chainConfig is fine", func(t *testing.T) { + builder, err := NewTxBuilder(&RPCClient{}, "solana:devnet", testGatewayAddress, "/tmp", logger, nil) + require.NoError(t, err) + assert.True(t, builder.protocolALT.IsZero()) + assert.Len(t, builder.tokenALTs, 0) + }) +} + +func TestBuildCreateATAIdempotentInstruction(t *testing.T) { + builder := newTestBuilder(t) + payer := solana.NewWallet().PublicKey() + owner := solana.NewWallet().PublicKey() + mint := solana.NewWallet().PublicKey() + + ix := builder.buildCreateATAIdempotentInstruction(payer, owner, mint) + + t.Run("program ID is ATA program", func(t *testing.T) { + expected := solana.MustPublicKeyFromBase58("ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL") + assert.Equal(t, expected, ix.ProgramID()) + }) + + t.Run("has 6 accounts in correct order", func(t *testing.T) { + accounts := ix.Accounts() + require.Len(t, accounts, 6) + + // payer (signer, writable) + assert.Equal(t, payer, accounts[0].PublicKey) + assert.True(t, accounts[0].IsSigner) + assert.True(t, accounts[0].IsWritable) + + // ATA (writable, derived deterministically) + ataProgramID := solana.MustPublicKeyFromBase58("ATokenGPvbdGVxr1b2hvZbsiqW5xWH25efTNsLJA8knL") + expectedATA, _, _ := solana.FindProgramAddress( + [][]byte{owner.Bytes(), solana.TokenProgramID.Bytes(), mint.Bytes()}, + ataProgramID, + ) + assert.Equal(t, expectedATA, accounts[1].PublicKey) + assert.True(t, accounts[1].IsWritable) + assert.False(t, accounts[1].IsSigner) + + // owner + assert.Equal(t, owner, accounts[2].PublicKey) + assert.False(t, accounts[2].IsWritable) + + // mint + assert.Equal(t, mint, accounts[3].PublicKey) + assert.False(t, accounts[3].IsWritable) + + // system program + assert.Equal(t, solana.SystemProgramID, accounts[4].PublicKey) + + // token program + assert.Equal(t, solana.TokenProgramID, accounts[5].PublicKey) + }) + + t.Run("instruction data is [1] for CreateIdempotent", func(t *testing.T) { + data, err := ix.Data() + require.NoError(t, err) + assert.Equal(t, []byte{1}, data) + }) +} diff --git a/universalClient/pushcore/pushCore_test.go b/universalClient/pushcore/pushCore_test.go index 1e74f49f..323372b4 100644 --- a/universalClient/pushcore/pushCore_test.go +++ b/universalClient/pushcore/pushCore_test.go @@ -842,6 +842,74 @@ func TestClient_GetGasPrice_NilResponse(t *testing.T) { assert.Nil(t, price) } +func TestClient_GetTx(t *testing.T) { + logger := zerolog.Nop() + ctx := context.Background() + + t.Run("no endpoints configured", func(t *testing.T) { + client := &Client{ + logger: logger, + txClients: []tx.ServiceClient{}, + } + + resp, err := client.GetTx(ctx, "0xabc") + require.Error(t, err) + assert.Contains(t, err.Error(), "no endpoints configured") + assert.Nil(t, resp) + }) + + t.Run("successful get tx", func(t *testing.T) { + mockClient := &mockTxServiceClient{ + getTxResp: &tx.GetTxResponse{ + TxResponse: &sdktypes.TxResponse{TxHash: "0xabc123", Code: 0}, + }, + } + + client := &Client{ + logger: logger, + txClients: []tx.ServiceClient{mockClient}, + } + + resp, err := client.GetTx(ctx, "0xabc123") + require.NoError(t, err) + require.NotNil(t, resp) + assert.Equal(t, "0xabc123", resp.TxResponse.TxHash) + }) + + t.Run("failover on first endpoint failure", func(t *testing.T) { + failing := &mockTxServiceClient{getTxErr: assert.AnError} + success := &mockTxServiceClient{ + getTxResp: &tx.GetTxResponse{ + TxResponse: &sdktypes.TxResponse{TxHash: "0xdef456"}, + }, + } + + client := &Client{ + logger: logger, + txClients: []tx.ServiceClient{failing, success}, + } + + resp, err := client.GetTx(ctx, "0xdef456") + require.NoError(t, err) + require.NotNil(t, resp) + assert.Equal(t, "0xdef456", resp.TxResponse.TxHash) + }) + + t.Run("all endpoints fail", func(t *testing.T) { + failing1 := &mockTxServiceClient{getTxErr: assert.AnError} + failing2 := &mockTxServiceClient{getTxErr: assert.AnError} + + client := &Client{ + logger: logger, + txClients: []tx.ServiceClient{failing1, failing2}, + } + + resp, err := client.GetTx(ctx, "0xabc") + require.Error(t, err) + assert.Nil(t, resp) + }) +} + // Mock implementations type mockRegistryQueryClient struct { @@ -931,7 +999,9 @@ func (m *mockUTSSQueryClient) KeyById(ctx context.Context, req *utsstypes.QueryK type mockTxServiceClient struct { tx.ServiceClient broadcastResp *tx.BroadcastTxResponse + getTxResp *tx.GetTxResponse err error + getTxErr error } func (m *mockTxServiceClient) BroadcastTx(ctx context.Context, req *tx.BroadcastTxRequest, opts ...grpc.CallOption) (*tx.BroadcastTxResponse, error) { @@ -942,7 +1012,10 @@ func (m *mockTxServiceClient) BroadcastTx(ctx context.Context, req *tx.Broadcast } func (m *mockTxServiceClient) GetTx(ctx context.Context, req *tx.GetTxRequest, opts ...grpc.CallOption) (*tx.GetTxResponse, error) { - return nil, nil + if m.getTxErr != nil { + return nil, m.getTxErr + } + return m.getTxResp, nil } type mockUExecutorQueryClient struct { diff --git a/universalClient/pushsigner/grant_verifier_test.go b/universalClient/pushsigner/grant_verifier_test.go index 0fd9e348..2c0ec770 100644 --- a/universalClient/pushsigner/grant_verifier_test.go +++ b/universalClient/pushsigner/grant_verifier_test.go @@ -1,14 +1,19 @@ package pushsigner import ( + "context" + "fmt" "testing" "time" "github.com/cosmos/cosmos-sdk/codec" codectypes "github.com/cosmos/cosmos-sdk/codec/types" - "github.com/cosmos/cosmos-sdk/x/authz" + cosmosauthz "github.com/cosmos/cosmos-sdk/x/authz" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + + "github.com/pushchain/push-chain-node/universalClient/config" + "github.com/pushchain/push-chain-node/universalClient/pushsigner/keys" ) func TestVerifyGrants(t *testing.T) { @@ -136,18 +141,18 @@ func TestVerifyGrants(t *testing.T) { func TestExtractGrantInfo(t *testing.T) { interfaceRegistry := codectypes.NewInterfaceRegistry() - authz.RegisterInterfaces(interfaceRegistry) + cosmosauthz.RegisterInterfaces(interfaceRegistry) cdc := codec.NewProtoCodec(interfaceRegistry) futureTime := time.Now().Add(24 * time.Hour) t.Run("extract valid generic authorization grants", func(t *testing.T) { - ga := &authz.GenericAuthorization{Msg: "/uexecutor.v1.MsgVoteInbound"} + ga := &cosmosauthz.GenericAuthorization{Msg: "/uexecutor.v1.MsgVoteInbound"} gaAny, err := codectypes.NewAnyWithValue(ga) require.NoError(t, err) - resp := &authz.QueryGranteeGrantsResponse{ - Grants: []*authz.GrantAuthorization{ + resp := &cosmosauthz.QueryGranteeGrantsResponse{ + Grants: []*cosmosauthz.GrantAuthorization{ { Granter: "push1granter123", Authorization: gaAny, @@ -166,12 +171,12 @@ func TestExtractGrantInfo(t *testing.T) { t.Run("skip non-generic authorization types", func(t *testing.T) { // Create an Any with a different type URL wrongTypeAny := &codectypes.Any{ - TypeUrl: "/cosmos.authz.v1beta1.SendAuthorization", + TypeUrl: "/cosmos.cosmosauthz.v1beta1.SendAuthorization", Value: []byte{}, } - resp := &authz.QueryGranteeGrantsResponse{ - Grants: []*authz.GrantAuthorization{ + resp := &cosmosauthz.QueryGranteeGrantsResponse{ + Grants: []*cosmosauthz.GrantAuthorization{ { Granter: "push1granter123", Authorization: wrongTypeAny, @@ -185,8 +190,8 @@ func TestExtractGrantInfo(t *testing.T) { }) t.Run("skip grants with nil authorization", func(t *testing.T) { - resp := &authz.QueryGranteeGrantsResponse{ - Grants: []*authz.GrantAuthorization{ + resp := &cosmosauthz.QueryGranteeGrantsResponse{ + Grants: []*cosmosauthz.GrantAuthorization{ { Granter: "push1granter123", Authorization: nil, @@ -200,8 +205,8 @@ func TestExtractGrantInfo(t *testing.T) { }) t.Run("empty grants response", func(t *testing.T) { - resp := &authz.QueryGranteeGrantsResponse{ - Grants: []*authz.GrantAuthorization{}, + resp := &cosmosauthz.QueryGranteeGrantsResponse{ + Grants: []*cosmosauthz.GrantAuthorization{}, } grants := extractGrantInfo(resp, cdc) @@ -209,16 +214,16 @@ func TestExtractGrantInfo(t *testing.T) { }) t.Run("multiple valid grants", func(t *testing.T) { - ga1 := &authz.GenericAuthorization{Msg: "/uexecutor.v1.MsgVoteInbound"} + ga1 := &cosmosauthz.GenericAuthorization{Msg: "/uexecutor.v1.MsgVoteInbound"} ga1Any, err := codectypes.NewAnyWithValue(ga1) require.NoError(t, err) - ga2 := &authz.GenericAuthorization{Msg: "/uexecutor.v1.MsgVoteChainMeta"} + ga2 := &cosmosauthz.GenericAuthorization{Msg: "/uexecutor.v1.MsgVoteChainMeta"} ga2Any, err := codectypes.NewAnyWithValue(ga2) require.NoError(t, err) - resp := &authz.QueryGranteeGrantsResponse{ - Grants: []*authz.GrantAuthorization{ + resp := &cosmosauthz.QueryGranteeGrantsResponse{ + Grants: []*cosmosauthz.GrantAuthorization{ { Granter: "push1granter123", Authorization: ga1Any, @@ -241,11 +246,11 @@ func TestExtractGrantInfo(t *testing.T) { func TestExtractMessageType(t *testing.T) { interfaceRegistry := codectypes.NewInterfaceRegistry() - authz.RegisterInterfaces(interfaceRegistry) + cosmosauthz.RegisterInterfaces(interfaceRegistry) cdc := codec.NewProtoCodec(interfaceRegistry) t.Run("extract message type from valid generic authorization", func(t *testing.T) { - ga := &authz.GenericAuthorization{Msg: "/uexecutor.v1.MsgVoteInbound"} + ga := &cosmosauthz.GenericAuthorization{Msg: "/uexecutor.v1.MsgVoteInbound"} gaAny, err := codectypes.NewAnyWithValue(ga) require.NoError(t, err) @@ -256,7 +261,7 @@ func TestExtractMessageType(t *testing.T) { t.Run("error on invalid proto data", func(t *testing.T) { invalidAny := &codectypes.Any{ - TypeUrl: "/cosmos.authz.v1beta1.GenericAuthorization", + TypeUrl: "/cosmos.cosmosauthz.v1beta1.GenericAuthorization", Value: []byte("invalid proto data"), } @@ -266,7 +271,7 @@ func TestExtractMessageType(t *testing.T) { }) t.Run("empty message type", func(t *testing.T) { - ga := &authz.GenericAuthorization{Msg: ""} + ga := &cosmosauthz.GenericAuthorization{Msg: ""} gaAny, err := codectypes.NewAnyWithValue(ga) require.NoError(t, err) @@ -276,6 +281,129 @@ func TestExtractMessageType(t *testing.T) { }) } +func TestValidateKeysAndGrants(t *testing.T) { + futureTime := time.Now().Add(24 * time.Hour) + + t.Run("file backend without password returns error", func(t *testing.T) { + mock := &mockChainClient{} + result, err := validateKeysAndGrants(context.Background(), config.KeyringBackendFile, "", "/tmp", mock, "push1granter") + require.Error(t, err) + assert.Nil(t, result) + assert.Contains(t, err.Error(), "keyring_password is required for file backend") + }) + + t.Run("empty keyring returns error", func(t *testing.T) { + tempDir := t.TempDir() + mock := &mockChainClient{} + result, err := validateKeysAndGrants(context.Background(), config.KeyringBackendTest, "", tempDir, mock, "push1granter") + require.Error(t, err) + assert.Nil(t, result) + assert.Contains(t, err.Error(), "no keys found in keyring") + }) + + t.Run("grant query error returns error", func(t *testing.T) { + tempDir := t.TempDir() + kr, err := keys.CreateKeyring(tempDir, nil, config.KeyringBackendTest) + require.NoError(t, err) + _, _, err = keys.CreateNewKey(kr, "test-key", "", "") + require.NoError(t, err) + + mock := &mockChainClient{ + getGranteeGrantFn: func(ctx context.Context, addr string) (*cosmosauthz.QueryGranteeGrantsResponse, error) { + return nil, fmt.Errorf("node unavailable") + }, + } + + result, err := validateKeysAndGrants(context.Background(), config.KeyringBackendTest, "", tempDir, mock, "push1granter") + require.Error(t, err) + assert.Nil(t, result) + assert.Contains(t, err.Error(), "failed to query grants") + }) + + t.Run("no grants returns error", func(t *testing.T) { + tempDir := t.TempDir() + kr, err := keys.CreateKeyring(tempDir, nil, config.KeyringBackendTest) + require.NoError(t, err) + _, _, err = keys.CreateNewKey(kr, "test-key", "", "") + require.NoError(t, err) + + mock := &mockChainClient{ + getGranteeGrantFn: func(ctx context.Context, addr string) (*cosmosauthz.QueryGranteeGrantsResponse, error) { + return &cosmosauthz.QueryGranteeGrantsResponse{ + Grants: []*cosmosauthz.GrantAuthorization{}, + }, nil + }, + } + + result, err := validateKeysAndGrants(context.Background(), config.KeyringBackendTest, "", tempDir, mock, "push1granter") + require.Error(t, err) + assert.Nil(t, result) + assert.Contains(t, err.Error(), "no AuthZ grants found") + }) + + t.Run("missing required grants returns error", func(t *testing.T) { + tempDir := t.TempDir() + kr, err := keys.CreateKeyring(tempDir, nil, config.KeyringBackendTest) + require.NoError(t, err) + _, _, err = keys.CreateNewKey(kr, "test-key", "", "") + require.NoError(t, err) + + // Only provide one grant out of five required + ga := &cosmosauthz.GenericAuthorization{Msg: "/uexecutor.v1.MsgVoteInbound"} + gaAny, err := codectypes.NewAnyWithValue(ga) + require.NoError(t, err) + + mock := &mockChainClient{ + getGranteeGrantFn: func(ctx context.Context, addr string) (*cosmosauthz.QueryGranteeGrantsResponse, error) { + return &cosmosauthz.QueryGranteeGrantsResponse{ + Grants: []*cosmosauthz.GrantAuthorization{ + {Granter: "push1granter", Authorization: gaAny, Expiration: &futureTime}, + }, + }, nil + }, + } + + result, err := validateKeysAndGrants(context.Background(), config.KeyringBackendTest, "", tempDir, mock, "push1granter") + require.Error(t, err) + assert.Nil(t, result) + assert.Contains(t, err.Error(), "missing grants") + }) + + t.Run("all grants present succeeds", func(t *testing.T) { + tempDir := t.TempDir() + kr, err := keys.CreateKeyring(tempDir, nil, config.KeyringBackendTest) + require.NoError(t, err) + _, _, err = keys.CreateNewKey(kr, "test-key", "", "") + require.NoError(t, err) + + var grantAuths []*cosmosauthz.GrantAuthorization + for _, msg := range requiredMsgGrants { + ga := &cosmosauthz.GenericAuthorization{Msg: msg} + gaAny, err := codectypes.NewAnyWithValue(ga) + require.NoError(t, err) + grantAuths = append(grantAuths, &cosmosauthz.GrantAuthorization{ + Granter: "push1granter", + Authorization: gaAny, + Expiration: &futureTime, + }) + } + + mock := &mockChainClient{ + getGranteeGrantFn: func(ctx context.Context, addr string) (*cosmosauthz.QueryGranteeGrantsResponse, error) { + return &cosmosauthz.QueryGranteeGrantsResponse{Grants: grantAuths}, nil + }, + } + + result, err := validateKeysAndGrants(context.Background(), config.KeyringBackendTest, "", tempDir, mock, "push1granter") + require.NoError(t, err) + require.NotNil(t, result) + assert.Equal(t, "push1granter", result.Granter) + assert.NotEmpty(t, result.KeyName) + assert.NotEmpty(t, result.KeyAddr) + assert.Len(t, result.Messages, len(requiredMsgGrants)) + }) +} + func TestGrantInfo(t *testing.T) { t.Run("grantInfo struct fields", func(t *testing.T) { exp := time.Now().Add(24 * time.Hour) diff --git a/universalClient/pushsigner/vote_test.go b/universalClient/pushsigner/vote_test.go index 3a302880..19682c1b 100644 --- a/universalClient/pushsigner/vote_test.go +++ b/universalClient/pushsigner/vote_test.go @@ -1,12 +1,18 @@ package pushsigner import ( + "context" + "fmt" "testing" "time" sdk "github.com/cosmos/cosmos-sdk/types" + sdktx "github.com/cosmos/cosmos-sdk/types/tx" + authtypes "github.com/cosmos/cosmos-sdk/x/auth/types" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + + uexecutortypes "github.com/pushchain/push-chain-node/x/uexecutor/types" ) func TestVoteConstants(t *testing.T) { @@ -23,4 +29,127 @@ func TestVoteConstants(t *testing.T) { t.Run("default vote timeout", func(t *testing.T) { assert.Equal(t, 30*time.Second, defaultVoteTimeout) }) + + t.Run("tx poll interval", func(t *testing.T) { + assert.Equal(t, 500*time.Millisecond, txPollInterval) + }) + + t.Run("tx confirm timeout", func(t *testing.T) { + assert.Equal(t, 15*time.Second, txConfirmTimeout) + }) +} + +func TestWaitForTxConfirmation(t *testing.T) { + t.Run("returns nil when tx found immediately", func(t *testing.T) { + mock := &mockChainClient{ + getTxFn: func(ctx context.Context, txHash string) (*sdktx.GetTxResponse, error) { + return &sdktx.GetTxResponse{ + TxResponse: &sdk.TxResponse{TxHash: txHash, Code: 0}, + }, nil + }, + } + + err := waitForTxConfirmation(context.Background(), mock, "0xabc") + assert.NoError(t, err) + }) + + t.Run("polls until tx found", func(t *testing.T) { + calls := 0 + mock := &mockChainClient{ + getTxFn: func(ctx context.Context, txHash string) (*sdktx.GetTxResponse, error) { + calls++ + if calls < 3 { + return nil, fmt.Errorf("tx not found") + } + return &sdktx.GetTxResponse{ + TxResponse: &sdk.TxResponse{TxHash: txHash}, + }, nil + }, + } + + err := waitForTxConfirmation(context.Background(), mock, "0xdef") + assert.NoError(t, err) + assert.GreaterOrEqual(t, calls, 3) + }) + + t.Run("returns error on context cancellation", func(t *testing.T) { + mock := &mockChainClient{ + getTxFn: func(ctx context.Context, txHash string) (*sdktx.GetTxResponse, error) { + return nil, fmt.Errorf("not found") + }, + } + + ctx, cancel := context.WithCancel(context.Background()) + cancel() // cancel immediately + + err := waitForTxConfirmation(ctx, mock, "0xabc") + assert.Error(t, err) + }) +} + +func TestVoteRejectedOnChain(t *testing.T) { + mock := &mockChainClient{ + getAccountFn: func(ctx context.Context, address string) (*authtypes.QueryAccountResponse, error) { + addr, _ := sdk.AccAddressFromBech32(address) + return makeAccountResponse(t, addr, 1, 1), nil + }, + broadcastTxFn: func(ctx context.Context, txBytes []byte) (*sdktx.BroadcastTxResponse, error) { + return &sdktx.BroadcastTxResponse{ + TxResponse: &sdk.TxResponse{Code: 7, TxHash: "REJECTED", RawLog: "unauthorized"}, + }, nil + }, + } + + signer := createTestSigner(t, mock) + inbound := &uexecutortypes.Inbound{TxHash: "0x1"} + + txHash, err := signer.VoteInbound(context.Background(), inbound) + require.Error(t, err) + assert.Empty(t, txHash) + assert.Contains(t, err.Error(), "transaction failed with code 7") +} + +func TestVoteEmptyMemoDefaultsToMsgType(t *testing.T) { + mock := &mockChainClient{ + getAccountFn: func(ctx context.Context, address string) (*authtypes.QueryAccountResponse, error) { + addr, _ := sdk.AccAddressFromBech32(address) + return makeAccountResponse(t, addr, 1, 1), nil + }, + broadcastTxFn: func(ctx context.Context, txBytes []byte) (*sdktx.BroadcastTxResponse, error) { + return &sdktx.BroadcastTxResponse{ + TxResponse: &sdk.TxResponse{Code: 0, TxHash: "OK"}, + }, nil + }, + } + + signer := createTestSigner(t, mock) + + // VoteChainMeta provides a memo, so this tests the non-empty path + txHash, err := signer.VoteChainMeta(context.Background(), "eip155:1", 100, 200) + require.NoError(t, err) + assert.Equal(t, "OK", txHash) +} + +func TestVoteAllTypes(t *testing.T) { + mock := successMock(t) + + t.Run("VoteOutbound with failure observation", func(t *testing.T) { + signer := createTestSigner(t, mock) + obs := &uexecutortypes.OutboundObservation{ + Success: false, + BlockHeight: 999, + TxHash: "0xfailed", + } + + txHash, err := signer.VoteOutbound(context.Background(), "tx-fail", "utx-fail", obs) + require.NoError(t, err) + assert.Equal(t, "VOTE_OK", txHash) + }) + + t.Run("VoteFundMigration with success=false", func(t *testing.T) { + signer := createTestSigner(t, mock) + txHash, err := signer.VoteFundMigration(context.Background(), 99, "0xfailhash", false) + require.NoError(t, err) + assert.Equal(t, "VOTE_OK", txHash) + }) } diff --git a/universalClient/tss/coordinator/coordinator_test.go b/universalClient/tss/coordinator/coordinator_test.go index afe50198..68481a85 100644 --- a/universalClient/tss/coordinator/coordinator_test.go +++ b/universalClient/tss/coordinator/coordinator_test.go @@ -2,22 +2,118 @@ package coordinator import ( "context" + "fmt" + "reflect" "testing" "time" + "unsafe" "github.com/rs/zerolog" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "gorm.io/driver/sqlite" "gorm.io/gorm" + "github.com/pushchain/push-chain-node/universalClient/chains" + "github.com/pushchain/push-chain-node/universalClient/chains/common" + "github.com/pushchain/push-chain-node/universalClient/config" "github.com/pushchain/push-chain-node/universalClient/pushcore" "github.com/pushchain/push-chain-node/universalClient/store" "github.com/pushchain/push-chain-node/universalClient/tss/eventstore" "github.com/pushchain/push-chain-node/universalClient/tss/keyshare" + uexecutortypes "github.com/pushchain/push-chain-node/x/uexecutor/types" + uregistrytypes "github.com/pushchain/push-chain-node/x/uregistry/types" "github.com/pushchain/push-chain-node/x/uvalidator/types" ) +type coordMockTxBuilder struct{ mock.Mock } + +func (m *coordMockTxBuilder) GetOutboundSigningRequest(ctx context.Context, data *uexecutortypes.OutboundCreatedEvent, nonce uint64) (*common.UnsignedSigningReq, error) { + args := m.Called(ctx, data, nonce) + if args.Get(0) == nil { + return nil, args.Error(1) + } + return args.Get(0).(*common.UnsignedSigningReq), args.Error(1) +} + +func (m *coordMockTxBuilder) GetNextNonce(ctx context.Context, addr string, useFinalized bool) (uint64, error) { + args := m.Called(ctx, addr, useFinalized) + return args.Get(0).(uint64), args.Error(1) +} + +func (m *coordMockTxBuilder) BroadcastOutboundSigningRequest(ctx context.Context, req *common.UnsignedSigningReq, data *uexecutortypes.OutboundCreatedEvent, sig []byte) (string, error) { + args := m.Called(ctx, req, data, sig) + return args.String(0), args.Error(1) +} + +func (m *coordMockTxBuilder) VerifyBroadcastedTx(ctx context.Context, txHash string) (bool, uint64, uint64, uint8, error) { + args := m.Called(ctx, txHash) + return args.Bool(0), args.Get(1).(uint64), args.Get(2).(uint64), args.Get(3).(uint8), args.Error(4) +} + +func (m *coordMockTxBuilder) IsAlreadyExecuted(ctx context.Context, txID string) (bool, error) { + args := m.Called(ctx, txID) + return args.Bool(0), args.Error(1) +} + +func (m *coordMockTxBuilder) GetGasFeeUsed(ctx context.Context, txHash string) (string, error) { + args := m.Called(ctx, txHash) + return args.String(0), args.Error(1) +} + +func (m *coordMockTxBuilder) GetFundMigrationSigningRequest(ctx context.Context, data *common.FundMigrationData, nonce uint64) (*common.UnsignedSigningReq, error) { + args := m.Called(ctx, data, nonce) + if args.Get(0) == nil { + return nil, args.Error(1) + } + return args.Get(0).(*common.UnsignedSigningReq), args.Error(1) +} + +func (m *coordMockTxBuilder) BroadcastFundMigrationTx(ctx context.Context, req *common.UnsignedSigningReq, data *common.FundMigrationData, sig []byte) (string, error) { + args := m.Called(ctx, req, data, sig) + return args.String(0), args.Error(1) +} + +type coordMockChainClient struct { + builder *coordMockTxBuilder + builderErr error +} + +func (m *coordMockChainClient) Start(context.Context) error { return nil } +func (m *coordMockChainClient) Stop() error { return nil } +func (m *coordMockChainClient) IsHealthy() bool { return true } +func (m *coordMockChainClient) GetTxBuilder() (common.TxBuilder, error) { + if m.builderErr != nil { + return nil, m.builderErr + } + return m.builder, nil +} + +func newTestChainsForCoordinator(t *testing.T, chainID string, vmType uregistrytypes.VmType, client common.ChainClient) *chains.Chains { + t.Helper() + c := chains.NewChains(nil, nil, &config.Config{PushChainID: "test-chain"}, zerolog.Nop()) + + v := reflect.ValueOf(c).Elem() + + chainsField := v.FieldByName("chains") + chainsMap := *(*map[string]common.ChainClient)(unsafe.Pointer(chainsField.UnsafeAddr())) + chainsMap[chainID] = client + + configsField := v.FieldByName("chainConfigs") + configsMap := *(*map[string]*uregistrytypes.ChainConfig)(unsafe.Pointer(configsField.UnsafeAddr())) + configsMap[chainID] = &uregistrytypes.ChainConfig{ + Chain: chainID, + VmType: vmType, + Enabled: &uregistrytypes.ChainEnabled{ + IsInboundEnabled: true, + IsOutboundEnabled: true, + }, + } + + return c +} + // setupTestCoordinator creates a test coordinator with in-memory dependencies. // The pushcore client is a zero-value *pushcore.Client that will fail on any live RPC call — // tests that need coordinator logic should use the pure-function helpers directly. @@ -342,7 +438,8 @@ func TestExtractFundMigrateChain(t *testing.T) { func TestDeriveEVMAddressFromPubkey(t *testing.T) { t.Run("valid compressed secp256k1 pubkey", func(t *testing.T) { // Generator point pubkey - well-known test vector - addr, err := DeriveEVMAddressFromPubkey("0279be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798") + addr, err := DeriveEVMAddressFromPubkey("03d5d5d290a0ecec420e843fc2a57f1696781ec657e204406fc67bb5fe0c751317") + fmt.Println("addr", addr) require.NoError(t, err) assert.True(t, len(addr) == 42, "address should be 42 chars (0x + 40 hex)") assert.Equal(t, "0x", addr[:2]) @@ -411,7 +508,7 @@ func TestGetInFlightSignCountPerChain(t *testing.T) { // These must NOT be counted. db.Create(&store.Event{EventID: "e4", Type: "SIGN_OUTBOUND", Status: store.StatusConfirmed, EventData: ethData}) // not yet in-flight db.Create(&store.Event{EventID: "e5", Type: "SIGN_OUTBOUND", Status: store.StatusBroadcasted, EventData: ethData}) // pending nonce RPC covers it - db.Create(&store.Event{EventID: "e6", Type: "KEYGEN", Status: store.StatusInProgress}) // not a SIGN event + db.Create(&store.Event{EventID: "e6", Type: "KEYGEN", Status: store.StatusInProgress}) // not a SIGN event perChain, err := coord.getInFlightSignCountPerChain() require.NoError(t, err) @@ -460,6 +557,155 @@ func TestBuildSignTransaction(t *testing.T) { require.Error(t, err) assert.Contains(t, err.Error(), "chains manager not configured") }) + + t.Run("chain client not found", func(t *testing.T) { + coord, _, _ := setupTestCoordinator(t) + builder := &coordMockTxBuilder{} + client := &coordMockChainClient{builder: builder} + coord.chains = newTestChainsForCoordinator(t, "eip155:1", uregistrytypes.VmType_EVM, client) + + data := []byte(`{"tx_id":"0x1","destination_chain":"eip155:999"}`) + _, err := coord.buildSignTransaction(ctx, data, nil) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to get client for chain") + }) + + t.Run("GetTxBuilder returns error", func(t *testing.T) { + coord, _, _ := setupTestCoordinator(t) + client := &coordMockChainClient{builder: nil, builderErr: fmt.Errorf("builder init failed")} + coord.chains = newTestChainsForCoordinator(t, "eip155:1", uregistrytypes.VmType_EVM, client) + + data := []byte(`{"tx_id":"0x1","destination_chain":"eip155:1"}`) + _, err := coord.buildSignTransaction(ctx, data, nil) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to get tx builder") + }) + + t.Run("nil assignedNonce returns error", func(t *testing.T) { + coord, _, _ := setupTestCoordinator(t) + builder := &coordMockTxBuilder{} + client := &coordMockChainClient{builder: builder} + coord.chains = newTestChainsForCoordinator(t, "eip155:1", uregistrytypes.VmType_EVM, client) + + data := []byte(`{"tx_id":"0x1","destination_chain":"eip155:1"}`) + _, err := coord.buildSignTransaction(ctx, data, nil) + require.Error(t, err) + assert.Contains(t, err.Error(), "assigned nonce is required") + }) + + t.Run("GetOutboundSigningRequest returns error", func(t *testing.T) { + coord, _, _ := setupTestCoordinator(t) + builder := &coordMockTxBuilder{} + client := &coordMockChainClient{builder: builder} + coord.chains = newTestChainsForCoordinator(t, "eip155:1", uregistrytypes.VmType_EVM, client) + + builder.On("GetOutboundSigningRequest", mock.Anything, mock.Anything, mock.Anything). + Return(nil, fmt.Errorf("ABI encoding failed")) + + nonce := uint64(5) + data := []byte(`{"tx_id":"0x1","destination_chain":"eip155:1"}`) + _, err := coord.buildSignTransaction(ctx, data, &nonce) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to get outbound signing request") + }) + + t.Run("GetOutboundSigningRequest succeeds", func(t *testing.T) { + coord, _, _ := setupTestCoordinator(t) + builder := &coordMockTxBuilder{} + client := &coordMockChainClient{builder: builder} + coord.chains = newTestChainsForCoordinator(t, "eip155:1", uregistrytypes.VmType_EVM, client) + + expectedReq := &common.UnsignedSigningReq{ + SigningHash: []byte{0xaa, 0xbb}, + Nonce: 5, + } + builder.On("GetOutboundSigningRequest", mock.Anything, mock.Anything, uint64(5)). + Return(expectedReq, nil) + + nonce := uint64(5) + data := []byte(`{"tx_id":"0x1","destination_chain":"eip155:1","recipient":"0xRecipient","amount":"1000"}`) + result, err := coord.buildSignTransaction(ctx, data, &nonce) + require.NoError(t, err) + assert.Equal(t, expectedReq, result) + builder.AssertExpectations(t) + }) +} + +func TestAssignSignNonce_SkippedChain(t *testing.T) { + coord, _, _ := setupTestCoordinator(t) + skippedChains := map[string]bool{"eip155:1": true} + + nonce, ok := coord.assignSignNonce( + context.Background(), + store.Event{EventID: "e1"}, + "eip155:1", + map[string]int{}, + map[string]uint64{}, + skippedChains, + ) + assert.False(t, ok) + assert.Equal(t, uint64(0), nonce) +} + +func TestAssignSignNonce_SubsequentEventUsesCache(t *testing.T) { + coord, _, _ := setupTestCoordinator(t) + + nonceByChain := map[string]uint64{"eip155:1": 10} + inFlightPerChain := map[string]int{"eip155:1": 1} + + nonce, ok := coord.assignSignNonce( + context.Background(), + store.Event{EventID: "e1"}, + "eip155:1", + inFlightPerChain, + nonceByChain, + map[string]bool{}, + ) + assert.True(t, ok) + assert.Equal(t, uint64(11), nonce) + assert.Equal(t, 2, inFlightPerChain["eip155:1"]) +} + +func TestAssignSignNonce_SubsequentEventCapReached(t *testing.T) { + coord, _, _ := setupTestCoordinator(t) + + nonceByChain := map[string]uint64{"eip155:1": 10} + inFlightPerChain := map[string]int{"eip155:1": PerChainCap} + + nonce, ok := coord.assignSignNonce( + context.Background(), + store.Event{EventID: "e1"}, + "eip155:1", + inFlightPerChain, + nonceByChain, + map[string]bool{}, + ) + assert.False(t, ok) + assert.Equal(t, uint64(0), nonce) +} + +func TestAssignSignNonce_FirstEventWithInFlight_SkipsUntilThreshold(t *testing.T) { + coord, _, _ := setupTestCoordinator(t) + + inFlightPerChain := map[string]int{"eip155:1": 1} + nonceByChain := map[string]uint64{} + skippedChains := map[string]bool{} + + nonce, ok := coord.assignSignNonce( + context.Background(), + store.Event{EventID: "e1"}, + "eip155:1", + inFlightPerChain, + nonceByChain, + skippedChains, + ) + assert.False(t, ok) + assert.Equal(t, uint64(0), nonce) + assert.True(t, skippedChains["eip155:1"], "chain should be marked as skipped") + + coord.chainWaitMu.Lock() + assert.Equal(t, 1, coord.consecutiveWaitPerChain["eip155:1"]) + coord.chainWaitMu.Unlock() } // --- Lifecycle --- @@ -480,3 +726,442 @@ func TestCoordinator_StartStop(t *testing.T) { assert.False(t, coord.running, "should be stopped after Stop") coord.mu.RUnlock() } + +func TestGetPartyIDFromPeerID(t *testing.T) { + coord, _, _ := setupTestCoordinator(t) + ctx := context.Background() + + t.Run("found", func(t *testing.T) { + partyID, err := coord.GetPartyIDFromPeerID(ctx, "peer1") + require.NoError(t, err) + assert.Equal(t, "validator1", partyID) + }) + + t.Run("second validator", func(t *testing.T) { + partyID, err := coord.GetPartyIDFromPeerID(ctx, "peer2") + require.NoError(t, err) + assert.Equal(t, "validator2", partyID) + }) + + t.Run("not found", func(t *testing.T) { + _, err := coord.GetPartyIDFromPeerID(ctx, "unknown-peer") + require.Error(t, err) + assert.Contains(t, err.Error(), "not found") + }) +} + +func TestGetPeerIDFromPartyID(t *testing.T) { + coord, _, _ := setupTestCoordinator(t) + ctx := context.Background() + + t.Run("found", func(t *testing.T) { + peerID, err := coord.GetPeerIDFromPartyID(ctx, "validator1") + require.NoError(t, err) + assert.Equal(t, "peer1", peerID) + }) + + t.Run("second validator", func(t *testing.T) { + peerID, err := coord.GetPeerIDFromPartyID(ctx, "validator2") + require.NoError(t, err) + assert.Equal(t, "peer2", peerID) + }) + + t.Run("not found", func(t *testing.T) { + _, err := coord.GetPeerIDFromPartyID(ctx, "unknown-validator") + require.Error(t, err) + assert.Contains(t, err.Error(), "not found") + }) +} + +func TestGetMultiAddrsFromPeerID(t *testing.T) { + coord, _, _ := setupTestCoordinator(t) + ctx := context.Background() + + t.Run("found", func(t *testing.T) { + addrs, err := coord.GetMultiAddrsFromPeerID(ctx, "peer1") + require.NoError(t, err) + assert.Equal(t, []string{"/ip4/127.0.0.1/tcp/9001"}, addrs) + }) + + t.Run("not found", func(t *testing.T) { + _, err := coord.GetMultiAddrsFromPeerID(ctx, "unknown-peer") + require.Error(t, err) + assert.Contains(t, err.Error(), "not found") + }) +} + +func TestHandleACK(t *testing.T) { + coord, _, _ := setupTestCoordinator(t) + ctx := context.Background() + + t.Run("ack for untracked event is ignored", func(t *testing.T) { + err := coord.HandleACK(ctx, "peer1", "unknown-event") + assert.NoError(t, err) + }) + + t.Run("ack tracking with registered event", func(t *testing.T) { + coord.ackMu.Lock() + coord.ackTracking["test-event"] = &ackState{ + participants: []string{"validator1", "validator2", "validator3"}, + ackedBy: make(map[string]bool), + ackCount: 0, + } + coord.ackMu.Unlock() + + // First ACK + err := coord.HandleACK(ctx, "peer1", "test-event") + assert.NoError(t, err) + + coord.ackMu.RLock() + state := coord.ackTracking["test-event"] + assert.Equal(t, 1, state.ackCount) + assert.True(t, state.ackedBy["peer1"]) + coord.ackMu.RUnlock() + + // Duplicate ACK from same peer should not increment + err = coord.HandleACK(ctx, "peer1", "test-event") + assert.NoError(t, err) + + coord.ackMu.RLock() + assert.Equal(t, 1, coord.ackTracking["test-event"].ackCount) + coord.ackMu.RUnlock() + + // ACK from second peer + err = coord.HandleACK(ctx, "peer2", "test-event") + assert.NoError(t, err) + + coord.ackMu.RLock() + assert.Equal(t, 2, coord.ackTracking["test-event"].ackCount) + coord.ackMu.RUnlock() + }) + + t.Run("ack from non-participant is rejected", func(t *testing.T) { + coord.ackMu.Lock() + coord.ackTracking["restricted-event"] = &ackState{ + participants: []string{"validator1"}, + ackedBy: make(map[string]bool), + ackCount: 0, + } + coord.ackMu.Unlock() + + // peer2 maps to validator2 which is not in participants + err := coord.HandleACK(ctx, "peer2", "restricted-event") + require.Error(t, err) + assert.Contains(t, err.Error(), "not a participant") + }) +} + +func TestCoordinator_DoubleStartStop(t *testing.T) { + coord, _, _ := setupTestCoordinator(t) + ctx := context.Background() + + // Start twice - second should be no-op + coord.Start(ctx) + time.Sleep(10 * time.Millisecond) + coord.Start(ctx) + + coord.mu.RLock() + assert.True(t, coord.running) + coord.mu.RUnlock() + + // Stop + coord.Stop() + time.Sleep(10 * time.Millisecond) + + // Stop again - should be no-op + coord.Stop() + + coord.mu.RLock() + assert.False(t, coord.running) + coord.mu.RUnlock() +} + +func TestNewCoordinator_DefaultPollInterval(t *testing.T) { + evtStore := eventstore.NewStore(nil, zerolog.Nop()) + coord := NewCoordinator( + evtStore, + &pushcore.Client{}, + nil, nil, + "validator1", 100, + 0, // zero poll interval should default to 10s + nil, + zerolog.Nop(), + ) + assert.Equal(t, 10*time.Second, coord.pollInterval) +} + +func TestGetEligibleUV_FundMigrate(t *testing.T) { + coord, _, _ := setupTestCoordinator(t) + + eligible := coord.GetEligibleUV("SIGN_FUND_MIGRATE") + assert.Len(t, eligible, 2) + addrs := validatorAddresses(eligible) + assert.True(t, addrs["validator1"]) + assert.True(t, addrs["validator2"]) + assert.False(t, addrs["validator3"]) +} + +func TestCoordinator_StopWithoutStart(t *testing.T) { + coord, _, _ := setupTestCoordinator(t) + // Stop on a coordinator that was never started should not panic. + coord.Stop() + coord.mu.RLock() + assert.False(t, coord.running) + coord.mu.RUnlock() +} + +func TestGetPartyIDFromPeerID_EdgeCases(t *testing.T) { + ctx := context.Background() + + t.Run("nil NetworkInfo is skipped", func(t *testing.T) { + coord, _, _ := setupTestCoordinator(t) + coord.mu.Lock() + coord.allValidators = []*types.UniversalValidator{ + { + IdentifyInfo: &types.IdentityInfo{CoreValidatorAddress: "val-no-net"}, + NetworkInfo: nil, + LifecycleInfo: &types.LifecycleInfo{CurrentStatus: types.UVStatus_UV_STATUS_ACTIVE}, + }, + } + coord.mu.Unlock() + + _, err := coord.GetPartyIDFromPeerID(ctx, "any-peer") + require.Error(t, err) + assert.Contains(t, err.Error(), "not found") + }) + + t.Run("nil IdentifyInfo with matching NetworkInfo", func(t *testing.T) { + coord, _, _ := setupTestCoordinator(t) + coord.mu.Lock() + coord.allValidators = []*types.UniversalValidator{ + { + IdentifyInfo: nil, + NetworkInfo: &types.NetworkInfo{PeerId: "peer-no-id"}, + LifecycleInfo: &types.LifecycleInfo{CurrentStatus: types.UVStatus_UV_STATUS_ACTIVE}, + }, + } + coord.mu.Unlock() + + // NetworkInfo matches but IdentifyInfo is nil, so the address is "" and it falls through + _, err := coord.GetPartyIDFromPeerID(ctx, "peer-no-id") + require.Error(t, err) + assert.Contains(t, err.Error(), "not found") + }) +} + +func TestGetPeerIDFromPartyID_EdgeCases(t *testing.T) { + ctx := context.Background() + + t.Run("nil IdentifyInfo is skipped", func(t *testing.T) { + coord, _, _ := setupTestCoordinator(t) + coord.mu.Lock() + coord.allValidators = []*types.UniversalValidator{ + { + IdentifyInfo: nil, + NetworkInfo: &types.NetworkInfo{PeerId: "peer-x"}, + LifecycleInfo: &types.LifecycleInfo{CurrentStatus: types.UVStatus_UV_STATUS_ACTIVE}, + }, + } + coord.mu.Unlock() + + _, err := coord.GetPeerIDFromPartyID(ctx, "any-val") + require.Error(t, err) + assert.Contains(t, err.Error(), "not found") + }) + + t.Run("nil NetworkInfo with matching IdentifyInfo", func(t *testing.T) { + coord, _, _ := setupTestCoordinator(t) + coord.mu.Lock() + coord.allValidators = []*types.UniversalValidator{ + { + IdentifyInfo: &types.IdentityInfo{CoreValidatorAddress: "val-no-net"}, + NetworkInfo: nil, + LifecycleInfo: &types.LifecycleInfo{CurrentStatus: types.UVStatus_UV_STATUS_ACTIVE}, + }, + } + coord.mu.Unlock() + + // IdentifyInfo matches but NetworkInfo is nil, falls through to not-found + _, err := coord.GetPeerIDFromPartyID(ctx, "val-no-net") + require.Error(t, err) + assert.Contains(t, err.Error(), "not found") + }) +} + +func TestGetMultiAddrsFromPeerID_NilNetworkInfo(t *testing.T) { + coord, _, _ := setupTestCoordinator(t) + ctx := context.Background() + + coord.mu.Lock() + coord.allValidators = []*types.UniversalValidator{ + { + IdentifyInfo: &types.IdentityInfo{CoreValidatorAddress: "v"}, + NetworkInfo: nil, + LifecycleInfo: &types.LifecycleInfo{CurrentStatus: types.UVStatus_UV_STATUS_ACTIVE}, + }, + } + coord.mu.Unlock() + + _, err := coord.GetMultiAddrsFromPeerID(ctx, "any-peer") + require.Error(t, err) + assert.Contains(t, err.Error(), "not found") +} + +func TestHandleACK_UnknownPeerID(t *testing.T) { + coord, _, _ := setupTestCoordinator(t) + ctx := context.Background() + + coord.ackMu.Lock() + coord.ackTracking["evt-unknown-peer"] = &ackState{ + participants: []string{"validator1"}, + ackedBy: make(map[string]bool), + ackCount: 0, + } + coord.ackMu.Unlock() + + err := coord.HandleACK(ctx, "totally-unknown-peer", "evt-unknown-peer") + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to get partyID") +} + +func TestHandleACK_AllACKsTriggersBEGIN(t *testing.T) { + coord, _, _ := setupTestCoordinator(t) + ctx := context.Background() + + var sentMessages []string + coord.send = func(_ context.Context, peerID string, _ []byte) error { + sentMessages = append(sentMessages, peerID) + return nil + } + + coord.ackMu.Lock() + coord.ackTracking["evt-begin"] = &ackState{ + participants: []string{"validator1", "validator2"}, + ackedBy: map[string]bool{"peer1": true}, + ackCount: 1, + } + coord.ackMu.Unlock() + + // Second ACK completes the set + err := coord.HandleACK(ctx, "peer2", "evt-begin") + require.NoError(t, err) + + // BEGIN should have been sent to both participants + assert.Len(t, sentMessages, 2) + assert.Contains(t, sentMessages, "peer1") + assert.Contains(t, sentMessages, "peer2") + + // ACK tracking should be cleaned up + coord.ackMu.RLock() + _, exists := coord.ackTracking["evt-begin"] + coord.ackMu.RUnlock() + assert.False(t, exists, "ack tracking should be removed after all ACKs received") +} + +func TestGetActiveParticipants(t *testing.T) { + validators := []*types.UniversalValidator{ + {IdentifyInfo: &types.IdentityInfo{CoreValidatorAddress: "v1"}, LifecycleInfo: &types.LifecycleInfo{CurrentStatus: types.UVStatus_UV_STATUS_ACTIVE}}, + {IdentifyInfo: &types.IdentityInfo{CoreValidatorAddress: "v2"}, LifecycleInfo: &types.LifecycleInfo{CurrentStatus: types.UVStatus_UV_STATUS_PENDING_JOIN}}, + {IdentifyInfo: &types.IdentityInfo{CoreValidatorAddress: "v3"}, LifecycleInfo: &types.LifecycleInfo{CurrentStatus: types.UVStatus_UV_STATUS_INACTIVE}}, + {IdentifyInfo: &types.IdentityInfo{CoreValidatorAddress: "v4"}, LifecycleInfo: nil}, + } + + active := getActiveParticipants(validators) + require.Len(t, active, 1) + assert.Equal(t, "v1", active[0].IdentifyInfo.CoreValidatorAddress) + + assert.Nil(t, getActiveParticipants(nil)) +} + +func TestGetCoordinatorParticipants(t *testing.T) { + t.Run("returns active validators when available", func(t *testing.T) { + validators := []*types.UniversalValidator{ + {IdentifyInfo: &types.IdentityInfo{CoreValidatorAddress: "v1"}, LifecycleInfo: &types.LifecycleInfo{CurrentStatus: types.UVStatus_UV_STATUS_ACTIVE}}, + {IdentifyInfo: &types.IdentityInfo{CoreValidatorAddress: "v2"}, LifecycleInfo: &types.LifecycleInfo{CurrentStatus: types.UVStatus_UV_STATUS_PENDING_JOIN}}, + } + result := getCoordinatorParticipants(validators) + require.Len(t, result, 1) + assert.Equal(t, "v1", result[0].IdentifyInfo.CoreValidatorAddress) + }) + + t.Run("falls back to all validators when no active", func(t *testing.T) { + validators := []*types.UniversalValidator{ + {IdentifyInfo: &types.IdentityInfo{CoreValidatorAddress: "v1"}, LifecycleInfo: &types.LifecycleInfo{CurrentStatus: types.UVStatus_UV_STATUS_PENDING_JOIN}}, + } + result := getCoordinatorParticipants(validators) + require.Len(t, result, 1) + assert.Equal(t, "v1", result[0].IdentifyInfo.CoreValidatorAddress) + }) + + t.Run("empty input returns empty", func(t *testing.T) { + assert.Empty(t, getCoordinatorParticipants(nil)) + }) +} + +func TestGetSignEligible(t *testing.T) { + validators := []*types.UniversalValidator{ + {IdentifyInfo: &types.IdentityInfo{CoreValidatorAddress: "v1"}, LifecycleInfo: &types.LifecycleInfo{CurrentStatus: types.UVStatus_UV_STATUS_ACTIVE}}, + {IdentifyInfo: &types.IdentityInfo{CoreValidatorAddress: "v2"}, LifecycleInfo: &types.LifecycleInfo{CurrentStatus: types.UVStatus_UV_STATUS_PENDING_LEAVE}}, + {IdentifyInfo: &types.IdentityInfo{CoreValidatorAddress: "v3"}, LifecycleInfo: &types.LifecycleInfo{CurrentStatus: types.UVStatus_UV_STATUS_PENDING_JOIN}}, + {IdentifyInfo: &types.IdentityInfo{CoreValidatorAddress: "v4"}, LifecycleInfo: &types.LifecycleInfo{CurrentStatus: types.UVStatus_UV_STATUS_INACTIVE}}, + {IdentifyInfo: &types.IdentityInfo{CoreValidatorAddress: "v5"}, LifecycleInfo: nil}, + } + + eligible := getSignEligible(validators) + require.Len(t, eligible, 2) + addrs := validatorAddresses(eligible) + assert.True(t, addrs["v1"]) + assert.True(t, addrs["v2"]) + assert.False(t, addrs["v3"]) + assert.False(t, addrs["v4"]) + assert.False(t, addrs["v5"]) +} + +func TestGetEligibleForProtocol(t *testing.T) { + validators := []*types.UniversalValidator{ + {IdentifyInfo: &types.IdentityInfo{CoreValidatorAddress: "v1"}, LifecycleInfo: &types.LifecycleInfo{CurrentStatus: types.UVStatus_UV_STATUS_ACTIVE}}, + {IdentifyInfo: &types.IdentityInfo{CoreValidatorAddress: "v2"}, LifecycleInfo: &types.LifecycleInfo{CurrentStatus: types.UVStatus_UV_STATUS_PENDING_JOIN}}, + {IdentifyInfo: &types.IdentityInfo{CoreValidatorAddress: "v3"}, LifecycleInfo: &types.LifecycleInfo{CurrentStatus: types.UVStatus_UV_STATUS_PENDING_LEAVE}}, + } + + t.Run("KEYGEN includes Active+PendingJoin", func(t *testing.T) { + result := getEligibleForProtocol("KEYGEN", validators) + assert.Len(t, result, 2) + addrs := validatorAddresses(result) + assert.True(t, addrs["v1"]) + assert.True(t, addrs["v2"]) + }) + + t.Run("KEYREFRESH includes Active+PendingLeave", func(t *testing.T) { + result := getEligibleForProtocol("KEYREFRESH", validators) + assert.Len(t, result, 2) + addrs := validatorAddresses(result) + assert.True(t, addrs["v1"]) + assert.True(t, addrs["v3"]) + }) + + t.Run("SIGN_OUTBOUND includes Active+PendingLeave", func(t *testing.T) { + result := getEligibleForProtocol("SIGN_OUTBOUND", validators) + assert.Len(t, result, 2) + addrs := validatorAddresses(result) + assert.True(t, addrs["v1"]) + assert.True(t, addrs["v3"]) + }) + + t.Run("SIGN_FUND_MIGRATE includes Active+PendingLeave", func(t *testing.T) { + result := getEligibleForProtocol("SIGN_FUND_MIGRATE", validators) + assert.Len(t, result, 2) + }) + + t.Run("QUORUM_CHANGE includes Active+PendingJoin", func(t *testing.T) { + result := getEligibleForProtocol("QUORUM_CHANGE", validators) + assert.Len(t, result, 2) + addrs := validatorAddresses(result) + assert.True(t, addrs["v1"]) + assert.True(t, addrs["v2"]) + }) + + t.Run("unknown returns nil", func(t *testing.T) { + assert.Nil(t, getEligibleForProtocol("UNKNOWN", validators)) + }) +} diff --git a/universalClient/tss/expirysweeper/sweeper_test.go b/universalClient/tss/expirysweeper/sweeper_test.go index dca9f5d6..ee3c8e87 100644 --- a/universalClient/tss/expirysweeper/sweeper_test.go +++ b/universalClient/tss/expirysweeper/sweeper_test.go @@ -4,6 +4,7 @@ import ( "context" "encoding/json" "testing" + "time" "github.com/rs/zerolog" "github.com/stretchr/testify/assert" @@ -11,6 +12,9 @@ import ( "gorm.io/driver/sqlite" "gorm.io/gorm" + utsstypes "github.com/pushchain/push-chain-node/x/utss/types" + + "github.com/pushchain/push-chain-node/universalClient/pushcore" "github.com/pushchain/push-chain-node/universalClient/store" "github.com/pushchain/push-chain-node/universalClient/tss/eventstore" ) @@ -56,6 +60,8 @@ func runSweepBatch(t *testing.T, s *Sweeper, currentBlock uint64) { ev := event if ev.Type == store.EventTypeSignOutbound { require.NoError(t, s.voteOutboundFailureAndMarkReverted(ctx, &ev, "event expired before TSS could start")) + } else if ev.Type == store.EventTypeSignFundMigrate { + require.NoError(t, s.voteFundMigrationFailureAndMarkReverted(ctx, &ev, "event expired before TSS could start")) } else { require.NoError(t, s.eventStore.Update(ev.EventID, map[string]any{"status": store.StatusReverted})) } @@ -139,3 +145,283 @@ func TestSweep(t *testing.T) { assert.Equal(t, "CONFIRMED", v1.Status) }) } + +func TestNewSweeper(t *testing.T) { + t.Run("default check interval", func(t *testing.T) { + s := NewSweeper(Config{ + Logger: zerolog.Nop(), + }) + assert.Equal(t, defaultCheckInterval, s.checkInterval) + }) + + t.Run("custom check interval", func(t *testing.T) { + s := NewSweeper(Config{ + CheckInterval: 5 * time.Second, + Logger: zerolog.Nop(), + }) + assert.Equal(t, 5*time.Second, s.checkInterval) + }) + + t.Run("all fields set", func(t *testing.T) { + db := setupTestDB(t) + evtStore := eventstore.NewStore(db, zerolog.Nop()) + s := NewSweeper(Config{ + EventStore: evtStore, + CheckInterval: 10 * time.Second, + Logger: zerolog.Nop(), + }) + assert.Equal(t, 10*time.Second, s.checkInterval) + assert.NotNil(t, s.eventStore) + assert.Nil(t, s.pushSigner) + assert.Nil(t, s.pushCore) + }) +} + +func fundMigrationEventData(t *testing.T, migrationID uint64, chain string) []byte { + t.Helper() + data, err := json.Marshal(utsstypes.FundMigrationInitiatedEventData{ + MigrationID: migrationID, + Chain: chain, + }) + require.NoError(t, err) + return data +} + +func TestSweep_FundMigration(t *testing.T) { + t.Run("expired CONFIRMED SIGN_FUND_MIGRATE marked REVERTED (pushSigner nil skips vote)", func(t *testing.T) { + sweeper, evtStore, db := setupTestSweeper(t) + + db.Create(&store.Event{ + EventID: "expired-fm", BlockHeight: 50, ExpiryBlockHeight: 90, + Status: "CONFIRMED", Type: store.EventTypeSignFundMigrate, + EventData: fundMigrationEventData(t, 1, "eip155:1"), + }) + + ctx := context.Background() + events, err := sweeper.eventStore.GetExpiredConfirmedEvents(100, sweepBatchSize) + require.NoError(t, err) + require.Len(t, events, 1) + + ev := events[0] + require.NoError(t, sweeper.voteFundMigrationFailureAndMarkReverted(ctx, &ev, "event expired")) + + e, _ := evtStore.GetEvent("expired-fm") + assert.Equal(t, "REVERTED", e.Status) + }) + + t.Run("fund migration with invalid event data returns error", func(t *testing.T) { + sweeper, _, _ := setupTestSweeper(t) + ctx := context.Background() + + event := &store.Event{ + EventID: "bad-fm", + EventData: []byte("not json"), + } + err := sweeper.voteFundMigrationFailureAndMarkReverted(ctx, event, "test error") + require.Error(t, err) + assert.Contains(t, err.Error(), "parse") + }) +} + +func TestSweep_VoteOutboundFailureInvalidJSON(t *testing.T) { + sweeper, _, _ := setupTestSweeper(t) + ctx := context.Background() + + event := &store.Event{ + EventID: "bad-sign", + EventData: []byte("not json"), + } + err := sweeper.voteOutboundFailureAndMarkReverted(ctx, event, "test error") + require.Error(t, err) + assert.Contains(t, err.Error(), "parse") +} + +func TestSweep_FundMigrateViaRunSweepBatch(t *testing.T) { + t.Run("fund migrate event swept through runSweepBatch", func(t *testing.T) { + sweeper, evtStore, db := setupTestSweeper(t) + + db.Create(&store.Event{ + EventID: "fm-batch", BlockHeight: 50, ExpiryBlockHeight: 90, + Status: "CONFIRMED", Type: store.EventTypeSignFundMigrate, + EventData: fundMigrationEventData(t, 42, "eip155:1"), + }) + + runSweepBatch(t, sweeper, 100) + + e, err := evtStore.GetEvent("fm-batch") + require.NoError(t, err) + assert.Equal(t, "REVERTED", e.Status) + }) + + t.Run("mixed outbound and fund migrate events all swept", func(t *testing.T) { + sweeper, evtStore, db := setupTestSweeper(t) + + db.Create(&store.Event{ + EventID: "sign-1", BlockHeight: 50, ExpiryBlockHeight: 90, + Status: "CONFIRMED", Type: store.EventTypeSignOutbound, + EventData: signEventData(t, "tx-1", "utx-1"), + }) + db.Create(&store.Event{ + EventID: "fm-1", BlockHeight: 50, ExpiryBlockHeight: 90, + Status: "CONFIRMED", Type: store.EventTypeSignFundMigrate, + EventData: fundMigrationEventData(t, 10, "eip155:137"), + }) + db.Create(&store.Event{ + EventID: "keygen-1", BlockHeight: 50, ExpiryBlockHeight: 90, + Status: "CONFIRMED", Type: "KEYGEN", + }) + + runSweepBatch(t, sweeper, 100) + + for _, id := range []string{"sign-1", "fm-1", "keygen-1"} { + e, err := evtStore.GetEvent(id) + require.NoError(t, err) + assert.Equal(t, "REVERTED", e.Status, "event %s should be REVERTED", id) + } + }) +} + +func TestStart_ContextCancellation(t *testing.T) { + db := setupTestDB(t) + evtStore := eventstore.NewStore(db, zerolog.Nop()) + + // Use a long check interval so the ticker never fires before we cancel. + sweeper := NewSweeper(Config{ + EventStore: evtStore, + CheckInterval: 10 * time.Second, + Logger: zerolog.Nop(), + }) + + ctx, cancel := context.WithCancel(context.Background()) + + done := make(chan struct{}) + go func() { + // Directly call run (blocking) so we can detect when it returns. + sweeper.run(ctx) + close(done) + }() + + // Cancel immediately; the goroutine should exit via ctx.Done(). + cancel() + + select { + case <-done: + // run returned cleanly — pass. + case <-time.After(2 * time.Second): + t.Fatal("sweeper.run did not stop after context cancellation") + } +} + +func TestStart_SpawnsGoroutine(t *testing.T) { + db := setupTestDB(t) + evtStore := eventstore.NewStore(db, zerolog.Nop()) + + sweeper := NewSweeper(Config{ + EventStore: evtStore, + CheckInterval: 10 * time.Second, + Logger: zerolog.Nop(), + }) + + ctx, cancel := context.WithCancel(context.Background()) + sweeper.Start(ctx) + + // Cancel and give the goroutine time to exit. + cancel() + time.Sleep(100 * time.Millisecond) +} + +func TestSweep_EmptyPushCore_ReturnsOnError(t *testing.T) { + db := setupTestDB(t) + evtStore := eventstore.NewStore(db, zerolog.Nop()) + + // Create a pushcore.Client with no endpoints — GetLatestBlock will return an error. + emptyCore := &pushcore.Client{} + + sweeper := NewSweeper(Config{ + EventStore: evtStore, + PushCore: emptyCore, + CheckInterval: 10 * time.Second, + Logger: zerolog.Nop(), + }) + + // Insert an expired event to confirm it is NOT swept (because GetLatestBlock fails first). + db.Create(&store.Event{ + EventID: "should-not-sweep", BlockHeight: 50, ExpiryBlockHeight: 90, + Status: "CONFIRMED", Type: "KEYGEN", + }) + + sweeper.sweep(context.Background()) + + // Event should remain CONFIRMED because sweep returned early on GetLatestBlock error. + e, err := evtStore.GetEvent("should-not-sweep") + require.NoError(t, err) + assert.Equal(t, "CONFIRMED", e.Status) +} + +func TestVoteOutboundFailureAndMarkReverted_UpdateFailure(t *testing.T) { + // Simulate eventStore.Update failure by closing the DB before calling the function. + db := setupTestDB(t) + evtStore := eventstore.NewStore(db, zerolog.Nop()) + + sweeper := &Sweeper{ + eventStore: evtStore, + pushSigner: nil, // vote skipped, but Update still called + logger: zerolog.Nop(), + } + + // Insert an event so Update can find it + db.Create(&store.Event{ + EventID: "update-fail", + Status: "CONFIRMED", + Type: store.EventTypeSignOutbound, + EventData: signEventData(t, "tx-uf", "utx-uf"), + }) + + // Close the underlying SQL connection to force Update to fail + sqlDB, err := db.DB() + require.NoError(t, err) + sqlDB.Close() + + err = sweeper.voteOutboundFailureAndMarkReverted(context.Background(), + &store.Event{ + EventID: "update-fail", + EventData: signEventData(t, "tx-uf", "utx-uf"), + }, + "event expired", + ) + require.Error(t, err) + assert.Contains(t, err.Error(), "mark event") +} + +func TestVoteFundMigrationFailureAndMarkReverted_UpdateFailure(t *testing.T) { + db := setupTestDB(t) + evtStore := eventstore.NewStore(db, zerolog.Nop()) + + sweeper := &Sweeper{ + eventStore: evtStore, + pushSigner: nil, + logger: zerolog.Nop(), + } + + db.Create(&store.Event{ + EventID: "fm-update-fail", + Status: "CONFIRMED", + Type: store.EventTypeSignFundMigrate, + EventData: fundMigrationEventData(t, 7, "eip155:1"), + }) + + // Close DB to force Update failure + sqlDB, err := db.DB() + require.NoError(t, err) + sqlDB.Close() + + err = sweeper.voteFundMigrationFailureAndMarkReverted(context.Background(), + &store.Event{ + EventID: "fm-update-fail", + EventData: fundMigrationEventData(t, 7, "eip155:1"), + }, + "event expired", + ) + require.Error(t, err) + assert.Contains(t, err.Error(), "mark event") +} diff --git a/universalClient/tss/sessionmanager/sessionmanager_test.go b/universalClient/tss/sessionmanager/sessionmanager_test.go index 170461fd..cdd5b1b3 100644 --- a/universalClient/tss/sessionmanager/sessionmanager_test.go +++ b/universalClient/tss/sessionmanager/sessionmanager_test.go @@ -3,6 +3,7 @@ package sessionmanager import ( "context" "encoding/json" + "fmt" "reflect" "testing" "time" @@ -25,6 +26,7 @@ import ( "github.com/pushchain/push-chain-node/universalClient/tss/eventstore" "github.com/pushchain/push-chain-node/universalClient/tss/keyshare" uexecutortypes "github.com/pushchain/push-chain-node/x/uexecutor/types" + utsstypes "github.com/pushchain/push-chain-node/x/utss/types" "github.com/pushchain/push-chain-node/x/uvalidator/types" ) @@ -445,3 +447,556 @@ func TestVerifySigningRequest_OutboundDisabled(t *testing.T) { } }) } + +func TestVerifyOutboundSigningRequest_Validation(t *testing.T) { + sm, _, _, _, _, _ := setupTestSessionManager(t) + ctx := context.Background() + + outboundData := uexecutortypes.OutboundCreatedEvent{ + DestinationChain: "eip155:1", + } + eventDataBytes, _ := json.Marshal(outboundData) + + event := &store.Event{ + EventID: "sign-event-1", + Type: "SIGN_OUTBOUND", + Status: store.StatusConfirmed, + EventData: eventDataBytes, + } + + t.Run("nil request is rejected", func(t *testing.T) { + err := sm.verifyOutboundSigningRequest(ctx, event, nil) + require.Error(t, err) + assert.Contains(t, err.Error(), "unsigned transaction request is required") + }) + + t.Run("empty signing hash is rejected", func(t *testing.T) { + err := sm.verifyOutboundSigningRequest(ctx, event, &common.UnsignedSigningReq{ + SigningHash: []byte{}, + }) + require.Error(t, err) + assert.Contains(t, err.Error(), "signing hash is missing") + }) + + t.Run("invalid event data JSON is rejected", func(t *testing.T) { + badEvent := &store.Event{ + EventID: "sign-bad-json", + Type: "SIGN_OUTBOUND", + EventData: []byte("not-json"), + } + err := sm.verifyOutboundSigningRequest(ctx, badEvent, &common.UnsignedSigningReq{ + SigningHash: []byte{0x01}, + }) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to parse outbound event data") + }) + + t.Run("empty destination chain is rejected", func(t *testing.T) { + noChainData := uexecutortypes.OutboundCreatedEvent{} + noChainBytes, _ := json.Marshal(noChainData) + noChainEvent := &store.Event{ + EventID: "sign-no-chain", + Type: "SIGN_OUTBOUND", + EventData: noChainBytes, + } + err := sm.verifyOutboundSigningRequest(ctx, noChainEvent, &common.UnsignedSigningReq{ + SigningHash: []byte{0x01}, + }) + require.Error(t, err) + assert.Contains(t, err.Error(), "destination chain is missing") + }) + + t.Run("nil chains manager skips hash verification and succeeds", func(t *testing.T) { + sm.chains = nil + err := sm.verifyOutboundSigningRequest(ctx, event, &common.UnsignedSigningReq{ + SigningHash: []byte{0x01, 0x02}, + }) + assert.NoError(t, err) + }) +} + +func TestVerifyFundMigrationSigningRequest_Validation(t *testing.T) { + sm, _, _, _, _, _ := setupTestSessionManager(t) + ctx := context.Background() + + t.Run("nil request is rejected", func(t *testing.T) { + event := &store.Event{EventID: "fm-1", Type: store.EventTypeSignFundMigrate} + err := sm.verifyFundMigrationSigningRequest(ctx, event, nil) + require.Error(t, err) + assert.Contains(t, err.Error(), "unsigned transaction request is required") + }) + + t.Run("empty signing hash is rejected", func(t *testing.T) { + event := &store.Event{EventID: "fm-2", Type: store.EventTypeSignFundMigrate} + err := sm.verifyFundMigrationSigningRequest(ctx, event, &common.UnsignedSigningReq{ + SigningHash: []byte{}, + }) + require.Error(t, err) + assert.Contains(t, err.Error(), "signing hash is missing") + }) + + t.Run("invalid event data JSON is rejected", func(t *testing.T) { + event := &store.Event{ + EventID: "fm-bad-json", + Type: store.EventTypeSignFundMigrate, + EventData: []byte("not valid json"), + } + err := sm.verifyFundMigrationSigningRequest(ctx, event, &common.UnsignedSigningReq{ + SigningHash: []byte{0x01}, + }) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to parse fund migration event data") + }) + + t.Run("invalid old TSS pubkey is rejected", func(t *testing.T) { + migrationData := utsstypes.FundMigrationInitiatedEventData{ + OldTssPubkey: "not-a-valid-pubkey", + CurrentTssPubkey: "also-invalid", + Chain: "eip155:1", + } + eventDataBytes, _ := json.Marshal(migrationData) + event := &store.Event{ + EventID: "fm-bad-pubkey", + Type: store.EventTypeSignFundMigrate, + EventData: eventDataBytes, + } + err := sm.verifyFundMigrationSigningRequest(ctx, event, &common.UnsignedSigningReq{ + SigningHash: []byte{0x01}, + }) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to derive old TSS address") + }) + + t.Run("invalid current TSS pubkey is rejected", func(t *testing.T) { + // Valid old key but invalid current key + validPub := "0279be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798" + migrationData := utsstypes.FundMigrationInitiatedEventData{ + OldTssPubkey: validPub, + CurrentTssPubkey: "not-valid", + Chain: "eip155:1", + } + eventDataBytes, _ := json.Marshal(migrationData) + event := &store.Event{ + EventID: "fm-bad-cur-pubkey", + Type: store.EventTypeSignFundMigrate, + EventData: eventDataBytes, + } + err := sm.verifyFundMigrationSigningRequest(ctx, event, &common.UnsignedSigningReq{ + SigningHash: []byte{0x01}, + }) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to derive current TSS address") + }) + + t.Run("nil chains manager skips hash verification and succeeds", func(t *testing.T) { + // Use the well-known secp256k1 generator point (valid compressed pubkey) + genPoint := "0279be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798" + + migrationData := utsstypes.FundMigrationInitiatedEventData{ + OldTssPubkey: genPoint, + CurrentTssPubkey: genPoint, + Chain: "eip155:1", + GasPrice: "1000000000", + GasLimit: 21000, + } + eventDataBytes, _ := json.Marshal(migrationData) + event := &store.Event{ + EventID: "fm-nil-chains", + Type: store.EventTypeSignFundMigrate, + EventData: eventDataBytes, + } + sm.chains = nil + err := sm.verifyFundMigrationSigningRequest(ctx, event, &common.UnsignedSigningReq{ + SigningHash: []byte{0x01, 0x02}, + }) + assert.NoError(t, err) + }) +} + +func TestHandleSetupMessage_EventStatus(t *testing.T) { + sm, _, _, _, _, testDB := setupTestSessionManager(t) + ctx := context.Background() + + t.Run("event in IN_PROGRESS status is rejected", func(t *testing.T) { + event := store.Event{ + EventID: "event-in-progress", + BlockHeight: 100, + Type: "KEYGEN", + Status: store.StatusInProgress, + } + require.NoError(t, testDB.Create(&event).Error) + + msg := coordinator.Message{ + Type: "setup", + EventID: "event-in-progress", + } + data, _ := json.Marshal(msg) + err := sm.HandleIncomingMessage(ctx, "peer1", data) + require.Error(t, err) + assert.Contains(t, err.Error(), "not in confirmed status") + }) + + t.Run("event in COMPLETED status is rejected", func(t *testing.T) { + event := store.Event{ + EventID: "event-completed", + BlockHeight: 100, + Type: "KEYGEN", + Status: store.StatusCompleted, + } + require.NoError(t, testDB.Create(&event).Error) + + msg := coordinator.Message{ + Type: "setup", + EventID: "event-completed", + } + data, _ := json.Marshal(msg) + err := sm.HandleIncomingMessage(ctx, "peer1", data) + require.Error(t, err) + assert.Contains(t, err.Error(), "not in confirmed status") + }) + + t.Run("event in REVERTED status is rejected", func(t *testing.T) { + event := store.Event{ + EventID: "event-reverted", + BlockHeight: 100, + Type: "KEYGEN", + Status: store.StatusReverted, + } + require.NoError(t, testDB.Create(&event).Error) + + msg := coordinator.Message{ + Type: "setup", + EventID: "event-reverted", + } + data, _ := json.Marshal(msg) + err := sm.HandleIncomingMessage(ctx, "peer1", data) + require.Error(t, err) + assert.Contains(t, err.Error(), "not in confirmed status") + }) + + t.Run("event in SIGNED status is rejected", func(t *testing.T) { + event := store.Event{ + EventID: "event-signed", + BlockHeight: 100, + Type: "SIGN_OUTBOUND", + Status: store.StatusSigned, + } + require.NoError(t, testDB.Create(&event).Error) + + msg := coordinator.Message{ + Type: "setup", + EventID: "event-signed", + } + data, _ := json.Marshal(msg) + err := sm.HandleIncomingMessage(ctx, "peer1", data) + require.Error(t, err) + assert.Contains(t, err.Error(), "not in confirmed status") + }) + + t.Run("duplicate setup for existing session is silently ignored", func(t *testing.T) { + // Pre-create a session entry + mockSess := new(mockSession) + sm.mu.Lock() + sm.sessions["event-dup"] = &sessionState{ + session: mockSess, + protocolType: "KEYGEN", + coordinator: "peer1", + expiryTime: time.Now().Add(5 * time.Minute), + participants: []string{"validator1", "validator2"}, + } + sm.mu.Unlock() + + msg := coordinator.Message{ + Type: "setup", + EventID: "event-dup", + } + data, _ := json.Marshal(msg) + err := sm.HandleIncomingMessage(ctx, "peer1", data) + assert.NoError(t, err, "duplicate setup should be silently ignored") + }) +} + +func TestHandleBeginMessage(t *testing.T) { + sm, _, _, _, _, _ := setupTestSessionManager(t) + ctx := context.Background() + + t.Run("session not found", func(t *testing.T) { + msg := coordinator.Message{ + Type: "begin", + EventID: "nonexistent", + } + data, _ := json.Marshal(msg) + err := sm.HandleIncomingMessage(ctx, "peer1", data) + require.Error(t, err) + assert.Contains(t, err.Error(), "does not exist") + }) + + t.Run("sender is not the session coordinator", func(t *testing.T) { + mockSess := new(mockSession) + sm.mu.Lock() + sm.sessions["begin-event-1"] = &sessionState{ + session: mockSess, + protocolType: "KEYGEN", + coordinator: "peer1", + expiryTime: time.Now().Add(5 * time.Minute), + participants: []string{"validator1", "validator2"}, + } + sm.mu.Unlock() + + msg := coordinator.Message{ + Type: "begin", + EventID: "begin-event-1", + } + data, _ := json.Marshal(msg) + err := sm.HandleIncomingMessage(ctx, "peer2", data) + require.Error(t, err) + assert.Contains(t, err.Error(), "begin message must come from coordinator") + }) +} + +func TestSendACK(t *testing.T) { + t.Run("marshals and sends ACK message correctly", func(t *testing.T) { + var capturedPeerID string + var capturedData []byte + + sendFn := func(ctx context.Context, peerID string, data []byte) error { + capturedPeerID = peerID + capturedData = data + return nil + } + + sm := NewSessionManager( + nil, nil, nil, nil, nil, + sendFn, + "validator1", + 3*time.Minute, 30*time.Second, 60, + zerolog.Nop(), + nil, + ) + + err := sm.sendACK(context.Background(), "coord-peer", "evt-123") + require.NoError(t, err) + assert.Equal(t, "coord-peer", capturedPeerID) + + var msg coordinator.Message + require.NoError(t, json.Unmarshal(capturedData, &msg)) + assert.Equal(t, "ack", msg.Type) + assert.Equal(t, "evt-123", msg.EventID) + assert.Nil(t, msg.Payload) + assert.Nil(t, msg.Participants) + }) + + t.Run("returns error when send fails", func(t *testing.T) { + sendFn := func(ctx context.Context, peerID string, data []byte) error { + return fmt.Errorf("network error") + } + + sm := NewSessionManager( + nil, nil, nil, nil, nil, + sendFn, + "validator1", + 3*time.Minute, 30*time.Second, 60, + zerolog.Nop(), + nil, + ) + + err := sm.sendACK(context.Background(), "coord-peer", "evt-456") + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to send ACK message") + }) +} + +func TestCleanSession(t *testing.T) { + sm, _, _, _, _, _ := setupTestSessionManager(t) + + mockSess := new(mockSession) + mockSess.On("Close").Return() + + sm.mu.Lock() + sm.sessions["clean-evt"] = &sessionState{ + session: mockSess, + protocolType: "KEYGEN", + coordinator: "peer1", + expiryTime: time.Now().Add(5 * time.Minute), + participants: []string{"validator1"}, + } + sm.mu.Unlock() + + state := sm.sessions["clean-evt"] + sm.cleanSession("clean-evt", state) + + sm.mu.RLock() + _, exists := sm.sessions["clean-evt"] + sm.mu.RUnlock() + assert.False(t, exists, "session should be removed after cleanup") + mockSess.AssertCalled(t, "Close") +} + +func TestStart_ContextCancellation(t *testing.T) { + sm, _, _, _, _, _ := setupTestSessionManager(t) + // Use a very short check interval so the goroutine ticks quickly. + sm.sessionExpiryCheckInterval = 10 * time.Millisecond + + ctx, cancel := context.WithCancel(context.Background()) + sm.Start(ctx) + + // Let it run a couple of ticks, then cancel. + time.Sleep(50 * time.Millisecond) + cancel() + + // Give goroutine time to exit — no panic, no hang. + time.Sleep(50 * time.Millisecond) +} + +func TestHandleStepMessage_InputAndStep(t *testing.T) { + sm, _, _, _, _, _ := setupTestSessionManager(t) + ctx := context.Background() + + t.Run("successful input routes to session", func(t *testing.T) { + mockSess := new(mockSession) + mockSess.On("InputMessage", []byte("step-payload")).Return(nil) + // processSessionStep will call Step() + mockSess.On("Step").Return([]dkls.Message{}, false, nil) + + sm.mu.Lock() + sm.sessions["step-evt"] = &sessionState{ + session: mockSess, + protocolType: "KEYGEN", + coordinator: "peer1", + expiryTime: time.Now().Add(5 * time.Minute), + participants: []string{"validator1", "validator2"}, + } + sm.mu.Unlock() + + msg := coordinator.Message{ + Type: "step", + EventID: "step-evt", + Payload: []byte("step-payload"), + } + data, _ := json.Marshal(msg) + // peer1 maps to validator1 which is in participants + err := sm.HandleIncomingMessage(ctx, "peer1", data) + assert.NoError(t, err) + mockSess.AssertCalled(t, "InputMessage", []byte("step-payload")) + mockSess.AssertCalled(t, "Step") + }) + + t.Run("InputMessage error is propagated", func(t *testing.T) { + mockSess := new(mockSession) + mockSess.On("InputMessage", []byte("bad-data")).Return(fmt.Errorf("decode error")) + + sm.mu.Lock() + sm.sessions["step-err-evt"] = &sessionState{ + session: mockSess, + protocolType: "KEYGEN", + coordinator: "peer1", + expiryTime: time.Now().Add(5 * time.Minute), + participants: []string{"validator1"}, + } + sm.mu.Unlock() + + msg := coordinator.Message{ + Type: "step", + EventID: "step-err-evt", + Payload: []byte("bad-data"), + } + data, _ := json.Marshal(msg) + err := sm.HandleIncomingMessage(ctx, "peer1", data) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to input message") + }) +} + +func TestHandleSigningComplete(t *testing.T) { + sm, _, _, _, _, testDB := setupTestSessionManager(t) + + t.Run("nil signing request returns error", func(t *testing.T) { + err := sm.handleSigningComplete(context.Background(), "evt-1", []byte(`{}`), []byte{0x01}, nil) + require.Error(t, err) + assert.Contains(t, err.Error(), "signing request is nil") + }) + + t.Run("invalid event data JSON returns error", func(t *testing.T) { + req := &common.UnsignedSigningReq{ + SigningHash: []byte{0xab, 0xcd}, + Nonce: 42, + } + err := sm.handleSigningComplete(context.Background(), "evt-2", []byte("not json"), []byte{0x01}, req) + require.Error(t, err) + assert.Contains(t, err.Error(), "failed to parse event data") + }) + + t.Run("successful signing complete persists data", func(t *testing.T) { + // Create an event in DB to be updated + event := store.Event{ + EventID: "sign-complete-1", + BlockHeight: 200, + Type: store.EventTypeSignOutbound, + Status: store.StatusInProgress, + EventData: []byte(`{"destination_chain":"eip155:1","recipient":"0xabc"}`), + } + require.NoError(t, testDB.Create(&event).Error) + + req := &common.UnsignedSigningReq{ + SigningHash: []byte{0xde, 0xad}, + Nonce: 99, + } + err := sm.handleSigningComplete(context.Background(), "sign-complete-1", event.EventData, []byte{0xbe, 0xef}, req) + require.NoError(t, err) + + // Verify event was updated + var updated store.Event + require.NoError(t, testDB.Where("event_id = ?", "sign-complete-1").First(&updated).Error) + assert.Equal(t, store.StatusSigned, updated.Status) + + // Verify signing_data was injected into event_data + var rawData map[string]any + require.NoError(t, json.Unmarshal(updated.EventData, &rawData)) + signingData, ok := rawData["signing_data"].(map[string]any) + require.True(t, ok, "signing_data should be present in event data") + assert.Equal(t, "beef", signingData["signature"]) + assert.Equal(t, "dead", signingData["signing_hash"]) + assert.Equal(t, float64(99), signingData["nonce"]) + }) +} + +func TestHandleIncomingMessage_Routing(t *testing.T) { + sm, _, _, _, _, _ := setupTestSessionManager(t) + ctx := context.Background() + + t.Run("begin message routes to handleBeginMessage", func(t *testing.T) { + msg := coordinator.Message{ + Type: "begin", + EventID: "no-such-event", + } + data, _ := json.Marshal(msg) + err := sm.HandleIncomingMessage(ctx, "peer1", data) + // Should fail with "does not exist" from handleBeginMessage (not "unknown type") + require.Error(t, err) + assert.Contains(t, err.Error(), "does not exist") + }) + + t.Run("setup message routes to handleSetupMessage", func(t *testing.T) { + msg := coordinator.Message{ + Type: "setup", + EventID: "no-such-event", + } + data, _ := json.Marshal(msg) + err := sm.HandleIncomingMessage(ctx, "peer1", data) + // Should fail with "not found in database" from handleSetupMessage + require.Error(t, err) + assert.Contains(t, err.Error(), "not found in database") + }) + + t.Run("step message routes to handleStepMessage", func(t *testing.T) { + msg := coordinator.Message{ + Type: "step", + EventID: "no-such-event", + } + data, _ := json.Marshal(msg) + err := sm.HandleIncomingMessage(ctx, "peer1", data) + // Should fail with "does not exist" from handleStepMessage + require.Error(t, err) + assert.Contains(t, err.Error(), "does not exist") + }) +} diff --git a/universalClient/tss/tss_test.go b/universalClient/tss/tss_test.go index 4418fdb7..9a4f3c42 100644 --- a/universalClient/tss/tss_test.go +++ b/universalClient/tss/tss_test.go @@ -14,6 +14,7 @@ import ( "github.com/pushchain/push-chain-node/universalClient/db" "github.com/pushchain/push-chain-node/universalClient/pushcore" + "github.com/pushchain/push-chain-node/universalClient/tss/coordinator" ) // generateTestPrivateKey generates a random Ed25519 private key for testing. @@ -196,3 +197,149 @@ func TestNode_PeerID_ListenAddrs(t *testing.T) { assert.NotEmpty(t, node.ListenAddrs()) }) } + +func TestConvertPrivateKeyHexToBase64(t *testing.T) { + t.Run("valid 32-byte hex key", func(t *testing.T) { + seed := make([]byte, 32) + _, err := rand.Read(seed) + require.NoError(t, err) + + hexKey := hex.EncodeToString(seed) + result, err := convertPrivateKeyHexToBase64(hexKey) + require.NoError(t, err) + assert.NotEmpty(t, result) + }) + + t.Run("invalid hex characters", func(t *testing.T) { + _, err := convertPrivateKeyHexToBase64("zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz") + require.Error(t, err) + assert.Contains(t, err.Error(), "hex decode failed") + }) + + t.Run("wrong length 16 bytes", func(t *testing.T) { + shortKey := hex.EncodeToString(make([]byte, 16)) + _, err := convertPrivateKeyHexToBase64(shortKey) + require.Error(t, err) + assert.Contains(t, err.Error(), "wrong key length") + }) + + t.Run("wrong length 64 bytes", func(t *testing.T) { + longKey := hex.EncodeToString(make([]byte, 64)) + _, err := convertPrivateKeyHexToBase64(longKey) + require.Error(t, err) + assert.Contains(t, err.Error(), "wrong key length") + }) + + t.Run("empty string", func(t *testing.T) { + _, err := convertPrivateKeyHexToBase64("") + require.Error(t, err) + assert.Contains(t, err.Error(), "wrong key length") + }) + + t.Run("key with whitespace is trimmed", func(t *testing.T) { + seed := make([]byte, 32) + _, err := rand.Read(seed) + require.NoError(t, err) + + hexKey := hex.EncodeToString(seed) + paddedKey := " " + hexKey + " \n" + + result, err := convertPrivateKeyHexToBase64(paddedKey) + require.NoError(t, err) + assert.NotEmpty(t, result) + + expected, err := convertPrivateKeyHexToBase64(hexKey) + require.NoError(t, err) + assert.Equal(t, expected, result) + }) +} + +func TestHandleACKMessage_CoordinatorNil(t *testing.T) { + t.Run("coordinator is nil", func(t *testing.T) { + node, _, _ := setupTestNode(t) + // Node is not started, so coordinator is nil + err := node.HandleACKMessage(context.Background(), "sender-peer", &coordinator.Message{ + Type: "ack", + EventID: "event-123", + }) + require.Error(t, err) + assert.Contains(t, err.Error(), "coordinator not initialized") + }) + + t.Run("node not started coordinator nil", func(t *testing.T) { + node, _, _ := setupTestNode(t) + assert.Nil(t, node.coordinator) + err := node.HandleACKMessage(context.Background(), "peer-abc", &coordinator.Message{ + Type: "ack", + EventID: "event-456", + }) + require.Error(t, err) + assert.Contains(t, err.Error(), "coordinator not initialized") + }) +} + +func TestNewNode_DefaultValues(t *testing.T) { + database, err := db.OpenInMemoryDB(true) + require.NoError(t, err) + testClient := &pushcore.Client{} + + t.Run("defaults are applied when optional fields are zero", func(t *testing.T) { + cfg := Config{ + ValidatorAddress: "validator1", + P2PPrivateKeyHex: generateTestPrivateKey(t), + LibP2PListen: "/ip4/127.0.0.1/tcp/0", + HomeDir: t.TempDir(), + Password: "test-password", + Database: database, + PushCore: testClient, + Logger: zerolog.Nop(), + } + + node, err := NewNode(context.Background(), cfg) + require.NoError(t, err) + + assert.Equal(t, 10*time.Second, node.coordinatorPollInterval) + assert.Equal(t, uint64(1000), node.coordinatorRange) + assert.Equal(t, 2*time.Minute, node.sessionExpiryTime) + assert.Equal(t, 30*time.Second, node.sessionExpiryCheckInterval) + assert.Equal(t, uint64(60), node.sessionExpiryBlockDelay) + }) + + t.Run("custom values override defaults", func(t *testing.T) { + cfg := Config{ + ValidatorAddress: "validator1", + P2PPrivateKeyHex: generateTestPrivateKey(t), + LibP2PListen: "/ip4/127.0.0.1/tcp/0", + HomeDir: t.TempDir(), + Password: "test-password", + Database: database, + PushCore: testClient, + Logger: zerolog.Nop(), + PollInterval: 5 * time.Second, + CoordinatorRange: 500, + SessionExpiryTime: 10 * time.Minute, + SessionExpiryCheckInterval: 1 * time.Minute, + SessionExpiryBlockDelay: 120, + } + + node, err := NewNode(context.Background(), cfg) + require.NoError(t, err) + + assert.Equal(t, 5*time.Second, node.coordinatorPollInterval) + assert.Equal(t, uint64(500), node.coordinatorRange) + assert.Equal(t, 10*time.Minute, node.sessionExpiryTime) + assert.Equal(t, 1*time.Minute, node.sessionExpiryCheckInterval) + assert.Equal(t, uint64(120), node.sessionExpiryBlockDelay) + }) +} + +func TestNode_SendToUnknownPeer(t *testing.T) { + node, _, _ := setupTestNode(t) + ctx := context.Background() + + require.NoError(t, node.Start(ctx)) + defer node.Stop() + + err := node.Send(ctx, "12D3KooWFakeUnknownPeerIDxxxxxxxxxxxxxxxxx", []byte("hello")) + require.Error(t, err) +} diff --git a/universalClient/tss/txbroadcaster/broadcaster_test.go b/universalClient/tss/txbroadcaster/broadcaster_test.go index 4c378b68..6725f154 100644 --- a/universalClient/tss/txbroadcaster/broadcaster_test.go +++ b/universalClient/tss/txbroadcaster/broadcaster_test.go @@ -7,6 +7,7 @@ import ( "fmt" "reflect" "testing" + "time" "unsafe" "github.com/rs/zerolog" @@ -26,10 +27,6 @@ import ( "github.com/pushchain/push-chain-node/universalClient/tss/eventstore" ) -// --------------------------------------------------------------------------- -// Mocks -// --------------------------------------------------------------------------- - type mockTxBuilder struct{ mock.Mock } func (m *mockTxBuilder) GetOutboundSigningRequest(ctx context.Context, data *uexecutortypes.OutboundCreatedEvent, nonce uint64) (*common.UnsignedSigningReq, error) { @@ -85,10 +82,6 @@ func (m *mockChainClient) Stop() error { ret func (m *mockChainClient) IsHealthy() bool { return true } func (m *mockChainClient) GetTxBuilder() (common.TxBuilder, error) { return m.builder, nil } -// --------------------------------------------------------------------------- -// Helpers -// --------------------------------------------------------------------------- - func setupTestDB(t *testing.T) (*eventstore.Store, *gorm.DB) { t.Helper() db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{}) @@ -177,10 +170,6 @@ func newBroadcaster(evtStore *eventstore.Store, ch *chains.Chains, tssAddr strin }) } -// --------------------------------------------------------------------------- -// EVM Tests -// --------------------------------------------------------------------------- - func TestEVM_BroadcastError_NonceConsumed_MarksBroadcasted(t *testing.T) { // Broadcast fails with txHash, finalized nonce shows consumed → BROADCASTED. evtStore, db := setupTestDB(t) @@ -309,10 +298,6 @@ func TestEVM_GetTSSAddressNil_UsesEmptyAddress(t *testing.T) { builder.AssertCalled(t, "GetNextNonce", mock.Anything, "", true) } -// --------------------------------------------------------------------------- -// SVM Tests -// --------------------------------------------------------------------------- - func TestSVM_BroadcastSuccess_MarksBroadcasted(t *testing.T) { // Broadcast succeeds → BROADCASTED with tx hash. evtStore, db := setupTestDB(t) @@ -395,10 +380,6 @@ func TestSVM_BroadcastFails_PDACheckFails_StaysSigned(t *testing.T) { require.Equal(t, store.StatusSigned, ev.Status) // stays SIGNED } -// --------------------------------------------------------------------------- -// processSigned Tests -// --------------------------------------------------------------------------- - func TestProcessSigned_NoEvents_DoesNothing(t *testing.T) { evtStore, _ := setupTestDB(t) builder := &mockTxBuilder{} @@ -439,10 +420,6 @@ func TestProcessSigned_MultipleEvents(t *testing.T) { require.Equal(t, store.StatusBroadcasted, ev2.Status) } -// --------------------------------------------------------------------------- -// markBroadcasted Tests -// --------------------------------------------------------------------------- - func TestMarkBroadcasted_FormatsCAIPTxHash(t *testing.T) { evtStore, db := setupTestDB(t) insertSignedEvent(t, db, "ev-1", "eip155:1", 5) @@ -468,10 +445,6 @@ func TestMarkBroadcasted_EmptyTxHash(t *testing.T) { require.Equal(t, "solana:mainnet:", updated.BroadcastedTxHash) } -// --------------------------------------------------------------------------- -// Fund Migration EVM Tests -// --------------------------------------------------------------------------- - // testOldTSSPubkey is a valid compressed secp256k1 pubkey for testing. // Derived address: coordinator.DeriveEVMAddressFromPubkey will succeed with this. const testOldTSSPubkey = "0279be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798" @@ -555,6 +528,58 @@ func TestFundMigrationEVM_BroadcastFails_NonceConsumed(t *testing.T) { require.Equal(t, store.StatusBroadcasted, ev.Status) } +func TestMarkBroadcasted_NonExistentEvent(t *testing.T) { + evtStore, _ := setupTestDB(t) + b := newBroadcaster(evtStore, nil, "") + + ev := &store.Event{EventID: "does-not-exist"} + b.markBroadcasted(ev, "eip155:1", "0xdeadbeef") + // The method logs a warning but does not panic; verify no event was created. +} + +func TestMarkBroadcasted_SetsAllFields(t *testing.T) { + evtStore, db := setupTestDB(t) + insertSignedEvent(t, db, "ev-fields", "eip155:1", 5) + + b := newBroadcaster(evtStore, nil, "") + ev := getEvent(t, db, "ev-fields") + b.markBroadcasted(&ev, "eip155:42", "0xcafe") + + updated := getEvent(t, db, "ev-fields") + require.Equal(t, store.StatusBroadcasted, updated.Status) + require.Equal(t, "eip155:42:0xcafe", updated.BroadcastedTxHash) +} + +func TestStart_ContextCancellation(t *testing.T) { + evtStore, _ := setupTestDB(t) + builder := &mockTxBuilder{} + client := &mockChainClient{builder: builder} + ch := newTestChains(t, "eip155:1", uregistrytypes.VmType_EVM, client) + + b := NewBroadcaster(Config{ + EventStore: evtStore, + Chains: ch, + CheckInterval: 50 * time.Millisecond, + Logger: zerolog.Nop(), + }) + + ctx, cancel := context.WithCancel(context.Background()) + done := make(chan struct{}) + go func() { + b.run(ctx) + close(done) + }() + + cancel() + + select { + case <-done: + // run exited cleanly + case <-time.After(2 * time.Second): + t.Fatal("run did not exit after context cancellation") + } +} + func TestFundMigrationEVM_BroadcastFails_NonceNotConsumed_StaysSigned(t *testing.T) { evtStore, db := setupTestDB(t) builder := &mockTxBuilder{} diff --git a/universalClient/tss/txresolver/resolver_test.go b/universalClient/tss/txresolver/resolver_test.go index 90fa7c62..8ac6c099 100644 --- a/universalClient/tss/txresolver/resolver_test.go +++ b/universalClient/tss/txresolver/resolver_test.go @@ -5,6 +5,7 @@ import ( "encoding/json" "reflect" "testing" + "time" "unsafe" "github.com/rs/zerolog" @@ -25,10 +26,6 @@ import ( "github.com/pushchain/push-chain-node/universalClient/tss/eventstore" ) -// --------------------------------------------------------------------------- -// Mocks -// --------------------------------------------------------------------------- - type mockTxBuilder struct{ mock.Mock } func (m *mockTxBuilder) GetOutboundSigningRequest(ctx context.Context, data *uexecutortypes.OutboundCreatedEvent, nonce uint64) (*common.UnsignedSigningReq, error) { @@ -84,10 +81,6 @@ func (m *mockChainClient) Stop() error { ret func (m *mockChainClient) IsHealthy() bool { return true } func (m *mockChainClient) GetTxBuilder() (common.TxBuilder, error) { return m.builder, nil } -// --------------------------------------------------------------------------- -// Helpers -// --------------------------------------------------------------------------- - func setupTestDB(t *testing.T) (*eventstore.Store, *gorm.DB) { t.Helper() db, err := gorm.Open(sqlite.Open(":memory:"), &gorm.Config{}) @@ -152,6 +145,30 @@ func getEvent(t *testing.T, db *gorm.DB, eventID string) store.Event { return ev } +func newTestChainsOutboundDisabled(t *testing.T, chainID string, vmType uregistrytypes.VmType, client common.ChainClient) *chains.Chains { + t.Helper() + c := chains.NewChains(nil, nil, &config.Config{PushChainID: "test-chain"}, zerolog.Nop()) + + v := reflect.ValueOf(c).Elem() + + chainsField := v.FieldByName("chains") + chainsMap := *(*map[string]common.ChainClient)(unsafe.Pointer(chainsField.UnsafeAddr())) + chainsMap[chainID] = client + + configsField := v.FieldByName("chainConfigs") + configsMap := *(*map[string]*uregistrytypes.ChainConfig)(unsafe.Pointer(configsField.UnsafeAddr())) + configsMap[chainID] = &uregistrytypes.ChainConfig{ + Chain: chainID, + VmType: vmType, + Enabled: &uregistrytypes.ChainEnabled{ + IsInboundEnabled: true, + IsOutboundEnabled: false, + }, + } + + return c +} + func newResolver(evtStore *eventstore.Store, ch *chains.Chains) *Resolver { return NewResolver(Config{ EventStore: evtStore, @@ -161,10 +178,6 @@ func newResolver(evtStore *eventstore.Store, ch *chains.Chains) *Resolver { }) } -// --------------------------------------------------------------------------- -// parseCAIPTxHash tests -// --------------------------------------------------------------------------- - func TestParseCAIPTxHash(t *testing.T) { t.Run("valid CAIP tx hash", func(t *testing.T) { chainID, txHash, err := parseCAIPTxHash("eip155:1:0xabc123") @@ -209,10 +222,6 @@ func TestParseCAIPTxHash(t *testing.T) { }) } -// --------------------------------------------------------------------------- -// extractOutboundIDs tests -// --------------------------------------------------------------------------- - func TestExtractOutboundIDs(t *testing.T) { t.Run("valid event data", func(t *testing.T) { eventData := makeOutboundEventData("tx-123", "utx-456", "eip155:1") @@ -240,10 +249,6 @@ func TestExtractOutboundIDs(t *testing.T) { }) } -// --------------------------------------------------------------------------- -// resolveSVM tests -// --------------------------------------------------------------------------- - func TestSVM_PDAExists_MarksCompleted(t *testing.T) { // PDA found on-chain → mark COMPLETED. evtStore, db := setupTestDB(t) @@ -326,10 +331,6 @@ func TestSVM_InvalidEventData_Skips(t *testing.T) { builder.AssertNotCalled(t, "IsAlreadyExecuted", mock.Anything, mock.Anything) } -// --------------------------------------------------------------------------- -// resolveEvent routing tests -// --------------------------------------------------------------------------- - func TestResolveEventRouting(t *testing.T) { t.Run("invalid CAIP hash with no outbound IDs triggers warning", func(t *testing.T) { evtStore, _ := setupTestDB(t) @@ -368,10 +369,6 @@ func TestResolveEventRouting(t *testing.T) { }) } -// --------------------------------------------------------------------------- -// notFoundCounts tracking tests -// --------------------------------------------------------------------------- - func TestNotFoundCountTracking(t *testing.T) { t.Run("increments on not found", func(t *testing.T) { evtStore, _ := setupTestDB(t) @@ -422,10 +419,6 @@ func TestNotFoundCountTracking(t *testing.T) { }) } -// --------------------------------------------------------------------------- -// voteOutboundFailureAndMarkReverted tests -// --------------------------------------------------------------------------- - func TestVoteFailureAndMarkReverted(t *testing.T) { t.Run("no push signer logs warning and returns nil", func(t *testing.T) { evtStore, _ := setupTestDB(t) @@ -441,10 +434,6 @@ func TestVoteFailureAndMarkReverted(t *testing.T) { }) } -// --------------------------------------------------------------------------- -// Fund migration EVM resolution tests -// --------------------------------------------------------------------------- - func makeFundMigrationEventData(migrationID uint64, chain string) []byte { data := utsstypes.FundMigrationInitiatedEventData{ MigrationID: migrationID, @@ -557,10 +546,6 @@ func TestFundMigrationEVM_InsufficientConfirmations_Retries(t *testing.T) { require.Equal(t, store.StatusBroadcasted, ev.Status) } -// --------------------------------------------------------------------------- -// constants tests -// --------------------------------------------------------------------------- - func TestConstants(t *testing.T) { t.Run("maxNotFoundRetries is reasonable", func(t *testing.T) { // At 30s interval, 10 retries = ~5 minutes @@ -571,3 +556,338 @@ func TestConstants(t *testing.T) { assert.Equal(t, 100, processBroadcastedBatchSize) }) } + +func TestNewResolverDefaults(t *testing.T) { + t.Run("default check interval when zero", func(t *testing.T) { + evtStore, _ := setupTestDB(t) + r := NewResolver(Config{ + EventStore: evtStore, + CheckInterval: 0, + Logger: zerolog.Nop(), + }) + assert.Equal(t, 15*time.Second, r.checkInterval) + }) + + t.Run("custom check interval", func(t *testing.T) { + evtStore, _ := setupTestDB(t) + r := NewResolver(Config{ + EventStore: evtStore, + CheckInterval: 45 * time.Second, + Logger: zerolog.Nop(), + }) + assert.Equal(t, 45*time.Second, r.checkInterval) + }) + + t.Run("notFoundCounts map is initialized", func(t *testing.T) { + evtStore, _ := setupTestDB(t) + r := NewResolver(Config{ + EventStore: evtStore, + Logger: zerolog.Nop(), + }) + assert.NotNil(t, r.notFoundCounts) + assert.Len(t, r.notFoundCounts, 0) + }) +} + +func TestResolveOutboundEVM_Success_MarksCompleted(t *testing.T) { + evtStore, db := setupTestDB(t) + builder := &mockTxBuilder{} + client := &mockChainClient{builder: builder} + ch := newTestChains(t, "eip155:1", uregistrytypes.VmType_EVM, client) + + eventData := makeOutboundEventData("tx-100", "utx-200", "eip155:1") + insertBroadcastedEvent(t, db, "ev-evm-1", "eip155:1", "eip155:1:0xsuccess", eventData) + + // Tx found, confirmed (20 confs), status=1 (success) + builder.On("VerifyBroadcastedTx", mock.Anything, "0xsuccess"). + Return(true, uint64(500), uint64(20), uint8(1), nil) + + resolver := newResolver(evtStore, ch) + resolver.processBroadcasted(context.Background()) + + ev := getEvent(t, db, "ev-evm-1") + require.Equal(t, store.StatusCompleted, ev.Status) +} + +func TestResolveOutboundEVM_DisabledChain_StaysBroadcasted(t *testing.T) { + evtStore, db := setupTestDB(t) + builder := &mockTxBuilder{} + client := &mockChainClient{builder: builder} + ch := newTestChainsOutboundDisabled(t, "eip155:42", uregistrytypes.VmType_EVM, client) + + eventData := makeOutboundEventData("tx-100", "utx-200", "eip155:42") + insertBroadcastedEvent(t, db, "ev-disabled-1", "eip155:42", "eip155:42:0xwhatever", eventData) + + resolver := newResolver(evtStore, ch) + resolver.processBroadcasted(context.Background()) + + ev := getEvent(t, db, "ev-disabled-1") + require.Equal(t, store.StatusBroadcasted, ev.Status) + builder.AssertNotCalled(t, "VerifyBroadcastedTx", mock.Anything, mock.Anything) +} + +func TestResolveOutboundEVM_InsufficientConfirmations_StaysBroadcasted(t *testing.T) { + evtStore, db := setupTestDB(t) + builder := &mockTxBuilder{} + client := &mockChainClient{builder: builder} + ch := newTestChains(t, "eip155:1", uregistrytypes.VmType_EVM, client) + + eventData := makeOutboundEventData("tx-100", "utx-200", "eip155:1") + insertBroadcastedEvent(t, db, "ev-lowconf-1", "eip155:1", "eip155:1:0xlowconf", eventData) + + // Found but only 2 confirmations (default required is 12) + builder.On("VerifyBroadcastedTx", mock.Anything, "0xlowconf"). + Return(true, uint64(500), uint64(2), uint8(1), nil) + + resolver := newResolver(evtStore, ch) + resolver.processBroadcasted(context.Background()) + + ev := getEvent(t, db, "ev-lowconf-1") + require.Equal(t, store.StatusBroadcasted, ev.Status) +} + +func TestResolveOutboundEVM_Reverted_NoPushSigner_StaysBroadcasted(t *testing.T) { + evtStore, db := setupTestDB(t) + builder := &mockTxBuilder{} + client := &mockChainClient{builder: builder} + ch := newTestChains(t, "eip155:1", uregistrytypes.VmType_EVM, client) + + eventData := makeOutboundEventData("tx-100", "utx-200", "eip155:1") + insertBroadcastedEvent(t, db, "ev-reverted-1", "eip155:1", "eip155:1:0xreverted", eventData) + + // Found, confirmed, status=0 (reverted) + builder.On("VerifyBroadcastedTx", mock.Anything, "0xreverted"). + Return(true, uint64(500), uint64(20), uint8(0), nil) + builder.On("GetGasFeeUsed", mock.Anything, "0xreverted"). + Return("21000", nil) + + // No pushSigner, so voteOutboundFailureAndMarkReverted returns nil early + resolver := newResolver(evtStore, ch) + resolver.processBroadcasted(context.Background()) + + ev := getEvent(t, db, "ev-reverted-1") + require.Equal(t, store.StatusBroadcasted, ev.Status) + builder.AssertCalled(t, "GetGasFeeUsed", mock.Anything, "0xreverted") +} + +func TestResolveOutboundEVM_VerifyError_StaysBroadcasted(t *testing.T) { + evtStore, db := setupTestDB(t) + builder := &mockTxBuilder{} + client := &mockChainClient{builder: builder} + ch := newTestChains(t, "eip155:1", uregistrytypes.VmType_EVM, client) + + eventData := makeOutboundEventData("tx-100", "utx-200", "eip155:1") + insertBroadcastedEvent(t, db, "ev-err-1", "eip155:1", "eip155:1:0xerror", eventData) + + builder.On("VerifyBroadcastedTx", mock.Anything, "0xerror"). + Return(false, uint64(0), uint64(0), uint8(0), assert.AnError) + + resolver := newResolver(evtStore, ch) + resolver.processBroadcasted(context.Background()) + + ev := getEvent(t, db, "ev-err-1") + require.Equal(t, store.StatusBroadcasted, ev.Status) +} + +func TestResolveFundMigration_NonEVMChain_StaysBroadcasted(t *testing.T) { + evtStore, db := setupTestDB(t) + builder := &mockTxBuilder{} + client := &mockChainClient{builder: builder} + ch := newTestChains(t, "solana:mainnet", uregistrytypes.VmType_SVM, client) + + insertBroadcastedFundMigrationEvent(t, db, "fm-svm-1", "solana:mainnet", "solana:mainnet:somesig", 99) + + resolver := newResolver(evtStore, ch) + resolver.processBroadcasted(context.Background()) + + ev := getEvent(t, db, "fm-svm-1") + require.Equal(t, store.StatusBroadcasted, ev.Status) + builder.AssertNotCalled(t, "VerifyBroadcastedTx", mock.Anything, mock.Anything) +} + +func TestResolveFundMigration_InvalidEventData_StaysBroadcasted(t *testing.T) { + evtStore, db := setupTestDB(t) + builder := &mockTxBuilder{} + client := &mockChainClient{builder: builder} + ch := newTestChains(t, "eip155:1", uregistrytypes.VmType_EVM, client) + + event := store.Event{ + EventID: "fm-bad-data", + BlockHeight: 100, + ExpiryBlockHeight: 99999, + Type: store.EventTypeSignFundMigrate, + ConfirmationType: "INSTANT", + Status: store.StatusBroadcasted, + EventData: []byte("not valid json"), + BroadcastedTxHash: "eip155:1:0xbaddata", + } + require.NoError(t, db.Create(&event).Error) + + resolver := newResolver(evtStore, ch) + resolver.processBroadcasted(context.Background()) + + ev := getEvent(t, db, "fm-bad-data") + require.Equal(t, store.StatusBroadcasted, ev.Status) + builder.AssertNotCalled(t, "VerifyBroadcastedTx", mock.Anything, mock.Anything) +} + +func TestGetBuilder_ChainNotRegistered_ReturnsError(t *testing.T) { + evtStore, _ := setupTestDB(t) + ch := chains.NewChains(nil, nil, &config.Config{PushChainID: "test-chain"}, zerolog.Nop()) + resolver := newResolver(evtStore, ch) + + _, err := resolver.getBuilder("eip155:999") + require.Error(t, err) +} + +func TestResolveEvent_UnknownType_StaysBroadcasted(t *testing.T) { + evtStore, db := setupTestDB(t) + builder := &mockTxBuilder{} + client := &mockChainClient{builder: builder} + ch := newTestChains(t, "eip155:1", uregistrytypes.VmType_EVM, client) + + event := store.Event{ + EventID: "ev-unknown-type", + BlockHeight: 100, + ExpiryBlockHeight: 99999, + Type: "SIGN_SOMETHING_ELSE", + ConfirmationType: "STANDARD", + Status: store.StatusBroadcasted, + EventData: []byte("{}"), + BroadcastedTxHash: "eip155:1:0xwhatever", + } + require.NoError(t, db.Create(&event).Error) + + resolver := newResolver(evtStore, ch) + resolver.resolveEvent(context.Background(), &event) + + ev := getEvent(t, db, "ev-unknown-type") + require.Equal(t, store.StatusBroadcasted, ev.Status) +} + +func TestResolveOutbound_SVM_RoutingPath(t *testing.T) { + // Valid CAIP hash for a non-EVM (SVM) chain routes to resolveSVM. + evtStore, db := setupTestDB(t) + builder := &mockTxBuilder{} + client := &mockChainClient{builder: builder} + ch := newTestChains(t, "solana:mainnet", uregistrytypes.VmType_SVM, client) + + eventData := makeOutboundEventData("tx-svm-1", "utx-svm-1", "solana:mainnet") + insertBroadcastedEvent(t, db, "ev-svm-route", "solana:mainnet", "solana:mainnet:someSig", eventData) + + // PDA found → COMPLETED + builder.On("IsAlreadyExecuted", mock.Anything, "tx-svm-1").Return(true, nil) + + resolver := newResolver(evtStore, ch) + resolver.processBroadcasted(context.Background()) + + ev := getEvent(t, db, "ev-svm-route") + require.Equal(t, store.StatusCompleted, ev.Status) + builder.AssertCalled(t, "IsAlreadyExecuted", mock.Anything, "tx-svm-1") +} + +func TestResolveOutbound_InvalidCAIP_ValidIDs_VotesFailure(t *testing.T) { + // CAIP parse fails but extractOutboundIDs succeeds → voteOutboundFailureAndMarkReverted called. + // With nil pushSigner, vote returns early (logged) so event stays unchanged. + evtStore, db := setupTestDB(t) + ch := newTestChains(t, "eip155:1", uregistrytypes.VmType_EVM, &mockChainClient{builder: &mockTxBuilder{}}) + + eventData := makeOutboundEventData("tx-bad", "utx-bad", "eip155:1") + insertBroadcastedEvent(t, db, "ev-bad-caip", "eip155:1", "invalid-no-colon", eventData) + + resolver := newResolver(evtStore, ch) + resolver.processBroadcasted(context.Background()) + + // No pushSigner → voteOutboundFailureAndMarkReverted returns nil early (no status change) + ev := getEvent(t, db, "ev-bad-caip") + require.Equal(t, store.StatusBroadcasted, ev.Status) +} + +func TestVoteOutboundFailureAndMarkReverted_EmptyGasFeeDefaults(t *testing.T) { + // Empty gasFeeUsed should default to "0" inside voteOutboundFailureAndMarkReverted. + evtStore, _ := setupTestDB(t) + resolver := NewResolver(Config{ + EventStore: evtStore, + PushSigner: nil, + Logger: zerolog.Nop(), + }) + + event := &store.Event{EventID: "ev-gas-default"} + // Pass empty gasFeeUsed + err := resolver.voteOutboundFailureAndMarkReverted( + context.Background(), event, "tx-1", "utx-1", "0xhash", 100, "", "some error", + ) + // With nil pushSigner, returns nil early (no panic, no error) + assert.NoError(t, err) +} + +func TestVoteOutboundFailureAndMarkReverted_EventStoreUpdateFailure(t *testing.T) { + // Simulate eventStore.Update failure by using an event ID that doesn't exist in the DB. + // Since pushSigner is nil, the vote is skipped and Update is never called, + // so this just confirms nil-signer early return. + evtStore, _ := setupTestDB(t) + resolver := NewResolver(Config{ + EventStore: evtStore, + PushSigner: nil, + Logger: zerolog.Nop(), + }) + + event := &store.Event{EventID: "nonexistent-event"} + err := resolver.voteOutboundFailureAndMarkReverted( + context.Background(), event, "tx-1", "utx-1", "", 0, "500", "test error", + ) + assert.NoError(t, err) +} + +func TestVoteFundMigrationAndMark_NilSigner(t *testing.T) { + // Nil pushSigner returns early cleanly without panic. + evtStore, _ := setupTestDB(t) + resolver := NewResolver(Config{ + EventStore: evtStore, + PushSigner: nil, + Logger: zerolog.Nop(), + }) + + event := &store.Event{EventID: "fm-nil-signer"} + // Should not panic, just log and return + resolver.voteFundMigrationAndMark(context.Background(), event, 42, "0xhash", true) + resolver.voteFundMigrationAndMark(context.Background(), event, 42, "0xhash", false) +} + +func TestStart_ContextCancellation(t *testing.T) { + evtStore, _ := setupTestDB(t) + resolver := NewResolver(Config{ + EventStore: evtStore, + CheckInterval: 10 * time.Second, + Logger: zerolog.Nop(), + }) + + ctx, cancel := context.WithCancel(context.Background()) + + done := make(chan struct{}) + go func() { + resolver.run(ctx) + close(done) + }() + + cancel() + + select { + case <-done: + // run returned cleanly + case <-time.After(2 * time.Second): + t.Fatal("resolver.run did not stop after context cancellation") + } +} + +func TestProcessBroadcasted_NilChains(t *testing.T) { + evtStore, _ := setupTestDB(t) + resolver := NewResolver(Config{ + EventStore: evtStore, + Chains: nil, + Logger: zerolog.Nop(), + }) + + // Should return early without panic when chains is nil + resolver.processBroadcasted(context.Background()) +}