refactor: speed up tests a bit #28

Merged
Kichiyaki merged 1 commits from refactor/speed-up-tests into master 2024-03-16 06:00:30 +00:00
4 changed files with 484 additions and 428 deletions

View File

@ -25,6 +25,7 @@ steps:
image: *go_image image: *go_image
pull: true pull: true
commands: commands:
- go mod vendor
- make generate - make generate
test: test:

View File

@ -12,6 +12,7 @@ import (
"os" "os"
"os/signal" "os/signal"
"path" "path"
"sync"
"syscall" "syscall"
"testing" "testing"
"time" "time"
@ -197,8 +198,15 @@ func TestDataSync(t *testing.T) {
require.NoError(t, dataSyncSvc.Sync(ctx)) require.NoError(t, dataSyncSvc.Sync(ctx))
var wg sync.WaitGroup
var expectedServers []map[string]any var expectedServers []map[string]any
readJSONFile(t, filesys, path.Join("expected", "servers.json"), &expectedServers) readJSONFile(t, filesys, path.Join("expected", "servers.json"), &expectedServers)
wg.Add(1)
go func() {
defer wg.Done()
assert.EventuallyWithTf(t, func(collect *assert.CollectT) { assert.EventuallyWithTf(t, func(collect *assert.CollectT) {
require.NoError(collect, ctx.Err()) require.NoError(collect, ctx.Err())
@ -274,9 +282,15 @@ func TestDataSync(t *testing.T) {
) )
} }
}, 60*time.Second, time.Second, "servers") }, 60*time.Second, time.Second, "servers")
}()
var expectedTribes []map[string]any var expectedTribes []map[string]any
readJSONFile(t, filesys, path.Join("expected", "tribes.json"), &expectedTribes) readJSONFile(t, filesys, path.Join("expected", "tribes.json"), &expectedTribes)
wg.Add(1)
go func() {
defer wg.Done()
assert.EventuallyWithT(t, func(collect *assert.CollectT) { assert.EventuallyWithT(t, func(collect *assert.CollectT) {
require.NoError(collect, ctx.Err()) require.NoError(collect, ctx.Err())
@ -341,9 +355,15 @@ func TestDataSync(t *testing.T) {
} }
} }
}, 60*time.Second, time.Second, "tribes") }, 60*time.Second, time.Second, "tribes")
}()
var expectedPlayers []map[string]any var expectedPlayers []map[string]any
readJSONFile(t, filesys, path.Join("expected", "players.json"), &expectedPlayers) readJSONFile(t, filesys, path.Join("expected", "players.json"), &expectedPlayers)
wg.Add(1)
go func() {
defer wg.Done()
assert.EventuallyWithT(t, func(collect *assert.CollectT) { assert.EventuallyWithT(t, func(collect *assert.CollectT) {
require.NoError(collect, ctx.Err()) require.NoError(collect, ctx.Err())
@ -405,9 +425,15 @@ func TestDataSync(t *testing.T) {
} }
} }
}, 60*time.Second, time.Second, "players") }, 60*time.Second, time.Second, "players")
}()
var expectedVillages []map[string]any var expectedVillages []map[string]any
readJSONFile(t, filesys, path.Join("expected", "villages.json"), &expectedVillages) readJSONFile(t, filesys, path.Join("expected", "villages.json"), &expectedVillages)
wg.Add(1)
go func() {
defer wg.Done()
assert.EventuallyWithT(t, func(collect *assert.CollectT) { assert.EventuallyWithT(t, func(collect *assert.CollectT) {
require.NoError(collect, ctx.Err()) require.NoError(collect, ctx.Err())
@ -452,9 +478,15 @@ func TestDataSync(t *testing.T) {
assert.Equal(collect, expected["ProfileURL"], actual.ProfileURL().String(), msg) assert.Equal(collect, expected["ProfileURL"], actual.ProfileURL().String(), msg)
} }
}, 60*time.Second, time.Second, "villages") }, 60*time.Second, time.Second, "villages")
}()
var expectedTribeChanges []map[string]any var expectedTribeChanges []map[string]any
readJSONFile(t, filesys, path.Join("expected", "tribe-changes.json"), &expectedTribeChanges) readJSONFile(t, filesys, path.Join("expected", "tribe-changes.json"), &expectedTribeChanges)
wg.Add(1)
go func() {
defer wg.Done()
assert.EventuallyWithT(t, func(collect *assert.CollectT) { assert.EventuallyWithT(t, func(collect *assert.CollectT) {
require.NoError(collect, ctx.Err()) require.NoError(collect, ctx.Err())
@ -493,6 +525,9 @@ func TestDataSync(t *testing.T) {
assert.EqualValues(collect, expected["NewTribeID"], actual.NewTribeID(), msg) assert.EqualValues(collect, expected["NewTribeID"], actual.NewTribeID(), msg)
} }
}, 60*time.Second, time.Second, "tribe changes") }, 60*time.Second, time.Second, "tribe changes")
}()
wg.Wait()
}) })
} }
} }

View File

@ -6,6 +6,7 @@ import (
"os" "os"
"os/signal" "os/signal"
"slices" "slices"
"sync"
"syscall" "syscall"
"testing" "testing"
"time" "time"
@ -143,6 +144,12 @@ func TestSnapshotCreation(t *testing.T) {
require.NoError(t, snapshotSvc.Create(ctx)) require.NoError(t, snapshotSvc.Create(ctx))
var wg sync.WaitGroup
wg.Add(1)
go func() {
defer wg.Done()
assert.EventuallyWithTf(t, func(collect *assert.CollectT) { assert.EventuallyWithTf(t, func(collect *assert.CollectT) {
require.NoError(collect, ctx.Err()) require.NoError(collect, ctx.Err())
@ -172,6 +179,11 @@ func TestSnapshotCreation(t *testing.T) {
require.NoError(collect, listParams.SetCursor(res.Next())) require.NoError(collect, listParams.SetCursor(res.Next()))
} }
}, 30*time.Second, 500*time.Millisecond, "servers") }, 30*time.Second, 500*time.Millisecond, "servers")
}()
wg.Add(1)
go func() {
defer wg.Done()
assert.EventuallyWithTf(t, func(collect *assert.CollectT) { assert.EventuallyWithTf(t, func(collect *assert.CollectT) {
require.NoError(collect, ctx.Err()) require.NoError(collect, ctx.Err())
@ -258,6 +270,11 @@ func TestSnapshotCreation(t *testing.T) {
//nolint:testifylint //nolint:testifylint
assert.Equal(collect, len(allTribes), cnt) assert.Equal(collect, len(allTribes), cnt)
}, 30*time.Second, 500*time.Millisecond, "tribes") }, 30*time.Second, 500*time.Millisecond, "tribes")
}()
wg.Add(1)
go func() {
defer wg.Done()
assert.EventuallyWithTf(t, func(collect *assert.CollectT) { assert.EventuallyWithTf(t, func(collect *assert.CollectT) {
require.NoError(collect, ctx.Err()) require.NoError(collect, ctx.Err())
@ -342,4 +359,7 @@ func TestSnapshotCreation(t *testing.T) {
//nolint:testifylint //nolint:testifylint
assert.Equal(collect, len(allPlayers), cnt) assert.Equal(collect, len(allPlayers), cnt)
}, 30*time.Second, 500*time.Millisecond, "players") }, 30*time.Second, 500*time.Millisecond, "players")
}()
wg.Wait()
} }

View File

@ -159,7 +159,7 @@ func (rmq *RabbitMQ) Close() error {
return nil return nil
} }
type TopicNameGenerator func(topic string) string type TopicNameGenerator amqp.QueueNameGenerator
func NewPubSub( func NewPubSub(
tb watermilltest.TestingTB, tb watermilltest.TestingTB,