[Robustness] Add first robustness tests

Add two tests:
- TestManySmallFiles: writes 100k files size 4k to a directory. Snapshots the data tree, restores and validates data.
- TestModifyWorkload: Loops over a simple randomized workload. Performs a series of random file writes to some random sub-directories, then takes a snapshot of the data tree. All snapshots taken during this test are restore-verified at the end.

A global test engine is instantiated in main_test.go, to be used in the robustness test suite across tests (saves time loading/saving metadata once per run instead of per test).
This commit is contained in:
Nick
2020-07-14 10:16:23 -07:00
committed by Jarek Kowalski
parent 82a2fa0ea5
commit ce5e6dcd13
4 changed files with 186 additions and 17 deletions

View File

@@ -0,0 +1,55 @@
// +build darwin linux
package robustness
import (
"context"
"errors"
"log"
"os"
"testing"
engine "github.com/kopia/kopia/tests/robustness/test_engine"
"github.com/kopia/kopia/tests/tools/fio"
"github.com/kopia/kopia/tests/tools/kopiarunner"
)
var eng *engine.Engine
const (
fsDataPath = "/tmp/robustness-data"
fsMetadataPath = "/tmp/robustness-metadata"
s3DataPath = "robustness-data"
s3MetadataPath = "robustness-metadata"
)
func TestMain(m *testing.M) {
var err error
eng, err = engine.NewEngine()
if err != nil {
log.Println("skipping robustness tests:", err)
if err == kopiarunner.ErrExeVariableNotSet || errors.Is(err, fio.ErrEnvNotSet) {
os.Exit(0)
}
os.Exit(1)
}
switch {
case os.Getenv(engine.S3BucketNameEnvKey) != "":
eng.InitS3(context.Background(), s3DataPath, s3MetadataPath)
default:
eng.InitFilesystem(context.Background(), fsDataPath, fsMetadataPath)
}
result := m.Run()
err = eng.Cleanup()
if err != nil {
panic(err)
}
os.Exit(result)
}

View File

@@ -0,0 +1,79 @@
// +build darwin linux
package robustness
import (
"context"
"fmt"
"io/ioutil"
"math/rand"
"path/filepath"
"testing"
"github.com/kopia/kopia/tests/testenv"
"github.com/kopia/kopia/tests/tools/fio"
)
func TestManySmallFiles(t *testing.T) {
fileSize := int64(4096)
numFiles := 100
fioOpt := fio.Options{}.WithFileSize(fileSize).WithNumFiles(numFiles).WithBlockSize(4096)
err := eng.FileWriter.WriteFiles("", fioOpt)
testenv.AssertNoError(t, err)
ctx := context.TODO()
snapID, err := eng.Checker.TakeSnapshot(ctx, eng.FileWriter.LocalDataDir)
testenv.AssertNoError(t, err)
output, err := ioutil.TempFile("", t.Name())
testenv.AssertNoError(t, err)
defer output.Close() //nolint:errcheck
err = eng.Checker.RestoreSnapshot(ctx, snapID, output)
testenv.AssertNoError(t, err)
}
func TestModifyWorkload(t *testing.T) {
const (
numSnapshots = 10
numDirs = 10
maxOpsPerMod = 5
)
numFiles := 10
writeSize := int64(65536 * numFiles)
fioOpt := fio.Options{}.
WithDedupePercentage(35).
WithRandRepeat(false).
WithBlockSize(4096).
WithFileSize(writeSize).
WithNumFiles(numFiles)
var resultIDs []string
ctx := context.Background()
for snapNum := 0; snapNum < numSnapshots; snapNum++ {
opsThisLoop := rand.Intn(maxOpsPerMod) + 1
for mod := 0; mod < opsThisLoop; mod++ {
dirIdxToMod := rand.Intn(numDirs)
writeToDir := filepath.Join(t.Name(), fmt.Sprintf("dir%d", dirIdxToMod))
err := eng.FileWriter.WriteFiles(writeToDir, fioOpt)
testenv.AssertNoError(t, err)
}
snapID, err := eng.Checker.TakeSnapshot(ctx, eng.FileWriter.LocalDataDir)
testenv.AssertNoError(t, err)
resultIDs = append(resultIDs, snapID)
}
for _, snapID := range resultIDs {
err := eng.Checker.RestoreSnapshot(ctx, snapID, nil)
testenv.AssertNoError(t, err)
}
}

View File

@@ -27,16 +27,16 @@
// WalkCompare is a checker.Comparer that utilizes the fswalker
// libraries to perform the data consistency check.
type WalkCompare struct {
GlobalFilterMatchers []string
GlobalFilterFuncs []func(string, fswalker.ActionData) bool
}
// NewWalkCompare instantiates a new WalkCompare and returns its pointer
func NewWalkCompare() *WalkCompare {
return &WalkCompare{
GlobalFilterMatchers: []string{
"ctime:",
"atime:",
"mtime:",
GlobalFilterFuncs: []func(string, fswalker.ActionData) bool{
filterFileTimeDiffs,
isRootDirectoryRename,
dirSizeMightBeOffByBlockSizeMultiple,
},
}
}
@@ -94,7 +94,7 @@ func (chk *WalkCompare) Compare(ctx context.Context, path string, data []byte, r
chk.filterReportDiffs(report)
err = validateReport(report)
if err != nil {
if err != nil && reportOut != nil {
printReportSummary(report, reportOut)
b, marshalErr := json.MarshalIndent(report, "", " ")
@@ -134,19 +134,12 @@ func (chk *WalkCompare) filterReportDiffs(report *fswalker.Report) {
DiffItemLoop:
for _, diffItem := range diffItems {
for _, filterStr := range chk.GlobalFilterMatchers {
if strings.Contains(diffItem, filterStr) {
log.Printf("Filtering %s due to filtered prefix %q\n", diffItem, filterStr)
for _, filterFunc := range chk.GlobalFilterFuncs {
if filterFunc(diffItem, mod) {
continue DiffItemLoop
}
}
// Filter the rename of the root directory
if isRootDirectoryRename(diffItem, mod) {
log.Println("Filtering", diffItem, "due to root directory rename")
continue DiffItemLoop
}
newDiffItemList = append(newDiffItemList, diffItem)
}
@@ -170,6 +163,24 @@ func isRootDirectoryRename(diffItem string, mod fswalker.ActionData) bool {
return mod.Before.Info.IsDir && filepath.Clean(mod.Before.Path) == "."
}
func dirSizeMightBeOffByBlockSizeMultiple(str string, mod fswalker.ActionData) bool {
if !mod.Before.Info.IsDir {
return false
}
if !strings.Contains(str, "size: ") {
return false
}
const blockSize = 4096
return (mod.Before.Stat.Size-mod.After.Stat.Size)%blockSize == 0
}
func filterFileTimeDiffs(str string, mod fswalker.ActionData) bool {
return strings.Contains(str, "ctime:") || strings.Contains(str, "atime:") || strings.Contains(str, "mtime:")
}
func validateReport(report *fswalker.Report) error {
if len(report.Modified) > 0 {
return errors.New("files were modified")

View File

@@ -8,6 +8,7 @@
"io/ioutil"
"os"
"path/filepath"
"strings"
"testing"
"github.com/google/fswalker"
@@ -183,8 +184,19 @@ type fields struct {
} {
t.Log(tt.name)
matchers := tt.fields.GlobalFilterMatchers
chk := &WalkCompare{
GlobalFilterMatchers: tt.fields.GlobalFilterMatchers,
GlobalFilterFuncs: []func(string, fswalker.ActionData) bool{
func(inputStr string, _ fswalker.ActionData) bool {
for _, filterStr := range matchers {
if strings.Contains(inputStr, filterStr) {
return true
}
}
return false
},
},
}
tmpDir, err := ioutil.TempDir("", "")
@@ -322,9 +334,21 @@ type fields struct {
} {
t.Log(tt.name)
matchers := tt.fields.GlobalFilterMatchers
chk := &WalkCompare{
GlobalFilterMatchers: tt.fields.GlobalFilterMatchers,
GlobalFilterFuncs: []func(string, fswalker.ActionData) bool{
func(inputStr string, _ fswalker.ActionData) bool {
for _, filterStr := range matchers {
if strings.Contains(inputStr, filterStr) {
return true
}
}
return false
},
},
}
chk.filterReportDiffs(tt.inputReport)
if want, got := tt.expModCount, len(tt.inputReport.Modified); want != got {