Skip to content

Commit

Permalink
Stores: convert tests to not rely on slice labels (#7098)
Browse files Browse the repository at this point in the history
Signed-off-by: Michael Hoffmann <[email protected]>
  • Loading branch information
MichaHoffmann authored Jan 27, 2024
1 parent daa34a5 commit 1cf333e
Show file tree
Hide file tree
Showing 5 changed files with 31 additions and 31 deletions.
2 changes: 1 addition & 1 deletion pkg/store/acceptance_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1011,7 +1011,7 @@ func TestProxyStore_Acceptance(t *testing.T) {
storetestutil.TestClient{StoreClient: storepb.ServerAsClient(p2, 0)},
}

return NewProxyStore(nil, nil, func() []Client { return clients }, component.Query, nil, 0*time.Second, RetrievalStrategy(EagerRetrieval))
return NewProxyStore(nil, nil, func() []Client { return clients }, component.Query, labels.EmptyLabels(), 0*time.Second, RetrievalStrategy(EagerRetrieval))
}

testStoreAPIsAcceptance(t, startStore)
Expand Down
36 changes: 18 additions & 18 deletions pkg/store/bucket_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -709,19 +709,19 @@ func TestBucketStore_Sharding(t *testing.T) {
bkt := objstore.NewInMemBucket()
series := []labels.Labels{labels.FromStrings("a", "1", "b", "1")}

id1, err := e2eutil.CreateBlock(ctx, dir, series, 10, 0, 1000, labels.Labels{{Name: "cluster", Value: "a"}, {Name: "region", Value: "r1"}}, 0, metadata.NoneFunc)
id1, err := e2eutil.CreateBlock(ctx, dir, series, 10, 0, 1000, labels.FromStrings("cluster", "a", "region", "r1"), 0, metadata.NoneFunc)
testutil.Ok(t, err)
testutil.Ok(t, block.Upload(ctx, logger, bkt, filepath.Join(dir, id1.String()), metadata.NoneFunc))

id2, err := e2eutil.CreateBlock(ctx, dir, series, 10, 1000, 2000, labels.Labels{{Name: "cluster", Value: "a"}, {Name: "region", Value: "r1"}}, 0, metadata.NoneFunc)
id2, err := e2eutil.CreateBlock(ctx, dir, series, 10, 1000, 2000, labels.FromStrings("cluster", "a", "region", "r1"), 0, metadata.NoneFunc)
testutil.Ok(t, err)
testutil.Ok(t, block.Upload(ctx, logger, bkt, filepath.Join(dir, id2.String()), metadata.NoneFunc))

id3, err := e2eutil.CreateBlock(ctx, dir, series, 10, 0, 1000, labels.Labels{{Name: "cluster", Value: "b"}, {Name: "region", Value: "r1"}}, 0, metadata.NoneFunc)
id3, err := e2eutil.CreateBlock(ctx, dir, series, 10, 0, 1000, labels.FromStrings("cluster", "b", "region", "r1"), 0, metadata.NoneFunc)
testutil.Ok(t, err)
testutil.Ok(t, block.Upload(ctx, logger, bkt, filepath.Join(dir, id3.String()), metadata.NoneFunc))

id4, err := e2eutil.CreateBlock(ctx, dir, series, 10, 0, 1000, labels.Labels{{Name: "cluster", Value: "a"}, {Name: "region", Value: "r2"}}, 0, metadata.NoneFunc)
id4, err := e2eutil.CreateBlock(ctx, dir, series, 10, 0, 1000, labels.FromStrings("cluster", "a", "region", "r2"), 0, metadata.NoneFunc)
testutil.Ok(t, err)
testutil.Ok(t, block.Upload(ctx, logger, bkt, filepath.Join(dir, id4.String()), metadata.NoneFunc))

Expand Down Expand Up @@ -1116,7 +1116,7 @@ func uploadTestBlock(t testing.TB, tmpDir string, bkt objstore.Bucket, series in
testutil.Ok(t, err)

_, err = metadata.InjectThanos(log.NewNopLogger(), filepath.Join(tmpDir, "tmp", id.String()), metadata.Thanos{
Labels: labels.Labels{{Name: "ext1", Value: "1"}}.Map(),
Labels: labels.FromStrings("ext1", "1").Map(),
Downsample: metadata.ThanosDownsample{Resolution: 0},
Source: metadata.TestSource,
IndexStats: metadata.IndexStats{SeriesMaxSize: stats.SeriesMaxSize, ChunkMaxSize: stats.ChunkMaxSize},
Expand Down Expand Up @@ -1383,7 +1383,7 @@ func benchBucketSeries(t testutil.TB, sampleType chunkenc.ValueType, skipChunk,
random = rand.New(rand.NewSource(120))
)

extLset := labels.Labels{{Name: "ext1", Value: "1"}}
extLset := labels.FromStrings("ext1", "1")
blockDir := filepath.Join(tmpDir, "tmp")

samplesPerSeriesPerBlock := samplesPerSeries / numOfBlocks
Expand Down Expand Up @@ -1582,7 +1582,7 @@ func TestBucketSeries_OneBlock_InMemIndexCacheSegfault(t *testing.T) {

logger := log.NewLogfmtLogger(os.Stderr)
thanosMeta := metadata.Thanos{
Labels: labels.Labels{{Name: "ext1", Value: "1"}}.Map(),
Labels: labels.FromStrings("ext1", "1").Map(),
Downsample: metadata.ThanosDownsample{Resolution: 0},
Source: metadata.TestSource,
}
Expand Down Expand Up @@ -1696,7 +1696,7 @@ func TestBucketSeries_OneBlock_InMemIndexCacheSegfault(t *testing.T) {
indexReaderPool: indexheader.NewReaderPool(log.NewNopLogger(), false, 0, indexheader.NewReaderPoolMetrics(nil), indexheader.AlwaysEagerDownloadIndexHeader),
metrics: newBucketStoreMetrics(nil),
blockSets: map[uint64]*bucketBlockSet{
labels.Labels{{Name: "ext1", Value: "1"}}.Hash(): {blocks: [][]*bucketBlock{{b1, b2}}},
labels.FromStrings("ext1", "1").Hash(): {blocks: [][]*bucketBlock{{b1, b2}}},
},
blocks: map[ulid.ULID]*bucketBlock{
b1.meta.ULID: b1,
Expand Down Expand Up @@ -1964,7 +1964,7 @@ func TestSeries_BlockWithMultipleChunks(t *testing.T) {
blk := createBlockFromHead(t, headOpts.ChunkDirRoot, h)

thanosMeta := metadata.Thanos{
Labels: labels.Labels{{Name: "ext1", Value: "1"}}.Map(),
Labels: labels.FromStrings("ext1", "1").Map(),
Downsample: metadata.ThanosDownsample{Resolution: 0},
Source: metadata.TestSource,
}
Expand Down Expand Up @@ -2215,7 +2215,7 @@ func uploadSeriesToBucket(t *testing.T, bkt *filesystem.Bucket, replica string,
blk := storetestutil.CreateBlockFromHead(t, headOpts.ChunkDirRoot, h)

thanosMeta := metadata.Thanos{
Labels: labels.Labels{{Name: "ext1", Value: replica}}.Map(),
Labels: labels.FromStrings("ext1", replica).Map(),
Downsample: metadata.ThanosDownsample{Resolution: 0},
Source: metadata.TestSource,
}
Expand Down Expand Up @@ -2269,7 +2269,7 @@ func createBlockWithOneSeriesWithStep(t testutil.TB, dir string, lbls labels.Lab
ref, err := app.Append(0, lbls, ts, random.Float64())
testutil.Ok(t, err)
for i := 1; i < totalSamples; i++ {
_, err := app.Append(ref, nil, ts+step*int64(i), random.Float64())
_, err := app.Append(ref, labels.EmptyLabels(), ts+step*int64(i), random.Float64())
testutil.Ok(t, err)
}
testutil.Ok(t, app.Commit())
Expand All @@ -2295,7 +2295,7 @@ func setupStoreForHintsTest(t *testing.T) (testutil.TB, *BucketStore, []*storepb
random = rand.New(rand.NewSource(120))
)

extLset := labels.Labels{{Name: "ext1", Value: "1"}}
extLset := labels.FromStrings("ext1", "1")
// Inject the Thanos meta to each block in the storage.
thanosMeta := metadata.Thanos{
Labels: extLset.Map(),
Expand Down Expand Up @@ -2527,7 +2527,7 @@ func TestSeries_ChunksHaveHashRepresentation(t *testing.T) {
blk := createBlockFromHead(t, headOpts.ChunkDirRoot, h)

thanosMeta := metadata.Thanos{
Labels: labels.Labels{{Name: "ext1", Value: "1"}}.Map(),
Labels: labels.FromStrings("ext1", "1").Map(),
Downsample: metadata.ThanosDownsample{Resolution: 0},
Source: metadata.TestSource,
}
Expand Down Expand Up @@ -2657,7 +2657,7 @@ func BenchmarkBucketBlock_readChunkRange(b *testing.B) {

// Upload the block to the bucket.
thanosMeta := metadata.Thanos{
Labels: labels.Labels{{Name: "ext1", Value: "1"}}.Map(),
Labels: labels.FromStrings("ext1", "1").Map(),
Downsample: metadata.ThanosDownsample{Resolution: 0},
Source: metadata.TestSource,
}
Expand Down Expand Up @@ -2719,15 +2719,15 @@ func prepareBucket(b *testing.B, resolutionLevel compact.ResolutionLevel) (*buck
SamplesPerSeries: 86400 / 15, // Simulate 1 day block with 15s scrape interval.
ScrapeInterval: 15 * time.Second,
Series: 1000,
PrependLabels: nil,
PrependLabels: labels.EmptyLabels(),
Random: rand.New(rand.NewSource(120)),
SkipChunks: true,
})
blockID := createBlockFromHead(b, tmpDir, head)

// Upload the block to the bucket.
thanosMeta := metadata.Thanos{
Labels: labels.Labels{{Name: "ext1", Value: "1"}}.Map(),
Labels: labels.FromStrings("ext1", "1").Map(),
Downsample: metadata.ThanosDownsample{Resolution: 0},
Source: metadata.TestSource,
}
Expand Down Expand Up @@ -3363,7 +3363,7 @@ func TestExpandedPostingsRace(t *testing.T) {
SamplesPerSeries: 10,
ScrapeInterval: 15 * time.Second,
Series: 1000,
PrependLabels: nil,
PrependLabels: labels.EmptyLabels(),
Random: rand.New(rand.NewSource(120)),
SkipChunks: true,
})
Expand All @@ -3376,7 +3376,7 @@ func TestExpandedPostingsRace(t *testing.T) {

// Upload the block to the bucket.
thanosMeta := metadata.Thanos{
Labels: labels.Labels{{Name: "ext1", Value: fmt.Sprintf("%d", i)}}.Map(),
Labels: labels.FromStrings("ext1", fmt.Sprintf("%d", i)).Map(),
Downsample: metadata.ThanosDownsample{Resolution: 0},
Source: metadata.TestSource,
}
Expand Down
8 changes: 4 additions & 4 deletions pkg/store/proxy_heap_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -314,12 +314,12 @@ func labelsFromStrings(ss ...string) labels.Labels {
if len(ss)%2 != 0 {
panic("invalid number of strings")
}
res := make(labels.Labels, 0, len(ss)/2)

b := labels.NewScratchBuilder(len(ss) / 2)
for i := 0; i < len(ss); i += 2 {
res = append(res, labels.Label{Name: ss[i], Value: ss[i+1]})
b.Add(ss[i], ss[i+1])
}

return res
return b.Labels()
}

func BenchmarkSortWithoutLabels(b *testing.B) {
Expand Down
10 changes: 5 additions & 5 deletions pkg/store/proxy_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ func TestProxyStore_Info(t *testing.T) {
nil,
func() []Client { return nil },
component.Query,
nil, 0*time.Second, RetrievalStrategy(EagerRetrieval),
labels.EmptyLabels(), 0*time.Second, RetrievalStrategy(EagerRetrieval),
)

resp, err := q.Info(ctx, &storepb.InfoRequest{})
Expand Down Expand Up @@ -96,7 +96,7 @@ func TestProxyStore_TSDBInfos(t *testing.T) {
}
q := NewProxyStore(nil, nil,
func() []Client { return stores },
component.Query, nil, 0*time.Second, EagerRetrieval,
component.Query, labels.EmptyLabels(), 0*time.Second, EagerRetrieval,
)

expected := []infopb.TSDBInfo{
Expand Down Expand Up @@ -1227,7 +1227,7 @@ func TestProxyStore_Series_RequestParamsProxied(t *testing.T) {
nil,
func() []Client { return cls },
component.Query,
nil,
labels.EmptyLabels(),
1*time.Second, EagerRetrieval,
)

Expand Down Expand Up @@ -1335,7 +1335,7 @@ func TestProxyStore_LabelValues(t *testing.T) {
nil,
func() []Client { return cls },
component.Query,
nil,
labels.EmptyLabels(),
0*time.Second, EagerRetrieval,
)

Expand Down Expand Up @@ -1535,7 +1535,7 @@ func TestProxyStore_LabelNames(t *testing.T) {
nil,
func() []Client { return tc.storeAPIs },
component.Query,
nil,
labels.EmptyLabels(),
5*time.Second, EagerRetrieval,
)

Expand Down
6 changes: 3 additions & 3 deletions pkg/store/tsdb_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -597,21 +597,21 @@ func benchTSDBStoreSeries(t testutil.TB, totalSamples, totalSeries int) {
// Add external labels & frame it.
s := r.GetSeries()
bytesLeftForChunks := store.maxBytesPerFrame
lbls := make([]labelpb.ZLabel, 0, len(s.Labels)+len(extLabels))
lbls := make([]labelpb.ZLabel, 0, len(s.Labels)+extLabels.Len())
for _, l := range s.Labels {
lbls = append(lbls, labelpb.ZLabel{
Name: l.Name,
Value: l.Value,
})
bytesLeftForChunks -= lbls[len(lbls)-1].Size()
}
for _, l := range extLabels {
extLabels.Range(func(l labels.Label) {
lbls = append(lbls, labelpb.ZLabel{
Name: l.Name,
Value: l.Value,
})
bytesLeftForChunks -= lbls[len(lbls)-1].Size()
}
})
sort.Slice(lbls, func(i, j int) bool {
return lbls[i].Name < lbls[j].Name
})
Expand Down

0 comments on commit 1cf333e

Please sign in to comment.