Skip to content

Commit cd1294b

Browse files
Update module github.com/prometheus/client_golang to v1.20.4 (main) (#9307)
* Update module github.com/prometheus/client_golang to v1.20.4 --------- Signed-off-by: Arve Knudsen <[email protected]> Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: Arve Knudsen <[email protected]>
1 parent 320ca98 commit cd1294b

File tree

34 files changed

+978
-318
lines changed

34 files changed

+978
-318
lines changed

go.mod

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ require (
3434
github.com/opentracing/opentracing-go v1.2.1-0.20220228012449-10b1cf09e00b
3535
github.com/pkg/errors v0.9.1
3636
github.com/prometheus/alertmanager v0.27.0
37-
github.com/prometheus/client_golang v1.20.3
37+
github.com/prometheus/client_golang v1.20.4
3838
github.com/prometheus/client_model v0.6.1
3939
github.com/prometheus/common v0.59.1
4040
github.com/prometheus/prometheus v1.99.0
@@ -284,10 +284,6 @@ require (
284284
// Using a fork of Prometheus with Mimir-specific changes.
285285
replace github.com/prometheus/prometheus => github.com/grafana/mimir-prometheus v0.0.0-20241003114040-35ec40c3b27a
286286

287-
// client_golang v1.20.3 has some data races in histogram exemplars.
288-
// Stick to v1.19.1 until they are fixed.
289-
replace github.com/prometheus/client_golang => github.com/prometheus/client_golang v1.19.1
290-
291287
// Replace memberlist with our fork which includes some fixes that haven't been
292288
// merged upstream yet:
293289
// - https://github.com/hashicorp/memberlist/pull/260

go.sum

Lines changed: 43 additions & 8 deletions
Large diffs are not rendered by default.

pkg/compactor/blocks_cleaner_test.go

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -464,12 +464,7 @@ func TestBlocksCleaner_ShouldNotCleanupUserThatDoesntBelongToShardAnymore(t *tes
464464
require.ElementsMatch(t, []string{"user-1", "user-2"}, cleaner.lastOwnedUsers)
465465

466466
// But there are no metrics for any user, because we did not in fact clean them.
467-
assert.NoError(t, testutil.GatherAndCompare(reg, strings.NewReader(`
468-
# HELP cortex_bucket_blocks_count Total number of blocks in the bucket. Includes blocks marked for deletion, but not partial blocks.
469-
# TYPE cortex_bucket_blocks_count gauge
470-
`),
471-
"cortex_bucket_blocks_count",
472-
))
467+
test.AssertGatherAndCompare(t, reg, "", "cortex_bucket_blocks_count")
473468

474469
// Running cleanUsers again will see that users are no longer owned.
475470
require.NoError(t, cleaner.runCleanupWithErr(ctx))

pkg/compactor/compactor_test.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -967,7 +967,7 @@ func TestMultitenantCompactor_ShouldNotCompactBlocksForUsersMarkedForDeletion(t
967967
"cortex_compactor_block_cleanup_started_total", "cortex_compactor_block_cleanup_completed_total", "cortex_compactor_block_cleanup_failed_total",
968968
"cortex_bucket_blocks_count", "cortex_bucket_blocks_marked_for_deletion_count", "cortex_bucket_index_last_successful_update_timestamp_seconds",
969969
}
970-
assert.NoError(t, prom_testutil.GatherAndCompare(registry, strings.NewReader(`
970+
testutil.AssertGatherAndCompare(t, registry, `
971971
# TYPE cortex_compactor_runs_started_total counter
972972
# HELP cortex_compactor_runs_started_total Total number of compaction runs started.
973973
cortex_compactor_runs_started_total 1
@@ -1006,7 +1006,7 @@ func TestMultitenantCompactor_ShouldNotCompactBlocksForUsersMarkedForDeletion(t
10061006
# TYPE cortex_compactor_block_cleanup_failed_total counter
10071007
# HELP cortex_compactor_block_cleanup_failed_total Total number of blocks cleanup runs failed.
10081008
cortex_compactor_block_cleanup_failed_total 0
1009-
`), testedMetrics...))
1009+
`, testedMetrics...)
10101010
}
10111011

10121012
func TestMultitenantCompactor_ShouldCompactAllUsersOnShardingEnabledButOnlyOneInstanceRunning(t *testing.T) {

pkg/distributor/distributor_test.go

Lines changed: 11 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -377,7 +377,7 @@ func TestDistributor_MetricsCleanup(t *testing.T) {
377377
d.latestSeenSampleTimestampPerUser.WithLabelValues("userA").Set(1111)
378378
d.labelValuesWithNewlinesPerUser.WithLabelValues("userA").Inc()
379379

380-
require.NoError(t, testutil.GatherAndCompare(reg, strings.NewReader(`
380+
util_test.AssertGatherAndCompare(t, reg, `
381381
# HELP cortex_distributor_deduped_samples_total The total number of deduplicated samples.
382382
# TYPE cortex_distributor_deduped_samples_total counter
383383
cortex_distributor_deduped_samples_total{cluster="cluster1",user="userA"} 1
@@ -420,23 +420,11 @@ func TestDistributor_MetricsCleanup(t *testing.T) {
420420
# HELP cortex_distributor_label_values_with_newlines_total Total number of label values with newlines seen at ingestion time.
421421
# TYPE cortex_distributor_label_values_with_newlines_total counter
422422
cortex_distributor_label_values_with_newlines_total{user="userA"} 1
423-
`), metrics...))
423+
`, metrics...)
424424

425425
d.cleanupInactiveUser("userA")
426426

427-
require.NoError(t, testutil.GatherAndCompare(reg, strings.NewReader(`
428-
# HELP cortex_distributor_deduped_samples_total The total number of deduplicated samples.
429-
# TYPE cortex_distributor_deduped_samples_total counter
430-
431-
# HELP cortex_distributor_latest_seen_sample_timestamp_seconds Unix timestamp of latest received sample per user.
432-
# TYPE cortex_distributor_latest_seen_sample_timestamp_seconds gauge
433-
434-
# HELP cortex_distributor_metadata_in_total The total number of metadata the have come in to the distributor, including rejected.
435-
# TYPE cortex_distributor_metadata_in_total counter
436-
437-
# HELP cortex_distributor_non_ha_samples_received_total The total number of received samples for a user that has HA tracking turned on, but the sample didn't contain both HA labels.
438-
# TYPE cortex_distributor_non_ha_samples_received_total counter
439-
427+
util_test.AssertGatherAndCompare(t, reg, `
440428
# HELP cortex_distributor_received_metadata_total The total number of received metadata, excluding rejected.
441429
# TYPE cortex_distributor_received_metadata_total counter
442430
cortex_distributor_received_metadata_total{user="userB"} 10
@@ -448,16 +436,7 @@ func TestDistributor_MetricsCleanup(t *testing.T) {
448436
# HELP cortex_distributor_received_exemplars_total The total number of received exemplars, excluding rejected and deduped exemplars.
449437
# TYPE cortex_distributor_received_exemplars_total counter
450438
cortex_distributor_received_exemplars_total{user="userB"} 10
451-
452-
# HELP cortex_distributor_samples_in_total The total number of samples that have come in to the distributor, including rejected or deduped samples.
453-
# TYPE cortex_distributor_samples_in_total counter
454-
455-
# HELP cortex_distributor_exemplars_in_total The total number of exemplars that have come in to the distributor, including rejected or deduped exemplars.
456-
# TYPE cortex_distributor_exemplars_in_total counter
457-
458-
# HELP cortex_distributor_label_values_with_newlines_total Total number of label values with newlines seen at ingestion time.
459-
# TYPE cortex_distributor_label_values_with_newlines_total counter
460-
`), metrics...))
439+
`, metrics...)
461440
}
462441

463442
func TestDistributor_PushRequestRateLimiter(t *testing.T) {
@@ -1687,7 +1666,7 @@ func TestDistributor_ExemplarValidation(t *testing.T) {
16871666
}
16881667

16891668
assert.Equal(t, tc.expectedExemplars, tc.req.Timeseries)
1690-
assert.NoError(t, testutil.GatherAndCompare(regs[0], strings.NewReader(tc.expectedMetrics), "cortex_discarded_exemplars_total"))
1669+
util_test.AssertGatherAndCompare(t, regs[0], tc.expectedMetrics, "cortex_discarded_exemplars_total")
16911670
})
16921671
}
16931672
}
@@ -7190,15 +7169,15 @@ func TestDistributor_StorageConfigMetrics(t *testing.T) {
71907169
happyIngesters: 3,
71917170
replicationFactor: 3,
71927171
})
7193-
assert.NoError(t, testutil.GatherAndCompare(regs[0], strings.NewReader(`
7172+
util_test.AssertGatherAndCompare(t, regs[0], `
71947173
# HELP cortex_distributor_replication_factor The configured replication factor.
71957174
# TYPE cortex_distributor_replication_factor gauge
71967175
cortex_distributor_replication_factor 3
71977176
71987177
# HELP cortex_distributor_ingest_storage_enabled Whether writes are being processed via ingest storage. Equal to 1 if ingest storage is enabled, 0 if disabled.
71997178
# TYPE cortex_distributor_ingest_storage_enabled gauge
72007179
cortex_distributor_ingest_storage_enabled 0
7201-
`), "cortex_distributor_replication_factor", "cortex_distributor_ingest_storage_enabled"))
7180+
`, "cortex_distributor_replication_factor", "cortex_distributor_ingest_storage_enabled")
72027181
})
72037182

72047183
t.Run("migration to ingest storage", func(t *testing.T) {
@@ -7211,15 +7190,15 @@ func TestDistributor_StorageConfigMetrics(t *testing.T) {
72117190
happyIngesters: 3,
72127191
replicationFactor: 3,
72137192
})
7214-
assert.NoError(t, testutil.GatherAndCompare(regs[0], strings.NewReader(`
7193+
util_test.AssertGatherAndCompare(t, regs[0], `
72157194
# HELP cortex_distributor_replication_factor The configured replication factor.
72167195
# TYPE cortex_distributor_replication_factor gauge
72177196
cortex_distributor_replication_factor 3
72187197
72197198
# HELP cortex_distributor_ingest_storage_enabled Whether writes are being processed via ingest storage. Equal to 1 if ingest storage is enabled, 0 if disabled.
72207199
# TYPE cortex_distributor_ingest_storage_enabled gauge
72217200
cortex_distributor_ingest_storage_enabled 1
7222-
`), "cortex_distributor_replication_factor", "cortex_distributor_ingest_storage_enabled"))
7201+
`, "cortex_distributor_replication_factor", "cortex_distributor_ingest_storage_enabled")
72237202
})
72247203

72257204
t.Run("ingest storage", func(t *testing.T) {
@@ -7231,11 +7210,11 @@ func TestDistributor_StorageConfigMetrics(t *testing.T) {
72317210
happyIngesters: 3,
72327211
replicationFactor: 3,
72337212
})
7234-
assert.NoError(t, testutil.GatherAndCompare(regs[0], strings.NewReader(`
7213+
util_test.AssertGatherAndCompare(t, regs[0], `
72357214
# HELP cortex_distributor_ingest_storage_enabled Whether writes are being processed via ingest storage. Equal to 1 if ingest storage is enabled, 0 if disabled.
72367215
# TYPE cortex_distributor_ingest_storage_enabled gauge
72377216
cortex_distributor_ingest_storage_enabled 1
7238-
`), "cortex_distributor_replication_factor", "cortex_distributor_ingest_storage_enabled"))
7217+
`, "cortex_distributor_replication_factor", "cortex_distributor_ingest_storage_enabled")
72397218
})
72407219
}
72417220

pkg/frontend/v1/frontend_test.go

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@ import (
4040
"github.com/grafana/mimir/pkg/querier/stats"
4141
querier_worker "github.com/grafana/mimir/pkg/querier/worker"
4242
"github.com/grafana/mimir/pkg/scheduler/queue"
43+
util_test "github.com/grafana/mimir/pkg/util/test"
4344
)
4445

4546
const (
@@ -222,10 +223,7 @@ func TestFrontendMetricsCleanup(t *testing.T) {
222223

223224
fr.cleanupInactiveUserMetrics("1")
224225

225-
require.NoError(t, testutil.GatherAndCompare(reg, strings.NewReader(`
226-
# HELP cortex_query_frontend_queue_length Number of queries in the queue.
227-
# TYPE cortex_query_frontend_queue_length gauge
228-
`), "cortex_query_frontend_queue_length"))
226+
util_test.AssertGatherAndCompare(t, reg, "", "cortex_query_frontend_queue_length")
229227
}
230228

231229
testFrontend(t, defaultFrontendConfig(), handler, test, nil, reg)

0 commit comments

Comments
 (0)