Skip to content

Commit

Permalink
Fix issue where failed remote rule evaluation errors are logged witho…
Browse files Browse the repository at this point in the history
…ut context such as trace IDs (#6789)

* Fix issue where failed remote rule evaluation errors are logged without context such as trace IDs.

* Add changelog entry.
  • Loading branch information
charleskorn authored Dec 1, 2023
1 parent ff8a70a commit 874ea9c
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 3 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
* [BUGFIX] Fix issue where concatenatingChunkIterator can obscure errors #6766
* [BUGFIX] Fix panic during tsdb Commit #6766
* [BUGFIX] tsdb/head: wlog exemplars after samples #6766
* [BUGFIX] Ruler: fix issue where "failed to remotely evaluate query expression, will retry" messages are logged without context such as the trace ID and do not appear in trace events. #6789

### Mixin

Expand Down
6 changes: 3 additions & 3 deletions pkg/ruler/remotequerier.go
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ func (q *RemoteQuerier) query(ctx context.Context, query string, ts time.Time, l
ctx, cancel := context.WithTimeout(ctx, q.timeout)
defer cancel()

resp, err := q.sendRequest(ctx, &req)
resp, err := q.sendRequest(ctx, &req, logger)
if err != nil {
level.Warn(logger).Log("msg", "failed to remotely evaluate query expression", "err", err, "qs", query, "tm", ts)
return promql.Vector{}, err
Expand Down Expand Up @@ -282,7 +282,7 @@ func (q *RemoteQuerier) createRequest(ctx context.Context, query string, ts time
return req, nil
}

func (q *RemoteQuerier) sendRequest(ctx context.Context, req *httpgrpc.HTTPRequest) (*httpgrpc.HTTPResponse, error) {
func (q *RemoteQuerier) sendRequest(ctx context.Context, req *httpgrpc.HTTPRequest, logger log.Logger) (*httpgrpc.HTTPResponse, error) {
// Ongoing request may be cancelled during evaluation due to some transient error or server shutdown,
// so we'll keep retrying until we get a successful response or backoff is terminated.
retryConfig := backoff.Config{
Expand All @@ -300,7 +300,7 @@ func (q *RemoteQuerier) sendRequest(ctx context.Context, req *httpgrpc.HTTPReque
if !retry.Ongoing() {
return nil, err
}
level.Warn(q.logger).Log("msg", "failed to remotely evaluate query expression, will retry", "err", err)
level.Warn(logger).Log("msg", "failed to remotely evaluate query expression, will retry", "err", err)
retry.Wait()

// Avoid masking last known error if context was cancelled while waiting.
Expand Down

0 comments on commit 874ea9c

Please sign in to comment.