Skip to content

Commit

Permalink
add test
Browse files Browse the repository at this point in the history
  • Loading branch information
kkrik-es committed Dec 2, 2024
1 parent d74622c commit d64746b
Show file tree
Hide file tree
Showing 2 changed files with 62 additions and 2 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ public class DocumentMapper {

private static final AtomicBoolean lastErrorLogLock = new AtomicBoolean(false);
private static volatile long lastErrorLogEpochSecond = 0;
private static long errorThrottlingIntervalSeconds = ERROR_THROTTLING_INTERVAL_SECONDS;

private final String type;
private final CompressedXContent mappingSource;
Expand All @@ -37,6 +38,15 @@ public class DocumentMapper {
private final IndexVersion indexVersion;
private final Logger logger;

// For testing.
static void setErrorThrottlingIntervalSecondsForTesting() {
DocumentMapper.errorThrottlingIntervalSeconds = 0;
}

static void resetErrorThrottlingIntervalSeconds() {
DocumentMapper.errorThrottlingIntervalSeconds = ERROR_THROTTLING_INTERVAL_SECONDS;
}

/**
* Create a new {@link DocumentMapper} that holds empty mappings.
* @param mapperService the mapper service that holds the needed components
Expand Down Expand Up @@ -84,13 +94,13 @@ private void maybeLog(Exception ex) {
} else {
final long now = Instant.now().getEpochSecond();
// Check without locking first, to reduce lock contention.
if (now - lastErrorLogEpochSecond > ERROR_THROTTLING_INTERVAL_SECONDS) {
if (now - lastErrorLogEpochSecond > errorThrottlingIntervalSeconds) {
boolean shouldLog = false;
// Acquire spinlock.
while (lastErrorLogLock.compareAndSet(false, true)) {
}
// Repeat check under lock, so that only one message gets written per interval.
if (now - lastErrorLogEpochSecond > ERROR_THROTTLING_INTERVAL_SECONDS) {
if (now - lastErrorLogEpochSecond > errorThrottlingIntervalSeconds) {
shouldLog = true;
lastErrorLogEpochSecond = now;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,17 @@

package org.elasticsearch.index.mapper;

import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.core.KeywordAnalyzer;
import org.apache.lucene.analysis.core.WhitespaceAnalyzer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.logging.MockAppender;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.IndexVersion;
Expand All @@ -24,6 +29,8 @@
import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.xcontent.XContentBuilder;
import org.elasticsearch.xcontent.XContentFactory;
import org.junit.AfterClass;
import org.junit.BeforeClass;

import java.io.IOException;
import java.util.ArrayList;
Expand All @@ -47,6 +54,27 @@

public class DocumentMapperTests extends MapperServiceTestCase {

static MockAppender appender;
static Logger testLogger = LogManager.getLogger(DocumentMapper.class);
static Level originalLogLevel = testLogger.getLevel();

@BeforeClass
public static void init() throws IllegalAccessException {
appender = new MockAppender("mock_appender");
appender.start();
Loggers.addAppender(testLogger, appender);
Loggers.setLevel(testLogger, Level.ERROR);
DocumentMapper.setErrorThrottlingIntervalSecondsForTesting();
}

@AfterClass
public static void cleanup() {
Loggers.removeAppender(testLogger, appender);
appender.stop();
Loggers.setLevel(testLogger, originalLogLevel);
DocumentMapper.resetErrorThrottlingIntervalSeconds();
}

public void testAddFields() throws Exception {
DocumentMapper stage1 = createDocumentMapper(mapping(b -> b.startObject("name").field("type", "text").endObject()));
DocumentMapper stage2 = createDocumentMapper(mapping(b -> {
Expand Down Expand Up @@ -493,4 +521,26 @@ public void testDeeplyNestedMapping() throws Exception {
}
}
}

public void testParsingErrorLogging() throws Exception {
DocumentMapper doc = createDocumentMapper(mapping(b -> b.startObject("value").field("type", "integer").endObject()));

DocumentParsingException e = expectThrows(DocumentParsingException.class, () -> doc.parse(source(b -> b.field("value", "foo"))));
assertThat(e.getMessage(), containsString("failed to parse field [value] of type [integer] in document with id '1'"));
assertThat(appender.getLastEventAndReset().getMessage().getFormattedMessage(), containsString(e.getMessage()));

e = expectThrows(DocumentParsingException.class, () -> doc.parse(source(b -> b.field("value", "foo"))));
assertThat(e.getMessage(), containsString("failed to parse field [value] of type [integer] in document with id '1'"));
assertThat(appender.getLastEventAndReset(), nullValue());

Thread.sleep(1000); // Wait for throttling to back off.

e = expectThrows(DocumentParsingException.class, () -> doc.parse(source(b -> b.field("value", "foo"))));
assertThat(e.getMessage(), containsString("failed to parse field [value] of type [integer] in document with id '1'"));
assertThat(appender.getLastEventAndReset().getMessage().getFormattedMessage(), containsString(e.getMessage()));

e = expectThrows(DocumentParsingException.class, () -> doc.parse(source(b -> b.field("value", "foo"))));
assertThat(e.getMessage(), containsString("failed to parse field [value] of type [integer] in document with id '1'"));
assertThat(appender.getLastEventAndReset(), nullValue());
}
}

0 comments on commit d64746b

Please sign in to comment.