diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java index ba49287d..6d79e451 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java @@ -90,6 +90,7 @@ import com.amazon.opendistroforelasticsearch.ad.rest.RestSearchAnomalyDetectorAction; import com.amazon.opendistroforelasticsearch.ad.rest.RestSearchAnomalyResultAction; import com.amazon.opendistroforelasticsearch.ad.rest.RestStatsAnomalyDetectorAction; +import com.amazon.opendistroforelasticsearch.ad.rest.RestValidateAnomalyDetectorAction; import com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings; import com.amazon.opendistroforelasticsearch.ad.settings.EnabledSetting; import com.amazon.opendistroforelasticsearch.ad.stats.ADStat; @@ -210,6 +211,11 @@ public List getRestHandlers( anomalyDetectionIndices ); RestSearchAnomalyDetectorAction searchAnomalyDetectorAction = new RestSearchAnomalyDetectorAction(); + RestValidateAnomalyDetectorAction validateAnomalyDetectorAction = new RestValidateAnomalyDetectorAction( + settings, + anomalyDetectionIndices, + xContentRegistry + ); RestSearchAnomalyResultAction searchAnomalyResultAction = new RestSearchAnomalyResultAction(); RestDeleteAnomalyDetectorAction deleteAnomalyDetectorAction = new RestDeleteAnomalyDetectorAction(clusterService); RestExecuteAnomalyDetectorAction executeAnomalyDetectorAction = new RestExecuteAnomalyDetectorAction( @@ -232,6 +238,7 @@ public List getRestHandlers( .of( restGetAnomalyDetectorAction, restIndexAnomalyDetectorAction, + validateAnomalyDetectorAction, searchAnomalyDetectorAction, searchAnomalyResultAction, deleteAnomalyDetectorAction, diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java index 4e6c2b7a..76ad8b3d 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java @@ -87,6 +87,7 @@ public class AnomalyDetector implements ToXContentObject { private final Map uiMetadata; private final Integer schemaVersion; private final Instant lastUpdateTime; + private final Boolean validation; /** * Constructor function. @@ -122,20 +123,62 @@ public AnomalyDetector( Integer schemaVersion, Instant lastUpdateTime ) { - if (Strings.isBlank(name)) { - throw new IllegalArgumentException("Detector name should be set"); - } - if (timeField == null) { - throw new IllegalArgumentException("Time field should be set"); - } - if (indices == null || indices.isEmpty()) { - throw new IllegalArgumentException("Indices should be set"); - } - if (detectionInterval == null) { - throw new IllegalArgumentException("Detection interval should be set"); - } - if (shingleSize != null && shingleSize < 1) { - throw new IllegalArgumentException("Shingle size must be a positive integer"); + this( + detectorId, + version, + name, + description, + timeField, + indices, + features, + filterQuery, + detectionInterval, + windowDelay, + shingleSize, + uiMetadata, + schemaVersion, + lastUpdateTime, + false + ); + } + + public AnomalyDetector( + String detectorId, + Long version, + String name, + String description, + String timeField, + List indices, + List features, + QueryBuilder filterQuery, + TimeConfiguration detectionInterval, + TimeConfiguration windowDelay, + Integer shingleSize, + Map uiMetadata, + Integer schemaVersion, + Instant lastUpdateTime, + Boolean validation + ) { + if (validation) { + if (indices == null || indices.isEmpty()) { + indices = null; + } + } else { + if (Strings.isBlank(name)) { + throw new IllegalArgumentException("Detector name should be set"); + } + if (timeField == null) { + throw new IllegalArgumentException("Time field should be set"); + } + if (indices == null || indices.isEmpty()) { + throw new IllegalArgumentException("Indices should be set"); + } + if (detectionInterval == null) { + throw new IllegalArgumentException("Detection interval should be set"); + } + if (shingleSize != null && shingleSize < 1) { + throw new IllegalArgumentException("Shingle size must be a positive integer"); + } } this.detectorId = detectorId; this.version = version; @@ -151,6 +194,7 @@ public AnomalyDetector( this.uiMetadata = uiMetadata; this.schemaVersion = schemaVersion; this.lastUpdateTime = lastUpdateTime; + this.validation = validation; } public XContentBuilder toXContent(XContentBuilder builder) throws IOException { @@ -253,7 +297,6 @@ public static AnomalyDetector parse( while (parser.nextToken() != XContentParser.Token.END_OBJECT) { String fieldName = parser.currentName(); parser.nextToken(); - switch (fieldName) { case NAME_FIELD: name = parser.text(); @@ -327,6 +370,100 @@ public static AnomalyDetector parse( ); } + public static AnomalyDetector parseValidation(XContentParser parser, String detectorId, Long version, Integer defaultShingleSize) + throws IOException { + String name = null; + String description = null; + String timeField = null; + List indices = new ArrayList(); + QueryBuilder filterQuery = QueryBuilders.matchAllQuery(); + TimeConfiguration detectionInterval = null; + TimeConfiguration windowDelay = null; + List features = new ArrayList<>(); + int schemaVersion = 0; + Map uiMetadata = null; + Instant lastUpdateTime = null; + Integer shingleSize = defaultShingleSize; + + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation); + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + String fieldName = parser.currentName(); + parser.nextToken(); + + switch (fieldName) { + case NAME_FIELD: + name = parser.text(); + break; + case DESCRIPTION_FIELD: + description = parser.text(); + break; + case TIMEFIELD_FIELD: + timeField = parser.text(); + break; + case INDICES_FIELD: + ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser::getTokenLocation); + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + indices.add(parser.text()); + } + break; + case UI_METADATA_FIELD: + uiMetadata = parser.map(); + break; + case SCHEMA_VERSION_FIELD: + schemaVersion = parser.intValue(); + break; + case FILTER_QUERY_FIELD: + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation); + try { + filterQuery = parseInnerQueryBuilder(parser); + } catch (IllegalArgumentException e) { + if (!e.getMessage().contains("empty clause")) { + throw e; + } + } + break; + case DETECTION_INTERVAL_FIELD: + detectionInterval = TimeConfiguration.parse(parser); + break; + case FEATURE_ATTRIBUTES_FIELD: + ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser::getTokenLocation); + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + features.add(Feature.parse(parser)); + } + break; + case WINDOW_DELAY_FIELD: + windowDelay = TimeConfiguration.parse(parser); + break; + case SHINGLE_SIZE_FIELD: + shingleSize = parser.intValue(); + break; + case LAST_UPDATE_TIME_FIELD: + lastUpdateTime = ParseUtils.toInstant(parser); + break; + default: + parser.skipChildren(); + break; + } + } + return new AnomalyDetector( + detectorId, + version, + name, + description, + timeField, + indices, + features, + filterQuery, + detectionInterval, + windowDelay, + shingleSize, + uiMetadata, + schemaVersion, + lastUpdateTime, + true + ); + } + public SearchSourceBuilder generateFeatureQuery() { SearchSourceBuilder generatedFeatureQuery = new SearchSourceBuilder().query(filterQuery); if (this.getFeatureAttributes() != null) { diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/DateTimeRange.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/DateTimeRange.java new file mode 100644 index 00000000..1138e725 --- /dev/null +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/DateTimeRange.java @@ -0,0 +1,56 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.model; + +import java.time.Instant; + +/** + * A DateTimeRange is used to represent start and end time for a timeRange + */ +public class DateTimeRange { + + private long start; + private long end; + + public DateTimeRange(long start, long end) { + this.start = start; + this.end = end; + } + + public static DateTimeRange rangeBasedOfInterval(long windowDelay, long intervalLength, int numOfIntervals) { + long dataEndTime = Instant.now().toEpochMilli() - windowDelay; + long dataStartTime = dataEndTime - ((long) (numOfIntervals) * intervalLength); + return new DateTimeRange(dataStartTime, dataEndTime); + + } + + public long getStart() { + return start; + } + + public long getEnd() { + return end; + } + + public void setStart(long start) { + this.start = start; + } + + public void setEnd(long end) { + this.end = end; + } + +} diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidateResponse.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidateResponse.java new file mode 100644 index 00000000..edbcca34 --- /dev/null +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidateResponse.java @@ -0,0 +1,72 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.model; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +public class ValidateResponse implements ToXContentObject { + private Map> failures; + private Map> suggestedChanges; + + public XContentBuilder toXContent(XContentBuilder builder) throws IOException { + return toXContent(builder, ToXContent.EMPTY_PARAMS); + } + + public ValidateResponse() { + failures = null; + suggestedChanges = null; + } + + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + XContentBuilder xContentBuilder = builder.startObject(); + + xContentBuilder.startObject("failures"); + for (String key : failures.keySet()) { + xContentBuilder.field(key, failures.get(key)); + } + xContentBuilder.endObject(); + + xContentBuilder.startObject("suggestedChanges"); + for (String key : suggestedChanges.keySet()) { + xContentBuilder.field(key, suggestedChanges.get(key)); + } + xContentBuilder.endObject(); + return xContentBuilder.endObject(); + } + + public Map> getFailures() { + return failures; + } + + public Map> getSuggestedChanges() { + return suggestedChanges; + } + + public void setFailures(Map> failures) { + this.failures = failures; + } + + public void setSuggestedChanges(Map> suggestedChanges) { + this.suggestedChanges = suggestedChanges; + } + +} diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationFailures.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationFailures.java new file mode 100644 index 00000000..aecfbc3c --- /dev/null +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationFailures.java @@ -0,0 +1,40 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.model; + +public enum ValidationFailures { + MISSING("missing"), + OTHERS("others"), + FIELD_TYPE("field_type"), + FORMAT("format"), + DUPLICATES("duplicates"); + + private String name; + + /** + * Get stat name + * + * @return name + */ + public String getName() { + return name; + } + + ValidationFailures(String name) { + this.name = name; + } + +} diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationSuggestedChanges.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationSuggestedChanges.java new file mode 100644 index 00000000..1b9cbeeb --- /dev/null +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationSuggestedChanges.java @@ -0,0 +1,40 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.model; + +public enum ValidationSuggestedChanges { + OTHERS("others"), + FILTER_QUERY("filter_query"), + FEATURE_ATTRIBUTES("feature_attributes"), + DETECTION_INTERVAL("detection_interval"), + WINDOW_DELAY("window_delay"); + + private String name; + + /** + * Get stat name + * + * @return name + */ + public String getName() { + return name; + } + + ValidationSuggestedChanges(String name) { + this.name = name; + } + +} diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java new file mode 100644 index 00000000..ff29b5d6 --- /dev/null +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java @@ -0,0 +1,118 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.rest; + +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.DEFAULT_SHINGLE_SIZE; +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.DETECTION_INTERVAL; +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.DETECTION_WINDOW_DELAY; +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.MAX_ANOMALY_DETECTORS; +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.MAX_ANOMALY_FEATURES; +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.REQUEST_TIMEOUT; +import static com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils.VALIDATE; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; + +import java.io.IOException; +import java.util.List; +import java.util.Locale; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; + +import com.amazon.opendistroforelasticsearch.ad.AnomalyDetectorPlugin; +import com.amazon.opendistroforelasticsearch.ad.constant.CommonErrorMessages; +import com.amazon.opendistroforelasticsearch.ad.indices.AnomalyDetectionIndices; +import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; +import com.amazon.opendistroforelasticsearch.ad.rest.handler.ValidateAnomalyDetectorActionHandler; +import com.amazon.opendistroforelasticsearch.ad.settings.EnabledSetting; +import com.google.common.collect.ImmutableList; + +/** + * This class consists of the REST handler to validate anomaly detector configurations. + */ + +public class RestValidateAnomalyDetectorAction extends BaseRestHandler { + + private static final String VALIDATE_ANOMALY_DETECTOR_ACTION = "validate_anomaly_detector_action"; + private final AnomalyDetectionIndices anomalyDetectionIndices; + private final Logger logger = LogManager.getLogger(RestValidateAnomalyDetectorAction.class); + private final NamedXContentRegistry xContentRegistry; + + private volatile TimeValue requestTimeout; + private volatile TimeValue detectionInterval; + private volatile TimeValue detectionWindowDelay; + private volatile Integer maxAnomalyDetectors; + private volatile Integer maxAnomalyFeatures; + + public RestValidateAnomalyDetectorAction( + Settings settings, + AnomalyDetectionIndices anomalyDetectionIndices, + NamedXContentRegistry xContentRegistry + ) { + this.anomalyDetectionIndices = anomalyDetectionIndices; + this.detectionInterval = DETECTION_INTERVAL.get(settings); + this.detectionWindowDelay = DETECTION_WINDOW_DELAY.get(settings); + this.maxAnomalyDetectors = MAX_ANOMALY_DETECTORS.get(settings); + this.maxAnomalyFeatures = MAX_ANOMALY_FEATURES.get(settings); + this.requestTimeout = REQUEST_TIMEOUT.get(settings); + this.xContentRegistry = xContentRegistry; + } + + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + if (!EnabledSetting.isADPluginEnabled()) { + throw new IllegalStateException(CommonErrorMessages.DISABLED_ERR_MSG); + } + + String detectorId = AnomalyDetector.NO_ID; + XContentParser parser = request.contentParser(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); + AnomalyDetector detector = AnomalyDetector.parseValidation(parser, detectorId, null, DEFAULT_SHINGLE_SIZE); + + return channel -> new ValidateAnomalyDetectorActionHandler( + client, + channel, + anomalyDetectionIndices, + detector, + maxAnomalyDetectors, + maxAnomalyFeatures, + requestTimeout, + xContentRegistry + ).startValidation(); + } + + @Override + public String getName() { + return VALIDATE_ANOMALY_DETECTOR_ACTION; + } + + @Override + public List routes() { + return ImmutableList + .of( + // validate configs + new Route( + RestRequest.Method.POST, + String.format(Locale.ROOT, "%s/%s", AnomalyDetectorPlugin.AD_BASE_DETECTORS_URI, VALIDATE) + ) + ); + } +} diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java new file mode 100644 index 00000000..644196e2 --- /dev/null +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java @@ -0,0 +1,668 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.rest.handler; + +import static com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector.ANOMALY_DETECTORS_INDEX; + +import java.io.IOException; +import java.text.ParseException; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.TreeMap; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + +import org.apache.commons.lang.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; +import org.elasticsearch.action.search.MultiSearchRequest; +import org.elasticsearch.action.search.MultiSearchResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.client.AdminClient; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.RangeQueryBuilder; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.metrics.Max; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.search.sort.SortOrder; + +import com.amazon.opendistroforelasticsearch.ad.indices.AnomalyDetectionIndices; +import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; +import com.amazon.opendistroforelasticsearch.ad.model.DateTimeRange; +import com.amazon.opendistroforelasticsearch.ad.model.Feature; +import com.amazon.opendistroforelasticsearch.ad.model.IntervalTimeConfiguration; +import com.amazon.opendistroforelasticsearch.ad.model.TimeConfiguration; +import com.amazon.opendistroforelasticsearch.ad.model.ValidateResponse; +import com.amazon.opendistroforelasticsearch.ad.model.ValidationFailures; +import com.amazon.opendistroforelasticsearch.ad.model.ValidationSuggestedChanges; +import com.amazon.opendistroforelasticsearch.ad.util.ParseUtils; +import com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils; + +/** + * Anomaly detector REST action handler to process POST request. + * POST request is for validating anomaly detector. + */ +public class ValidateAnomalyDetectorActionHandler extends AbstractActionHandler { + + protected static final String AGG_NAME_MAX = "max_timefield"; + protected static final int NUM_OF_INTERVAL_SAMPLES = 128; + protected static final int MAX_NUM_OF_SAMPLES_VIEWED = 128; + protected static final int NUM_OF_INTERVALS_CHECKED = 256; + protected static final double SAMPLE_SUCCESS_RATE = 0.75; + protected static final int FEATURE_VALIDATION_TIME_BACK_MINUTES = 10080; + protected static final int NUM_OF_INTERVALS_CHECKED_FILTER = 384; + protected static final long MAX_INTERVAL_LENGTH = (30L * 24 * 60 * 60 * 1000); + protected static final long HISTORICAL_CHECK_IN_MS = (90L * 24 * 60 * 60 * 1000); + protected static final String NAME_REGEX = "[a-zA-Z0-9._-]+"; + protected static final double INTERVAL_RECOMMENDATION_MULTIPLIER = 1.2; + protected static final String[] numericType = { "long", "integer", "short", "double", "float" }; + + private final AnomalyDetectionIndices anomalyDetectionIndices; + private final AnomalyDetector anomalyDetector; + private final AdminClient adminClient; + + private final Logger logger = LogManager.getLogger(IndexAnomalyDetectorActionHandler.class); + private final Integer maxAnomalyDetectors; + private final Integer maxAnomalyFeatures; + private final TimeValue requestTimeout; + private final NamedXContentRegistry xContent; + + private final ValidateResponse responseValidate; + private final Map> failuresMap; + private final Map> suggestedChangesMap; + private final AtomicBoolean inferAgain; + + /** + * Constructor function. + * + * @param client ES node client that executes actions on the local node + * @param channel ES channel used to construct bytes / builder based outputs, and send responses + * @param anomalyDetectionIndices anomaly detector index manager + * @param anomalyDetector anomaly detector instance + */ + public ValidateAnomalyDetectorActionHandler( + NodeClient client, + RestChannel channel, + AnomalyDetectionIndices anomalyDetectionIndices, + AnomalyDetector anomalyDetector, + Integer maxAnomalyDetectors, + Integer maxAnomalyFeatures, + TimeValue requestTimeout, + NamedXContentRegistry xContentRegistry + ) { + super(client, channel); + this.anomalyDetectionIndices = anomalyDetectionIndices; + this.anomalyDetector = anomalyDetector; + this.maxAnomalyDetectors = maxAnomalyDetectors; + this.maxAnomalyFeatures = maxAnomalyFeatures; + this.requestTimeout = requestTimeout; + this.responseValidate = new ValidateResponse(); + this.xContent = xContentRegistry; + this.inferAgain = new AtomicBoolean(true); + this.failuresMap = new HashMap<>(); + this.suggestedChangesMap = new HashMap<>(); + this.adminClient = client.admin(); + } + + /** + * Start function to process validate anomaly detector request. + * Checks if anomaly detector index exist first, if not, add it as a failure case. + * + * @throws IOException IOException from {@link AnomalyDetectionIndices#initAnomalyDetectorIndexIfAbsent(ActionListener)} + */ + public void startValidation() throws IOException { + boolean indexExists = anomalyDetectionIndices.doesAnomalyDetectorIndexExist(); + preDataValidationSteps(indexExists); + } + + private void preDataValidationSteps(boolean indexExists) { + List missingFields = new ArrayList<>(); + List formatErrors = new ArrayList<>(); + if (StringUtils.isBlank(anomalyDetector.getName())) { + missingFields.add("name"); + } else if (!anomalyDetector.getName().matches(NAME_REGEX)) { + formatErrors.add(anomalyDetector.getName()); + failuresMap.put(ValidationFailures.FORMAT.getName(), formatErrors); + } + if (anomalyDetector.getTimeField() == null) { + missingFields.add("time_field"); + } + if (anomalyDetector.getIndices() == null) { + missingFields.add("indices"); + } + if (anomalyDetector.getWindowDelay() == null) { + missingFields.add("window_delay"); + } + if (anomalyDetector.getDetectionInterval() == null) { + missingFields.add("detector_interval"); + } + if (anomalyDetector.getFeatureAttributes().isEmpty()) { + missingFields.add("feature_attributes"); + } + if (!missingFields.isEmpty()) { + failuresMap.put(ValidationFailures.MISSING.getName(), missingFields); + } + String error = RestHandlerUtils.validateAnomalyDetector(anomalyDetector, maxAnomalyFeatures); + if (StringUtils.isNotBlank(error)) { + List dupErrorsFeatures = new ArrayList<>(); + dupErrorsFeatures.addAll(Arrays.asList(error.split("\\r?\\n"))); + failuresMap.put(ValidationFailures.DUPLICATES.getName(), dupErrorsFeatures); + } + if (!failuresMap.isEmpty()) { + sendAnomalyDetectorValidationResponse(); + } else if (indexExists) { + validateNumberOfDetectors(); + } else { + searchAdInputIndices(false); + } + } + + private void validateNumberOfDetectors() { + try { + QueryBuilder query = QueryBuilders.matchAllQuery(); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(query).size(0).timeout(requestTimeout); + SearchRequest searchRequest = new SearchRequest(ANOMALY_DETECTORS_INDEX).source(searchSourceBuilder); + client.search(searchRequest, ActionListener.wrap(response -> onSearchAdResponse(response), exception -> onFailure(exception))); + } catch (Exception e) { + logger.error("Failed to create search request for validation", e); + onFailure(e); + } + } + + private void onSearchAdResponse(SearchResponse response) { + if (response.getHits().getTotalHits().value >= maxAnomalyDetectors) { + suggestedChangesMap + .computeIfAbsent(ValidationSuggestedChanges.OTHERS.getName(), k -> new ArrayList<>()) + .add("Can't create anomaly detector more than " + maxAnomalyDetectors + " ,please delete unused detectors"); + } else { + searchAdInputIndices(true); + } + } + + private void searchAdInputIndices(boolean indexExists) { + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() + .query(QueryBuilders.matchAllQuery()) + .size(0) + .timeout(requestTimeout); + SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().toArray(new String[0])).source(searchSourceBuilder); + client + .search( + searchRequest, + ActionListener.wrap(searchResponse -> onSearchAdInputIndicesResponse(searchResponse, indexExists), exception -> { + onFailure(exception); + logger.error("Failed to create search request for validation", exception); + }) + ); + } + + private void onSearchAdInputIndicesResponse(SearchResponse response, boolean indexExists) throws IOException { + if (response.getHits().getTotalHits().value == 0) { + String errorMsg = String + .format("Can't create anomaly detector as no document found in indices: %s", anomalyDetector.getIndices()); + failuresMap.computeIfAbsent(ValidationFailures.OTHERS.getName(), k -> new ArrayList<>()).add(errorMsg); + sendAnomalyDetectorValidationResponse(); + } else if (indexExists) { + checkADNameExists(); + } else { + checkForHistoricalData(); + } + } + + private void checkADNameExists() { + BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); + boolQueryBuilder.must(QueryBuilders.termQuery("name.keyword", anomalyDetector.getName())); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(boolQueryBuilder).timeout(requestTimeout); + SearchRequest searchRequest = new SearchRequest(ANOMALY_DETECTORS_INDEX).source(searchSourceBuilder); + client + .search( + searchRequest, + ActionListener.wrap(searchResponse -> onSearchADNameResponse(searchResponse, anomalyDetector.getName()), exception -> { + onFailure(exception); + logger.error("Failed to create search request for validation", exception); + }) + ); + } + + private void onSearchADNameResponse(SearchResponse response, String name) { + if (response.getHits().getTotalHits().value > 0) { + failuresMap.computeIfAbsent(ValidationFailures.DUPLICATES.getName(), k -> new ArrayList<>()).add(name); + sendAnomalyDetectorValidationResponse(); + } else { + checkForHistoricalData(); + } + } + + public void checkForHistoricalData() { + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() + .aggregation(AggregationBuilders.max(AGG_NAME_MAX).field(anomalyDetector.getTimeField())) + .size(1) + .sort(new FieldSortBuilder(anomalyDetector.getTimeField()).order(SortOrder.DESC)); + SearchRequest searchRequest = new SearchRequest().indices(anomalyDetector.getIndices().get(0)).source(searchSourceBuilder); + client.search(searchRequest, ActionListener.wrap(response -> checkIfAnyHistoricalData(getLatestDataTime(response)), exception -> { + onFailure(exception); + logger.error("Failed to create search request for validation", exception); + })); + } + + private void checkIfAnyHistoricalData(Optional lastTimeStamp) { + if (lastTimeStamp.isPresent() && (Instant.now().toEpochMilli() - HISTORICAL_CHECK_IN_MS > lastTimeStamp.get())) { + failuresMap + .computeIfAbsent(ValidationFailures.OTHERS.getName(), k -> new ArrayList<>()) + .add("No historical data for past 3 months"); + sendAnomalyDetectorValidationResponse(); + } else { + queryFilterValidation(); + } + } + + private Long timeConfigToMilliSec(TimeConfiguration config) { + return Optional.ofNullable((IntervalTimeConfiguration) config).map(t -> t.toDuration().toMillis()).orElse(0L); + } + + private DateTimeRange startEndTimeRangeWithIntervals(int numOfIntervals) { + long delayMillis = timeConfigToMilliSec(anomalyDetector.getWindowDelay()); + long detectorInterval = timeConfigToMilliSec(anomalyDetector.getDetectionInterval()); + return DateTimeRange.rangeBasedOfInterval(delayMillis, detectorInterval, numOfIntervals); + } + + private void queryFilterValidation() { + DateTimeRange timeRange = startEndTimeRangeWithIntervals(NUM_OF_INTERVALS_CHECKED_FILTER); + long dataStartTime = timeRange.getStart(); + long dataEndTime = timeRange.getEnd(); + RangeQueryBuilder rangeQuery = new RangeQueryBuilder(anomalyDetector.getTimeField()) + .from(dataStartTime) + .to(dataEndTime) + .format("epoch_millis"); + BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().must(rangeQuery).must(anomalyDetector.getFilterQuery()); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() + .query(internalFilterQuery) + .size(1) + .terminateAfter(1) + .timeout(requestTimeout); + SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(searchSourceBuilder); + client.search(searchRequest, ActionListener.wrap(searchResponse -> onQueryFilterSearch(searchResponse), exception -> { + onFailure(exception); + logger.error("Failed to create data query search request for validation", exception); + })); + } + + private void onQueryFilterSearch(SearchResponse response) throws IOException, ParseException { + if (response.getHits().getTotalHits().value <= 0) { + List filterError = new ArrayList<>(); + filterError + .add( + "query filter is potentially wrong as no hits were found at all or no historical data in last " + + NUM_OF_INTERVALS_CHECKED_FILTER + + " intervals" + ); + suggestedChangesMap.put(ValidationSuggestedChanges.FILTER_QUERY.getName(), filterError); + sendAnomalyDetectorValidationResponse(); + } else { + featureQueryValidation(); + } + } + + private List parseAggregationRequest(XContentParser parser) throws IOException { + List fieldNames = new ArrayList<>(); + XContentParser.Token token; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + final String field = parser.currentName(); + switch (field) { + case "field": + parser.nextToken(); + fieldNames.add(parser.textOrNull()); + break; + default: + parser.skipChildren(); + break; + } + } + } + return fieldNames; + } + + private List ForFieldMapping(XContentParser parser) throws IOException { + List fieldNameAggType = new ArrayList<>(); + XContentParser.Token token; + String agg = ""; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + final String field = parser.currentName(); + switch (field) { + case "max": + agg = parser.currentName(); + break; + case "avg": + agg = parser.currentName(); + break; + case "sum": + agg = parser.currentName(); + break; + case "field": + parser.nextToken(); + fieldNameAggType.add(agg); + fieldNameAggType.add(parser.textOrNull()); + break; + default: + parser.skipChildren(); + break; + } + } + } + return fieldNameAggType; + } + + private void checkFeatureAggregationType() throws IOException { + Map featureToField = new TreeMap<>(); + if (anomalyDetector.getFeatureAttributes() != null) { + for (Feature feature : anomalyDetector.getFeatureAttributes()) { + ParseUtils.parseAggregators(feature.getAggregation().toString(), xContent, feature.getId()); + XContentParser parser = XContentType.JSON + .xContent() + .createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); + parser.nextToken(); + List aggTypeFieldName = ForFieldMapping(parser); + featureToField.put(feature.getName(), aggTypeFieldName.get(1)); + } + } + getFieldMapping(featureToField); + } + + private void getFieldMapping(Map featureToAgg) { + GetFieldMappingsRequest request = new GetFieldMappingsRequest().indices(anomalyDetector.getIndices().get(0)); + request.fields(featureToAgg.values().toArray(new String[0])); + adminClient + .indices() + .getFieldMappings( + request, + ActionListener + .wrap(response -> checkFieldIndex(response, featureToAgg.values().toArray(new String[0]), featureToAgg), exception -> { + onFailure(exception); + logger.error("Failed to get field mapping for validation", exception); + }) + ); + } + + private void checkFieldIndex(GetFieldMappingsResponse response, String[] fields, Map featuresToAgg) throws IOException { + List numericTypes = Arrays.asList(numericType); + for (int i = 0; i < fields.length; i++) { + Map>> mappings = response.mappings(); + final Map> fieldMappings = mappings + .get(anomalyDetector.getIndices().get(0)); + final GetFieldMappingsResponse.FieldMappingMetadata metadata = fieldMappings.get("_doc").get(fields[i]); + final Map source = metadata.sourceAsMap(); + String fieldTypeJSON = source.get(fields[i]).toString(); + String fieldType = fieldTypeJSON.substring(fieldTypeJSON.lastIndexOf('=') + 1, fieldTypeJSON.length() - 1).trim(); + if (!numericTypes.contains(fieldType)) { + failuresMap + .computeIfAbsent(ValidationFailures.FIELD_TYPE.getName(), k -> new ArrayList<>()) + .add( + "Field named: " + + fields[i] + + " can't be aggregated due to it being of type " + + fieldType + + " which isn't numeric, please use a different aggregation type" + ); + } + } + if (!failuresMap.isEmpty()) { + sendAnomalyDetectorValidationResponse(); + } else { + intervalValidation(); + } + } + + private DateTimeRange getFeatureQueryValidationDateRange() { + long delayMillis = timeConfigToMilliSec(anomalyDetector.getWindowDelay()); + DateTimeRange timeRange = startEndTimeRangeWithIntervals(NUM_OF_INTERVALS_CHECKED_FILTER); + IntervalTimeConfiguration searchRange = new IntervalTimeConfiguration(FEATURE_VALIDATION_TIME_BACK_MINUTES, ChronoUnit.MINUTES); + long searchRangeTime = Optional.ofNullable(searchRange).map(t -> t.toDuration().toMillis()).orElse(0L); + long startTimeWithSetTime = timeRange.getEnd() - (searchRangeTime - delayMillis); + if (timeRange.getStart() > startTimeWithSetTime) { + timeRange.setStart(startTimeWithSetTime); + } + return timeRange; + } + + private void featureQueryValidation() throws IOException { + DateTimeRange timeRange = getFeatureQueryValidationDateRange(); + AtomicInteger featureCounter = new AtomicInteger(); + RangeQueryBuilder rangeQuery = new RangeQueryBuilder(anomalyDetector.getTimeField()) + .from(timeRange.getStart()) + .to(timeRange.getEnd()) + .format("epoch_millis") + .includeLower(true) + .includeUpper(false); + if (anomalyDetector.getFeatureAttributes() != null) { + for (Feature feature : anomalyDetector.getFeatureAttributes()) { + ParseUtils.parseAggregators(feature.getAggregation().toString(), xContent, feature.getId()); + XContentParser parser = XContentType.JSON + .xContent() + .createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); + parser.nextToken(); + List fieldNames = parseAggregationRequest(parser); + BoolQueryBuilder boolQuery = QueryBuilders + .boolQuery() + .filter(rangeQuery) + .filter(anomalyDetector.getFilterQuery()) + .filter(QueryBuilders.existsQuery(fieldNames.get(0))); + SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(boolQuery).size(1).terminateAfter(1); + SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(internalSearchSourceBuilder); + client.search(searchRequest, ActionListener.wrap(searchResponse -> { + featureCounter.incrementAndGet(); + onFeatureAggregationValidation(searchResponse, feature, featureCounter); + }, exception -> { + onFailure(exception); + logger.error("Failed to create feature search request for validation", exception); + })); + } + } + } + + private void onFeatureAggregationValidation(SearchResponse response, Feature feature, AtomicInteger counter) throws IOException { + if (response.getHits().getTotalHits().value <= 0) { + String errorMsg = feature.getName() + ": feature query is potentially wrong as no hits were found"; + suggestedChangesMap + .computeIfAbsent(ValidationSuggestedChanges.FEATURE_ATTRIBUTES.getName(), k -> new ArrayList<>()) + .add(errorMsg); + } + if (counter.get() == anomalyDetector.getFeatureAttributes().size()) { + if (!suggestedChangesMap.isEmpty()) { + sendAnomalyDetectorValidationResponse(); + } else { + checkFeatureAggregationType(); + } + } + } + + // creates a new 2D array of time ranges based of a different detector interval inorder to validate + // detector interval with a new range every time. Creates 128 new interval time ranges + private DateTimeRange[] createNewTimeRange(long detectorInterval) { + DateTimeRange timeRanges[] = new DateTimeRange[MAX_NUM_OF_SAMPLES_VIEWED]; + long delayMillis = timeConfigToMilliSec(anomalyDetector.getWindowDelay()); + long dataEndTime = Instant.now().toEpochMilli() - delayMillis; + long dataStartTime = dataEndTime - ((long) (MAX_NUM_OF_SAMPLES_VIEWED) * detectorInterval); + for (int i = 0; i < MAX_NUM_OF_SAMPLES_VIEWED; i++) { + long newStartTime = dataStartTime + (i * detectorInterval); + long newEndTime = newStartTime + detectorInterval; + timeRanges[i] = new DateTimeRange(newStartTime, newEndTime); + } + return timeRanges; + } + + private synchronized void intervalValidation() { + long detectorInterval = timeConfigToMilliSec(anomalyDetector.getDetectionInterval()); + for (long inferredDetectorInterval = detectorInterval; inferredDetectorInterval <= MAX_INTERVAL_LENGTH; inferredDetectorInterval *= + INTERVAL_RECOMMENDATION_MULTIPLIER) { + DateTimeRange timeRanges[] = createNewTimeRange(inferredDetectorInterval); + try { + if (inferAgain.get()) { + verifyWithInterval(timeRanges, inferredDetectorInterval); + } + wait(); + } catch (Exception ex) { + onFailure(ex); + logger.error(ex); + } + } + } + + private List getFieldNamesForFeature(Feature feature) throws IOException { + ParseUtils.parseAggregators(feature.getAggregation().toString(), xContent, feature.getId()); + XContentParser parser = XContentType.JSON + .xContent() + .createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); + parser.nextToken(); + List fieldNames = parseAggregationRequest(parser); + return fieldNames; + } + + private List getFeatureFieldNames() throws IOException { + List featureFields = new ArrayList<>(); + for (Feature feature : anomalyDetector.getFeatureAttributes()) { + featureFields.add(getFieldNamesForFeature(feature).get(0)); + } + return featureFields; + } + + private void verifyWithInterval(DateTimeRange[] timeRanges, long detectorInterval) throws IOException { + inferAgain.set(false); + List featureFields = getFeatureFieldNames(); + MultiSearchRequest sr = new MultiSearchRequest(); + for (int i = 0; i < NUM_OF_INTERVAL_SAMPLES; i++) { + long rangeStart = timeRanges[i].getStart(); + long rangeEnd = timeRanges[i].getEnd(); + RangeQueryBuilder rangeQuery = new RangeQueryBuilder(anomalyDetector.getTimeField()) + .from(rangeStart) + .to(rangeEnd) + .format("epoch_millis") + .includeLower(true) + .includeUpper(false); + BoolQueryBuilder boolQuery = QueryBuilders.boolQuery().filter(rangeQuery).filter(anomalyDetector.getFilterQuery()); + for (int j = 0; j < featureFields.size(); j++) { + boolQuery.filter(QueryBuilders.existsQuery(featureFields.get(j))); + } + SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(boolQuery).size(1).terminateAfter(1); + sr.add(new SearchRequest(anomalyDetector.getIndices().get(0)).source(internalSearchSourceBuilder)); + } + client.multiSearch(sr, ActionListener.wrap(searchResponse -> { + if (doneInferring(detectorInterval, searchResponse)) { + checkWindowDelay(); + } + }, exception -> { + onFailure(exception); + logger.error("Failed to create multi search request for validation", exception); + })); + } + + private synchronized boolean doneInferring(long detectorInterval, MultiSearchResponse searchResponse) { + long originalInterval = Optional + .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) + .map(t -> t.toDuration().toMillis()) + .orElse(0L); + final AtomicInteger hitCounter = new AtomicInteger(); + for (MultiSearchResponse.Item item : searchResponse) { + SearchResponse response = item.getResponse(); + if (response.getHits().getTotalHits().value > 0) { + hitCounter.incrementAndGet(); + } + } + inferAgain.set(true); + notify(); + if (hitCounter.doubleValue() / (double) NUM_OF_INTERVAL_SAMPLES < SAMPLE_SUCCESS_RATE) { + if ((detectorInterval * INTERVAL_RECOMMENDATION_MULTIPLIER) >= MAX_INTERVAL_LENGTH) { + suggestedChangesMap + .computeIfAbsent(ValidationSuggestedChanges.DETECTION_INTERVAL.getName(), k -> new ArrayList<>()) + .add("detector interval: failed to infer max up too: " + MAX_INTERVAL_LENGTH); + } else { + return false; + } + } else if (detectorInterval != originalInterval) { + suggestedChangesMap + .computeIfAbsent(ValidationSuggestedChanges.DETECTION_INTERVAL.getName(), k -> new ArrayList<>()) + .add(Long.toString(detectorInterval)); + } + inferAgain.set(false); + return true; + } + + private void checkWindowDelay() { + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() + .aggregation(AggregationBuilders.max(AGG_NAME_MAX).field(anomalyDetector.getTimeField())) + .size(1) + .sort(new FieldSortBuilder(anomalyDetector.getTimeField()).order(SortOrder.DESC)); + SearchRequest searchRequest = new SearchRequest().indices(anomalyDetector.getIndices().get(0)).source(searchSourceBuilder); + client.search(searchRequest, ActionListener.wrap(response -> checkDelayResponse(getLatestDataTime(response)), exception -> { + onFailure(exception); + logger.error("Failed to create search request for last data point", exception); + })); + } + + private Optional getLatestDataTime(SearchResponse searchResponse) { + return Optional + .ofNullable(searchResponse) + .map(SearchResponse::getAggregations) + .map(aggs -> aggs.asMap()) + .map(map -> (Max) map.get(AGG_NAME_MAX)) + .map(agg -> (long) agg.getValue()); + } + + private void checkDelayResponse(Optional lastTimeStamp) { + long delayMillis = timeConfigToMilliSec(anomalyDetector.getWindowDelay()); + if (lastTimeStamp.isPresent() && (Instant.now().toEpochMilli() - lastTimeStamp.get() > delayMillis)) { + long minutesSinceLastStamp = TimeUnit.MILLISECONDS.toMinutes(Instant.now().toEpochMilli() - lastTimeStamp.get()); + suggestedChangesMap + .computeIfAbsent(ValidationSuggestedChanges.WINDOW_DELAY.getName(), k -> new ArrayList<>()) + .add(Long.toString(minutesSinceLastStamp)); + } + sendAnomalyDetectorValidationResponse(); + } + + private void sendAnomalyDetectorValidationResponse() { + this.responseValidate.setFailures(failuresMap); + this.responseValidate.setSuggestedChanges(suggestedChangesMap); + try { + BytesRestResponse restResponse = new BytesRestResponse(RestStatus.OK, responseValidate.toXContent(channel.newBuilder())); + channel.sendResponse(restResponse); + } catch (Exception e) { + channel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); + } + } +} diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportAction.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportAction.java index 45146188..826f899b 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportAction.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportAction.java @@ -260,7 +260,6 @@ private ActionListener> onGetDetector( .orElse(0L); long dataStartTime = request.getStart() - delayMillis; long dataEndTime = request.getEnd() - delayMillis; - featureManager .getCurrentFeatures( anomalyDetector, diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/RestHandlerUtils.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/RestHandlerUtils.java index 5032a898..c8b99f97 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/RestHandlerUtils.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/RestHandlerUtils.java @@ -55,6 +55,7 @@ public final class RestHandlerUtils { public static final String START_JOB = "_start"; public static final String STOP_JOB = "_stop"; public static final String PROFILE = "_profile"; + public static final String VALIDATE = "_validate"; public static final String TYPE = "type"; public static final ToXContent.MapParams XCONTENT_WITH_TYPE = new ToXContent.MapParams(ImmutableMap.of("with_type", "true")); diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/e2e/DetectionResultEvalutationIT.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/e2e/DetectionResultEvalutationIT.java index 48e80cc2..f85b8345 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/e2e/DetectionResultEvalutationIT.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/e2e/DetectionResultEvalutationIT.java @@ -15,8 +15,11 @@ package com.amazon.opendistroforelasticsearch.ad.e2e; +import static org.apache.http.entity.ContentType.APPLICATION_JSON; + import java.io.File; import java.io.FileReader; +import java.io.IOException; import java.time.Instant; import java.time.format.DateTimeFormatter; import java.time.temporal.ChronoUnit; @@ -29,13 +32,21 @@ import java.util.Map.Entry; import java.util.Set; +import org.apache.http.HttpEntity; +import org.apache.http.entity.StringEntity; import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.xcontent.support.XContentMapValues; import com.amazon.opendistroforelasticsearch.ad.ODFERestTestCase; +import com.amazon.opendistroforelasticsearch.ad.TestHelpers; +import com.google.common.collect.ImmutableMap; import com.google.gson.JsonArray; +import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; +import com.google.gson.JsonPrimitive; public class DetectionResultEvalutationIT extends ODFERestTestCase { @@ -43,6 +54,235 @@ public void testDataset() throws Exception { verifyAnomaly("synthetic", 1, 1500, 8, .9, .9, 10); } + protected HttpEntity toHttpEntity(String jsonString) throws IOException { + return new StringEntity(jsonString, APPLICATION_JSON); + } + + public void testNoHistoricalData() throws Exception { + RestClient client = client(); + List data = createData(10, 7776001000L); + indexTrainData("validation", data, 1500, client); + indexTestData(data, "validation", 1500, client); + String requestBody = String + .format( + Locale.ROOT, + "{ \"name\": \"test\", \"description\": \"test\", \"time_field\": \"timestamp\"" + + ", \"indices\": [\"validation\"], \"feature_attributes\": [{ \"feature_name\": \"feature 1\", \"feature_enabled\": " + + "\"true\", \"aggregation_query\": { \"Feature1\": { \"sum\": { \"field\": \"Feature1\" } } } }, { \"feature_name\"" + + ": \"feature 2\", \"feature_enabled\": \"true\", \"aggregation_query\": { \"Feature2\": { \"sum\": { \"field\": " + + "\"Feature2\" } } } }], \"detection_interval\": { \"period\": { \"interval\": %d, \"unit\": \"Minutes\" } }" + + ",\"window_delay\":{\"period\":{\"interval\":35,\"unit\":\"Minutes\"}}}", + 1 + ); + Response resp = TestHelpers + .makeRequest( + client(), + "POST", + TestHelpers.AD_BASE_DETECTORS_URI + "/_validate", + ImmutableMap.of(), + toHttpEntity(requestBody), + null + ); + Map responseMap = entityAsMap(resp); + @SuppressWarnings("unchecked") + Map>> failuresMap = (Map>>) XContentMapValues + .extractValue("failures", responseMap); + @SuppressWarnings("unchecked") + Map>> suggestionsMap = (Map>>) XContentMapValues + .extractValue("suggestedChanges", responseMap); + assertTrue(failuresMap.keySet().size() == 1); + assertTrue(failuresMap.containsKey("others")); + } + + public void testValidationIntervalRecommendation() throws Exception { + RestClient client = client(); + List data = createData(300, 1800000); + indexTrainData("validation", data, 1500, client); + indexTestData(data, "validation", 1500, client); + String requestBody = String + .format( + Locale.ROOT, + "{ \"name\": \"test\", \"description\": \"test\", \"time_field\": \"timestamp\"" + + ", \"indices\": [\"validation\"], \"feature_attributes\": [{ \"feature_name\": \"feature 1\", \"feature_enabled\": " + + "\"true\", \"aggregation_query\": { \"Feature1\": { \"sum\": { \"field\": \"Feature1\" } } } }, { \"feature_name\"" + + ": \"feature 2\", \"feature_enabled\": \"true\", \"aggregation_query\": { \"Feature2\": { \"sum\": { \"field\": " + + "\"Feature2\" } } } }], \"detection_interval\": { \"period\": { \"interval\": %d, \"unit\": \"Minutes\" } }" + + ",\"window_delay\":{\"period\":{\"interval\":35,\"unit\":\"Minutes\"}}}", + 1 + ); + Response resp = TestHelpers + .makeRequest( + client(), + "POST", + TestHelpers.AD_BASE_DETECTORS_URI + "/_validate", + ImmutableMap.of(), + toHttpEntity(requestBody), + null + ); + Map responseMap = entityAsMap(resp); + @SuppressWarnings("unchecked") + Map>> failuresMap = (Map>>) XContentMapValues + .extractValue("failures", responseMap); + @SuppressWarnings("unchecked") + Map>> suggestionsMap = (Map>>) XContentMapValues + .extractValue("suggestedChanges", responseMap); + assertTrue(failuresMap.keySet().size() == 0); + assertTrue(suggestionsMap.keySet().size() == 1); + assertTrue(suggestionsMap.containsKey("detection_interval")); + } + + public void testValidationWindowDelayRecommendation() throws Exception { + RestClient client = client(); + List data = createData(1000, 120000); + indexTrainData("validation", data, 1000, client); + indexTestData(data, "validation", 1000, client); + String requestBody = String + .format( + Locale.ROOT, + "{ \"name\": \"test\", \"description\": \"test\", \"time_field\": \"timestamp\"" + + ", \"indices\": [\"validation\"], \"feature_attributes\": [{ \"feature_name\": \"feature 1\", \"feature_enabled\": " + + "\"true\", \"aggregation_query\": { \"Feature1\": { \"sum\": { \"field\": \"Feature1\" } } } }, { \"feature_name\"" + + ": \"feature 2\", \"feature_enabled\": \"true\", \"aggregation_query\": { \"Feature2\": { \"sum\": { \"field\": " + + "\"Feature2\" } } } }], \"detection_interval\": { \"period\": { \"interval\": %d, \"unit\": \"Minutes\" } }" + + ",\"window_delay\":{\"period\":{\"interval\":1,\"unit\":\"Minutes\"}}}", + 10 + ); + Response resp = TestHelpers + .makeRequest( + client(), + "POST", + TestHelpers.AD_BASE_DETECTORS_URI + "/_validate", + ImmutableMap.of(), + toHttpEntity(requestBody), + null + ); + Map responseMap = entityAsMap(resp); + @SuppressWarnings("unchecked") + Map>> failuresMap = (Map>>) XContentMapValues + .extractValue("failures", responseMap); + @SuppressWarnings("unchecked") + Map>> suggestionsMap = (Map>>) XContentMapValues + .extractValue("suggestedChanges", responseMap); + assertTrue(failuresMap.keySet().size() == 0); + assertTrue(suggestionsMap.keySet().size() == 1); + assertTrue(suggestionsMap.containsKey("window_delay")); + } + + public void testValidationFilterQuery() throws Exception { + RestClient client = client(); + List data = createData(1000, 6000); + indexTrainData("validation", data, 1000, client); + indexTestData(data, "validation", 1000, client); + String requestBody = String + .format( + Locale.ROOT, + "{\"name\":\"test\",\"description\":\"Test\",\"time_field\":\"timestamp\"," + + "\"indices\":[\"validation\"],\"feature_attributes\":[{\"feature_name\":\"feature 1\"" + + ",\"feature_enabled\":true,\"aggregation_query\":{\"Feature1\":{\"sum\":{\"field\":\"Feature1\"}}}}," + + "{\"feature_name\":\"feature 2\",\"feature_enabled\":true,\"aggregation_query\":{\"Feature2\":" + + "{\"sum\":{\"field\":\"Feature2\"}}}}],\"filter_query\":{\"bool\":" + + "{\"filter\":[{\"exists\":{\"field\":\"value\",\"boost\":1}}],\"adjust_pure_negative\":true,\"boost\":1}}," + + "\"detection_interval\":{\"period\":{\"interval\": %d,\"unit\":\"Minutes\"}}" + + ",\"window_delay\":{\"period\":{\"interval\":1,\"unit\":\"Minutes\"}}}", + 1 + ); + Response resp = TestHelpers + .makeRequest( + client(), + "POST", + TestHelpers.AD_BASE_DETECTORS_URI + "/_validate", + ImmutableMap.of(), + toHttpEntity(requestBody), + null + ); + Map responseMap = entityAsMap(resp); + @SuppressWarnings("unchecked") + Map>> failuresMap = (Map>>) XContentMapValues + .extractValue("failures", responseMap); + @SuppressWarnings("unchecked") + Map>> suggestionsMap = (Map>>) XContentMapValues + .extractValue("suggestedChanges", responseMap); + assertTrue(failuresMap.keySet().size() == 0); + assertTrue(suggestionsMap.keySet().size() == 1); + assertTrue(suggestionsMap.containsKey("filter_query")); + } + + public void testValidationFeatureQuery() throws Exception { + RestClient client = client(); + List data = createData(1000, 6000); + indexTrainData("validation", data, 1000, client); + indexTestData(data, "validation", 1000, client); + String requestBody = String + .format( + Locale.ROOT, + "{\"name\":\"test\",\"description\":\"Test\",\"time_field\":\"timestamp\"," + + "\"indices\":[\"validation\"],\"feature_attributes\":[{\"feature_name\":\"feature 1\"" + + ",\"feature_enabled\":true,\"aggregation_query\":{\"Feature1\":{\"sum\":{\"field\":\"Feature1\"}}}}," + + "{\"feature_name\":\"feature 2\",\"feature_enabled\":true,\"aggregation_query\":" + + "{\"Feature2\":{\"sum\":{\"field\":\"Feature5\"}}}}]," + + "\"filter_query\":{\"bool\":{\"filter\":[{\"exists\":{\"field\":\"Feature1\",\"boost\":1}}]," + + "\"adjust_pure_negative\":true,\"boost\":1}}," + + "\"detection_interval\":{\"period\":{\"interval\": %d,\"unit\":\"Minutes\"}}," + + "\"window_delay\":{\"period\":{\"interval\":1,\"unit\":\"Minutes\"}}}", + 1 + ); + Response resp = TestHelpers + .makeRequest( + client(), + "POST", + TestHelpers.AD_BASE_DETECTORS_URI + "/_validate", + ImmutableMap.of(), + toHttpEntity(requestBody), + null + ); + Map responseMap = entityAsMap(resp); + @SuppressWarnings("unchecked") + Map>> failuresMap = (Map>>) XContentMapValues + .extractValue("failures", responseMap); + @SuppressWarnings("unchecked") + Map>> suggestionsMap = (Map>>) XContentMapValues + .extractValue("suggestedChanges", responseMap); + assertTrue(failuresMap.keySet().size() == 0); + assertTrue(suggestionsMap.keySet().size() == 1); + assertTrue(suggestionsMap.containsKey("feature_attributes")); + } + + public void testValidationWithDataSetSuccess() throws Exception { + RestClient client = client(); + List data = createData(300, 60000); + indexTrainData("validation", data, 1500, client); + indexTestData(data, "validation", 1500, client); + String requestBody = String + .format( + Locale.ROOT, + "{ \"name\": \"test\", \"description\": \"test\", \"time_field\": \"timestamp\"" + + ", \"indices\": [\"validation\"], \"feature_attributes\": [{ \"feature_name\": \"feature 1\", \"feature_enabled\": " + + "\"true\", \"aggregation_query\": { \"Feature1\": { \"sum\": { \"field\": \"Feature1\" } } } }, { \"feature_name\"" + + ": \"feature 2\", \"feature_enabled\": \"true\", \"aggregation_query\": { \"Feature2\": { \"sum\": { \"field\": " + + "\"Feature2\" } } } }], \"detection_interval\": { \"period\": { \"interval\": %d, \"unit\": \"Minutes\" } }" + + ",\"window_delay\":{\"period\":{\"interval\":2,\"unit\":\"Minutes\"}}}", + 1 + ); + Response resp = TestHelpers + .makeRequest( + client(), + "POST", + TestHelpers.AD_BASE_DETECTORS_URI + "/_validate", + ImmutableMap.of(), + toHttpEntity(requestBody), + null + ); + Map responseMap = entityAsMap(resp); + @SuppressWarnings("unchecked") + Map>> failuresMap = (Map>>) XContentMapValues + .extractValue("failures", responseMap); + @SuppressWarnings("unchecked") + Map>> suggestionsMap = (Map>>) XContentMapValues + .extractValue("suggestedChanges", responseMap); + assertTrue(failuresMap.keySet().size() == 0); + assertTrue(suggestionsMap.keySet().size() == 0); + } + private void verifyAnomaly( String datasetName, int intervalMinutes, @@ -227,7 +467,7 @@ private List> getAnomalyWindows(String labalFileName) th private void indexTrainData(String datasetName, List data, int trainTestSplit, RestClient client) throws Exception { Request request = new Request("PUT", datasetName); String requestBody = "{ \"mappings\": { \"properties\": { \"timestamp\": { \"type\": \"date\"}," - + " \"Feature1\": { \"type\": \"double\" }, \"Feature2\": { \"type\": \"double\" } } } }"; + + " \"Feature1\": { \"type\": \"long\" }, \"Feature2\": { \"type\": \"long\" } } } }"; request.setJsonEntity(requestBody); client.performRequest(request); Thread.sleep(1_000); @@ -253,6 +493,21 @@ private List getData(String datasetFileName) throws Exception { return list; } + private List createData(int numOfDataPoints, long detectorIntervalMS) { + List list = new ArrayList<>(); + for (int i = 1; i < numOfDataPoints; i++) { + long valueFeature1 = randomLongBetween(1, 10000000); + long valueFeature2 = randomLongBetween(1, 10000000); + JsonObject obj = new JsonObject(); + JsonElement element = new JsonPrimitive(Instant.now().toEpochMilli() - (detectorIntervalMS * i)); + obj.add("timestamp", element); + obj.add("Feature1", new JsonPrimitive(valueFeature1)); + obj.add("Feature2", new JsonPrimitive(valueFeature2)); + list.add(obj); + } + return list; + } + private Map getDetectionResult(String detectorId, Instant begin, Instant end, RestClient client) { try { Request request = new Request("POST", String.format("/_opendistro/_anomaly_detection/detectors/%s/_run", detectorId)); diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorTests.java index da3ddcd9..7c8f7e6c 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorTests.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorTests.java @@ -68,6 +68,21 @@ public void testParseAnomalyDetectorWithEmptyFilterQuery() throws IOException { assertTrue(parsedDetector.getFilterQuery() instanceof MatchAllQueryBuilder); } + public void testParseValidationAnomalyDetectorWithEmptyFilterQuery() throws IOException { + String detectorString = "{\"name\":\"todagtCMkwpcaedpyYUM\",\"description\":" + + "\"ClrcaMpuLfeDSlVduRcKlqPZyqWDBf\",\"time_field\":\"dJRwh\",\"indices\":[\"eIrgWMqAED\"]," + + "\"feature_attributes\":[{\"feature_id\":\"lxYRN\",\"feature_name\":\"eqSeU\",\"feature_enabled\":" + + "true,\"aggregation_query\":{\"aa\":{\"value_count\":{\"field\":\"ok\"}}}}],\"filter_query\":{}," + + "\"detection_interval\":{\"period\":{\"interval\":425,\"unit\":\"Minutes\"}},\"window_delay\":" + + "{\"period\":{\"interval\":973,\"unit\":\"Minutes\"}},\"shingle_size\":4,\"schema_version\":-1203962153,\"ui_metadata\":" + + "{\"JbAaV\":{\"feature_id\":\"rIFjS\",\"feature_name\":\"QXCmS\",\"feature_enabled\":false," + + "\"aggregation_query\":{\"aa\":{\"value_count\":{\"field\":\"ok\"}}}}}," + + "\"last_update_time\":1568396089028}"; + AnomalyDetector parsedDetector = AnomalyDetector + .parse(TestHelpers.parser(detectorString), "id", 1L, null, null, AnomalyDetectorSettings.DEFAULT_SHINGLE_SIZE); + assertTrue(parsedDetector.getFilterQuery() instanceof MatchAllQueryBuilder); + } + public void testParseAnomalyDetectorWithWrongFilterQuery() throws Exception { String detectorString = "{\"name\":\"todagtCMkwpcaedpyYUM\",\"description\":" + "\"ClrcaMpuLfeDSlVduRcKlqPZyqWDBf\",\"time_field\":\"dJRwh\",\"indices\":[\"eIrgWMqAED\"]," @@ -95,6 +110,38 @@ public void testParseAnomalyDetectorWithoutOptionalParams() throws IOException { assertEquals((long) parsedDetector.getShingleSize(), (long) AnomalyDetectorSettings.DEFAULT_SHINGLE_SIZE); } + public void testParseValidationAnomalyDetector() throws IOException { + String detectorString = "{\"name\":\"todagtCMkwpcaedpyYUM\",\"description\":" + + "\"ClrcaMpuLfeDSlVduRcKlqPZyqWDBf\",\"time_field\":\"dJRwh\",\"indices\":[\"eIrgWMqAED\"]," + + "\"feature_attributes\":[{\"feature_id\":\"lxYRN\",\"feature_name\":\"eqSeU\",\"feature_enabled\"" + + ":true,\"aggregation_query\":{\"aa\":{\"value_count\":{\"field\":\"ok\"}}}}],\"detection_interval\":" + + "{\"period\":{\"interval\":425,\"unit\":\"Minutes\"}},\"schema_version\":-1203962153,\"ui_metadata\":" + + "{\"JbAaV\":{\"feature_id\":\"rIFjS\",\"feature_name\":\"QXCmS\",\"feature_enabled\":false," + + "\"aggregation_query\":{\"aa\":{\"value_count\":{\"field\":\"ok\"}}}}},\"last_update_time\":1568396089028}"; + AnomalyDetector parsedDetector = AnomalyDetector + .parseValidation(TestHelpers.parser(detectorString), "id", 1L, AnomalyDetectorSettings.DEFAULT_SHINGLE_SIZE); + assertTrue(parsedDetector.getFilterQuery() instanceof MatchAllQueryBuilder); + assertEquals((long) parsedDetector.getShingleSize(), (long) AnomalyDetectorSettings.DEFAULT_SHINGLE_SIZE); + } + + public void testParseValidationAnomalyDetectorWithWrongFilterQuery() throws Exception { + String detectorString = "{\"name\":\"todagtCMkwpcaedpyYUM\",\"description\":" + + "\"ClrcaMpuLfeDSlVduRcKlqPZyqWDBf\",\"time_field\":\"dJRwh\",\"indices\":[\"eIrgWMqAED\"]," + + "\"feature_attributes\":[{\"feature_id\":\"lxYRN\",\"feature_name\":\"eqSeU\",\"feature_enabled\":" + + "true,\"aggregation_query\":{\"aa\":{\"value_count\":{\"field\":\"ok\"}}}}],\"filter_query\":" + + "{\"aa\":\"bb\"},\"detection_interval\":{\"period\":{\"interval\":425,\"unit\":\"Minutes\"}}," + + "\"window_delay\":{\"period\":{\"interval\":973,\"unit\":\"Minutes\"}},\"shingle_size\":8,\"schema_version\":" + + "-1203962153,\"ui_metadata\":{\"JbAaV\":{\"feature_id\":\"rIFjS\",\"feature_name\":\"QXCmS\"," + + "\"feature_enabled\":false,\"aggregation_query\":{\"aa\":{\"value_count\":{\"field\":\"ok\"}}}}}," + + "\"last_update_time\":1568396089028}"; + TestHelpers + .assertFailWith( + ParsingException.class, + () -> AnomalyDetector + .parseValidation(TestHelpers.parser(detectorString), "id", 1L, AnomalyDetectorSettings.DEFAULT_SHINGLE_SIZE) + ); + } + public void testParseAnomalyDetectorWithInvalidShingleSize() throws Exception { String detectorString = "{\"name\":\"todagtCMkwpcaedpyYUM\",\"description\":" + "\"ClrcaMpuLfeDSlVduRcKlqPZyqWDBf\",\"time_field\":\"dJRwh\",\"indices\":[\"eIrgWMqAED\"]," @@ -114,6 +161,23 @@ public void testParseAnomalyDetectorWithNullUiMetadata() throws IOException { assertNull(parsedDetector.getUiMetadata()); } + public void testParseValidationAnomalyDetectorWithNullUiMetadata() throws IOException { + AnomalyDetector detector = TestHelpers.randomAnomalyDetector(null, Instant.now()); + String detectorString = TestHelpers.xContentBuilderToString(detector.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + AnomalyDetector parsedDetector = AnomalyDetector + .parseValidation(TestHelpers.parser(detectorString), "id", 1L, AnomalyDetectorSettings.DEFAULT_SHINGLE_SIZE); + assertEquals("Parsing anomaly detector doesn't work", detector, parsedDetector); + assertNull(parsedDetector.getUiMetadata()); + } + + public void testParseValidationAnomalyDetectorWithEmptyUiMetadata() throws IOException { + AnomalyDetector detector = TestHelpers.randomAnomalyDetector(ImmutableMap.of(), Instant.now()); + String detectorString = TestHelpers.xContentBuilderToString(detector.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + AnomalyDetector parsedDetector = AnomalyDetector + .parseValidation(TestHelpers.parser(detectorString), "id", 1L, AnomalyDetectorSettings.DEFAULT_SHINGLE_SIZE); + assertEquals("Parsing anomaly detector doesn't work", detector, parsedDetector); + } + public void testParseAnomalyDetectorWithEmptyUiMetadata() throws IOException { AnomalyDetector detector = TestHelpers.randomAnomalyDetector(ImmutableMap.of(), Instant.now()); String detectorString = TestHelpers.xContentBuilderToString(detector.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/TestDateTimeRange.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/TestDateTimeRange.java new file mode 100644 index 00000000..921a0ada --- /dev/null +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/TestDateTimeRange.java @@ -0,0 +1,42 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.model; + +import org.elasticsearch.test.ESTestCase; + +public class TestDateTimeRange extends ESTestCase { + public void testDateTimeRangeGetStart() { + DateTimeRange timeRange = new DateTimeRange(10, 20); + assertEquals(10, timeRange.getStart()); + } + + public void testDateTimeRangeGetEnd() { + DateTimeRange timeRange = new DateTimeRange(10, 20); + assertEquals(20, timeRange.getEnd()); + } + + public void testDateTimeRangeSetEnd() { + DateTimeRange timeRange = new DateTimeRange(10, 20); + timeRange.setEnd(50); + assertEquals(50, timeRange.getEnd()); + } + + public void testDateTimeRangeSetStart() { + DateTimeRange timeRange = DateTimeRange.rangeBasedOfInterval(0, 20, 2); + timeRange.setStart(10); + assertEquals(10, timeRange.getStart()); + } +} diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationTests.java new file mode 100644 index 00000000..eb5f0638 --- /dev/null +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationTests.java @@ -0,0 +1,55 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.model; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.test.ESTestCase; + +import com.amazon.opendistroforelasticsearch.ad.TestHelpers; + +public class ValidationTests extends ESTestCase { + + public void testValidationSuggestedChanges() { + assertEquals("others", ValidationSuggestedChanges.OTHERS.getName()); + } + + public void testValidationFailures() { + assertEquals("missing", ValidationFailures.MISSING.getName()); + } + + public void testValidationResponse() throws IOException { + Map> failuresMap = new HashMap<>(); + Map> suggestedChanges = new HashMap<>(); + failuresMap.put("missing", Arrays.asList("name")); + suggestedChanges.put("detection_interval", Arrays.asList("200000")); + ValidateResponse responseValidate = new ValidateResponse(); + responseValidate.setFailures(failuresMap); + responseValidate.setSuggestedChanges(suggestedChanges); + String validation = TestHelpers + .xContentBuilderToString(responseValidate.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + System.out.println(validation); + assertEquals("{\"failures\":{\"missing\":[\"name\"]}," + "\"suggestedChanges\":{\"detection_interval\":[\"200000\"]}}", validation); + assertEquals(failuresMap, responseValidate.getFailures()); + assertEquals(suggestedChanges, responseValidate.getSuggestedChanges()); + } + +} diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/AnomalyDetectorRestApiIT.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/AnomalyDetectorRestApiIT.java index a51d4c8c..d117d57c 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/AnomalyDetectorRestApiIT.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/AnomalyDetectorRestApiIT.java @@ -25,6 +25,7 @@ import java.io.IOException; import java.time.Instant; +import java.util.List; import java.util.Map; import org.apache.http.entity.ContentType; @@ -33,6 +34,7 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -83,6 +85,108 @@ public void testCreateAnomalyDetectorWithEmptyIndices() throws Exception { ); } + public void testValidateAnomalyDetectorWithEmptyIndices() throws Exception { + AnomalyDetector detector = TestHelpers.randomAnomalyDetector(TestHelpers.randomUiMetadata(), null); + TestHelpers + .makeRequest( + client(), + "PUT", + "/" + detector.getIndices().get(0), + ImmutableMap.of(), + toHttpEntity( + "{\"settings\":{\"number_of_shards\":1},\"mappings\":{\"properties\":{\"field1\":" + + "{\"type\":\"text\"},\"" + + detector.getTimeField() + + "\":{\"type\":\"date\",\"format\":\"yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_millis\"}}}}" + ), + null + ); + Response resp = TestHelpers + .makeRequest( + client(), + "POST", + TestHelpers.AD_BASE_DETECTORS_URI + "/_validate", + ImmutableMap.of(), + toHttpEntity(detector), + null + ); + Map responseMap = entityAsMap(resp); + System.out.println(responseMap); + @SuppressWarnings("unchecked") + Map>> failuresMap = (Map>>) XContentMapValues + .extractValue("failures", responseMap); + assertTrue(failuresMap.containsKey("others")); + System.out.println(failuresMap.get("others").get(0)); + assertEquals( + "Can't create anomaly detector as no document found in indices: [" + detector.getIndices().get(0) + "]", + failuresMap.get("others").get(0) + ); + } + + public void testValidateMissingNameAndTimeFieldFailure() throws Exception { + Response resp = TestHelpers + .makeRequest( + client(), + "POST", + TestHelpers.AD_BASE_DETECTORS_URI + "/_validate", + ImmutableMap.of(), + toHttpEntity( + "{\"description\":\"Test detector\",\"indices\":[\"test-index-sparse\"]," + + "\"feature_attributes\":[{\"feature_name\":\"total_order\",\"feature_enabled\":true," + + "\"aggregation_query\":{\"total_order\":{\"max\":{\"field\":\"feature-1\"}}}}," + + "{\"feature_name\":\"second_feature\",\"feature_enabled\":true,\"aggregation_query\":" + + "{\"total\":{\"max\":{\"field\":\"feature-2\"}}}}],\"detection_interval\":{\"period\":" + + "{\"interval\":70,\"unit\":\"Minutes\"}},\"window_delay\":" + + "{\"period\":{\"interval\":70,\"unit\":\"Minutes\"}}}" + ), + null + ); + Map responseMap = entityAsMap(resp); + System.out.println(responseMap); + @SuppressWarnings("unchecked") + Map>> failuresMap = (Map>>) XContentMapValues + .extractValue("failures", responseMap); + assertTrue(failuresMap.containsKey("missing")); + assertTrue(failuresMap.keySet().size() == 1); + assertTrue(failuresMap.get("missing").size() == 2); + assertEquals("name", failuresMap.get("missing").get(0)); + assertEquals("time_field", failuresMap.get("missing").get(1)); + } + + public void testValidateAnomalyDetectorWithDuplicateName() throws Exception { + AnomalyDetector detector = createRandomAnomalyDetector(true, true); + TestHelpers.createIndex(client(), "test-index", toHttpEntity("{\"timestamp\": " + Instant.now().toEpochMilli() + "}")); + Response resp = TestHelpers + .makeRequest( + client(), + "POST", + TestHelpers.AD_BASE_DETECTORS_URI + "/_validate", + ImmutableMap.of(), + toHttpEntity( + "{\"name\":\"" + + detector.getName() + + "\",\"description\":\"Test detector\",\"time_field\":\"timestamp\"," + + "\"indices\":[\"test-index\"],\"feature_attributes\":[{\"feature_name\":\"totssal\",\"" + + "feature_enabled\":true,\"aggregation_query\":{\"totalquery\":{\"max\":{\"field\":\"feature-3\"}}}}," + + "{\"feature_name\":\"totaly\",\"feature_enabled\":true,\"aggregation_query\":" + + "{\"totalqusery\":" + + "{\"max\":{\"field\":\"feature-1\"}}}}],\"filter_query\":{\"bool\":{\"filter\":[{\"exists\":" + + "{\"field\":" + + "\"feature-4\",\"boost\":1}}],\"adjust_pure_negative\":true,\"boost\":1}},\"detection_interval\":" + + "{\"period\":{\"interval\":1,\"unit\":\"Minutes\"}}," + + "\"window_delay\":{\"period\":{\"interval\":2,\"unit\":\"Minutes\"}}}" + ), + null + ); + Map responseMap = entityAsMap(resp); + @SuppressWarnings("unchecked") + Map>> failuresMap = (Map>>) XContentMapValues + .extractValue("failures", responseMap); + assertTrue(failuresMap.containsKey("duplicates")); + assertTrue(failuresMap.keySet().size() == 1); + assertEquals(detector.getName(), failuresMap.get("duplicates").get(0)); + } + public void testCreateAnomalyDetectorWithDuplicateName() throws Exception { AnomalyDetector detector = createRandomAnomalyDetector(true, true);