From f9a097d80acd2aacc3e36d649a365aa3798d0aab Mon Sep 17 00:00:00 2001 From: Galitzky Date: Fri, 10 Jul 2020 11:00:35 -0700 Subject: [PATCH 01/20] starting validate API structure --- .../ad/AnomalyDetectorPlugin.java | 13 +- .../RestValidateAnomalyDetectorAction.java | 118 ++++++++++++++++++ .../ValidateAnomalyDetectorActionHandler.java | 8 ++ .../ad/util/RestHandlerUtils.java | 1 + 4 files changed, 132 insertions(+), 8 deletions(-) create mode 100644 src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java create mode 100644 src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java index f01c563a..9f858d2e 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java @@ -31,6 +31,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; +import com.amazon.opendistroforelasticsearch.ad.rest.*; import org.elasticsearch.SpecialPermission; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; @@ -87,14 +88,6 @@ import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetectorJob; import com.amazon.opendistroforelasticsearch.ad.model.AnomalyResult; -import com.amazon.opendistroforelasticsearch.ad.rest.RestAnomalyDetectorJobAction; -import com.amazon.opendistroforelasticsearch.ad.rest.RestDeleteAnomalyDetectorAction; -import com.amazon.opendistroforelasticsearch.ad.rest.RestExecuteAnomalyDetectorAction; -import com.amazon.opendistroforelasticsearch.ad.rest.RestGetAnomalyDetectorAction; -import com.amazon.opendistroforelasticsearch.ad.rest.RestIndexAnomalyDetectorAction; -import com.amazon.opendistroforelasticsearch.ad.rest.RestSearchAnomalyDetectorAction; -import com.amazon.opendistroforelasticsearch.ad.rest.RestSearchAnomalyResultAction; -import com.amazon.opendistroforelasticsearch.ad.rest.RestStatsAnomalyDetectorAction; import com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings; import com.amazon.opendistroforelasticsearch.ad.settings.EnabledSetting; import com.amazon.opendistroforelasticsearch.ad.stats.ADStat; @@ -206,6 +199,10 @@ public List getRestHandlers( anomalyDetectionIndices ); RestSearchAnomalyDetectorAction searchAnomalyDetectorAction = new RestSearchAnomalyDetectorAction(); + RestValidateAnomalyDetectorAction restValidateAnomalyDetectorAction = new RestValidateAnomalyDetectorAction( + settings, + anomalyDetectionIndices + ); RestSearchAnomalyResultAction searchAnomalyResultAction = new RestSearchAnomalyResultAction(); RestDeleteAnomalyDetectorAction deleteAnomalyDetectorAction = new RestDeleteAnomalyDetectorAction(clusterService); RestExecuteAnomalyDetectorAction executeAnomalyDetectorAction = new RestExecuteAnomalyDetectorAction( diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java new file mode 100644 index 00000000..fbd5fe6e --- /dev/null +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java @@ -0,0 +1,118 @@ +package com.amazon.opendistroforelasticsearch.ad.rest; + +import com.amazon.opendistroforelasticsearch.ad.AnomalyDetectorPlugin; +import com.amazon.opendistroforelasticsearch.ad.constant.CommonErrorMessages; +import com.amazon.opendistroforelasticsearch.ad.indices.AnomalyDetectionIndices; +import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; + +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.*; +import static com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils.VALIDATE; + +import com.amazon.opendistroforelasticsearch.ad.rest.handler.IndexAnomalyDetectorActionHandler; +import com.amazon.opendistroforelasticsearch.ad.rest.handler.ValidateAnomalyDetectorActionHandler; +import com.amazon.opendistroforelasticsearch.ad.settings.EnabledSetting; +import com.google.common.collect.ImmutableList; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.seqno.SequenceNumbers; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.common.settings.Settings; + +import java.io.IOException; +import java.util.List; +import java.util.Locale; + +import static com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector.ANOMALY_DETECTORS_INDEX; +import static com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils.*; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; + +/** + * This class consists of the REST handler to validate anomaly detector configurations. + */ + +public class RestValidateAnomalyDetectorAction extends BaseRestHandler { + + private static final String VALIDATE_ANOMALY_DETECTOR_ACTION = "validate_anomaly_detector_action"; + private final AnomalyDetectionIndices anomalyDetectionIndices; + private final Logger logger = LogManager.getLogger(RestValidateAnomalyDetectorAction.class); + private final Settings settings; + + + private volatile TimeValue detectionInterval; + private volatile TimeValue detectionWindowDelay; + private volatile Integer maxAnomalyDetectors; + private volatile Integer maxAnomalyFeatures; + + public RestValidateAnomalyDetectorAction( + Settings settings, + AnomalyDetectionIndices anomalyDetectionIndices + ) { + this.settings = settings; + this.anomalyDetectionIndices = anomalyDetectionIndices; + this.detectionInterval = DETECTION_INTERVAL.get(settings); + this.detectionWindowDelay = DETECTION_WINDOW_DELAY.get(settings); + this.maxAnomalyDetectors = MAX_ANOMALY_DETECTORS.get(settings); + this.maxAnomalyFeatures = MAX_ANOMALY_FEATURES.get(settings); + } + + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + if (!EnabledSetting.isADPluginEnabled()) { + throw new IllegalStateException(CommonErrorMessages.DISABLED_ERR_MSG); + } + + String detectorId = request.param(DETECTOR_ID, AnomalyDetector.NO_ID); + logger.info("AnomalyDetector {} action for detectorId {}", request.method(), detectorId); + + XContentParser parser = request.contentParser(); + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); + // TODO: check detection interval < modelTTL + AnomalyDetector detector = AnomalyDetector.parse(parser, detectorId, null, detectionInterval, detectionWindowDelay); + //System.out.println("detector interval amit print:" + detector.getDetectionInterval().PERIOD_FIELD); + + long seqNo = request.paramAsLong(IF_SEQ_NO, SequenceNumbers.UNASSIGNED_SEQ_NO); + long primaryTerm = request.paramAsLong(IF_PRIMARY_TERM, SequenceNumbers.UNASSIGNED_PRIMARY_TERM); + WriteRequest.RefreshPolicy refreshPolicy = request.hasParam(REFRESH) + ? WriteRequest.RefreshPolicy.parse(request.param(REFRESH)) + : WriteRequest.RefreshPolicy.IMMEDIATE; + + return channel -> new ValidateAnomalyDetectorActionHandler( + settings, + client, + channel, + anomalyDetectionIndices, + detectorId, + seqNo, + primaryTerm, + refreshPolicy, + detector, + maxAnomalyDetectors, + maxAnomalyFeatures + ).start(); + } + + + + + + + @Override + public String getName() { return VALIDATE_ANOMALY_DETECTOR_ACTION; } + + @Override + public List routes() { + return ImmutableList + .of( + // validate configs + new Route( + RestRequest.Method.POST, + String.format(Locale.ROOT, "%s/%s", AnomalyDetectorPlugin.AD_BASE_DETECTORS_URI, VALIDATE) + ) + ); + } +} diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java new file mode 100644 index 00000000..7d7cda40 --- /dev/null +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java @@ -0,0 +1,8 @@ +package com.amazon.opendistroforelasticsearch.ad.rest.handler; + +/** + * Anomaly detector REST action handler to process POST request. + * POST request is for validating anomaly detector. + */ +public class ValidateAnomalyDetectorActionHandler { +} diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/RestHandlerUtils.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/RestHandlerUtils.java index 5032a898..c8b99f97 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/RestHandlerUtils.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/RestHandlerUtils.java @@ -55,6 +55,7 @@ public final class RestHandlerUtils { public static final String START_JOB = "_start"; public static final String STOP_JOB = "_stop"; public static final String PROFILE = "_profile"; + public static final String VALIDATE = "_validate"; public static final String TYPE = "type"; public static final ToXContent.MapParams XCONTENT_WITH_TYPE = new ToXContent.MapParams(ImmutableMap.of("with_type", "true")); From ef8195b4c2986c89ac42228cec2d8256b5c49e52 Mon Sep 17 00:00:00 2001 From: Galitzky Date: Tue, 14 Jul 2020 11:53:40 -0700 Subject: [PATCH 02/20] starting validate method --- .../ad/model/AnomalyDetector.java | 132 ++++++++++++++++++ .../ad/model/AnomalyDetectorValidation.java | 72 ++++++++++ .../RestValidateAnomalyDetectorAction.java | 22 +-- .../ValidateAnomalyDetectorActionHandler.java | 111 ++++++++++++++- 4 files changed, 317 insertions(+), 20 deletions(-) create mode 100644 src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorValidation.java diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java index 0c3373df..83adc2a8 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java @@ -84,6 +84,7 @@ public class AnomalyDetector implements ToXContentObject { private final Map uiMetadata; private final Integer schemaVersion; private final Instant lastUpdateTime; + private final Boolean validation; /** * Constructor function. @@ -142,6 +143,42 @@ public AnomalyDetector( this.uiMetadata = uiMetadata; this.schemaVersion = schemaVersion; this.lastUpdateTime = lastUpdateTime; + this.validation = false; + } + + public AnomalyDetector( + String detectorId, + Long version, + String name, + String description, + String timeField, + List indices, + List features, + QueryBuilder filterQuery, + TimeConfiguration detectionInterval, + TimeConfiguration windowDelay, + Map uiMetadata, + Integer schemaVersion, + Instant lastUpdateTime, + Boolean validation + ) { + if (indices == null || indices.isEmpty()) { + indices = null; + } + this.detectorId = detectorId; + this.version = version; + this.name = name; + this.description = description; + this.timeField = timeField; + this.indices = indices; + this.featureAttributes = features; + this.filterQuery = filterQuery; + this.detectionInterval = detectionInterval; + this.windowDelay = windowDelay; + this.uiMetadata = uiMetadata; + this.schemaVersion = schemaVersion; + this.lastUpdateTime = lastUpdateTime; + this.validation = validation; } public XContentBuilder toXContent(XContentBuilder builder) throws IOException { @@ -310,6 +347,101 @@ public static AnomalyDetector parse( ); } + public static AnomalyDetector parseValidation( + XContentParser parser, + String detectorId, + Long version + ) throws IOException { + Boolean validation = true; + String name = null; + String description = null; + String timeField = null; + List indices = new ArrayList(); + QueryBuilder filterQuery = QueryBuilders.matchAllQuery(); + TimeConfiguration detectionInterval = null; + TimeConfiguration windowDelay = null; + List features = new ArrayList<>(); + int schemaVersion = 0; + Map uiMetadata = null; + Instant lastUpdateTime = null; + + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation); + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + String fieldName = parser.currentName(); + parser.nextToken(); + + switch (fieldName) { + case NAME_FIELD: + name = parser.text(); + break; + case DESCRIPTION_FIELD: + description = parser.text(); + break; + case TIMEFIELD_FIELD: + timeField = parser.text(); + break; + case INDICES_FIELD: + ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser::getTokenLocation); + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + indices.add(parser.text()); + } + break; + case UI_METADATA_FIELD: + uiMetadata = parser.map(); + break; + case SCHEMA_VERSION_FIELD: + schemaVersion = parser.intValue(); + break; + case FILTER_QUERY_FIELD: + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation); + try { + filterQuery = parseInnerQueryBuilder(parser); + } catch (IllegalArgumentException e) { + if (!e.getMessage().contains("empty clause")) { + throw e; + } + } + break; + case DETECTION_INTERVAL_FIELD: + detectionInterval = TimeConfiguration.parse(parser); + break; + case FEATURE_ATTRIBUTES_FIELD: + ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser::getTokenLocation); + while (parser.nextToken() != XContentParser.Token.END_ARRAY) { + features.add(Feature.parse(parser)); + } + break; + case WINDOW_DELAY_FIELD: + windowDelay = TimeConfiguration.parse(parser); + break; + case LAST_UPDATE_TIME_FIELD: + lastUpdateTime = ParseUtils.toInstant(parser); + break; + default: + parser.skipChildren(); + break; + } + } + return new AnomalyDetector( + detectorId, + version, + name, + description, + timeField, + indices, + features, + filterQuery, + detectionInterval, + windowDelay, + uiMetadata, + schemaVersion, + lastUpdateTime, + validation + ); + } + + + public SearchSourceBuilder generateFeatureQuery() { SearchSourceBuilder generatedFeatureQuery = new SearchSourceBuilder().query(filterQuery); if (this.getFeatureAttributes() != null) { diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorValidation.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorValidation.java new file mode 100644 index 00000000..22075644 --- /dev/null +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorValidation.java @@ -0,0 +1,72 @@ +package com.amazon.opendistroforelasticsearch.ad.model; + +import org.apache.logging.log4j.util.Strings; +import org.elasticsearch.index.query.QueryBuilder; + +import java.time.Instant; +import java.util.List; +import java.util.Map; + +public class AnomalyDetectorValidation extends AnomalyDetector{ + + /** + * Constructor function. + * + * @param detectorId detector identifier + * @param version detector document version + * @param name detector name + * @param description description of detector + * @param timeField time field + * @param indices indices used as detector input + * @param features detector feature attributes + * @param filterQuery detector filter query + * @param detectionInterval detecting interval + * @param windowDelay max delay window for realtime data + * @param uiMetadata metadata used by Kibana + * @param schemaVersion anomaly detector index mapping version + * @param lastUpdateTime detector's last update time + */ + public AnomalyDetectorValidation( + String detectorId, + Long version, + String name, + String description, + String timeField, + List indices, + List features, + QueryBuilder filterQuery, + TimeConfiguration detectionInterval, + TimeConfiguration windowDelay, + Map uiMetadata, + Integer schemaVersion, + Instant lastUpdateTime + ) { + super(); + if (Strings.isBlank(name)) { + throw new IllegalArgumentException("Detector name should be set"); + } + if (timeField == null) { + throw new IllegalArgumentException("Time field should be set"); + } + if (indices == null || indices.isEmpty()) { + throw new IllegalArgumentException("Indices should be set"); + } + if (detectionInterval == null) { + throw new IllegalArgumentException("Detection interval should be set"); + } + this.detectorId = detectorId; + this.version = version; + this.name = name; + this.description = description; + this.timeField = timeField; + this.indices = indices; + this.featureAttributes = features; + this.filterQuery = filterQuery; + this.detectionInterval = detectionInterval; + this.windowDelay = windowDelay; + this.uiMetadata = uiMetadata; + this.schemaVersion = schemaVersion; + this.lastUpdateTime = lastUpdateTime; + } +} + diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java index fbd5fe6e..9dc0ca01 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java @@ -66,20 +66,11 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli throw new IllegalStateException(CommonErrorMessages.DISABLED_ERR_MSG); } - String detectorId = request.param(DETECTOR_ID, AnomalyDetector.NO_ID); - logger.info("AnomalyDetector {} action for detectorId {}", request.method(), detectorId); + String detectorId = AnomalyDetector.NO_ID; XContentParser parser = request.contentParser(); ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); - // TODO: check detection interval < modelTTL - AnomalyDetector detector = AnomalyDetector.parse(parser, detectorId, null, detectionInterval, detectionWindowDelay); - //System.out.println("detector interval amit print:" + detector.getDetectionInterval().PERIOD_FIELD); - - long seqNo = request.paramAsLong(IF_SEQ_NO, SequenceNumbers.UNASSIGNED_SEQ_NO); - long primaryTerm = request.paramAsLong(IF_PRIMARY_TERM, SequenceNumbers.UNASSIGNED_PRIMARY_TERM); - WriteRequest.RefreshPolicy refreshPolicy = request.hasParam(REFRESH) - ? WriteRequest.RefreshPolicy.parse(request.param(REFRESH)) - : WriteRequest.RefreshPolicy.IMMEDIATE; + AnomalyDetector detector = AnomalyDetector.parseValidation(parser, detectorId, null); return channel -> new ValidateAnomalyDetectorActionHandler( settings, @@ -87,20 +78,13 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli channel, anomalyDetectionIndices, detectorId, - seqNo, - primaryTerm, - refreshPolicy, detector, maxAnomalyDetectors, maxAnomalyFeatures - ).start(); + ).validate(); } - - - - @Override public String getName() { return VALIDATE_ANOMALY_DETECTOR_ACTION; } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java index 7d7cda40..0140ac8f 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java @@ -1,8 +1,117 @@ package com.amazon.opendistroforelasticsearch.ad.rest.handler; +import com.amazon.opendistroforelasticsearch.ad.indices.AnomalyDetectionIndices; +import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; +import com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils; +import org.apache.commons.lang.StringUtils; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import static com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector.ANOMALY_DETECTORS_INDEX; +import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; + /** * Anomaly detector REST action handler to process POST request. * POST request is for validating anomaly detector. */ -public class ValidateAnomalyDetectorActionHandler { +public class ValidateAnomalyDetectorActionHandler extends AbstractActionHandler { + private final AnomalyDetectionIndices anomalyDetectionIndices; + private final String detectorId; + private final AnomalyDetector anomalyDetector; + + private final Logger logger = LogManager.getLogger(IndexAnomalyDetectorActionHandler.class); + private final Integer maxAnomalyDetectors; + private final Integer maxAnomalyFeatures; + + private final List failures; + private final List suggestedChanges; + + + /** + * Constructor function. + * + * @param settings ES settings + * @param client ES node client that executes actions on the local node + * @param channel ES channel used to construct bytes / builder based outputs, and send responses + * @param anomalyDetectionIndices anomaly detector index manager + * @param detectorId detector identifier + * @param anomalyDetector anomaly detector instance + */ + public ValidateAnomalyDetectorActionHandler( + Settings settings, + NodeClient client, + RestChannel channel, + AnomalyDetectionIndices anomalyDetectionIndices, + String detectorId, + AnomalyDetector anomalyDetector, + Integer maxAnomalyDetectors, + Integer maxAnomalyFeatures + ) { + super(client, channel); + this.anomalyDetectionIndices = anomalyDetectionIndices; + this.detectorId = detectorId; + this.anomalyDetector = anomalyDetector; + this.maxAnomalyDetectors = maxAnomalyDetectors; + this.maxAnomalyFeatures = maxAnomalyFeatures; + this.failures = new ArrayList<>(); + this.suggestedChanges = new ArrayList<>(); + } + + /** + * Start function to process validate anomaly detector request. + * Checks if anomaly detector index exist first, if not, add it as a failure case. + * + * @throws IOException IOException from {@link AnomalyDetectionIndices#initAnomalyDetectorIndexIfAbsent(ActionListener)} + */ + public void startAndCheckIfIndexExists() throws IOException { + if (!anomalyDetectionIndices.doesAnomalyDetectorIndexExist()) { + anomalyDetectionIndices + .initAnomalyDetectorIndex( + ActionListener.wrap(response -> onCreateMappingsResponse(response), exception -> onFailure(exception)) + ); + } else { + preDataValidationSteps(); + } + } + + public void preDataValidationSteps() { + String error = RestHandlerUtils.validateAnomalyDetector(anomalyDetector, maxAnomalyFeatures); + if (StringUtils.isNotBlank(error)) { + channel.sendResponse(new BytesRestResponse(RestStatus.BAD_REQUEST, error)); + return; + } + if (channel.request().method() == RestRequest.Method.PUT) { + handler.getDetectorJob(clusterService, client, detectorId, channel, () -> updateAnomalyDetector(client, detectorId)); + } else { + createAnomalyDetector(); + } + } + + private void onCreateMappingsResponse(CreateIndexResponse response) throws IOException { + if (response.isAcknowledged()) { + logger.info("Created {} with mappings.", ANOMALY_DETECTORS_INDEX); + preDataValidationSteps(); + } else { + logger.warn("Created {} with mappings call not acknowledged.", ANOMALY_DETECTORS_INDEX); + channel + .sendResponse( + new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, response.toXContent(channel.newErrorBuilder(), EMPTY_PARAMS)) + ); + } + } } From faf0e735e9e12e0c7f88f55f25b2ac23c1f86b54 Mon Sep 17 00:00:00 2001 From: Galitzky Date: Tue, 14 Jul 2020 12:08:36 -0700 Subject: [PATCH 03/20] pre validation steps begining --- .../ad/model/AnomalyDetectorValidation.java | 72 ------------------- .../ValidateAnomalyDetectorActionHandler.java | 11 +-- 2 files changed, 7 insertions(+), 76 deletions(-) delete mode 100644 src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorValidation.java diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorValidation.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorValidation.java deleted file mode 100644 index 22075644..00000000 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorValidation.java +++ /dev/null @@ -1,72 +0,0 @@ -package com.amazon.opendistroforelasticsearch.ad.model; - -import org.apache.logging.log4j.util.Strings; -import org.elasticsearch.index.query.QueryBuilder; - -import java.time.Instant; -import java.util.List; -import java.util.Map; - -public class AnomalyDetectorValidation extends AnomalyDetector{ - - /** - * Constructor function. - * - * @param detectorId detector identifier - * @param version detector document version - * @param name detector name - * @param description description of detector - * @param timeField time field - * @param indices indices used as detector input - * @param features detector feature attributes - * @param filterQuery detector filter query - * @param detectionInterval detecting interval - * @param windowDelay max delay window for realtime data - * @param uiMetadata metadata used by Kibana - * @param schemaVersion anomaly detector index mapping version - * @param lastUpdateTime detector's last update time - */ - public AnomalyDetectorValidation( - String detectorId, - Long version, - String name, - String description, - String timeField, - List indices, - List features, - QueryBuilder filterQuery, - TimeConfiguration detectionInterval, - TimeConfiguration windowDelay, - Map uiMetadata, - Integer schemaVersion, - Instant lastUpdateTime - ) { - super(); - if (Strings.isBlank(name)) { - throw new IllegalArgumentException("Detector name should be set"); - } - if (timeField == null) { - throw new IllegalArgumentException("Time field should be set"); - } - if (indices == null || indices.isEmpty()) { - throw new IllegalArgumentException("Indices should be set"); - } - if (detectionInterval == null) { - throw new IllegalArgumentException("Detection interval should be set"); - } - this.detectorId = detectorId; - this.version = version; - this.name = name; - this.description = description; - this.timeField = timeField; - this.indices = indices; - this.featureAttributes = features; - this.filterQuery = filterQuery; - this.detectionInterval = detectionInterval; - this.windowDelay = windowDelay; - this.uiMetadata = uiMetadata; - this.schemaVersion = schemaVersion; - this.lastUpdateTime = lastUpdateTime; - } -} - diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java index 0140ac8f..c908aa8d 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java @@ -90,14 +90,17 @@ public void startAndCheckIfIndexExists() throws IOException { } public void preDataValidationSteps() { + + if (anomalyDetector.getName() == null) { + failures.add("name missing"); + + String error = RestHandlerUtils.validateAnomalyDetector(anomalyDetector, maxAnomalyFeatures); if (StringUtils.isNotBlank(error)) { - channel.sendResponse(new BytesRestResponse(RestStatus.BAD_REQUEST, error)); + failures.add(error); return; } - if (channel.request().method() == RestRequest.Method.PUT) { - handler.getDetectorJob(clusterService, client, detectorId, channel, () -> updateAnomalyDetector(client, detectorId)); - } else { + if () createAnomalyDetector(); } } From f8d6e88c770bdf8305f03b23a164d7e554cc2e8c Mon Sep 17 00:00:00 2001 From: Galitzky Date: Wed, 22 Jul 2020 13:10:16 -0700 Subject: [PATCH 04/20] working on feature aggregation --- .../ad/AnomalyDetectorPlugin.java | 4 +- .../ad/model/ValidateResponse.java | 54 +++ .../RestValidateAnomalyDetectorAction.java | 11 +- .../ValidateAnomalyDetectorActionHandler.java | 314 +++++++++++++++++- 4 files changed, 364 insertions(+), 19 deletions(-) create mode 100644 src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidateResponse.java diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java index 9f858d2e..9d0333f7 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java @@ -201,7 +201,8 @@ public List getRestHandlers( RestSearchAnomalyDetectorAction searchAnomalyDetectorAction = new RestSearchAnomalyDetectorAction(); RestValidateAnomalyDetectorAction restValidateAnomalyDetectorAction = new RestValidateAnomalyDetectorAction( settings, - anomalyDetectionIndices + anomalyDetectionIndices, + xContentRegistry ); RestSearchAnomalyResultAction searchAnomalyResultAction = new RestSearchAnomalyResultAction(); RestDeleteAnomalyDetectorAction deleteAnomalyDetectorAction = new RestDeleteAnomalyDetectorAction(clusterService); @@ -225,6 +226,7 @@ public List getRestHandlers( .of( restGetAnomalyDetectorAction, restIndexAnomalyDetectorAction, + restValidateAnomalyDetectorAction, searchAnomalyDetectorAction, searchAnomalyResultAction, deleteAnomalyDetectorAction, diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidateResponse.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidateResponse.java new file mode 100644 index 00000000..241ead2c --- /dev/null +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidateResponse.java @@ -0,0 +1,54 @@ +package com.amazon.opendistroforelasticsearch.ad.model; + +import com.amazon.opendistroforelasticsearch.ad.constant.CommonName; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; +import java.util.List; + +public class ValidateResponse implements ToXContentObject { + private List failures; + private List suggestedChanges; + + public XContentBuilder toXContent(XContentBuilder builder) throws IOException { + return toXContent(builder, ToXContent.EMPTY_PARAMS); + } + + public ValidateResponse() { + failures = null; + suggestedChanges = null; + } + + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + XContentBuilder xContentBuilder = builder.startObject(); + + System.out.println("inside x content"); + if (failures != null && failures.size() > 0) { + xContentBuilder.array("failures", failures); + + } + if (suggestedChanges != null && suggestedChanges.size() > 0) { + xContentBuilder.array("suggestedChanges", suggestedChanges); + + } + return xContentBuilder.endObject(); + } + + public List getFailures() { return failures; } + + public List getSuggestedChanges() { return suggestedChanges; } + + public void setFailures(List failures) { this.failures = failures; } + + public void setSuggestedChanges(List suggestedChanges) { this.suggestedChanges = suggestedChanges; } + + + + +} diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java index 9dc0ca01..b7fcfb99 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java @@ -43,7 +43,7 @@ public class RestValidateAnomalyDetectorAction extends BaseRestHandler { private final Logger logger = LogManager.getLogger(RestValidateAnomalyDetectorAction.class); private final Settings settings; - + private volatile TimeValue requestTimeout; private volatile TimeValue detectionInterval; private volatile TimeValue detectionWindowDelay; private volatile Integer maxAnomalyDetectors; @@ -59,6 +59,8 @@ public RestValidateAnomalyDetectorAction( this.detectionWindowDelay = DETECTION_WINDOW_DELAY.get(settings); this.maxAnomalyDetectors = MAX_ANOMALY_DETECTORS.get(settings); this.maxAnomalyFeatures = MAX_ANOMALY_FEATURES.get(settings); + this.requestTimeout = REQUEST_TIMEOUT.get(settings); + } protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { @@ -67,7 +69,6 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli } String detectorId = AnomalyDetector.NO_ID; - XContentParser parser = request.contentParser(); ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); AnomalyDetector detector = AnomalyDetector.parseValidation(parser, detectorId, null); @@ -80,8 +81,10 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli detectorId, detector, maxAnomalyDetectors, - maxAnomalyFeatures - ).validate(); + maxAnomalyFeatures, + requestTimeout, + xContentRegistry + ).startValidation(); } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java index c908aa8d..aeb279dc 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java @@ -1,26 +1,43 @@ package com.amazon.opendistroforelasticsearch.ad.rest.handler; +import com.amazon.opendistroforelasticsearch.ad.AnomalyDetectorPlugin; import com.amazon.opendistroforelasticsearch.ad.indices.AnomalyDetectionIndices; -import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; +import com.amazon.opendistroforelasticsearch.ad.model.*; +import com.amazon.opendistroforelasticsearch.ad.util.ParseUtils; import com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils; import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang.Validate; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.rest.BytesRestResponse; -import org.elasticsearch.rest.RestChannel; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.RangeQueryBuilder; +import org.elasticsearch.rest.*; +import org.elasticsearch.rest.action.RestResponseListener; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.metrics.Max; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + import java.io.IOException; -import java.util.ArrayList; -import java.util.List; +import java.time.Instant; +import java.util.*; +import java.util.stream.Collectors; import static com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector.ANOMALY_DETECTORS_INDEX; import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; @@ -30,6 +47,12 @@ * POST request is for validating anomaly detector. */ public class ValidateAnomalyDetectorActionHandler extends AbstractActionHandler { + + protected static final String AGG_NAME_MAX = "max_timefield"; + + public static final String SUGGESTED_CHANGES = "suggested_changes"; + public static final String FAILURES = "failures"; + private final AnomalyDetectionIndices anomalyDetectionIndices; private final String detectorId; private final AnomalyDetector anomalyDetector; @@ -37,6 +60,10 @@ public class ValidateAnomalyDetectorActionHandler extends AbstractActionHandler private final Logger logger = LogManager.getLogger(IndexAnomalyDetectorActionHandler.class); private final Integer maxAnomalyDetectors; private final Integer maxAnomalyFeatures; + private final TimeValue requestTimeout; + private final NamedXContentRegistry xContent; + + private ValidateResponse responseValidate; private final List failures; private final List suggestedChanges; @@ -60,7 +87,9 @@ public ValidateAnomalyDetectorActionHandler( String detectorId, AnomalyDetector anomalyDetector, Integer maxAnomalyDetectors, - Integer maxAnomalyFeatures + Integer maxAnomalyFeatures, + TimeValue requestTimeout, + NamedXContentRegistry xContentRegistry ) { super(client, channel); this.anomalyDetectionIndices = anomalyDetectionIndices; @@ -68,8 +97,11 @@ public ValidateAnomalyDetectorActionHandler( this.anomalyDetector = anomalyDetector; this.maxAnomalyDetectors = maxAnomalyDetectors; this.maxAnomalyFeatures = maxAnomalyFeatures; + this.requestTimeout = requestTimeout; this.failures = new ArrayList<>(); this.suggestedChanges = new ArrayList<>(); + this.responseValidate = new ValidateResponse(); + this.xContent = xContentRegistry; } /** @@ -78,7 +110,7 @@ public ValidateAnomalyDetectorActionHandler( * * @throws IOException IOException from {@link AnomalyDetectionIndices#initAnomalyDetectorIndexIfAbsent(ActionListener)} */ - public void startAndCheckIfIndexExists() throws IOException { + public void startValidation() throws IOException { if (!anomalyDetectionIndices.doesAnomalyDetectorIndexExist()) { anomalyDetectionIndices .initAnomalyDetectorIndex( @@ -86,25 +118,279 @@ public void startAndCheckIfIndexExists() throws IOException { ); } else { preDataValidationSteps(); + if (!failures.isEmpty()) { + validateAnomalyDetectorResponse(); + return; + } + validateNumberOfDetectors(); } - } + + } public void preDataValidationSteps() { if (anomalyDetector.getName() == null) { failures.add("name missing"); + } + if (anomalyDetector.getTimeField() == null) { + failures.add("time-field missing"); + } + + if (anomalyDetector.getIndices() == null) { + failures.add("indices missing"); + } + if (anomalyDetector.getWindowDelay() == null) { + failures.add("window-delay missing"); + } + if (anomalyDetector.getDetectionInterval() == null) { + failures.add("detector-interval missing"); + } String error = RestHandlerUtils.validateAnomalyDetector(anomalyDetector, maxAnomalyFeatures); if (StringUtils.isNotBlank(error)) { failures.add(error); + } + } + + public void validateNumberOfDetectors() { + try { + QueryBuilder query = QueryBuilders.matchAllQuery(); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(query).size(0).timeout(requestTimeout); + + SearchRequest searchRequest = new SearchRequest(ANOMALY_DETECTORS_INDEX).source(searchSourceBuilder); + + client.search(searchRequest, ActionListener.wrap(response -> onSearchAdResponse(response), exception -> onFailure(exception))); + } catch (Exception e) { + onFailure(e); + } + } + + private void onSearchAdResponse(SearchResponse response) throws IOException { + if (response.getHits().getTotalHits().value >= maxAnomalyDetectors) { + String errorMsg = "Can't create anomaly detector more than " + maxAnomalyDetectors; + logger.error(errorMsg); + failures.add(errorMsg); return; } - if () - createAnomalyDetector(); + searchAdInputIndices(null); + } + + private void searchAdInputIndices(String detectorId) { + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() + .query(QueryBuilders.matchAllQuery()) + .size(0) + .timeout(requestTimeout); + + SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().toArray(new String[0])).source(searchSourceBuilder); + + client + .search( + searchRequest, + ActionListener + .wrap(searchResponse -> onSearchAdInputIndicesResponse(searchResponse, detectorId), exception -> onFailure(exception)) + ); + } + + private void onSearchAdInputIndicesResponse(SearchResponse response, String detectorId) throws IOException { + if (response.getHits().getTotalHits().value == 0) { + String errorMsg = "Can't create anomaly detector as no document found in indices: " + + Arrays.toString(anomalyDetector.getIndices().toArray(new String[0])); + logger.error(errorMsg); + failures.add(errorMsg); + return; } + checkADNameExists(detectorId); + } + + private void checkADNameExists(String detectorId) throws IOException { + BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); + boolQueryBuilder.must(QueryBuilders.termQuery("name.keyword", anomalyDetector.getName())); + if (StringUtils.isNotBlank(detectorId)) { + boolQueryBuilder.mustNot(QueryBuilders.termQuery(RestHandlerUtils._ID, detectorId)); + } + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(boolQueryBuilder).timeout(requestTimeout); + SearchRequest searchRequest = new SearchRequest(ANOMALY_DETECTORS_INDEX).source(searchSourceBuilder); + + client + .search( + searchRequest, + ActionListener + .wrap( + searchResponse -> onSearchADNameResponse(searchResponse, detectorId, anomalyDetector.getName()), + exception -> onFailure(exception) + ) + ); } + private void onSearchADNameResponse(SearchResponse response, String detectorId, String name) throws IOException { + if (response.getHits().getTotalHits().value > 0) { + String errorMsg = String + .format( + "Cannot create anomaly detector with name [%s] as it's already used by detector %s", + name, + Arrays.stream(response.getHits().getHits()).map(hit -> hit.getId()).collect(Collectors.toList()) + ); + logger.warn(errorMsg); + failures.add(errorMsg); + return; + } + if (anomalyDetector.getFilterQuery() != null) { + queryFilterValidation(); + } else { + //featureQueryValidation(); + } + + } + + private void queryFilterValidation() { + +// long delayMillis = Optional +// .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) +// .map(t -> t.toDuration().toMillis()) +// .orElse(0L); +// long dataStartTime = Instant.now().toEpochMilli() - delayMillis; +// long dataEndTime = 256 * anomalyDetector.getDetectionInterval(). - delayMillis; +// +// +// RangeQueryBuilder rangeQuery = new RangeQueryBuilder(anomalyDetector.getTimeField()) +// .from(dataStartTime) +// .to(endTime) +// .format("epoch_millis") +// .includeLower(true) +// .includeUpper(false); + BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().must(rangeQuery).must(anomalyDetector.getFilterQuery()); + + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(internalFilterQuery).size(1).timeout(requestTimeout); + SearchRequest searchRequest = new SearchRequest(ANOMALY_DETECTORS_INDEX).source(searchSourceBuilder); + + client + .search( + searchRequest, + ActionListener + .wrap( + searchResponse -> onQueryFilterSearch(searchResponse), + exception -> onFailure(exception) + ) + ); + } + + private void onQueryFilterSearch(SearchResponse response) { + if (response.getHits().getTotalHits().value < 0) { + String errorMsg = "query filter is potentially wrong as no hits were found at all. "; + logger.warn(errorMsg); + failures.add(errorMsg); + return; + } + //checkIfMissingFeature(,anomalyDetector.getEnabledFeatureIds()) + featureQueryValidation(); + } + + + private void checkIfMissingFeature() throws IOException{ + SearchRequest request = createPreviewSearchRequest(detector, ranges); + + try { + SearchSourceBuilder searchSourceBuilder = ParseUtils.generatePreviewQuery(detector, ranges, xContent); + return new SearchRequest(detector.getIndices().toArray(new String[0]), searchSourceBuilder); + } catch (IOException e) { + logger.warn("Failed to create feature search request for " + detector.getDetectorId() + " for preview", e); + throw e; + } + + client.search(request, ActionListener.wrap(response -> { + Aggregations aggs = response.getAggregations(); + if (aggs == null) { + listener.onResponse(Collections.emptyList()); + return; + } + if (anomalyDetector.getFeatureAttributes() != null) { + for (Feature feature : anomalyDetector.getFeatureAttributes()) { + AggregatorFactories.Builder internalAgg = ParseUtils.parseAggregators( + feature.getAggregation().toString(), + xContent, + feature.getId() + ); + } + } + + private Optional parseAggregations(Optional< Aggregations > aggregations, List featureIds) { + return aggregations + .map(aggs -> aggs.asMap()) + .map( + map -> featureIds + .stream() + .mapToDouble(id -> Optional.ofNullable(map.get(id)).map(this::parseAggregation).orElse(Double.NaN)) + .toArray() + ) + .filter(result -> Arrays.stream(result).noneMatch(d -> Double.isNaN(d) || Double.isInfinite(d))); + } + } + + private void featureQueryValidation() throws IOException { + BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().must(rangeQuery).must(anomalyDetector.getFilterQuery()); + SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(internalFilterQuery); + + if (anomalyDetector.getFeatureAttributes() != null) { + for (Feature feature : anomalyDetector.getFeatureAttributes()) { + AggregatorFactories.Builder internalAgg = ParseUtils.parseAggregators( + feature.getAggregation().toString(), + xContent, + feature.getId() + ); + internalSearchSourceBuilder.aggregation(internalAgg.getAggregatorFactories().iterator().next()); + } + } + + + } + + private void randomSamplingIntervalValidation() { + + } + + private void checkWindowDelay() { + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() + .aggregation(AggregationBuilders.max(AGG_NAME_MAX).field(anomalyDetector.getTimeField())) + .size(0); + SearchRequest searchRequest = new SearchRequest().indices(anomalyDetector.getIndices().toArray(new String[0])).source(searchSourceBuilder); + client + .search(searchRequest, ActionListener.wrap(response -> checkDelayResponse(getLatestDataTime(response)), exception -> onFailure(exception))); + } + + private Optional getLatestDataTime(SearchResponse searchResponse) { + return Optional + .ofNullable(searchResponse) + .map(SearchResponse::getAggregations) + .map(aggs -> aggs.asMap()) + .map(map -> (Max) map.get(AGG_NAME_MAX)) + .map(agg -> (long) agg.getValue()); + } + + private void checkDelayResponse(Optional windowDelay) { + if (windowDelay.isPresent()) { + System.out.println(windowDelay.toString()); + } + } + + + + + + + private void validateAnomalyDetectorResponse() throws IOException { + this.responseValidate.setFailures(failures); + this.responseValidate.setSuggestedChanges(suggestedChanges); + try { + BytesRestResponse restResponse = new BytesRestResponse(RestStatus.OK, responseValidate.toXContent(channel.newBuilder())); + channel.sendResponse(restResponse); + } catch (Exception e) { + channel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); + } + + } + + private void onCreateMappingsResponse(CreateIndexResponse response) throws IOException { if (response.isAcknowledged()) { logger.info("Created {} with mappings.", ANOMALY_DETECTORS_INDEX); From ffa20dddbb7f35e4e9b81cf9e901e5c1f6e44486 Mon Sep 17 00:00:00 2001 From: Galitzky Date: Mon, 27 Jul 2020 10:42:50 -0700 Subject: [PATCH 05/20] works right now but random sampling is incosistent --- .../ad/feature/SearchFeatureDao.java | 13 +- .../ad/model/ValidateResponse.java | 17 +- .../RestValidateAnomalyDetectorAction.java | 7 +- .../ValidateAnomalyDetectorActionHandler.java | 487 ++++++++++++++---- .../AnomalyResultTransportAction.java | 3 + 5 files changed, 413 insertions(+), 114 deletions(-) diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDao.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDao.java index 44d48443..756e21af 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDao.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDao.java @@ -124,12 +124,17 @@ public void getLatestDataTime(AnomalyDetector detector, ActionListener getLatestDataTime(SearchResponse searchResponse) { - return Optional + + Optional x = Optional .ofNullable(searchResponse) .map(SearchResponse::getAggregations) .map(aggs -> aggs.asMap()) .map(map -> (Max) map.get(AGG_NAME_MAX)) .map(agg -> (long) agg.getValue()); + + System.out.println("windowDelay optional: " + x.toString()); + System.out.println("windowDelay optional without .toString(): " + x); + return x; } /** @@ -558,11 +563,14 @@ private SearchRequest createPreviewSearchRequest(AnomalyDetector detector, List< } private Optional parseBucket(InternalDateRange.Bucket bucket, List featureIds) { + return parseAggregations(Optional.ofNullable(bucket).map(b -> b.getAggregations()), featureIds); } private Optional parseAggregations(Optional aggregations, List featureIds) { - return aggregations + System.out.println("optional aggregation list: " + aggregations.get().asList()); + + Optional aggregationsResults = aggregations .map(aggs -> aggs.asMap()) .map( map -> featureIds @@ -571,5 +579,6 @@ private Optional parseAggregations(Optional aggregations .toArray() ) .filter(result -> Arrays.stream(result).noneMatch(d -> Double.isNaN(d) || Double.isInfinite(d))); + return aggregationsResults; } } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidateResponse.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidateResponse.java index 241ead2c..3a47ffa1 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidateResponse.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidateResponse.java @@ -28,15 +28,14 @@ public ValidateResponse() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { XContentBuilder xContentBuilder = builder.startObject(); - System.out.println("inside x content"); - if (failures != null && failures.size() > 0) { - xContentBuilder.array("failures", failures); - - } - if (suggestedChanges != null && suggestedChanges.size() > 0) { - xContentBuilder.array("suggestedChanges", suggestedChanges); - - } + xContentBuilder.field("failures", failures); + xContentBuilder.field("suggestedChanges", suggestedChanges); +// if (failures != null && failures.size() > 0) { +// xContentBuilder.array("failures", failures); +// } +// if (suggestedChanges != null && suggestedChanges.size() > 0) { +// xContentBuilder.array("suggestedChanges", suggestedChanges); +// } return xContentBuilder.endObject(); } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java index b7fcfb99..91f2c94a 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.rest.BaseRestHandler; @@ -42,6 +43,7 @@ public class RestValidateAnomalyDetectorAction extends BaseRestHandler { private final AnomalyDetectionIndices anomalyDetectionIndices; private final Logger logger = LogManager.getLogger(RestValidateAnomalyDetectorAction.class); private final Settings settings; + private final NamedXContentRegistry xContentRegistry; private volatile TimeValue requestTimeout; private volatile TimeValue detectionInterval; @@ -51,7 +53,8 @@ public class RestValidateAnomalyDetectorAction extends BaseRestHandler { public RestValidateAnomalyDetectorAction( Settings settings, - AnomalyDetectionIndices anomalyDetectionIndices + AnomalyDetectionIndices anomalyDetectionIndices, + NamedXContentRegistry xContentRegistry ) { this.settings = settings; this.anomalyDetectionIndices = anomalyDetectionIndices; @@ -60,7 +63,7 @@ public RestValidateAnomalyDetectorAction( this.maxAnomalyDetectors = MAX_ANOMALY_DETECTORS.get(settings); this.maxAnomalyFeatures = MAX_ANOMALY_FEATURES.get(settings); this.requestTimeout = REQUEST_TIMEOUT.get(settings); - + this.xContentRegistry = xContentRegistry; } protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java index aeb279dc..ee4c511c 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java @@ -3,6 +3,7 @@ import com.amazon.opendistroforelasticsearch.ad.AnomalyDetectorPlugin; import com.amazon.opendistroforelasticsearch.ad.indices.AnomalyDetectionIndices; import com.amazon.opendistroforelasticsearch.ad.model.*; +import com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings; import com.amazon.opendistroforelasticsearch.ad.util.ParseUtils; import com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils; import org.apache.commons.lang.StringUtils; @@ -11,9 +12,14 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.client.AdminClient; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -26,21 +32,33 @@ import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.rest.*; import org.elasticsearch.rest.action.RestResponseListener; +import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.bucket.range.DateRangeAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentiles; import org.elasticsearch.search.aggregations.metrics.Max; +import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; +import org.elasticsearch.search.aggregations.metrics.Percentile; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.sort.FieldSortBuilder; +import org.elasticsearch.search.sort.SortOrder; import java.io.IOException; +import java.io.ObjectInputStream; import java.time.Instant; +import java.time.temporal.ChronoUnit; import java.util.*; +import java.util.concurrent.ThreadLocalRandom; import java.util.stream.Collectors; +import java.util.stream.Stream; -import static com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector.ANOMALY_DETECTORS_INDEX; +import static com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector.*; import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; +import static org.elasticsearch.search.aggregations.AggregationBuilders.dateRange; /** * Anomaly detector REST action handler to process POST request. @@ -49,6 +67,13 @@ public class ValidateAnomalyDetectorActionHandler extends AbstractActionHandler { protected static final String AGG_NAME_MAX = "max_timefield"; + protected static final int NUM_OF_RANDOM_SAMPLES = 12; + protected static final int MAX_NUM_OF_SAMPLES_VIEWED = 128; + protected static final int NUM_OF_INTERVALS_CHECKED = 256; + protected static final double SAMPLE_SUCCESS_RATE = 0.75; + protected static final int RANDOM_SAMPLING_REPEATS = 10; + private final AdminClient adminClient; + public static final String SUGGESTED_CHANGES = "suggested_changes"; public static final String FAILURES = "failures"; @@ -102,6 +127,7 @@ public ValidateAnomalyDetectorActionHandler( this.suggestedChanges = new ArrayList<>(); this.responseValidate = new ValidateResponse(); this.xContent = xContentRegistry; + this.adminClient = client.admin(); } /** @@ -118,15 +144,9 @@ public void startValidation() throws IOException { ); } else { preDataValidationSteps(); - if (!failures.isEmpty()) { - validateAnomalyDetectorResponse(); - return; - } - validateNumberOfDetectors(); } - - } + public void preDataValidationSteps() { if (anomalyDetector.getName() == null) { @@ -135,31 +155,35 @@ public void preDataValidationSteps() { if (anomalyDetector.getTimeField() == null) { failures.add("time-field missing"); } - if (anomalyDetector.getIndices() == null) { - failures.add("indices missing"); + failures.add("data source indices missing"); } - if (anomalyDetector.getWindowDelay() == null) { failures.add("window-delay missing"); } if (anomalyDetector.getDetectionInterval() == null) { failures.add("detector-interval missing"); } + if (anomalyDetector.getFeatureAttributes().isEmpty()) { + failures.add("feature is missing"); + } String error = RestHandlerUtils.validateAnomalyDetector(anomalyDetector, maxAnomalyFeatures); if (StringUtils.isNotBlank(error)) { failures.add(error); } + if (!failures.isEmpty()) { + validateAnomalyDetectorResponse(); + } else { + validateNumberOfDetectors(); + } } public void validateNumberOfDetectors() { try { QueryBuilder query = QueryBuilders.matchAllQuery(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(query).size(0).timeout(requestTimeout); - SearchRequest searchRequest = new SearchRequest(ANOMALY_DETECTORS_INDEX).source(searchSourceBuilder); - client.search(searchRequest, ActionListener.wrap(response -> onSearchAdResponse(response), exception -> onFailure(exception))); } catch (Exception e) { onFailure(e); @@ -171,9 +195,10 @@ private void onSearchAdResponse(SearchResponse response) throws IOException { String errorMsg = "Can't create anomaly detector more than " + maxAnomalyDetectors; logger.error(errorMsg); failures.add(errorMsg); - return; + validateAnomalyDetectorResponse(); + } else { + searchAdInputIndices(null); } - searchAdInputIndices(null); } private void searchAdInputIndices(String detectorId) { @@ -183,7 +208,6 @@ private void searchAdInputIndices(String detectorId) { .timeout(requestTimeout); SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().toArray(new String[0])).source(searchSourceBuilder); - client .search( searchRequest, @@ -200,7 +224,7 @@ private void onSearchAdInputIndicesResponse(SearchResponse response, String dete failures.add(errorMsg); return; } - checkADNameExists(detectorId); + checkADNameExists(detectorId); } private void checkADNameExists(String detectorId) throws IOException { @@ -233,103 +257,148 @@ private void onSearchADNameResponse(SearchResponse response, String detectorId, ); logger.warn(errorMsg); failures.add(errorMsg); - return; - } - if (anomalyDetector.getFilterQuery() != null) { + validateAnomalyDetectorResponse(); + } else if (anomalyDetector.getFilterQuery() != null) { queryFilterValidation(); } else { - //featureQueryValidation(); + featureQueryValidation(); } - } private void queryFilterValidation() { - -// long delayMillis = Optional -// .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) -// .map(t -> t.toDuration().toMillis()) -// .orElse(0L); -// long dataStartTime = Instant.now().toEpochMilli() - delayMillis; -// long dataEndTime = 256 * anomalyDetector.getDetectionInterval(). - delayMillis; -// -// -// RangeQueryBuilder rangeQuery = new RangeQueryBuilder(anomalyDetector.getTimeField()) -// .from(dataStartTime) -// .to(endTime) -// .format("epoch_millis") -// .includeLower(true) -// .includeUpper(false); + long delayMillis = Optional + .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) + .map(t -> t.toDuration().toMillis()) + .orElse(0L); + long dataEndTime = Instant.now().toEpochMilli() - delayMillis; + long detectorInterval = Optional + .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) + .map(t -> t.toDuration().toMillis()) + .orElse(0L); + long dataStartTime = dataEndTime - ((long) (NUM_OF_INTERVALS_CHECKED) * detectorInterval - delayMillis); + RangeQueryBuilder rangeQuery = new RangeQueryBuilder(anomalyDetector.getTimeField()) + .from(dataStartTime) + .to(dataEndTime) + .format("epoch_millis"); BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().must(rangeQuery).must(anomalyDetector.getFilterQuery()); - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(internalFilterQuery).size(1).timeout(requestTimeout); - SearchRequest searchRequest = new SearchRequest(ANOMALY_DETECTORS_INDEX).source(searchSourceBuilder); - + SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(searchSourceBuilder); client .search( searchRequest, ActionListener .wrap( searchResponse -> onQueryFilterSearch(searchResponse), - exception -> onFailure(exception) + exception -> { + System.out.println("queryfilter exception: " + exception.getMessage()); + onFailure(exception); + } ) ); } - private void onQueryFilterSearch(SearchResponse response) { - if (response.getHits().getTotalHits().value < 0) { + private void onQueryFilterSearch(SearchResponse response) throws IOException { + if (response.getHits().getTotalHits().value <= 0) { String errorMsg = "query filter is potentially wrong as no hits were found at all. "; logger.warn(errorMsg); failures.add(errorMsg); - return; + validateAnomalyDetectorResponse(); + } else { + featureQueryValidation(); } - //checkIfMissingFeature(,anomalyDetector.getEnabledFeatureIds()) - featureQueryValidation(); } - - private void checkIfMissingFeature() throws IOException{ - SearchRequest request = createPreviewSearchRequest(detector, ranges); - - try { - SearchSourceBuilder searchSourceBuilder = ParseUtils.generatePreviewQuery(detector, ranges, xContent); - return new SearchRequest(detector.getIndices().toArray(new String[0]), searchSourceBuilder); - } catch (IOException e) { - logger.warn("Failed to create feature search request for " + detector.getDetectorId() + " for preview", e); - throw e; - } - - client.search(request, ActionListener.wrap(response -> { - Aggregations aggs = response.getAggregations(); - if (aggs == null) { - listener.onResponse(Collections.emptyList()); - return; - } - if (anomalyDetector.getFeatureAttributes() != null) { + private void featureQueryValidation() throws IOException { + long delayMillis = Optional + .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) + .map(t -> t.toDuration().toMillis()) + .orElse(0L); + long dataEndTime = Instant.now().toEpochMilli() - delayMillis; + IntervalTimeConfiguration searchRange = new IntervalTimeConfiguration(10080, ChronoUnit.MINUTES); + long searchRangeTime = Optional + .ofNullable(searchRange) + .map(t -> t.toDuration().toMillis()) + .orElse(0L); + + long dataStartTime = dataEndTime - (searchRangeTime - delayMillis); + RangeQueryBuilder rangeQuery = new RangeQueryBuilder(anomalyDetector.getTimeField()) + .from(dataStartTime) + .to(dataEndTime) + .format("epoch_millis") + .includeLower(true) + .includeUpper(false); + if (anomalyDetector.getFeatureAttributes() != null) { for (Feature feature : anomalyDetector.getFeatureAttributes()) { AggregatorFactories.Builder internalAgg = ParseUtils.parseAggregators( feature.getAggregation().toString(), xContent, feature.getId() ); + BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().must(rangeQuery).must(anomalyDetector.getFilterQuery()); + SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(internalFilterQuery); + internalSearchSourceBuilder.aggregation(internalAgg.getAggregatorFactories().iterator().next()); + SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(internalSearchSourceBuilder); + //System.out.println("search builder for each feature query: " + internalSearchSourceBuilder.toString()); + client + .search( + searchRequest, + ActionListener + .wrap( + searchResponse -> onFeatureAggregationValidation(searchResponse, feature), + exception -> { + System.out.println(exception.getMessage()); + onFailure(exception); + } + ) + ); } + if (!suggestedChanges.isEmpty()) { + validateAnomalyDetectorResponse(); + return; + } + System.out.println("went into here"); + randomSamplingIntervalValidation(); } + } - private Optional parseAggregations(Optional< Aggregations > aggregations, List featureIds) { - return aggregations - .map(aggs -> aggs.asMap()) - .map( - map -> featureIds - .stream() - .mapToDouble(id -> Optional.ofNullable(map.get(id)).map(this::parseAggregation).orElse(Double.NaN)) - .toArray() - ) - .filter(result -> Arrays.stream(result).noneMatch(d -> Double.isNaN(d) || Double.isInfinite(d))); + private void onFeatureAggregationValidation(SearchResponse response, Feature feature) throws IOException { + //System.out.println("FEATURE AGG VALIDATION: " + response.toString()); + //System.out.println("feature agg done!!!!"); + Optional aggValue = Optional + .ofNullable(response) + .map(SearchResponse::getAggregations) + .map(aggs -> aggs.asMap()) + .map(map -> map.get(feature.getId())) + .map(this::parseAggregation); + if (Double.isNaN(aggValue.get()) || Double.isInfinite(aggValue.get()) ) { + String errorMsg = "feature query is potentially wrong as no hits were found at all for feature " + feature.getName(); + logger.warn(errorMsg); + suggestedChanges.add(errorMsg); + System.out.println("no hits from feature query over 1 week"); } } - private void featureQueryValidation() throws IOException { - BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().must(rangeQuery).must(anomalyDetector.getFilterQuery()); - SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(internalFilterQuery); + private void randomSamplingIntervalValidation() throws IOException { + long delayMillis = Optional + .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) + .map(t -> t.toDuration().toMillis()) + .orElse(0L); + long dataEndTime = Instant.now().toEpochMilli() - delayMillis; + long detectorInterval = Optional + .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) + .map(t -> t.toDuration().toMillis()) + .orElse(0L); + long dataStartTime = dataEndTime - ((long) (MAX_NUM_OF_SAMPLES_VIEWED) * detectorInterval - delayMillis); + + long[][] timeRanges = new long[MAX_NUM_OF_SAMPLES_VIEWED][2]; + for (int i = 0; i < MAX_NUM_OF_SAMPLES_VIEWED; i++) { + timeRanges[i][0] = dataStartTime + (i * detectorInterval); + timeRanges[i][1] = timeRanges[i][0] + detectorInterval; + } + //System.out.println("timeRanges: " + Arrays.deepToString(timeRanges)); + + + if (anomalyDetector.getFeatureAttributes() != null) { for (Feature feature : anomalyDetector.getFeatureAttributes()) { @@ -338,59 +407,118 @@ private void featureQueryValidation() throws IOException { xContent, feature.getId() ); - internalSearchSourceBuilder.aggregation(internalAgg.getAggregatorFactories().iterator().next()); + Random rand = new Random(); + int[] hitCounter = new int[]{0}; + int sampleCounter = 0; + for (int i = 0; i < NUM_OF_RANDOM_SAMPLES; i++) { + sampleCounter++; + int randIndex = rand.nextInt(127); + long RandomRangeStart = timeRanges[randIndex][0]; + long RandomRangeEnd = timeRanges[randIndex][1]; + RangeQueryBuilder rangeQueryRandom = new RangeQueryBuilder(anomalyDetector.getTimeField()) + .from(RandomRangeStart) + .to(RandomRangeEnd) + .format("epoch_millis") + .includeLower(true) + .includeUpper(false); + BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().must(rangeQueryRandom).must(anomalyDetector.getFilterQuery()); + SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(internalFilterQuery); + internalSearchSourceBuilder.aggregation(internalAgg.getAggregatorFactories().iterator().next()); + SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(internalSearchSourceBuilder); + //System.out.println("search builder inside counter: " + internalSearchSourceBuilder.toString()); + client + .search( + searchRequest, + ActionListener + .wrap( + searchResponse -> onRandomSampleResponse(searchResponse, hitCounter, sampleCounter), + exception -> { + System.out.println(exception.getMessage()); + onFailure(exception); + } + ) + ); + } + System.out.println(sampleCounter); + if (sampleCounter >= NUM_OF_RANDOM_SAMPLES - 2) { + System.out.println("hitdsad counter: " + Arrays.toString(hitCounter)); + double successRate = (double) hitCounter[0] / (double) NUM_OF_RANDOM_SAMPLES; + System.out.println(successRate); + if ((double) hitCounter[0] / (double) NUM_OF_RANDOM_SAMPLES < SAMPLE_SUCCESS_RATE) { + String errorMsg = "data is too sparse with this interval for feature " + feature.getName(); + logger.warn(errorMsg); + suggestedChanges.add(errorMsg); + validateAnomalyDetectorResponse(); + return; + } + } } } - - + checkWindowDelay(); + //getFieldMapping(); } - private void randomSamplingIntervalValidation() { + + private void onRandomSampleResponse(SearchResponse response, int[] hitCounter, int sampleCounter) { + //System.out.println("response from random sampling: " + response.toString()); + if (response.getHits().getTotalHits().value > 0) { + hitCounter[0]++; + System.out.println("hit counter inside if check: " + hitCounter[0]); + } } private void checkWindowDelay() { - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() - .aggregation(AggregationBuilders.max(AGG_NAME_MAX).field(anomalyDetector.getTimeField())) - .size(0); - SearchRequest searchRequest = new SearchRequest().indices(anomalyDetector.getIndices().toArray(new String[0])).source(searchSourceBuilder); - client - .search(searchRequest, ActionListener.wrap(response -> checkDelayResponse(getLatestDataTime(response)), exception -> onFailure(exception))); - } + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() + .aggregation(AggregationBuilders.max(AGG_NAME_MAX).field(anomalyDetector.getTimeField())) + .size(1).sort(new FieldSortBuilder("timestamp").order(SortOrder.DESC)); + SearchRequest searchRequest = new SearchRequest().indices(anomalyDetector.getIndices().get(0)).source(searchSourceBuilder); + client.search(searchRequest, ActionListener.wrap(response -> checkDelayResponse(getLatestDataTime(response)), exception -> onFailure(exception))); + } private Optional getLatestDataTime(SearchResponse searchResponse) { - return Optional + System.out.println(searchResponse.toString()); + Optional x = Optional .ofNullable(searchResponse) .map(SearchResponse::getAggregations) .map(aggs -> aggs.asMap()) .map(map -> (Max) map.get(AGG_NAME_MAX)) .map(agg -> (long) agg.getValue()); + + System.out.println("after parsing the max timestamp to long: " + x.get()); + return x; } - private void checkDelayResponse(Optional windowDelay) { - if (windowDelay.isPresent()) { - System.out.println(windowDelay.toString()); + private void checkDelayResponse(Optional lastTimeStamp) { + long delayMillis = Optional + .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) + .map(t -> t.toDuration().toMillis()) + .orElse(0L); + System.out.println("Window delay passed in from configs: " + delayMillis); + System.out.println("Time now: " + Instant.now().toEpochMilli()); + System.out.println("last seen time stamp: " + lastTimeStamp.get()); + if (lastTimeStamp.isPresent() && (Instant.now().toEpochMilli() - lastTimeStamp.get() > delayMillis)) { + String errorMsg = "window-delay too short:"; + logger.warn(errorMsg); + suggestedChanges.add(errorMsg); } + validateAnomalyDetectorResponse(); } - - - - - - private void validateAnomalyDetectorResponse() throws IOException { + private void validateAnomalyDetectorResponse() { this.responseValidate.setFailures(failures); this.responseValidate.setSuggestedChanges(suggestedChanges); + System.out.println("failure list in response: " + responseValidate.getFailures()); + System.out.println("suggestion list in response: " + responseValidate.getSuggestedChanges()); + System.out.println("inside response building and sending"); try { BytesRestResponse restResponse = new BytesRestResponse(RestStatus.OK, responseValidate.toXContent(channel.newBuilder())); channel.sendResponse(restResponse); } catch (Exception e) { channel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); } - } - private void onCreateMappingsResponse(CreateIndexResponse response) throws IOException { if (response.isAcknowledged()) { logger.info("Created {} with mappings.", ANOMALY_DETECTORS_INDEX); @@ -403,4 +531,161 @@ private void onCreateMappingsResponse(CreateIndexResponse response) throws IOExc ); } } + + private double parseAggregation(Aggregation aggregation) { + Double result = null; + if (aggregation instanceof NumericMetricsAggregation.SingleValue) { + result = ((NumericMetricsAggregation.SingleValue) aggregation).value(); + } else if (aggregation instanceof InternalTDigestPercentiles) { + Iterator percentile = ((InternalTDigestPercentiles) aggregation).iterator(); + if (percentile.hasNext()) { + result = percentile.next().getValue(); + } + } + return Optional.ofNullable(result).orElseThrow(() -> new IllegalStateException("Failed to parse aggregation " + aggregation)); + } } +//private void randomSamplingIntervalValidationCombined() throws IOException{ +// System.out.println("inside combined random sampling"); +// long delayMillis = Optional +// .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) +// .map(t -> t.toDuration().toMillis()) +// .orElse(0L); +// long dataStartTime = Instant.now().toEpochMilli() - delayMillis; +// long detectorInterval = Optional +// .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) +// .map(t -> t.toDuration().toMillis()) +// .orElse(0L); +// long dataEndTime = dataStartTime + ((long) (MAX_NUM_OF_SAMPLES_VIEWED) * detectorInterval - delayMillis); +// +// long[][] timeRanges = new long[MAX_NUM_OF_SAMPLES_VIEWED][2]; +// for (int i = 0; i < MAX_NUM_OF_SAMPLES_VIEWED; i++) { +// timeRanges[i][0] = dataStartTime + (i * detectorInterval); +// timeRanges[i][1] = timeRanges[i][0] + detectorInterval; +// } +// //System.out.println("timeRanges: " + Arrays.deepToString(timeRanges)); +// BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().must(anomalyDetector.getFilterQuery()); +// +// SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder(); +// +// if (anomalyDetector.getFeatureAttributes() != null) { +// for (Feature feature : anomalyDetector.getFeatureAttributes()) { +// AggregatorFactories.Builder internalAgg = ParseUtils.parseAggregators( +// feature.getAggregation().toString(), +// xContent, +// feature.getId() +// ); +// internalSearchSourceBuilder.aggregation(internalAgg.getAggregatorFactories().iterator().next()); +// } +// Random rand = new Random(); +// int hitCounter = 0; +// for (int i = 0; i < NUM_OF_RANDOM_SAMPLES; i++) { +// int randIndex = rand.nextInt(NUM_OF_RANDOM_SAMPLES); +// long RandomRangeStart = timeRanges[randIndex][0]; +// long RandomRangeEnd = timeRanges[randIndex][1]; +// RangeQueryBuilder rangeQueryRandom = new RangeQueryBuilder(anomalyDetector.getTimeField()) +// .from(RandomRangeStart) +// .to(RandomRangeEnd) +// .format("epoch_millis") +// .includeLower(true) +// .includeUpper(false); +// internalFilterQuery.must(rangeQueryRandom); +// internalSearchSourceBuilder.query(internalFilterQuery); +// SearchRequest searchRequest = new SearchRequest(ANOMALY_DETECTORS_INDEX).source(internalSearchSourceBuilder); +// System.out.println("search builder inside counter: " + internalSearchSourceBuilder.toString()); +// client +// .search( +// searchRequest, +// ActionListener +// .wrap( +// searchResponse -> onRandomSampleResponse(searchResponse, hitCounter), +// exception -> { +// System.out.println(exception.getMessage()); +// onFailure(exception); +// } +// ) +// ); +// } +// System.out.println("hit counter: " + hitCounter); +// if (hitCounter < 6) { +// String errorMsg = "data is too sparse with this interval for feature "; +// logger.warn(errorMsg); +// suggestedChanges.add(errorMsg); +// validateAnomalyDetectorResponse(); +// } else { +// checkWindowDelay(); +// } +// } +//} +// private Optional parseAggregations(Optional aggregations, String featureIds) { +// return aggregations +// .map(aggs -> aggs.asMap()) +// .map( +// map -> featureIds +// .stream() +// .mapToDouble(id -> Optional.ofNullable(map.get(id)).map(this::parseAggregation).orElse(Double.NaN)) +// .toArray() +// ) +// .filter(result -> Arrays.stream(result).noneMatch(d -> Double.isNaN(d) || Double.isInfinite(d))); +// } + +// private void getFieldMapping() { +// GetMappingsRequest request = new GetMappingsRequest().indices(anomalyDetector.getIndices().get(0)); +// adminClient +// .indices().getMappings( +// request, +// ActionListener +// .wrap( +// response -> checkFieldIndex(response), +// exception -> onFailure(exception) +// ) +// ); +// } +// +// private void checkFieldIndex(GetMappingsResponse response) { +// System.out.println(response.toString()); +// Optional x = Optional +// .ofNullable(response) +// .map(SearchResponse::get) +// .map(aggs -> aggs.asMap()) +// .map(map -> (Max) map.get(AGG_NAME_MAX)) +// .map(agg -> (long) agg.getValue()); +// } +//} + + +// private void checkIfMissingFeature() throws IOException{ +// if (anomalyDetector.getFeatureAttributes() != null) { +// for (Feature feature : anomalyDetector.getFeatureAttributes()) { +// AggregatorFactories.Builder internalAgg = ParseUtils.parseAggregators( +// feature.getAggregation().toString(), +// xContent, +// feature.getId() +// ); +// } +// } +// if (anomalyDetector.getFeatureAttributes() != null) { +// for (Feature feature : anomalyDetector.getFeatureAttributes()) { +// AggregatorFactories.Builder internalAgg = ParseUtils.parseAggregators( +// feature.getAggregation().toString(), +// xContent, +// feature.getId() +// ); +// internalSearchSourceBuilder.aggregation(internalAgg.getAggregatorFactories().iterator().next()); +// } +// } +// +// try { +// SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(internalAgg) +// return new SearchRequest(detector.getIndices().toArray(new String[0]), searchSourceBuilder); +// } catch (IOException e) { +// logger.warn("Failed to create feature search request for " + detector.getDetectorId() + " for preview", e); +// throw e; +// } +// +// if (anomalyDetector.getFeatureAttributes() != null) { +// for (Feature feature : anomalyDetector.getFeatureAttributes()) { +// parseAggregations(Optional.ofNullable(feature).map(f -> f.getAggregation()), anomalyDetector.getEnabledFeatureIds()); +// } +// } +// } \ No newline at end of file diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportAction.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportAction.java index 5d6df1ca..c1a65259 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportAction.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportAction.java @@ -258,6 +258,9 @@ private ActionListener> onGetDetector( long dataStartTime = request.getStart() - delayMillis; long dataEndTime = request.getEnd() - delayMillis; + System.out.println("dataStartTime: " + dataStartTime); + System.out.println("dataEndTime: " + dataEndTime); + featureManager .getCurrentFeatures( anomalyDetector, From 8dcf2eba894f77dbdc8b1223e0b555d5b71594f6 Mon Sep 17 00:00:00 2001 From: Galitzky Date: Tue, 28 Jul 2020 15:12:39 -0700 Subject: [PATCH 06/20] this solved the asynch issues with search request and this works with calling randomsampling once internally --- .../ValidateAnomalyDetectorActionHandler.java | 234 ++++++++++++------ ...iSearchResponseDelegateActionListener.java | 112 +++++++++ 2 files changed, 277 insertions(+), 69 deletions(-) create mode 100644 src/main/java/com/amazon/opendistroforelasticsearch/ad/util/MultiSearchResponseDelegateActionListener.java diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java index ee4c511c..50430a4c 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java @@ -4,6 +4,7 @@ import com.amazon.opendistroforelasticsearch.ad.indices.AnomalyDetectionIndices; import com.amazon.opendistroforelasticsearch.ad.model.*; import com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings; +import com.amazon.opendistroforelasticsearch.ad.util.MultiResponsesDelegateActionListener; import com.amazon.opendistroforelasticsearch.ad.util.ParseUtils; import com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils; import org.apache.commons.lang.StringUtils; @@ -16,9 +17,9 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; +import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.*; import org.elasticsearch.client.AdminClient; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.settings.Settings; @@ -53,6 +54,8 @@ import java.time.temporal.ChronoUnit; import java.util.*; import java.util.concurrent.ThreadLocalRandom; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -67,11 +70,11 @@ public class ValidateAnomalyDetectorActionHandler extends AbstractActionHandler { protected static final String AGG_NAME_MAX = "max_timefield"; - protected static final int NUM_OF_RANDOM_SAMPLES = 12; + protected static final int NUM_OF_RANDOM_SAMPLES = 8; protected static final int MAX_NUM_OF_SAMPLES_VIEWED = 128; protected static final int NUM_OF_INTERVALS_CHECKED = 256; protected static final double SAMPLE_SUCCESS_RATE = 0.75; - protected static final int RANDOM_SAMPLING_REPEATS = 10; + protected static final int RANDOM_SAMPLING_REPEATS = 12; private final AdminClient adminClient; @@ -89,7 +92,11 @@ public class ValidateAnomalyDetectorActionHandler extends AbstractActionHandler private final NamedXContentRegistry xContent; private ValidateResponse responseValidate; + private final Map savedResponsesToFeature; + private final List savedMultiResponses; + + private final Map featureIntervalValidation; private final List failures; private final List suggestedChanges; @@ -128,6 +135,11 @@ public ValidateAnomalyDetectorActionHandler( this.responseValidate = new ValidateResponse(); this.xContent = xContentRegistry; this.adminClient = client.admin(); + this.featureIntervalValidation = new HashMap<>(); + this.savedMultiResponses = Collections.synchronizedList(new ArrayList()); + + this.savedResponsesToFeature = new HashMap<>(); + } /** @@ -242,6 +254,7 @@ private void checkADNameExists(String detectorId) throws IOException { ActionListener .wrap( searchResponse -> onSearchADNameResponse(searchResponse, detectorId, anomalyDetector.getName()), + exception -> onFailure(exception) ) ); @@ -370,7 +383,7 @@ private void onFeatureAggregationValidation(SearchResponse response, Feature fea .map(aggs -> aggs.asMap()) .map(map -> map.get(feature.getId())) .map(this::parseAggregation); - if (Double.isNaN(aggValue.get()) || Double.isInfinite(aggValue.get()) ) { + if (Double.isNaN(aggValue.get()) || Double.isInfinite(aggValue.get())) { String errorMsg = "feature query is potentially wrong as no hits were found at all for feature " + feature.getName(); logger.warn(errorMsg); suggestedChanges.add(errorMsg); @@ -396,75 +409,155 @@ private void randomSamplingIntervalValidation() throws IOException { timeRanges[i][1] = timeRanges[i][0] + detectorInterval; } //System.out.println("timeRanges: " + Arrays.deepToString(timeRanges)); +// for (Feature feature : anomalyDetector.getFeatureAttributes()) { +// AtomicInteger validCounter = new AtomicInteger(); +// for (int i = 0; i < RANDOM_SAMPLING_REPEATS; i++) { +// randomSamplingHelper(timeRanges, feature, validCounter); +// } +// System.out.println("valid counter: " + validCounter[0]); +// if (validCounter[0] / (double) ( RANDOM_SAMPLING_REPEATS) < SAMPLE_SUCCESS_RATE) { +// String errorMsg = "data is too sparse with this interval for feature " + feature.getName(); +// logger.warn(errorMsg); +// suggestedChanges.add(errorMsg); +// validateAnomalyDetectorResponse(); +// return; +// } +// } + int numOfFeatures = anomalyDetector.getFeatureAttributes().size(); + AtomicInteger featureCounter = new AtomicInteger(); + AtomicInteger listnerCounter = new AtomicInteger(); + System.out.println("num of Features:" + numOfFeatures); + for (Feature feature : anomalyDetector.getFeatureAttributes()) { + featureCounter.incrementAndGet(); + randomSamplingHelper(timeRanges, feature, listnerCounter); + } +// if (featureCounter == numOfFeatures && suggestedChanges.isEmpty() && listnerCounter.get() == featureCounter) { +// System.out.println("featureCounter: " + featureCounter); +// checkWindowDelay(); +// } + } + private void randomSamplingHelper(long[][] timeRanges, Feature feature, AtomicInteger listnerCounter) throws IOException { + AggregatorFactories.Builder internalAgg = ParseUtils.parseAggregators( + feature.getAggregation().toString(), + xContent, + feature.getId() + ); + Random rand = new Random(); + + MultiSearchRequest sr = new MultiSearchRequest(); + + for (int i = 0; i < NUM_OF_RANDOM_SAMPLES; i++) { + int randIndex = rand.nextInt(127); + long RandomRangeStart = timeRanges[randIndex][0]; + long RandomRangeEnd = timeRanges[randIndex][1]; + RangeQueryBuilder rangeQueryRandom = new RangeQueryBuilder(anomalyDetector.getTimeField()) + .from(RandomRangeStart) + .to(RandomRangeEnd) + .format("epoch_millis") + .includeLower(true) + .includeUpper(false); + BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().must(rangeQueryRandom).must(anomalyDetector.getFilterQuery()); + SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(internalFilterQuery); + internalSearchSourceBuilder.aggregation(internalAgg.getAggregatorFactories().iterator().next()); + SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(internalSearchSourceBuilder); + // System.out.println("search builder inside counter: " + internalSearchSourceBuilder.toString()); + sr.add(searchRequest); + } + client + .multiSearch( + sr, + ActionListener + .wrap( + searchResponse -> { + savedResponsesToFeature.put(feature.getName(), searchResponse); + listnerCounter.incrementAndGet(); + if (listnerCounter.get() >= anomalyDetector.getFeatureAttributes().size()) { + onRandomGetMultiResponse(searchResponse, feature, listnerCounter); + } + }, + exception -> { + System.out.println(exception.getMessage()); + onFailure(exception); + } + ) + ); +// MultiResponsesDelegateActionListener delegateListener = new MultiResponsesDelegateActionListener( +// ActionListener +// .wrap( +// searchResponse -> onRandomSampleMultiResponse(searchResponse), +// exception -> { +// System.out.println(exception.getMessage()); +// onFailure(exception); +// } +// ) +// ,NUM_OF_RANDOM_SAMPLES,"Fail to sample data for detector interval validation"); + +// System.out.println("Hit counter: " + hitCounter); +// double successRate = hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES; +// System.out.println(successRate); +// if (hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES < SAMPLE_SUCCESS_RATE) { +// return 0; +// } +// return 1; + } - if (anomalyDetector.getFeatureAttributes() != null) { - for (Feature feature : anomalyDetector.getFeatureAttributes()) { - AggregatorFactories.Builder internalAgg = ParseUtils.parseAggregators( - feature.getAggregation().toString(), - xContent, - feature.getId() - ); - Random rand = new Random(); - int[] hitCounter = new int[]{0}; - int sampleCounter = 0; - for (int i = 0; i < NUM_OF_RANDOM_SAMPLES; i++) { - sampleCounter++; - int randIndex = rand.nextInt(127); - long RandomRangeStart = timeRanges[randIndex][0]; - long RandomRangeEnd = timeRanges[randIndex][1]; - RangeQueryBuilder rangeQueryRandom = new RangeQueryBuilder(anomalyDetector.getTimeField()) - .from(RandomRangeStart) - .to(RandomRangeEnd) - .format("epoch_millis") - .includeLower(true) - .includeUpper(false); - BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().must(rangeQueryRandom).must(anomalyDetector.getFilterQuery()); - SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(internalFilterQuery); - internalSearchSourceBuilder.aggregation(internalAgg.getAggregatorFactories().iterator().next()); - SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(internalSearchSourceBuilder); - //System.out.println("search builder inside counter: " + internalSearchSourceBuilder.toString()); - client - .search( - searchRequest, - ActionListener - .wrap( - searchResponse -> onRandomSampleResponse(searchResponse, hitCounter, sampleCounter), - exception -> { - System.out.println(exception.getMessage()); - onFailure(exception); - } - ) - ); - } - System.out.println(sampleCounter); - if (sampleCounter >= NUM_OF_RANDOM_SAMPLES - 2) { - System.out.println("hitdsad counter: " + Arrays.toString(hitCounter)); - double successRate = (double) hitCounter[0] / (double) NUM_OF_RANDOM_SAMPLES; - System.out.println(successRate); - if ((double) hitCounter[0] / (double) NUM_OF_RANDOM_SAMPLES < SAMPLE_SUCCESS_RATE) { - String errorMsg = "data is too sparse with this interval for feature " + feature.getName(); - logger.warn(errorMsg); - suggestedChanges.add(errorMsg); - validateAnomalyDetectorResponse(); - return; + private void onRandomGetMultiResponse(MultiSearchResponse multiSearchResponse, Feature feature, AtomicInteger listnerCounter) { + //System.out.println("response from random sampling: " + multiSearchResponse.toString()); + System.out.println("listen counter: " + listnerCounter.get()); + //System.out.println("savedResponsesToFeature: " + savedResponsesToFeature.toString()); + for (String f: savedResponsesToFeature.keySet()) { + String errorMsg = ""; + final AtomicInteger hitCounter = new AtomicInteger(); + for (MultiSearchResponse.Item item : savedResponsesToFeature.get(f)) { + SearchResponse response = item.getResponse(); + System.out.println("each response for feature, " + f + ": " + response.toString()); + if (response.getHits().getTotalHits().value > 0) { + hitCounter.incrementAndGet(); } } + System.out.println("Hit counter before last validation: " + hitCounter.get()); + if (hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES < SAMPLE_SUCCESS_RATE) { + featureIntervalValidation.put(f, false); + errorMsg += "data is too sparse with this interval for feature/s: " + f; + logger.warn(errorMsg); + suggestedChanges.add(errorMsg); + } else { + featureIntervalValidation.put(f, true); + } + } + System.out.println("validateIntervalMap: " + featureIntervalValidation.toString()); + if (featureIntervalValidation.containsValue(false)) { + validateAnomalyDetectorResponse(); + } else { + checkWindowDelay(); } - } - checkWindowDelay(); - //getFieldMapping(); } + private void executeRandomSamplingSearch(SearchRequest searchRequest, AtomicInteger hitCounter) { + client + .search( + searchRequest, + ActionListener + .wrap( + searchResponse -> onRandomSampleResponse(searchResponse, hitCounter), + exception -> { + System.out.println(exception.getMessage()); + onFailure(exception); + } + ) + ); + } - private void onRandomSampleResponse(SearchResponse response, int[] hitCounter, int sampleCounter) { - //System.out.println("response from random sampling: " + response.toString()); + private void onRandomSampleResponse(SearchResponse response, AtomicInteger hitCounter) { + // System.out.println("inside on sample response"); + System.out.println("response from random sampling: " + response.toString()); if (response.getHits().getTotalHits().value > 0) { - hitCounter[0]++; - System.out.println("hit counter inside if check: " + hitCounter[0]); + hitCounter.incrementAndGet(); + //System.out.println("hit counter inside if check: " + hitCounter[0]); } } @@ -477,7 +570,7 @@ private void checkWindowDelay() { } private Optional getLatestDataTime(SearchResponse searchResponse) { - System.out.println(searchResponse.toString()); + //System.out.println(searchResponse.toString()); Optional x = Optional .ofNullable(searchResponse) .map(SearchResponse::getAggregations) @@ -485,7 +578,7 @@ private Optional getLatestDataTime(SearchResponse searchResponse) { .map(map -> (Max) map.get(AGG_NAME_MAX)) .map(agg -> (long) agg.getValue()); - System.out.println("after parsing the max timestamp to long: " + x.get()); + //System.out.println("after parsing the max timestamp to long: " + x.get()); return x; } @@ -494,11 +587,14 @@ private void checkDelayResponse(Optional lastTimeStamp) { .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) .map(t -> t.toDuration().toMillis()) .orElse(0L); - System.out.println("Window delay passed in from configs: " + delayMillis); - System.out.println("Time now: " + Instant.now().toEpochMilli()); + //System.out.println("Window delay passed in from configs: " + delayMillis); + //System.out.println("Time now: " + Instant.now().toEpochMilli()); System.out.println("last seen time stamp: " + lastTimeStamp.get()); if (lastTimeStamp.isPresent() && (Instant.now().toEpochMilli() - lastTimeStamp.get() > delayMillis)) { - String errorMsg = "window-delay too short:"; + long minutesSinceLastStamp = TimeUnit.MILLISECONDS.toMinutes(Instant.now().toEpochMilli() - lastTimeStamp.get()); + long windowDelayMins = TimeUnit.MILLISECONDS.toMinutes(delayMillis); + String errorMsg = "window-delay given is too short, and last seen timestamp is " + minutesSinceLastStamp + + " minutes ago " + " and the window-delay given is only of " + windowDelayMins + " minutes"; logger.warn(errorMsg); suggestedChanges.add(errorMsg); } @@ -508,8 +604,8 @@ private void checkDelayResponse(Optional lastTimeStamp) { private void validateAnomalyDetectorResponse() { this.responseValidate.setFailures(failures); this.responseValidate.setSuggestedChanges(suggestedChanges); - System.out.println("failure list in response: " + responseValidate.getFailures()); - System.out.println("suggestion list in response: " + responseValidate.getSuggestedChanges()); + //System.out.println("failure list in response: " + responseValidate.getFailures()); + //System.out.println("suggestion list in response: " + responseValidate.getSuggestedChanges()); System.out.println("inside response building and sending"); try { BytesRestResponse restResponse = new BytesRestResponse(RestStatus.OK, responseValidate.toXContent(channel.newBuilder())); diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/MultiSearchResponseDelegateActionListener.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/MultiSearchResponseDelegateActionListener.java new file mode 100644 index 00000000..1f1d6d7b --- /dev/null +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/MultiSearchResponseDelegateActionListener.java @@ -0,0 +1,112 @@ +//package com.amazon.opendistroforelasticsearch.ad.util; +// +//import com.amazon.opendistroforelasticsearch.ad.model.Mergeable; +//import org.apache.logging.log4j.LogManager; +//import org.apache.logging.log4j.Logger; +//import org.elasticsearch.action.ActionListener; +// +//import java.util.ArrayList; +//import java.util.Collections; +//import java.util.List; +//import java.util.Locale; +//import java.util.concurrent.atomic.AtomicInteger; +// +///* +// * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. +// * +// * Licensed under the Apache License, Version 2.0 (the "License"). +// * You may not use this file except in compliance with the License. +// * A copy of the License is located at +// * +// * http://www.apache.org/licenses/LICENSE-2.0 +// * +// * or in the "license" file accompanying this file. This file is distributed +// * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either +// * express or implied. See the License for the specific language governing +// * permissions and limitations under the License. +// */ +// +///** +// * A listener wrapper to help send multiple requests asynchronously and return one final responses together +// */ +//public class MultiSearchResponsesDelegateActionListener implements ActionListener { +// private static final Logger LOG = LogManager.getLogger(MultiResponsesDelegateActionListener.class); +// private final ActionListener delegate; +// private final AtomicInteger collectedResponseCount; +// private final int maxResponseCount; +// // save responses from multiple requests +// private final List savedResponses; +// private List exceptions; +// private String finalErrorMsg; +// +// public MultiSearchResponsesDelegateActionListener(ActionListener delegate, int maxResponseCount, String finalErrorMsg) { +// this.delegate = delegate; +// this.collectedResponseCount = new AtomicInteger(0); +// this.maxResponseCount = maxResponseCount; +// this.savedResponses = Collections.synchronizedList(new ArrayList()); +// this.exceptions = Collections.synchronizedList(new ArrayList()); +// this.finalErrorMsg = finalErrorMsg; +// } +// +// @Override +// public void onResponse(T response) { +// try { +// if (response != null) { +// this.savedResponses.add(response); +// } +// } finally { +// // If expectedResponseCount == 0 , collectedResponseCount.incrementAndGet() will be greater than expectedResponseCount +// if (collectedResponseCount.incrementAndGet() >= maxResponseCount) { +// finish(); +// } +// } +// +// } +// +// @Override +// public void onFailure(Exception e) { +// LOG.error(e); +// try { +// this.exceptions.add(e.getMessage()); +// } finally { +// // no matter the asynchronous request is a failure or success, we need to increment the count. +// // We need finally here to increment the count when there is a failure. +// if (collectedResponseCount.incrementAndGet() >= maxResponseCount) { +// finish(); +// } +// } +// } +// +// private void finish() { +// if (this.exceptions.size() == 0) { +// if (savedResponses.size() == 0) { +// this.delegate.onFailure(new RuntimeException("No response collected")); +// } else { +// T response0 = savedResponses.get(0); +// for (int i = 1; i < savedResponses.size(); i++) { +// response0.merge(savedResponses.get(i)); +// } +// this.delegate.onResponse(savedResponses); +// } +// } else { +// this.delegate.onFailure(new RuntimeException(String.format(Locale.ROOT, finalErrorMsg + " Exceptions: %s", exceptions))); +// } +// } +// +// public void failImmediately(Exception e) { +// this.delegate.onFailure(new RuntimeException(finalErrorMsg, e)); +// } +// +// public void failImmediately(String errMsg) { +// this.delegate.onFailure(new RuntimeException(errMsg)); +// } +// +// public void failImmediately(String errMsg, Exception e) { +// this.delegate.onFailure(new RuntimeException(errMsg, e)); +// } +// +// public void respondImmediately(T o) { +// this.delegate.onResponse(o); +// } +//} +// From 2bbcb58631aecd0896897714c24e29ca6c91eddf Mon Sep 17 00:00:00 2001 From: Galitzky Date: Fri, 31 Jul 2020 09:14:23 -0700 Subject: [PATCH 07/20] changed random sampling to nonrandom and no aggregation, filter query instead --- .../ValidateAnomalyDetectorActionHandler.java | 420 ++++++------------ 1 file changed, 130 insertions(+), 290 deletions(-) diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java index 50430a4c..6b30680a 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java @@ -22,28 +22,24 @@ import org.elasticsearch.action.search.*; import org.elasticsearch.client.AdminClient; import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.*; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.rest.*; import org.elasticsearch.rest.action.RestResponseListener; -import org.elasticsearch.search.aggregations.Aggregation; -import org.elasticsearch.search.aggregations.AggregationBuilders; -import org.elasticsearch.search.aggregations.Aggregations; -import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.*; import org.elasticsearch.search.aggregations.bucket.range.DateRangeAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentiles; import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; import org.elasticsearch.search.aggregations.metrics.Percentile; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortOrder; @@ -56,12 +52,16 @@ import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; +import java.util.regex.Matcher; import java.util.stream.Collectors; import java.util.stream.Stream; import static com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector.*; import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder; import static org.elasticsearch.search.aggregations.AggregationBuilders.dateRange; +import static org.elasticsearch.search.aggregations.AggregatorFactories.VALID_AGG_NAME; /** * Anomaly detector REST action handler to process POST request. @@ -70,11 +70,12 @@ public class ValidateAnomalyDetectorActionHandler extends AbstractActionHandler { protected static final String AGG_NAME_MAX = "max_timefield"; - protected static final int NUM_OF_RANDOM_SAMPLES = 8; + protected static final int NUM_OF_RANDOM_SAMPLES = 128; protected static final int MAX_NUM_OF_SAMPLES_VIEWED = 128; protected static final int NUM_OF_INTERVALS_CHECKED = 256; protected static final double SAMPLE_SUCCESS_RATE = 0.75; protected static final int RANDOM_SAMPLING_REPEATS = 12; + protected static final int FEATURE_VALIDATION_TIME_BACK_MINUTES = 10080; private final AdminClient adminClient; @@ -94,8 +95,6 @@ public class ValidateAnomalyDetectorActionHandler extends AbstractActionHandler private ValidateResponse responseValidate; private final Map savedResponsesToFeature; private final List savedMultiResponses; - - private final Map featureIntervalValidation; private final List failures; private final List suggestedChanges; @@ -136,10 +135,8 @@ public ValidateAnomalyDetectorActionHandler( this.xContent = xContentRegistry; this.adminClient = client.admin(); this.featureIntervalValidation = new HashMap<>(); - this.savedMultiResponses = Collections.synchronizedList(new ArrayList()); - + this.savedMultiResponses = Collections.synchronizedList(new ArrayList<>()); this.savedResponsesToFeature = new HashMap<>(); - } /** @@ -278,7 +275,7 @@ private void onSearchADNameResponse(SearchResponse response, String detectorId, } } - private void queryFilterValidation() { + private long[] startEndTimeRangeWithIntervals(int numOfIntervals) { long delayMillis = Optional .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) .map(t -> t.toDuration().toMillis()) @@ -288,14 +285,23 @@ private void queryFilterValidation() { .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) .map(t -> t.toDuration().toMillis()) .orElse(0L); - long dataStartTime = dataEndTime - ((long) (NUM_OF_INTERVALS_CHECKED) * detectorInterval - delayMillis); + long dataStartTime = dataEndTime - ((long) (numOfIntervals) * detectorInterval - delayMillis); + + return new long[]{dataStartTime, dataEndTime}; + } + + private void queryFilterValidation() { + long[] startEnd = startEndTimeRangeWithIntervals(NUM_OF_INTERVALS_CHECKED); + long dataEndTime = startEnd[1]; + long dataStartTime = startEnd[0]; RangeQueryBuilder rangeQuery = new RangeQueryBuilder(anomalyDetector.getTimeField()) .from(dataStartTime) .to(dataEndTime) .format("epoch_millis"); BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().must(rangeQuery).must(anomalyDetector.getFilterQuery()); - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(internalFilterQuery).size(1).timeout(requestTimeout); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(internalFilterQuery).size(1).terminateAfter(1).timeout(requestTimeout); SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(searchSourceBuilder); + System.out.println("Query filter request: " + searchRequest.toString()); client .search( searchRequest, @@ -314,10 +320,11 @@ private void onQueryFilterSearch(SearchResponse response) throws IOException { if (response.getHits().getTotalHits().value <= 0) { String errorMsg = "query filter is potentially wrong as no hits were found at all. "; logger.warn(errorMsg); - failures.add(errorMsg); + suggestedChanges.add(errorMsg); validateAnomalyDetectorResponse(); } else { featureQueryValidation(); + //getFieldMapping(); } } @@ -326,14 +333,19 @@ private void featureQueryValidation() throws IOException { .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) .map(t -> t.toDuration().toMillis()) .orElse(0L); - long dataEndTime = Instant.now().toEpochMilli() - delayMillis; - IntervalTimeConfiguration searchRange = new IntervalTimeConfiguration(10080, ChronoUnit.MINUTES); + long[] startEnd = startEndTimeRangeWithIntervals(NUM_OF_INTERVALS_CHECKED); + long dataEndTime = startEnd[1]; + long dataStartTime = startEnd[0]; + IntervalTimeConfiguration searchRange = new IntervalTimeConfiguration(FEATURE_VALIDATION_TIME_BACK_MINUTES, ChronoUnit.MINUTES); long searchRangeTime = Optional .ofNullable(searchRange) .map(t -> t.toDuration().toMillis()) .orElse(0L); + long startTimeWithSetTime = startEnd[1] - (searchRangeTime - delayMillis); + if (startEnd[0] > startTimeWithSetTime) { + dataStartTime = startTimeWithSetTime; + } - long dataStartTime = dataEndTime - (searchRangeTime - delayMillis); RangeQueryBuilder rangeQuery = new RangeQueryBuilder(anomalyDetector.getTimeField()) .from(dataStartTime) .to(dataEndTime) @@ -342,16 +354,17 @@ private void featureQueryValidation() throws IOException { .includeUpper(false); if (anomalyDetector.getFeatureAttributes() != null) { for (Feature feature : anomalyDetector.getFeatureAttributes()) { - AggregatorFactories.Builder internalAgg = ParseUtils.parseAggregators( - feature.getAggregation().toString(), - xContent, - feature.getId() - ); - BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().must(rangeQuery).must(anomalyDetector.getFilterQuery()); - SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(internalFilterQuery); - internalSearchSourceBuilder.aggregation(internalAgg.getAggregatorFactories().iterator().next()); + ParseUtils.parseAggregators(feature.getAggregation().toString(), xContent, feature.getId()); + XContentParser parser = XContentType.JSON.xContent().createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); + parser.nextToken(); + List fieldNames = parseAggregationRequest(parser, 0, feature.getAggregation().getName()); +// BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().must(rangeQuery).must(anomalyDetector.getFilterQuery()) +// .must(QueryBuilders.boolQuery().filter(QueryBuilders.existsQuery(fieldNames.get(0)))); + BoolQueryBuilder boolQuery2 = QueryBuilders.boolQuery().filter(rangeQuery).filter(anomalyDetector.getFilterQuery()) + .filter(QueryBuilders.existsQuery(fieldNames.get(0))); + SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(boolQuery2).size(1).terminateAfter(1); SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(internalSearchSourceBuilder); - //System.out.println("search builder for each feature query: " + internalSearchSourceBuilder.toString()); + System.out.println("search builder for each feature query: " + searchRequest.toString()); client .search( searchRequest, @@ -374,96 +387,96 @@ private void featureQueryValidation() throws IOException { } } + private List parseAggregationRequest(XContentParser parser, int level, String aggName) throws IOException { + List fieldNames = new ArrayList<>(); + XContentParser.Token token = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + final String field = parser.currentName(); + switch (field) { + case "field": + parser.nextToken(); + fieldNames.add(parser.textOrNull()); + break; + default: + parser.skipChildren(); + break; + } + } + } + return fieldNames; + } + + private void onFeatureAggregationValidation(SearchResponse response, Feature feature) throws IOException { - //System.out.println("FEATURE AGG VALIDATION: " + response.toString()); - //System.out.println("feature agg done!!!!"); - Optional aggValue = Optional - .ofNullable(response) - .map(SearchResponse::getAggregations) - .map(aggs -> aggs.asMap()) - .map(map -> map.get(feature.getId())) - .map(this::parseAggregation); - if (Double.isNaN(aggValue.get()) || Double.isInfinite(aggValue.get())) { + System.out.println("response for each feature agg validation " + response.toString()); + if (response.getHits().getTotalHits().value <= 0) { String errorMsg = "feature query is potentially wrong as no hits were found at all for feature " + feature.getName(); logger.warn(errorMsg); suggestedChanges.add(errorMsg); - System.out.println("no hits from feature query over 1 week"); } } + +// private void onFeatureAggregationValidation(SearchResponse response, Feature feature) throws IOException { +// //System.out.println("response for each feature agg validation " + response.toString()); +// Optional aggValue = Optional +// .ofNullable(response) +// .map(SearchResponse::getAggregations) +// .map(aggs -> aggs.asMap()) +// .map(map -> map.get(feature.getId())) +// .map(this::parseAggregation); +// if (Double.isNaN(aggValue.get()) || Double.isInfinite(aggValue.get())) { +// String errorMsg = "feature query is potentially wrong as no hits were found at all for feature " + feature.getName(); +// logger.warn(errorMsg); +// suggestedChanges.add(errorMsg); +// } +// } + private void randomSamplingIntervalValidation() throws IOException { - long delayMillis = Optional - .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) - .map(t -> t.toDuration().toMillis()) - .orElse(0L); - long dataEndTime = Instant.now().toEpochMilli() - delayMillis; long detectorInterval = Optional .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) .map(t -> t.toDuration().toMillis()) .orElse(0L); - long dataStartTime = dataEndTime - ((long) (MAX_NUM_OF_SAMPLES_VIEWED) * detectorInterval - delayMillis); - + long[] startEnd = startEndTimeRangeWithIntervals(MAX_NUM_OF_SAMPLES_VIEWED); + long dataStartTime = startEnd[0]; long[][] timeRanges = new long[MAX_NUM_OF_SAMPLES_VIEWED][2]; for (int i = 0; i < MAX_NUM_OF_SAMPLES_VIEWED; i++) { timeRanges[i][0] = dataStartTime + (i * detectorInterval); timeRanges[i][1] = timeRanges[i][0] + detectorInterval; } - //System.out.println("timeRanges: " + Arrays.deepToString(timeRanges)); -// for (Feature feature : anomalyDetector.getFeatureAttributes()) { -// AtomicInteger validCounter = new AtomicInteger(); -// for (int i = 0; i < RANDOM_SAMPLING_REPEATS; i++) { -// randomSamplingHelper(timeRanges, feature, validCounter); -// } -// System.out.println("valid counter: " + validCounter[0]); -// if (validCounter[0] / (double) ( RANDOM_SAMPLING_REPEATS) < SAMPLE_SUCCESS_RATE) { -// String errorMsg = "data is too sparse with this interval for feature " + feature.getName(); -// logger.warn(errorMsg); -// suggestedChanges.add(errorMsg); -// validateAnomalyDetectorResponse(); -// return; -// } -// } - int numOfFeatures = anomalyDetector.getFeatureAttributes().size(); - AtomicInteger featureCounter = new AtomicInteger(); - AtomicInteger listnerCounter = new AtomicInteger(); - System.out.println("num of Features:" + numOfFeatures); + AtomicInteger listenerCounter = new AtomicInteger(); for (Feature feature : anomalyDetector.getFeatureAttributes()) { - featureCounter.incrementAndGet(); - randomSamplingHelper(timeRanges, feature, listnerCounter); + randomSamplingHelper(timeRanges, feature, listenerCounter); } -// if (featureCounter == numOfFeatures && suggestedChanges.isEmpty() && listnerCounter.get() == featureCounter) { -// System.out.println("featureCounter: " + featureCounter); -// checkWindowDelay(); -// } } - private void randomSamplingHelper(long[][] timeRanges, Feature feature, AtomicInteger listnerCounter) throws IOException { - AggregatorFactories.Builder internalAgg = ParseUtils.parseAggregators( - feature.getAggregation().toString(), - xContent, - feature.getId() - ); - Random rand = new Random(); - + private void randomSamplingHelper(long[][] timeRanges, Feature feature, AtomicInteger listenerCounter) throws IOException { + ParseUtils.parseAggregators(feature.getAggregation().toString(), xContent, feature.getId()); + XContentParser parser = XContentType.JSON.xContent().createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); + parser.nextToken(); + List fieldNames = parseAggregationRequest(parser, 0, feature.getAggregation().getName()); + //Random rand = new Random(); MultiSearchRequest sr = new MultiSearchRequest(); - for (int i = 0; i < NUM_OF_RANDOM_SAMPLES; i++) { - int randIndex = rand.nextInt(127); - long RandomRangeStart = timeRanges[randIndex][0]; - long RandomRangeEnd = timeRanges[randIndex][1]; +// int randIndex = rand.nextInt(MAX_NUM_OF_SAMPLES_VIEWED - 1); +// long RandomRangeStart = timeRanges[randIndex][0]; +// long RandomRangeEnd = timeRanges[randIndex][1]; + long rangeStart = timeRanges[i][0]; + long rangeEnd = timeRanges[i][1]; RangeQueryBuilder rangeQueryRandom = new RangeQueryBuilder(anomalyDetector.getTimeField()) - .from(RandomRangeStart) - .to(RandomRangeEnd) + .from(rangeStart) + .to(rangeEnd) .format("epoch_millis") .includeLower(true) .includeUpper(false); - BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().must(rangeQueryRandom).must(anomalyDetector.getFilterQuery()); - SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(internalFilterQuery); - internalSearchSourceBuilder.aggregation(internalAgg.getAggregatorFactories().iterator().next()); + BoolQueryBuilder boolQuery2 = QueryBuilders.boolQuery().filter(rangeQueryRandom).filter(anomalyDetector.getFilterQuery()) + .filter(QueryBuilders.existsQuery(fieldNames.get(0))); + SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(boolQuery2).size(1).terminateAfter(1); SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(internalSearchSourceBuilder); - // System.out.println("search builder inside counter: " + internalSearchSourceBuilder.toString()); sr.add(searchRequest); } + // System.out.println("8 requests: " + sr.requests().toString()); client .multiSearch( sr, @@ -471,9 +484,9 @@ private void randomSamplingHelper(long[][] timeRanges, Feature feature, AtomicIn .wrap( searchResponse -> { savedResponsesToFeature.put(feature.getName(), searchResponse); - listnerCounter.incrementAndGet(); - if (listnerCounter.get() >= anomalyDetector.getFeatureAttributes().size()) { - onRandomGetMultiResponse(searchResponse, feature, listnerCounter); + listenerCounter.incrementAndGet(); + if (listenerCounter.get() >= anomalyDetector.getFeatureAttributes().size()) { + onRandomGetMultiResponse(searchResponse, feature, listenerCounter); } }, exception -> { @@ -482,53 +495,33 @@ private void randomSamplingHelper(long[][] timeRanges, Feature feature, AtomicIn } ) ); - - -// MultiResponsesDelegateActionListener delegateListener = new MultiResponsesDelegateActionListener( -// ActionListener -// .wrap( -// searchResponse -> onRandomSampleMultiResponse(searchResponse), -// exception -> { -// System.out.println(exception.getMessage()); -// onFailure(exception); -// } -// ) -// ,NUM_OF_RANDOM_SAMPLES,"Fail to sample data for detector interval validation"); - -// System.out.println("Hit counter: " + hitCounter); -// double successRate = hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES; -// System.out.println(successRate); -// if (hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES < SAMPLE_SUCCESS_RATE) { -// return 0; -// } -// return 1; } private void onRandomGetMultiResponse(MultiSearchResponse multiSearchResponse, Feature feature, AtomicInteger listnerCounter) { - //System.out.println("response from random sampling: " + multiSearchResponse.toString()); - System.out.println("listen counter: " + listnerCounter.get()); - //System.out.println("savedResponsesToFeature: " + savedResponsesToFeature.toString()); for (String f: savedResponsesToFeature.keySet()) { String errorMsg = ""; final AtomicInteger hitCounter = new AtomicInteger(); + //System.out.println("feature name out of all feature loop: " + f); for (MultiSearchResponse.Item item : savedResponsesToFeature.get(f)) { SearchResponse response = item.getResponse(); - System.out.println("each response for feature, " + f + ": " + response.toString()); + // System.out.println("each response for feature, " + f + ": " + response.toString()); if (response.getHits().getTotalHits().value > 0) { hitCounter.incrementAndGet(); } } System.out.println("Hit counter before last validation: " + hitCounter.get()); + System.out.println("successRate: " + hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES); + if (hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES < SAMPLE_SUCCESS_RATE) { featureIntervalValidation.put(f, false); - errorMsg += "data is too sparse with this interval for feature/s: " + f; + errorMsg += "data is too sparse with this interval for feature: " + f; logger.warn(errorMsg); suggestedChanges.add(errorMsg); } else { featureIntervalValidation.put(f, true); } } - System.out.println("validateIntervalMap: " + featureIntervalValidation.toString()); + // System.out.println("validateIntervalMap: " + featureIntervalValidation.toString()); if (featureIntervalValidation.containsValue(false)) { validateAnomalyDetectorResponse(); } else { @@ -536,31 +529,6 @@ private void onRandomGetMultiResponse(MultiSearchResponse multiSearchResponse, F } } - private void executeRandomSamplingSearch(SearchRequest searchRequest, AtomicInteger hitCounter) { - client - .search( - searchRequest, - ActionListener - .wrap( - searchResponse -> onRandomSampleResponse(searchResponse, hitCounter), - exception -> { - System.out.println(exception.getMessage()); - onFailure(exception); - } - ) - ); - } - - - private void onRandomSampleResponse(SearchResponse response, AtomicInteger hitCounter) { - // System.out.println("inside on sample response"); - System.out.println("response from random sampling: " + response.toString()); - if (response.getHits().getTotalHits().value > 0) { - hitCounter.incrementAndGet(); - //System.out.println("hit counter inside if check: " + hitCounter[0]); - } - } - private void checkWindowDelay() { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() .aggregation(AggregationBuilders.max(AGG_NAME_MAX).field(anomalyDetector.getTimeField())) @@ -577,7 +545,6 @@ private Optional getLatestDataTime(SearchResponse searchResponse) { .map(aggs -> aggs.asMap()) .map(map -> (Max) map.get(AGG_NAME_MAX)) .map(agg -> (long) agg.getValue()); - //System.out.println("after parsing the max timestamp to long: " + x.get()); return x; } @@ -587,14 +554,11 @@ private void checkDelayResponse(Optional lastTimeStamp) { .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) .map(t -> t.toDuration().toMillis()) .orElse(0L); - //System.out.println("Window delay passed in from configs: " + delayMillis); - //System.out.println("Time now: " + Instant.now().toEpochMilli()); - System.out.println("last seen time stamp: " + lastTimeStamp.get()); if (lastTimeStamp.isPresent() && (Instant.now().toEpochMilli() - lastTimeStamp.get() > delayMillis)) { long minutesSinceLastStamp = TimeUnit.MILLISECONDS.toMinutes(Instant.now().toEpochMilli() - lastTimeStamp.get()); long windowDelayMins = TimeUnit.MILLISECONDS.toMinutes(delayMillis); String errorMsg = "window-delay given is too short, and last seen timestamp is " + minutesSinceLastStamp + - " minutes ago " + " and the window-delay given is only of " + windowDelayMins + " minutes"; + " minutes ago " + "and the window-delay given is only of " + windowDelayMins + " minutes"; logger.warn(errorMsg); suggestedChanges.add(errorMsg); } @@ -604,9 +568,6 @@ private void checkDelayResponse(Optional lastTimeStamp) { private void validateAnomalyDetectorResponse() { this.responseValidate.setFailures(failures); this.responseValidate.setSuggestedChanges(suggestedChanges); - //System.out.println("failure list in response: " + responseValidate.getFailures()); - //System.out.println("suggestion list in response: " + responseValidate.getSuggestedChanges()); - System.out.println("inside response building and sending"); try { BytesRestResponse restResponse = new BytesRestResponse(RestStatus.OK, responseValidate.toXContent(channel.newBuilder())); channel.sendResponse(restResponse); @@ -640,148 +601,27 @@ private double parseAggregation(Aggregation aggregation) { } return Optional.ofNullable(result).orElseThrow(() -> new IllegalStateException("Failed to parse aggregation " + aggregation)); } -} -//private void randomSamplingIntervalValidationCombined() throws IOException{ -// System.out.println("inside combined random sampling"); -// long delayMillis = Optional -// .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) -// .map(t -> t.toDuration().toMillis()) -// .orElse(0L); -// long dataStartTime = Instant.now().toEpochMilli() - delayMillis; -// long detectorInterval = Optional -// .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) -// .map(t -> t.toDuration().toMillis()) -// .orElse(0L); -// long dataEndTime = dataStartTime + ((long) (MAX_NUM_OF_SAMPLES_VIEWED) * detectorInterval - delayMillis); -// -// long[][] timeRanges = new long[MAX_NUM_OF_SAMPLES_VIEWED][2]; -// for (int i = 0; i < MAX_NUM_OF_SAMPLES_VIEWED; i++) { -// timeRanges[i][0] = dataStartTime + (i * detectorInterval); -// timeRanges[i][1] = timeRanges[i][0] + detectorInterval; -// } -// //System.out.println("timeRanges: " + Arrays.deepToString(timeRanges)); -// BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().must(anomalyDetector.getFilterQuery()); -// -// SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder(); -// -// if (anomalyDetector.getFeatureAttributes() != null) { -// for (Feature feature : anomalyDetector.getFeatureAttributes()) { -// AggregatorFactories.Builder internalAgg = ParseUtils.parseAggregators( -// feature.getAggregation().toString(), -// xContent, -// feature.getId() -// ); -// internalSearchSourceBuilder.aggregation(internalAgg.getAggregatorFactories().iterator().next()); -// } -// Random rand = new Random(); -// int hitCounter = 0; -// for (int i = 0; i < NUM_OF_RANDOM_SAMPLES; i++) { -// int randIndex = rand.nextInt(NUM_OF_RANDOM_SAMPLES); -// long RandomRangeStart = timeRanges[randIndex][0]; -// long RandomRangeEnd = timeRanges[randIndex][1]; -// RangeQueryBuilder rangeQueryRandom = new RangeQueryBuilder(anomalyDetector.getTimeField()) -// .from(RandomRangeStart) -// .to(RandomRangeEnd) -// .format("epoch_millis") -// .includeLower(true) -// .includeUpper(false); -// internalFilterQuery.must(rangeQueryRandom); -// internalSearchSourceBuilder.query(internalFilterQuery); -// SearchRequest searchRequest = new SearchRequest(ANOMALY_DETECTORS_INDEX).source(internalSearchSourceBuilder); -// System.out.println("search builder inside counter: " + internalSearchSourceBuilder.toString()); -// client -// .search( -// searchRequest, -// ActionListener -// .wrap( -// searchResponse -> onRandomSampleResponse(searchResponse, hitCounter), -// exception -> { -// System.out.println(exception.getMessage()); -// onFailure(exception); -// } -// ) -// ); -// } -// System.out.println("hit counter: " + hitCounter); -// if (hitCounter < 6) { -// String errorMsg = "data is too sparse with this interval for feature "; -// logger.warn(errorMsg); -// suggestedChanges.add(errorMsg); -// validateAnomalyDetectorResponse(); -// } else { -// checkWindowDelay(); -// } -// } -//} -// private Optional parseAggregations(Optional aggregations, String featureIds) { -// return aggregations -// .map(aggs -> aggs.asMap()) -// .map( -// map -> featureIds -// .stream() -// .mapToDouble(id -> Optional.ofNullable(map.get(id)).map(this::parseAggregation).orElse(Double.NaN)) -// .toArray() -// ) -// .filter(result -> Arrays.stream(result).noneMatch(d -> Double.isNaN(d) || Double.isInfinite(d))); -// } + private void getFieldMapping() { + GetMappingsRequest request = new GetMappingsRequest().indices(anomalyDetector.getIndices().get(0)); + adminClient + .indices().getMappings( + request, + ActionListener + .wrap( + response -> checkFieldIndex(response), + exception -> onFailure(exception) + ) + ); + } -// private void getFieldMapping() { -// GetMappingsRequest request = new GetMappingsRequest().indices(anomalyDetector.getIndices().get(0)); -// adminClient -// .indices().getMappings( -// request, -// ActionListener -// .wrap( -// response -> checkFieldIndex(response), -// exception -> onFailure(exception) -// ) -// ); -// } -// -// private void checkFieldIndex(GetMappingsResponse response) { -// System.out.println(response.toString()); + private void checkFieldIndex(GetMappingsResponse response) { + System.out.println(response.toString()); // Optional x = Optional // .ofNullable(response) // .map(SearchResponse::get) // .map(aggs -> aggs.asMap()) // .map(map -> (Max) map.get(AGG_NAME_MAX)) // .map(agg -> (long) agg.getValue()); -// } -//} - - -// private void checkIfMissingFeature() throws IOException{ -// if (anomalyDetector.getFeatureAttributes() != null) { -// for (Feature feature : anomalyDetector.getFeatureAttributes()) { -// AggregatorFactories.Builder internalAgg = ParseUtils.parseAggregators( -// feature.getAggregation().toString(), -// xContent, -// feature.getId() -// ); -// } -// } -// if (anomalyDetector.getFeatureAttributes() != null) { -// for (Feature feature : anomalyDetector.getFeatureAttributes()) { -// AggregatorFactories.Builder internalAgg = ParseUtils.parseAggregators( -// feature.getAggregation().toString(), -// xContent, -// feature.getId() -// ); -// internalSearchSourceBuilder.aggregation(internalAgg.getAggregatorFactories().iterator().next()); -// } -// } -// -// try { -// SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(internalAgg) -// return new SearchRequest(detector.getIndices().toArray(new String[0]), searchSourceBuilder); -// } catch (IOException e) { -// logger.warn("Failed to create feature search request for " + detector.getDetectorId() + " for preview", e); -// throw e; -// } -// -// if (anomalyDetector.getFeatureAttributes() != null) { -// for (Feature feature : anomalyDetector.getFeatureAttributes()) { -// parseAggregations(Optional.ofNullable(feature).map(f -> f.getAggregation()), anomalyDetector.getEnabledFeatureIds()); -// } -// } -// } \ No newline at end of file + } +} + From 7dd785155e6e5936b302dfa0ab201e1a23cebb6e Mon Sep 17 00:00:00 2001 From: Galitzky Date: Mon, 3 Aug 2020 10:49:47 -0700 Subject: [PATCH 08/20] commit before switching to updated code --- .../ValidateAnomalyDetectorActionHandler.java | 189 +++++++++++++++--- 1 file changed, 158 insertions(+), 31 deletions(-) diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java index 6b30680a..7c6faff7 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java @@ -76,6 +76,7 @@ public class ValidateAnomalyDetectorActionHandler extends AbstractActionHandler protected static final double SAMPLE_SUCCESS_RATE = 0.75; protected static final int RANDOM_SAMPLING_REPEATS = 12; protected static final int FEATURE_VALIDATION_TIME_BACK_MINUTES = 10080; + protected static final long MAX_INTERVAL_LENGTH = 86400000; private final AdminClient adminClient; @@ -96,6 +97,7 @@ public class ValidateAnomalyDetectorActionHandler extends AbstractActionHandler private final Map savedResponsesToFeature; private final List savedMultiResponses; private final Map featureIntervalValidation; + private final Map featureValidTime; private final List failures; private final List suggestedChanges; @@ -137,6 +139,7 @@ public ValidateAnomalyDetectorActionHandler( this.featureIntervalValidation = new HashMap<>(); this.savedMultiResponses = Collections.synchronizedList(new ArrayList<>()); this.savedResponsesToFeature = new HashMap<>(); + this.featureValidTime = new HashMap<>(); } /** @@ -447,11 +450,30 @@ private void randomSamplingIntervalValidation() throws IOException { } AtomicInteger listenerCounter = new AtomicInteger(); for (Feature feature : anomalyDetector.getFeatureAttributes()) { - randomSamplingHelper(timeRanges, feature, listenerCounter); + randomSamplingHelper(timeRanges, feature, listenerCounter, detectorInterval); +// boolean valid = false; +// for (long i = detectorInterval; i < MAX_INTERVAL_LENGTH; i*=2) { +// detectorInterval = i; +// timeRanges = new long[MAX_NUM_OF_SAMPLES_VIEWED][2]; +// long delayMillis = Optional +// .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) +// .map(t -> t.toDuration().toMillis()) +// .orElse(0L); +// long dataEndTime = Instant.now().toEpochMilli() - delayMillis; +// dataStartTime = dataEndTime - ((long) (MAX_NUM_OF_SAMPLES_VIEWED) * detectorInterval - delayMillis); +// for (int j = 0; j < MAX_NUM_OF_SAMPLES_VIEWED; j++) { +// timeRanges[j][0] = dataStartTime + (j * detectorInterval); +// timeRanges[j][1] = timeRanges[j][0] + detectorInterval; +// } +// randomSamplingHelper(timeRanges, feature, listenerCounter, i); +// if (featureValidTime.containsKey(feature.getName())) { +// break; +// } +// } } } - private void randomSamplingHelper(long[][] timeRanges, Feature feature, AtomicInteger listenerCounter) throws IOException { + private void randomSamplingHelper(long[][] timeRanges, Feature feature, AtomicInteger listenerCounter, long intervalTime) throws IOException { ParseUtils.parseAggregators(feature.getAggregation().toString(), xContent, feature.getId()); XContentParser parser = XContentType.JSON.xContent().createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); parser.nextToken(); @@ -476,7 +498,7 @@ private void randomSamplingHelper(long[][] timeRanges, Feature feature, AtomicIn SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(internalSearchSourceBuilder); sr.add(searchRequest); } - // System.out.println("8 requests: " + sr.requests().toString()); + // System.out.println("8 requests: " + sr.requests().toString()); client .multiSearch( sr, @@ -486,9 +508,9 @@ private void randomSamplingHelper(long[][] timeRanges, Feature feature, AtomicIn savedResponsesToFeature.put(feature.getName(), searchResponse); listenerCounter.incrementAndGet(); if (listenerCounter.get() >= anomalyDetector.getFeatureAttributes().size()) { - onRandomGetMultiResponse(searchResponse, feature, listenerCounter); + onRandomGetMultiResponse(searchResponse, feature, listenerCounter, intervalTime); } - }, + }, exception -> { System.out.println(exception.getMessage()); onFailure(exception); @@ -497,38 +519,143 @@ private void randomSamplingHelper(long[][] timeRanges, Feature feature, AtomicIn ); } - private void onRandomGetMultiResponse(MultiSearchResponse multiSearchResponse, Feature feature, AtomicInteger listnerCounter) { - for (String f: savedResponsesToFeature.keySet()) { - String errorMsg = ""; - final AtomicInteger hitCounter = new AtomicInteger(); - //System.out.println("feature name out of all feature loop: " + f); - for (MultiSearchResponse.Item item : savedResponsesToFeature.get(f)) { - SearchResponse response = item.getResponse(); - // System.out.println("each response for feature, " + f + ": " + response.toString()); - if (response.getHits().getTotalHits().value > 0) { - hitCounter.incrementAndGet(); - } - } - System.out.println("Hit counter before last validation: " + hitCounter.get()); - System.out.println("successRate: " + hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES); - - if (hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES < SAMPLE_SUCCESS_RATE) { - featureIntervalValidation.put(f, false); - errorMsg += "data is too sparse with this interval for feature: " + f; - logger.warn(errorMsg); - suggestedChanges.add(errorMsg); - } else { - featureIntervalValidation.put(f, true); + private void onRandomGetMultiResponse(MultiSearchResponse multiSearchResponse, Feature feature, AtomicInteger listnerCounter, long intervalTime) + throws IOException { + if (intervalTime >= MAX_INTERVAL_LENGTH) { + return; + } + for (String f: savedResponsesToFeature.keySet()) { + String errorMsg = ""; + final AtomicInteger hitCounter = new AtomicInteger(); + //System.out.println("feature name out of all feature loop: " + f); + for (MultiSearchResponse.Item item : savedResponsesToFeature.get(f)) { + SearchResponse response = item.getResponse(); + // System.out.println("each response for feature, " + f + ": " + response.toString()); + if (response.getHits().getTotalHits().value > 0) { + hitCounter.incrementAndGet(); } } - // System.out.println("validateIntervalMap: " + featureIntervalValidation.toString()); - if (featureIntervalValidation.containsValue(false)) { - validateAnomalyDetectorResponse(); +// System.out.println("Hit counter before last validation: " + hitCounter.get()); +// System.out.println("successRate: " + hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES); + + if (hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES < SAMPLE_SUCCESS_RATE) { + featureIntervalValidation.put(f, false); + errorMsg += "data is too sparse with this interval for feature: " + f; + logger.warn(errorMsg); + suggestedChanges.add(errorMsg); } else { - checkWindowDelay(); + featureValidTime.put(f, intervalTime); + featureIntervalValidation.put(f, true); + } + } + if (!featureIntervalValidation.containsValue(false)) { + return; + } + System.out.println("valid time" + featureValidTime); + if (featureValidTime.keySet().size() == anomalyDetector.getFeatureAttributes().size()) { + featureIntervalValidation.put(feature.getName(), false); + } + if (featureIntervalValidation.containsValue(false)) { + long detectorInterval = intervalTime * 2; + long[][] timeRanges = new long[MAX_NUM_OF_SAMPLES_VIEWED][2]; + long delayMillis = Optional + .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) + .map(t -> t.toDuration().toMillis()) + .orElse(0L); + long dataEndTime = Instant.now().toEpochMilli() - delayMillis; + long dataStartTime = dataEndTime - ((long) (MAX_NUM_OF_SAMPLES_VIEWED) * detectorInterval - delayMillis); + for (int j = 0; j < MAX_NUM_OF_SAMPLES_VIEWED; j++) { + timeRanges[j][0] = dataStartTime + (j * detectorInterval); + timeRanges[j][1] = timeRanges[j][0] + detectorInterval; } + randomSamplingHelper(timeRanges, feature, listnerCounter, detectorInterval); + } + if (featureValidTime.keySet().size() != anomalyDetector.getFeatureAttributes().size()) { + validateAnomalyDetectorResponse(); + } else { + checkWindowDelay(); + } } +// private void randomSamplingHelper(long[][] timeRanges, Feature feature, AtomicInteger listenerCounter) throws IOException { +// ParseUtils.parseAggregators(feature.getAggregation().toString(), xContent, feature.getId()); +// XContentParser parser = XContentType.JSON.xContent().createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); +// parser.nextToken(); +// List fieldNames = parseAggregationRequest(parser, 0, feature.getAggregation().getName()); +// //Random rand = new Random(); +// MultiSearchRequest sr = new MultiSearchRequest(); +// for (int i = 0; i < NUM_OF_RANDOM_SAMPLES; i++) { +//// int randIndex = rand.nextInt(MAX_NUM_OF_SAMPLES_VIEWED - 1); +//// long RandomRangeStart = timeRanges[randIndex][0]; +//// long RandomRangeEnd = timeRanges[randIndex][1]; +// long rangeStart = timeRanges[i][0]; +// long rangeEnd = timeRanges[i][1]; +// RangeQueryBuilder rangeQueryRandom = new RangeQueryBuilder(anomalyDetector.getTimeField()) +// .from(rangeStart) +// .to(rangeEnd) +// .format("epoch_millis") +// .includeLower(true) +// .includeUpper(false); +// BoolQueryBuilder boolQuery2 = QueryBuilders.boolQuery().filter(rangeQueryRandom).filter(anomalyDetector.getFilterQuery()) +// .filter(QueryBuilders.existsQuery(fieldNames.get(0))); +// SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(boolQuery2).size(1).terminateAfter(1); +// SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(internalSearchSourceBuilder); +// sr.add(searchRequest); +// } +// // System.out.println("8 requests: " + sr.requests().toString()); +// client +// .multiSearch( +// sr, +// ActionListener +// .wrap( +// searchResponse -> { +// savedResponsesToFeature.put(feature.getName(), searchResponse); +// listenerCounter.incrementAndGet(); +// if (listenerCounter.get() >= anomalyDetector.getFeatureAttributes().size()) { +// onRandomGetMultiResponse(searchResponse, feature, listenerCounter); +// } +// }, +// exception -> { +// System.out.println(exception.getMessage()); +// onFailure(exception); +// } +// ) +// ); +// } +// +// private void onRandomGetMultiResponse(MultiSearchResponse multiSearchResponse, Feature feature, AtomicInteger listnerCounter) { +// for (String f: savedResponsesToFeature.keySet()) { +// String errorMsg = ""; +// final AtomicInteger hitCounter = new AtomicInteger(); +// //System.out.println("feature name out of all feature loop: " + f); +// for (MultiSearchResponse.Item item : savedResponsesToFeature.get(f)) { +// SearchResponse response = item.getResponse(); +// // System.out.println("each response for feature, " + f + ": " + response.toString()); +// if (response.getHits().getTotalHits().value > 0) { +// hitCounter.incrementAndGet(); +// } +// } +// System.out.println("Hit counter before last validation: " + hitCounter.get()); +// System.out.println("successRate: " + hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES); +// +// if (hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES < SAMPLE_SUCCESS_RATE) { +// +// featureIntervalValidation.put(f, false); +// errorMsg += "data is too sparse with this interval for feature: " + f; +// logger.warn(errorMsg); +// suggestedChanges.add(errorMsg); +// } else { +// featureIntervalValidation.put(f, true); +// } +// } +// // System.out.println("validateIntervalMap: " + featureIntervalValidation.toString()); +// if (featureIntervalValidation.containsValue(false)) { +// validateAnomalyDetectorResponse(); +// } else { +// checkWindowDelay(); +// } +// } + private void checkWindowDelay() { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() .aggregation(AggregationBuilders.max(AGG_NAME_MAX).field(anomalyDetector.getTimeField())) From 6e4604135538c4b1ffcbe247f8a3102cd35c0c48 Mon Sep 17 00:00:00 2001 From: Galitzky Date: Tue, 4 Aug 2020 23:37:17 -0700 Subject: [PATCH 09/20] recomendation fully works now --- .../ValidateAnomalyDetectorActionHandler.java | 421 +++++++++++++----- 1 file changed, 319 insertions(+), 102 deletions(-) diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java index 7c6faff7..f597c372 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java @@ -49,8 +49,10 @@ import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.*; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.regex.Matcher; import java.util.stream.Collectors; @@ -97,9 +99,11 @@ public class ValidateAnomalyDetectorActionHandler extends AbstractActionHandler private final Map savedResponsesToFeature; private final List savedMultiResponses; private final Map featureIntervalValidation; - private final Map featureValidTime; + private final Map featureValidTimerecommendation; private final List failures; private final List suggestedChanges; + private Boolean inferringInterval; + private AtomicBoolean inferAgain; /** @@ -139,7 +143,9 @@ public ValidateAnomalyDetectorActionHandler( this.featureIntervalValidation = new HashMap<>(); this.savedMultiResponses = Collections.synchronizedList(new ArrayList<>()); this.savedResponsesToFeature = new HashMap<>(); - this.featureValidTime = new HashMap<>(); + this.featureValidTimerecommendation = new HashMap<>(); + this.inferringInterval = false; + this.inferAgain = new AtomicBoolean(true); } /** @@ -218,7 +224,6 @@ private void searchAdInputIndices(String detectorId) { .query(QueryBuilders.matchAllQuery()) .size(0) .timeout(requestTimeout); - SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().toArray(new String[0])).source(searchSourceBuilder); client .search( @@ -304,7 +309,7 @@ private void queryFilterValidation() { BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().must(rangeQuery).must(anomalyDetector.getFilterQuery()); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(internalFilterQuery).size(1).terminateAfter(1).timeout(requestTimeout); SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(searchSourceBuilder); - System.out.println("Query filter request: " + searchRequest.toString()); + // System.out.println("Query filter request: " + searchRequest.toString()); client .search( searchRequest, @@ -367,7 +372,7 @@ private void featureQueryValidation() throws IOException { .filter(QueryBuilders.existsQuery(fieldNames.get(0))); SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(boolQuery2).size(1).terminateAfter(1); SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(internalSearchSourceBuilder); - System.out.println("search builder for each feature query: " + searchRequest.toString()); + //System.out.println("search builder for each feature query: " + searchRequest.toString()); client .search( searchRequest, @@ -412,7 +417,7 @@ private List parseAggregationRequest(XContentParser parser, int level, S private void onFeatureAggregationValidation(SearchResponse response, Feature feature) throws IOException { - System.out.println("response for each feature agg validation " + response.toString()); + // System.out.println("response for each feature agg validation " + response.toString()); if (response.getHits().getTotalHits().value <= 0) { String errorMsg = "feature query is potentially wrong as no hits were found at all for feature " + feature.getName(); logger.warn(errorMsg); @@ -436,54 +441,56 @@ private void onFeatureAggregationValidation(SearchResponse response, Feature fea // } // } - private void randomSamplingIntervalValidation() throws IOException { + private synchronized void randomSamplingIntervalValidation() throws IOException { long detectorInterval = Optional .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) .map(t -> t.toDuration().toMillis()) .orElse(0L); - long[] startEnd = startEndTimeRangeWithIntervals(MAX_NUM_OF_SAMPLES_VIEWED); - long dataStartTime = startEnd[0]; - long[][] timeRanges = new long[MAX_NUM_OF_SAMPLES_VIEWED][2]; - for (int i = 0; i < MAX_NUM_OF_SAMPLES_VIEWED; i++) { - timeRanges[i][0] = dataStartTime + (i * detectorInterval); - timeRanges[i][1] = timeRanges[i][0] + detectorInterval; - } - AtomicInteger listenerCounter = new AtomicInteger(); - for (Feature feature : anomalyDetector.getFeatureAttributes()) { - randomSamplingHelper(timeRanges, feature, listenerCounter, detectorInterval); -// boolean valid = false; -// for (long i = detectorInterval; i < MAX_INTERVAL_LENGTH; i*=2) { -// detectorInterval = i; -// timeRanges = new long[MAX_NUM_OF_SAMPLES_VIEWED][2]; -// long delayMillis = Optional -// .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) -// .map(t -> t.toDuration().toMillis()) -// .orElse(0L); -// long dataEndTime = Instant.now().toEpochMilli() - delayMillis; -// dataStartTime = dataEndTime - ((long) (MAX_NUM_OF_SAMPLES_VIEWED) * detectorInterval - delayMillis); -// for (int j = 0; j < MAX_NUM_OF_SAMPLES_VIEWED; j++) { -// timeRanges[j][0] = dataStartTime + (j * detectorInterval); -// timeRanges[j][1] = timeRanges[j][0] + detectorInterval; -// } -// randomSamplingHelper(timeRanges, feature, listenerCounter, i); -// if (featureValidTime.containsKey(feature.getName())) { -// break; -// } -// } + //CountDownLatch latch = new CountDownLatch(Math.sqrt((double) MAX_INTERVAL_LENGTH / detectorInterval)); + + for (long i = detectorInterval; i < MAX_INTERVAL_LENGTH; i *= 1.5) { + if (inferringInterval) { + break; + } + detectorInterval = i; + long timeRanges[][] = new long[MAX_NUM_OF_SAMPLES_VIEWED][2]; + long delayMillis = Optional + .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) + .map(t -> t.toDuration().toMillis()) + .orElse(0L); + long dataEndTime = Instant.now().toEpochMilli() - delayMillis; + long dataStartTime = dataEndTime - ((long) (MAX_NUM_OF_SAMPLES_VIEWED) * detectorInterval - delayMillis); + for (int j = 0; j < MAX_NUM_OF_SAMPLES_VIEWED; j++) { + timeRanges[j][0] = dataStartTime + (j * detectorInterval); + timeRanges[j][1] = timeRanges[j][0] + detectorInterval; + } + AtomicInteger listenerCounter = new AtomicInteger(); + try { + if (inferAgain.get()) { + randomSamplingHelper(timeRanges, listenerCounter, i); + } + wait(); + } catch (Exception ex) { + + } + + System.out.println("value of i inside loop: " + i); } } - private void randomSamplingHelper(long[][] timeRanges, Feature feature, AtomicInteger listenerCounter, long intervalTime) throws IOException { - ParseUtils.parseAggregators(feature.getAggregation().toString(), xContent, feature.getId()); - XContentParser parser = XContentType.JSON.xContent().createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); - parser.nextToken(); - List fieldNames = parseAggregationRequest(parser, 0, feature.getAggregation().getName()); - //Random rand = new Random(); + private synchronized void randomSamplingHelper(long[][] timeRanges, AtomicInteger listenerCounter, long detectorInterval) throws IOException { + inferAgain.set(false); + List featureFields = new ArrayList<>(); + for (Feature feature : anomalyDetector.getFeatureAttributes()) { + ParseUtils.parseAggregators(feature.getAggregation().toString(), xContent, feature.getId()); + XContentParser parser = XContentType.JSON.xContent().createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); + parser.nextToken(); + List fieldNames = parseAggregationRequest(parser, 0, feature.getAggregation().getName()); + featureFields.add(fieldNames.get(0)); + } + System.out.println("featureFields: " + featureFields); MultiSearchRequest sr = new MultiSearchRequest(); for (int i = 0; i < NUM_OF_RANDOM_SAMPLES; i++) { -// int randIndex = rand.nextInt(MAX_NUM_OF_SAMPLES_VIEWED - 1); -// long RandomRangeStart = timeRanges[randIndex][0]; -// long RandomRangeEnd = timeRanges[randIndex][1]; long rangeStart = timeRanges[i][0]; long rangeEnd = timeRanges[i][1]; RangeQueryBuilder rangeQueryRandom = new RangeQueryBuilder(anomalyDetector.getTimeField()) @@ -492,25 +499,34 @@ private void randomSamplingHelper(long[][] timeRanges, Feature feature, AtomicIn .format("epoch_millis") .includeLower(true) .includeUpper(false); - BoolQueryBuilder boolQuery2 = QueryBuilders.boolQuery().filter(rangeQueryRandom).filter(anomalyDetector.getFilterQuery()) - .filter(QueryBuilders.existsQuery(fieldNames.get(0))); - SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(boolQuery2).size(1).terminateAfter(1); + BoolQueryBuilder qb = QueryBuilders.boolQuery().filter(rangeQueryRandom).filter(anomalyDetector.getFilterQuery()); + for (int j = 0 ; j < featureFields.size(); j++) { + qb.filter(QueryBuilders.existsQuery(featureFields.get(j) ) ) ; + } +// BoolQueryBuilder boolQuery2 = QueryBuilders.boolQuery().filter(rangeQueryRandom).filter(anomalyDetector.getFilterQuery()) +// .filter(QueryBuilders.existsQuery(fieldNames.get(0))); + SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(qb).size(1).terminateAfter(1); SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(internalSearchSourceBuilder); + if (i == 0) { + System.out.println("search request: " + searchRequest); + } sr.add(searchRequest); } - // System.out.println("8 requests: " + sr.requests().toString()); - client + // System.out.println("8 requests: " + sr.requests().toString()); + + client .multiSearch( sr, ActionListener .wrap( searchResponse -> { - savedResponsesToFeature.put(feature.getName(), searchResponse); - listenerCounter.incrementAndGet(); - if (listenerCounter.get() >= anomalyDetector.getFeatureAttributes().size()) { - onRandomGetMultiResponse(searchResponse, feature, listenerCounter, intervalTime); +// savedMultiResponses.add(searchResponse); +// listenerCounter.incrementAndGet(); + + if (doneInferring(detectorInterval, searchResponse)) { + onRandomGetMultiResponse(); } - }, + }, exception -> { System.out.println(exception.getMessage()); onFailure(exception); @@ -518,65 +534,266 @@ private void randomSamplingHelper(long[][] timeRanges, Feature feature, AtomicIn ) ); } - - private void onRandomGetMultiResponse(MultiSearchResponse multiSearchResponse, Feature feature, AtomicInteger listnerCounter, long intervalTime) - throws IOException { - if (intervalTime >= MAX_INTERVAL_LENGTH) { - return; + private synchronized boolean doneInferring(long detectorInterval, MultiSearchResponse searchResponse){ + System.out.println("number of responses in multiresponse: " + searchResponse.getResponses().length); + System.out.println("detector interval: " + detectorInterval); + boolean firstCase = false; + long originalInterval = Optional + .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) + .map(t -> t.toDuration().toMillis()) + .orElse(0L); + if (detectorInterval == originalInterval) { + System.out.println("went into first case"); + firstCase = true; + } + if (detectorInterval >= MAX_INTERVAL_LENGTH) { + suggestedChanges.add("detector interval: failed to infer max up too: " + MAX_INTERVAL_LENGTH); + System.out.println("went into max check"); + inferAgain.set(false); + return true; } - for (String f: savedResponsesToFeature.keySet()) { String errorMsg = ""; final AtomicInteger hitCounter = new AtomicInteger(); - //System.out.println("feature name out of all feature loop: " + f); - for (MultiSearchResponse.Item item : savedResponsesToFeature.get(f)) { + for (MultiSearchResponse.Item item : searchResponse) { SearchResponse response = item.getResponse(); - // System.out.println("each response for feature, " + f + ": " + response.toString()); if (response.getHits().getTotalHits().value > 0) { hitCounter.incrementAndGet(); } } -// System.out.println("Hit counter before last validation: " + hitCounter.get()); -// System.out.println("successRate: " + hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES); - + inferAgain.set(true); + notify(); + System.out.println("Hit counter before last validation: " + hitCounter.get()); + System.out.println("successRate: " + hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES); if (hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES < SAMPLE_SUCCESS_RATE) { - featureIntervalValidation.put(f, false); - errorMsg += "data is too sparse with this interval for feature: " + f; - logger.warn(errorMsg); - suggestedChanges.add(errorMsg); - } else { - featureValidTime.put(f, intervalTime); - featureIntervalValidation.put(f, true); - } - } - if (!featureIntervalValidation.containsValue(false)) { - return; - } - System.out.println("valid time" + featureValidTime); - if (featureValidTime.keySet().size() == anomalyDetector.getFeatureAttributes().size()) { - featureIntervalValidation.put(feature.getName(), false); - } - if (featureIntervalValidation.containsValue(false)) { - long detectorInterval = intervalTime * 2; - long[][] timeRanges = new long[MAX_NUM_OF_SAMPLES_VIEWED][2]; - long delayMillis = Optional - .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) - .map(t -> t.toDuration().toMillis()) - .orElse(0L); - long dataEndTime = Instant.now().toEpochMilli() - delayMillis; - long dataStartTime = dataEndTime - ((long) (MAX_NUM_OF_SAMPLES_VIEWED) * detectorInterval - delayMillis); - for (int j = 0; j < MAX_NUM_OF_SAMPLES_VIEWED; j++) { - timeRanges[j][0] = dataStartTime + (j * detectorInterval); - timeRanges[j][1] = timeRanges[j][0] + detectorInterval; + return false; + } else if (!firstCase){ + String suggestion = "detector interval: " + detectorInterval; + suggestedChanges.add(suggestion); + inferAgain.set(false); + return true; } - randomSamplingHelper(timeRanges, feature, listnerCounter, detectorInterval); - } - if (featureValidTime.keySet().size() != anomalyDetector.getFeatureAttributes().size()) { - validateAnomalyDetectorResponse(); - } else { - checkWindowDelay(); - } + return true; + } + + private void onRandomGetMultiResponse() { + checkWindowDelay(); } + +// private void onRandomGetMultiResponse(MultiSearchResponse multiSearchResponse, Feature feature, AtomicInteger listnerCounter, long detectorInterval) { +// boolean firstCase = false; +// long originalInterval = Optional +// .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) +// .map(t -> t.toDuration().toMillis()) +// .orElse(0L); +// if (detectorInterval == originalInterval) { +// System.out.println("went into first case"); +// firstCase = true; +// } +// for (String f: savedResponsesToFeature.keySet()) { +// String errorMsg = ""; +// final AtomicInteger hitCounter = new AtomicInteger(); +// for (MultiSearchResponse.Item item : savedResponsesToFeature.get(f)) { +// SearchResponse response = item.getResponse(); +// if (response.getHits().getTotalHits().value > 0) { +// hitCounter.incrementAndGet(); +// } +// } +// System.out.println("Hit counter before last validation: " + hitCounter.get()); +// System.out.println("successRate: " + hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES); +// if (hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES < SAMPLE_SUCCESS_RATE) { +// featureIntervalValidation.put(f, false); +//// errorMsg += "data is too sparse with this interval for feature: " + f; +//// logger.warn(errorMsg); +//// suggestedChanges.add(errorMsg); +// +//// +// } else { +// if (!firstCase) { +// featureValidTimerecommendation.put(f, detectorInterval); +// } +// featureIntervalValidation.put(f, true); +// } +// } +// System.out.println("1: " + featureIntervalValidation); +// +// if (!featureIntervalValidation.containsValue((false)) && firstCase && !inferringInterval) { +// inferringInterval = true; +// System.out.println("BEFORE WINDOW DELAY CALL 1"); +// System.out.println(featureIntervalValidation); +// checkWindowDelay(); +// } else if (!featureIntervalValidation.containsValue((false)) && !inferringInterval) { +// for (String featureName : featureValidTimerecommendation.keySet()) { +// String suggestion = featureName + ": " + featureValidTimerecommendation.get(featureName).toString(); +// suggestedChanges.add(suggestion); +// } +// inferringInterval = true; +// System.out.println("BEFORE WINDOW DELAY CALL 2"); +// System.out.println(featureIntervalValidation); +// checkWindowDelay(); +// } else if (detectorInterval >= MAX_INTERVAL_LENGTH && featureIntervalValidation.containsValue(false) && !inferringInterval) { +// for (String featureName : featureIntervalValidation.keySet()) { +// if (!featureIntervalValidation.get(featureName)) { +// String doneInferring = "failed to infer max up too: " + MAX_INTERVAL_LENGTH + "for feature: " + featureName; +// suggestedChanges.add(doneInferring); +// } else { +// for (String featureNameRecc : featureValidTimerecommendation.keySet()) { +// String suggestion = featureName + ": " + featureValidTimerecommendation.get(featureNameRecc).toString(); +// suggestedChanges.add(suggestion); +// } +// } +// } +// inferringInterval = true; +// System.out.println("BEFORE WINDOW DELAY CALL 3"); +// checkWindowDelay(); +// } else if (detectorInterval >= MAX_INTERVAL_LENGTH && !inferringInterval) { +// inferringInterval = true; +// System.out.println("BEFORE WINDOW DELAY CALL 34"); +// checkWindowDelay(); +// } else { +// System.out.println("hello"); +// System.out.println(featureIntervalValidation); +// } +// } + + //long detectorInterval = intervalTime * 2; + +//// long[][] timeRanges = new long[MAX_NUM_OF_SAMPLES_VIEWED][2]; +//// long delayMillis = Optional +//// .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) +//// .map(t -> t.toDuration().toMillis()) +//// .orElse(0L); +//// long dataEndTime = Instant.now().toEpochMilli() - delayMillis; +//// long dataStartTime = dataEndTime - ((long) (MAX_NUM_OF_SAMPLES_VIEWED) * detectorInterval - delayMillis); +//// for (int j = 0; j < MAX_NUM_OF_SAMPLES_VIEWED; j++) { +//// timeRanges[j][0] = dataStartTime + (j * detectorInterval); +//// timeRanges[j][1] = timeRanges[j][0] + detectorInterval; +//// } + +// boolean valid = false; +// for (long i = detectorInterval; i < MAX_INTERVAL_LENGTH; i*=2) { +// detectorInterval = i; +// timeRanges = new long[MAX_NUM_OF_SAMPLES_VIEWED][2]; +// long delayMillis = Optional +// .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) +// .map(t -> t.toDuration().toMillis()) +// .orElse(0L); +// long dataEndTime = Instant.now().toEpochMilli() - delayMillis; +// dataStartTime = dataEndTime - ((long) (MAX_NUM_OF_SAMPLES_VIEWED) * detectorInterval - delayMillis); +// for (int j = 0; j < MAX_NUM_OF_SAMPLES_VIEWED; j++) { +// timeRanges[j][0] = dataStartTime + (j * detectorInterval); +// timeRanges[j][1] = timeRanges[j][0] + detectorInterval; +// } +// randomSamplingHelper(timeRanges, feature, listenerCounter, i); +// if (featureValidTime.containsKey(feature.getName())) { +// break; +// } +// } + + // private void randomSamplingHelper(long[][] timeRanges, Feature feature, AtomicInteger listenerCounter, long intervalTime) throws IOException { +// ParseUtils.parseAggregators(feature.getAggregation().toString(), xContent, feature.getId()); +// XContentParser parser = XContentType.JSON.xContent().createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); +// parser.nextToken(); +// List fieldNames = parseAggregationRequest(parser, 0, feature.getAggregation().getName()); +// //Random rand = new Random(); +// MultiSearchRequest sr = new MultiSearchRequest(); +// for (int i = 0; i < NUM_OF_RANDOM_SAMPLES; i++) { +//// int randIndex = rand.nextInt(MAX_NUM_OF_SAMPLES_VIEWED - 1); +//// long RandomRangeStart = timeRanges[randIndex][0]; +//// long RandomRangeEnd = timeRanges[randIndex][1]; +// long rangeStart = timeRanges[i][0]; +// long rangeEnd = timeRanges[i][1]; +// RangeQueryBuilder rangeQueryRandom = new RangeQueryBuilder(anomalyDetector.getTimeField()) +// .from(rangeStart) +// .to(rangeEnd) +// .format("epoch_millis") +// .includeLower(true) +// .includeUpper(false); +// BoolQueryBuilder boolQuery2 = QueryBuilders.boolQuery().filter(rangeQueryRandom).filter(anomalyDetector.getFilterQuery()) +// .filter(QueryBuilders.existsQuery(fieldNames.get(0))); +// SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(boolQuery2).size(1).terminateAfter(1); +// SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(internalSearchSourceBuilder); +// sr.add(searchRequest); +// } +// // System.out.println("8 requests: " + sr.requests().toString()); +// client +// .multiSearch( +// sr, +// ActionListener +// .wrap( +// searchResponse -> { +// savedResponsesToFeature.put(feature.getName(), searchResponse); +// listenerCounter.incrementAndGet(); +// if (listenerCounter.get() >= anomalyDetector.getFeatureAttributes().size()) { +// onRandomGetMultiResponse(searchResponse, feature, listenerCounter, intervalTime); +// } +// }, +// exception -> { +// System.out.println(exception.getMessage()); +// onFailure(exception); +// } +// ) +// ); +// } + +// private void onRandomGetMultiResponse(MultiSearchResponse multiSearchResponse, Feature feature, AtomicInteger listnerCounter, long intervalTime) +// throws IOException { +// if (intervalTime >= MAX_INTERVAL_LENGTH) { +// return; +// } +// for (String f: savedResponsesToFeature.keySet()) { +// String errorMsg = ""; +// final AtomicInteger hitCounter = new AtomicInteger(); +// //System.out.println("feature name out of all feature loop: " + f); +// for (MultiSearchResponse.Item item : savedResponsesToFeature.get(f)) { +// SearchResponse response = item.getResponse(); +// // System.out.println("each response for feature, " + f + ": " + response.toString()); +// if (response.getHits().getTotalHits().value > 0) { +// hitCounter.incrementAndGet(); +// } +// } +//// System.out.println("Hit counter before last validation: " + hitCounter.get()); +//// System.out.println("successRate: " + hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES); +// +// if (hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES < SAMPLE_SUCCESS_RATE) { +// featureIntervalValidation.put(f, false); +// errorMsg += "data is too sparse with this interval for feature: " + f; +// logger.warn(errorMsg); +// suggestedChanges.add(errorMsg); +// } else { +// featureValidTime.put(f, intervalTime); +// featureIntervalValidation.put(f, true); +// } +// } +// if (!featureIntervalValidation.containsValue(false)) { +// return; +// } +// System.out.println("valid time" + featureValidTime); +// if (featureValidTime.keySet().size() == anomalyDetector.getFeatureAttributes().size()) { +// featureIntervalValidation.put(feature.getName(), false); +// } +// if (featureIntervalValidation.containsValue(false)) { +// long detectorInterval = intervalTime * 2; +// long[][] timeRanges = new long[MAX_NUM_OF_SAMPLES_VIEWED][2]; +// long delayMillis = Optional +// .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) +// .map(t -> t.toDuration().toMillis()) +// .orElse(0L); +// long dataEndTime = Instant.now().toEpochMilli() - delayMillis; +// long dataStartTime = dataEndTime - ((long) (MAX_NUM_OF_SAMPLES_VIEWED) * detectorInterval - delayMillis); +// for (int j = 0; j < MAX_NUM_OF_SAMPLES_VIEWED; j++) { +// timeRanges[j][0] = dataStartTime + (j * detectorInterval); +// timeRanges[j][1] = timeRanges[j][0] + detectorInterval; +// } +// randomSamplingHelper(timeRanges, feature, listnerCounter, detectorInterval); +// } +// if (featureValidTime.keySet().size() != anomalyDetector.getFeatureAttributes().size()) { +// validateAnomalyDetectorResponse(); +// } else { +// checkWindowDelay(); +// } +// } + // private void randomSamplingHelper(long[][] timeRanges, Feature feature, AtomicInteger listenerCounter) throws IOException { // ParseUtils.parseAggregators(feature.getAggregation().toString(), xContent, feature.getId()); // XContentParser parser = XContentType.JSON.xContent().createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); From 890fa5b9418f30b453e0fe4c0a3e2ce24d8744a5 Mon Sep 17 00:00:00 2001 From: Galitzky Date: Wed, 5 Aug 2020 09:22:53 -0700 Subject: [PATCH 10/20] fixed feature query validation bug --- .../ValidateAnomalyDetectorActionHandler.java | 47 ++++++++++++------- 1 file changed, 31 insertions(+), 16 deletions(-) diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java index f597c372..1204a7a4 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java @@ -239,6 +239,7 @@ private void onSearchAdInputIndicesResponse(SearchResponse response, String dete + Arrays.toString(anomalyDetector.getIndices().toArray(new String[0])); logger.error(errorMsg); failures.add(errorMsg); + validateAnomalyDetectorResponse(); return; } checkADNameExists(detectorId); @@ -353,7 +354,7 @@ private void featureQueryValidation() throws IOException { if (startEnd[0] > startTimeWithSetTime) { dataStartTime = startTimeWithSetTime; } - + AtomicInteger featureCounter = new AtomicInteger(); RangeQueryBuilder rangeQuery = new RangeQueryBuilder(anomalyDetector.getTimeField()) .from(dataStartTime) .to(dataEndTime) @@ -378,7 +379,10 @@ private void featureQueryValidation() throws IOException { searchRequest, ActionListener .wrap( - searchResponse -> onFeatureAggregationValidation(searchResponse, feature), + searchResponse -> { + featureCounter.incrementAndGet(); + onFeatureAggregationValidation(searchResponse, feature, featureCounter); + }, exception -> { System.out.println(exception.getMessage()); onFailure(exception); @@ -386,12 +390,7 @@ private void featureQueryValidation() throws IOException { ) ); } - if (!suggestedChanges.isEmpty()) { - validateAnomalyDetectorResponse(); - return; - } - System.out.println("went into here"); - randomSamplingIntervalValidation(); + } } @@ -416,13 +415,25 @@ private List parseAggregationRequest(XContentParser parser, int level, S } - private void onFeatureAggregationValidation(SearchResponse response, Feature feature) throws IOException { - // System.out.println("response for each feature agg validation " + response.toString()); + private void onFeatureAggregationValidation(SearchResponse response, Feature feature, AtomicInteger counter) throws IOException { + // System.out.println("response for each feature agg validation " + response.toString()); if (response.getHits().getTotalHits().value <= 0) { String errorMsg = "feature query is potentially wrong as no hits were found at all for feature " + feature.getName(); logger.warn(errorMsg); suggestedChanges.add(errorMsg); } + if (counter.get() == anomalyDetector.getFeatureAttributes().size()) { + // System.out.println("went into here"); + // System.out.println("feature name: " + feature.getName()); + if (!suggestedChanges.isEmpty()) { + validateAnomalyDetectorResponse(); + } else { + randomSamplingIntervalValidation(); + } + + } + + } @@ -488,7 +499,7 @@ private synchronized void randomSamplingHelper(long[][] timeRanges, AtomicIntege List fieldNames = parseAggregationRequest(parser, 0, feature.getAggregation().getName()); featureFields.add(fieldNames.get(0)); } - System.out.println("featureFields: " + featureFields); + // System.out.println("featureFields: " + featureFields); MultiSearchRequest sr = new MultiSearchRequest(); for (int i = 0; i < NUM_OF_RANDOM_SAMPLES; i++) { long rangeStart = timeRanges[i][0]; @@ -535,8 +546,8 @@ private synchronized void randomSamplingHelper(long[][] timeRanges, AtomicIntege ); } private synchronized boolean doneInferring(long detectorInterval, MultiSearchResponse searchResponse){ - System.out.println("number of responses in multiresponse: " + searchResponse.getResponses().length); - System.out.println("detector interval: " + detectorInterval); + // System.out.println("number of responses in multiresponse: " + searchResponse.getResponses().length); + // System.out.println("detector interval: " + detectorInterval); boolean firstCase = false; long originalInterval = Optional .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) @@ -562,8 +573,8 @@ private synchronized boolean doneInferring(long detectorInterval, MultiSearchRes } inferAgain.set(true); notify(); - System.out.println("Hit counter before last validation: " + hitCounter.get()); - System.out.println("successRate: " + hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES); + //System.out.println("Hit counter before last validation: " + hitCounter.get()); + // System.out.println("successRate: " + hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES); if (hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES < SAMPLE_SUCCESS_RATE) { return false; } else if (!firstCase){ @@ -572,6 +583,7 @@ private synchronized boolean doneInferring(long detectorInterval, MultiSearchRes inferAgain.set(false); return true; } + inferAgain.set(false); return true; } @@ -947,6 +959,8 @@ private double parseAggregation(Aggregation aggregation) { } private void getFieldMapping() { GetMappingsRequest request = new GetMappingsRequest().indices(anomalyDetector.getIndices().get(0)); + + adminClient .indices().getMappings( request, @@ -959,7 +973,8 @@ private void getFieldMapping() { } private void checkFieldIndex(GetMappingsResponse response) { - System.out.println(response.toString()); + System.out.println(response); + System.out.println("response mapping: " + response.mappings().get("feature-1")); // Optional x = Optional // .ofNullable(response) // .map(SearchResponse::get) From 7afabb42d8918a0dd819c81315775868b3d8a455 Mon Sep 17 00:00:00 2001 From: Galitzky Date: Wed, 19 Aug 2020 13:50:50 -0700 Subject: [PATCH 11/20] ready for PR --- .../ad/AnomalyDetectorPlugin.java | 8 +- .../ad/feature/SearchFeatureDao.java | 5 +- .../ad/model/AnomalyDetector.java | 64 +- .../ad/model/ValidateResponse.java | 83 +- .../RestValidateAnomalyDetectorAction.java | 89 +- .../ValidateAnomalyDetectorActionHandler.java | 1020 +++++------------ ...iSearchResponseDelegateActionListener.java | 112 -- .../ad/e2e/DetectionResultEvalutationIT.java | 252 +++- .../ad/rest/AnomalyDetectorRestApiIT.java | 97 ++ 9 files changed, 794 insertions(+), 936 deletions(-) delete mode 100644 src/main/java/com/amazon/opendistroforelasticsearch/ad/util/MultiSearchResponseDelegateActionListener.java diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java index 913bc595..7dab4e2c 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java @@ -29,7 +29,6 @@ import java.util.stream.Collectors; import java.util.stream.Stream; -import com.amazon.opendistroforelasticsearch.ad.rest.*; import org.elasticsearch.SpecialPermission; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; @@ -91,6 +90,7 @@ import com.amazon.opendistroforelasticsearch.ad.rest.RestSearchAnomalyDetectorAction; import com.amazon.opendistroforelasticsearch.ad.rest.RestSearchAnomalyResultAction; import com.amazon.opendistroforelasticsearch.ad.rest.RestStatsAnomalyDetectorAction; +import com.amazon.opendistroforelasticsearch.ad.rest.RestValidateAnomalyDetectorAction; import com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings; import com.amazon.opendistroforelasticsearch.ad.settings.EnabledSetting; import com.amazon.opendistroforelasticsearch.ad.stats.ADStat; @@ -213,9 +213,9 @@ public List getRestHandlers( ); RestSearchAnomalyDetectorAction searchAnomalyDetectorAction = new RestSearchAnomalyDetectorAction(); RestValidateAnomalyDetectorAction restValidateAnomalyDetectorAction = new RestValidateAnomalyDetectorAction( - settings, - anomalyDetectionIndices, - xContentRegistry + settings, + anomalyDetectionIndices, + xContentRegistry ); RestSearchAnomalyResultAction searchAnomalyResultAction = new RestSearchAnomalyResultAction(); RestDeleteAnomalyDetectorAction deleteAnomalyDetectorAction = new RestDeleteAnomalyDetectorAction(clusterService); diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDao.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDao.java index e5b60b43..fd9e8192 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDao.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDao.java @@ -565,9 +565,7 @@ private Optional parseBucket(InternalDateRange.Bucket bucket, List parseAggregations(Optional aggregations, List featureIds) { - System.out.println("optional aggregation list: " + aggregations.get().asList()); - - Optional aggregationsResults = aggregations + return aggregations .map(aggs -> aggs.asMap()) .map( map -> featureIds @@ -576,6 +574,5 @@ private Optional parseAggregations(Optional aggregations .toArray() ) .filter(result -> Arrays.stream(result).noneMatch(d -> Double.isNaN(d) || Double.isInfinite(d))); - return aggregationsResults; } } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java index 83adc2a8..5057843b 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java @@ -147,20 +147,20 @@ public AnomalyDetector( } public AnomalyDetector( - String detectorId, - Long version, - String name, - String description, - String timeField, - List indices, - List features, - QueryBuilder filterQuery, - TimeConfiguration detectionInterval, - TimeConfiguration windowDelay, - Map uiMetadata, - Integer schemaVersion, - Instant lastUpdateTime, - Boolean validation + String detectorId, + Long version, + String name, + String description, + String timeField, + List indices, + List features, + QueryBuilder filterQuery, + TimeConfiguration detectionInterval, + TimeConfiguration windowDelay, + Map uiMetadata, + Integer schemaVersion, + Instant lastUpdateTime, + Boolean validation ) { if (indices == null || indices.isEmpty()) { indices = null; @@ -347,11 +347,7 @@ public static AnomalyDetector parse( ); } - public static AnomalyDetector parseValidation( - XContentParser parser, - String detectorId, - Long version - ) throws IOException { + public static AnomalyDetector parseValidation(XContentParser parser, String detectorId, Long version) throws IOException { Boolean validation = true; String name = null; String description = null; @@ -423,25 +419,23 @@ public static AnomalyDetector parseValidation( } } return new AnomalyDetector( - detectorId, - version, - name, - description, - timeField, - indices, - features, - filterQuery, - detectionInterval, - windowDelay, - uiMetadata, - schemaVersion, - lastUpdateTime, - validation + detectorId, + version, + name, + description, + timeField, + indices, + features, + filterQuery, + detectionInterval, + windowDelay, + uiMetadata, + schemaVersion, + lastUpdateTime, + validation ); } - - public SearchSourceBuilder generateFeatureQuery() { SearchSourceBuilder generatedFeatureQuery = new SearchSourceBuilder().query(filterQuery); if (this.getFeatureAttributes() != null) { diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidateResponse.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidateResponse.java index 3a47ffa1..edbcca34 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidateResponse.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidateResponse.java @@ -1,53 +1,72 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + package com.amazon.opendistroforelasticsearch.ad.model; -import com.amazon.opendistroforelasticsearch.ad.constant.CommonName; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.common.io.stream.StreamOutput; +import java.io.IOException; +import java.util.List; +import java.util.Map; + import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.rest.RestStatus; - -import java.io.IOException; -import java.util.List; public class ValidateResponse implements ToXContentObject { - private List failures; - private List suggestedChanges; + private Map> failures; + private Map> suggestedChanges; - public XContentBuilder toXContent(XContentBuilder builder) throws IOException { - return toXContent(builder, ToXContent.EMPTY_PARAMS); - } + public XContentBuilder toXContent(XContentBuilder builder) throws IOException { + return toXContent(builder, ToXContent.EMPTY_PARAMS); + } - public ValidateResponse() { - failures = null; - suggestedChanges = null; - } + public ValidateResponse() { + failures = null; + suggestedChanges = null; + } public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { XContentBuilder xContentBuilder = builder.startObject(); - xContentBuilder.field("failures", failures); - xContentBuilder.field("suggestedChanges", suggestedChanges); -// if (failures != null && failures.size() > 0) { -// xContentBuilder.array("failures", failures); -// } -// if (suggestedChanges != null && suggestedChanges.size() > 0) { -// xContentBuilder.array("suggestedChanges", suggestedChanges); -// } + xContentBuilder.startObject("failures"); + for (String key : failures.keySet()) { + xContentBuilder.field(key, failures.get(key)); + } + xContentBuilder.endObject(); + + xContentBuilder.startObject("suggestedChanges"); + for (String key : suggestedChanges.keySet()) { + xContentBuilder.field(key, suggestedChanges.get(key)); + } + xContentBuilder.endObject(); return xContentBuilder.endObject(); } - public List getFailures() { return failures; } - - public List getSuggestedChanges() { return suggestedChanges; } - - public void setFailures(List failures) { this.failures = failures; } - - public void setSuggestedChanges(List suggestedChanges) { this.suggestedChanges = suggestedChanges; } + public Map> getFailures() { + return failures; + } + public Map> getSuggestedChanges() { + return suggestedChanges; + } + public void setFailures(Map> failures) { + this.failures = failures; + } + public void setSuggestedChanges(Map> suggestedChanges) { + this.suggestedChanges = suggestedChanges; + } } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java index 91f2c94a..fe7c8790 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java @@ -1,37 +1,49 @@ -package com.amazon.opendistroforelasticsearch.ad.rest; +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ -import com.amazon.opendistroforelasticsearch.ad.AnomalyDetectorPlugin; -import com.amazon.opendistroforelasticsearch.ad.constant.CommonErrorMessages; -import com.amazon.opendistroforelasticsearch.ad.indices.AnomalyDetectionIndices; -import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; +package com.amazon.opendistroforelasticsearch.ad.rest; -import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.*; +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.DETECTION_INTERVAL; +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.DETECTION_WINDOW_DELAY; +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.MAX_ANOMALY_DETECTORS; +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.MAX_ANOMALY_FEATURES; +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.REQUEST_TIMEOUT; import static com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils.VALIDATE; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; + +import java.io.IOException; +import java.util.List; +import java.util.Locale; -import com.amazon.opendistroforelasticsearch.ad.rest.handler.IndexAnomalyDetectorActionHandler; -import com.amazon.opendistroforelasticsearch.ad.rest.handler.ValidateAnomalyDetectorActionHandler; -import com.amazon.opendistroforelasticsearch.ad.settings.EnabledSetting; -import com.google.common.collect.ImmutableList; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.common.settings.Settings; - -import java.io.IOException; -import java.util.List; -import java.util.Locale; -import static com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector.ANOMALY_DETECTORS_INDEX; -import static com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils.*; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; +import com.amazon.opendistroforelasticsearch.ad.AnomalyDetectorPlugin; +import com.amazon.opendistroforelasticsearch.ad.constant.CommonErrorMessages; +import com.amazon.opendistroforelasticsearch.ad.indices.AnomalyDetectionIndices; +import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; +import com.amazon.opendistroforelasticsearch.ad.rest.handler.ValidateAnomalyDetectorActionHandler; +import com.amazon.opendistroforelasticsearch.ad.settings.EnabledSetting; +import com.google.common.collect.ImmutableList; /** * This class consists of the REST handler to validate anomaly detector configurations. @@ -42,7 +54,6 @@ public class RestValidateAnomalyDetectorAction extends BaseRestHandler { private static final String VALIDATE_ANOMALY_DETECTOR_ACTION = "validate_anomaly_detector_action"; private final AnomalyDetectionIndices anomalyDetectionIndices; private final Logger logger = LogManager.getLogger(RestValidateAnomalyDetectorAction.class); - private final Settings settings; private final NamedXContentRegistry xContentRegistry; private volatile TimeValue requestTimeout; @@ -52,11 +63,10 @@ public class RestValidateAnomalyDetectorAction extends BaseRestHandler { private volatile Integer maxAnomalyFeatures; public RestValidateAnomalyDetectorAction( - Settings settings, - AnomalyDetectionIndices anomalyDetectionIndices, - NamedXContentRegistry xContentRegistry + Settings settings, + AnomalyDetectionIndices anomalyDetectionIndices, + NamedXContentRegistry xContentRegistry ) { - this.settings = settings; this.anomalyDetectionIndices = anomalyDetectionIndices; this.detectionInterval = DETECTION_INTERVAL.get(settings); this.detectionWindowDelay = DETECTION_WINDOW_DELAY.get(settings); @@ -77,22 +87,21 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli AnomalyDetector detector = AnomalyDetector.parseValidation(parser, detectorId, null); return channel -> new ValidateAnomalyDetectorActionHandler( - settings, - client, - channel, - anomalyDetectionIndices, - detectorId, - detector, - maxAnomalyDetectors, - maxAnomalyFeatures, - requestTimeout, - xContentRegistry + client, + channel, + anomalyDetectionIndices, + detector, + maxAnomalyDetectors, + maxAnomalyFeatures, + requestTimeout, + xContentRegistry ).startValidation(); } - @Override - public String getName() { return VALIDATE_ANOMALY_DETECTOR_ACTION; } + public String getName() { + return VALIDATE_ANOMALY_DETECTOR_ACTION; + } @Override public List routes() { @@ -100,8 +109,8 @@ public List routes() { .of( // validate configs new Route( - RestRequest.Method.POST, - String.format(Locale.ROOT, "%s/%s", AnomalyDetectorPlugin.AD_BASE_DETECTORS_URI, VALIDATE) + RestRequest.Method.POST, + String.format(Locale.ROOT, "%s/%s", AnomalyDetectorPlugin.AD_BASE_DETECTORS_URI, VALIDATE) ) ); } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java index 1204a7a4..d2f97b67 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java @@ -1,69 +1,71 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + package com.amazon.opendistroforelasticsearch.ad.rest.handler; -import com.amazon.opendistroforelasticsearch.ad.AnomalyDetectorPlugin; -import com.amazon.opendistroforelasticsearch.ad.indices.AnomalyDetectionIndices; -import com.amazon.opendistroforelasticsearch.ad.model.*; -import com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings; -import com.amazon.opendistroforelasticsearch.ad.util.MultiResponsesDelegateActionListener; -import com.amazon.opendistroforelasticsearch.ad.util.ParseUtils; -import com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils; +import static com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector.ANOMALY_DETECTORS_INDEX; +import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; + +import java.io.IOException; +import java.time.Instant; +import java.time.temporal.ChronoUnit; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + import org.apache.commons.lang.StringUtils; -import org.apache.commons.lang.Validate; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; -import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; -import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; -import org.elasticsearch.action.get.GetResponse; -import org.elasticsearch.action.index.IndexResponse; -import org.elasticsearch.action.search.*; -import org.elasticsearch.client.AdminClient; +import org.elasticsearch.action.search.MultiSearchRequest; +import org.elasticsearch.action.search.MultiSearchResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.node.NodeClient; -import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.*; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.RangeQueryBuilder; -import org.elasticsearch.rest.*; -import org.elasticsearch.rest.action.RestResponseListener; -import org.elasticsearch.search.aggregations.*; -import org.elasticsearch.search.aggregations.bucket.range.DateRangeAggregationBuilder; -import org.elasticsearch.search.aggregations.bucket.sampler.SamplerAggregationBuilder; -import org.elasticsearch.search.aggregations.metrics.InternalTDigestPercentiles; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.metrics.Max; -import org.elasticsearch.search.aggregations.metrics.NumericMetricsAggregation; -import org.elasticsearch.search.aggregations.metrics.Percentile; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortOrder; - -import java.io.IOException; -import java.io.ObjectInputStream; -import java.time.Instant; -import java.time.temporal.ChronoUnit; -import java.util.*; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ThreadLocalRandom; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.regex.Matcher; -import java.util.stream.Collectors; -import java.util.stream.Stream; - -import static com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector.*; -import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -import static org.elasticsearch.index.query.AbstractQueryBuilder.parseInnerQueryBuilder; -import static org.elasticsearch.search.aggregations.AggregationBuilders.dateRange; -import static org.elasticsearch.search.aggregations.AggregatorFactories.VALID_AGG_NAME; +import com.amazon.opendistroforelasticsearch.ad.indices.AnomalyDetectionIndices; +import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; +import com.amazon.opendistroforelasticsearch.ad.model.Feature; +import com.amazon.opendistroforelasticsearch.ad.model.IntervalTimeConfiguration; +import com.amazon.opendistroforelasticsearch.ad.model.ValidateResponse; +import com.amazon.opendistroforelasticsearch.ad.util.ParseUtils; +import com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils; /** * Anomaly detector REST action handler to process POST request. @@ -72,21 +74,18 @@ public class ValidateAnomalyDetectorActionHandler extends AbstractActionHandler { protected static final String AGG_NAME_MAX = "max_timefield"; - protected static final int NUM_OF_RANDOM_SAMPLES = 128; + protected static final int NUM_OF_INTERVAL_SAMPLES = 128; protected static final int MAX_NUM_OF_SAMPLES_VIEWED = 128; protected static final int NUM_OF_INTERVALS_CHECKED = 256; protected static final double SAMPLE_SUCCESS_RATE = 0.75; - protected static final int RANDOM_SAMPLING_REPEATS = 12; protected static final int FEATURE_VALIDATION_TIME_BACK_MINUTES = 10080; - protected static final long MAX_INTERVAL_LENGTH = 86400000; - private final AdminClient adminClient; - - - public static final String SUGGESTED_CHANGES = "suggested_changes"; - public static final String FAILURES = "failures"; + protected static final int NUM_OF_INTERVALS_CHECKED_FILTER = 384; + protected static final long MAX_INTERVAL_LENGTH = 2592000000L; + protected static final long HISTORICAL_CHECK_IN_MS = 7776000000L; + protected static final String NAME_REGEX = "[a-zA-Z0-9._-]+"; + protected static final double INTERVAL_RECOMMENDATION_MULTIPLIER = 1.2; private final AnomalyDetectionIndices anomalyDetectionIndices; - private final String detectorId; private final AnomalyDetector anomalyDetector; private final Logger logger = LogManager.getLogger(IndexAnomalyDetectorActionHandler.class); @@ -96,56 +95,41 @@ public class ValidateAnomalyDetectorActionHandler extends AbstractActionHandler private final NamedXContentRegistry xContent; private ValidateResponse responseValidate; - private final Map savedResponsesToFeature; - private final List savedMultiResponses; - private final Map featureIntervalValidation; - private final Map featureValidTimerecommendation; - private final List failures; - private final List suggestedChanges; + private Map> failuresMap; + private Map> suggestedChangesMap; private Boolean inferringInterval; private AtomicBoolean inferAgain; - /** * Constructor function. * - * @param settings ES settings * @param client ES node client that executes actions on the local node * @param channel ES channel used to construct bytes / builder based outputs, and send responses * @param anomalyDetectionIndices anomaly detector index manager - * @param detectorId detector identifier * @param anomalyDetector anomaly detector instance */ public ValidateAnomalyDetectorActionHandler( - Settings settings, - NodeClient client, - RestChannel channel, - AnomalyDetectionIndices anomalyDetectionIndices, - String detectorId, - AnomalyDetector anomalyDetector, - Integer maxAnomalyDetectors, - Integer maxAnomalyFeatures, - TimeValue requestTimeout, - NamedXContentRegistry xContentRegistry + NodeClient client, + RestChannel channel, + AnomalyDetectionIndices anomalyDetectionIndices, + AnomalyDetector anomalyDetector, + Integer maxAnomalyDetectors, + Integer maxAnomalyFeatures, + TimeValue requestTimeout, + NamedXContentRegistry xContentRegistry ) { super(client, channel); this.anomalyDetectionIndices = anomalyDetectionIndices; - this.detectorId = detectorId; this.anomalyDetector = anomalyDetector; this.maxAnomalyDetectors = maxAnomalyDetectors; this.maxAnomalyFeatures = maxAnomalyFeatures; this.requestTimeout = requestTimeout; - this.failures = new ArrayList<>(); - this.suggestedChanges = new ArrayList<>(); this.responseValidate = new ValidateResponse(); this.xContent = xContentRegistry; - this.adminClient = client.admin(); - this.featureIntervalValidation = new HashMap<>(); - this.savedMultiResponses = Collections.synchronizedList(new ArrayList<>()); - this.savedResponsesToFeature = new HashMap<>(); - this.featureValidTimerecommendation = new HashMap<>(); this.inferringInterval = false; this.inferAgain = new AtomicBoolean(true); + this.failuresMap = new HashMap<>(); + this.suggestedChangesMap = new HashMap<>(); } /** @@ -157,40 +141,48 @@ public ValidateAnomalyDetectorActionHandler( public void startValidation() throws IOException { if (!anomalyDetectionIndices.doesAnomalyDetectorIndexExist()) { anomalyDetectionIndices - .initAnomalyDetectorIndex( - ActionListener.wrap(response -> onCreateMappingsResponse(response), exception -> onFailure(exception)) - ); + .initAnomalyDetectorIndex( + ActionListener.wrap(response -> onCreateMappingsResponse(response), exception -> onFailure(exception)) + ); } else { preDataValidationSteps(); } } public void preDataValidationSteps() { - - if (anomalyDetector.getName() == null) { - failures.add("name missing"); + List missingFields = new ArrayList<>(); + List formatErrors = new ArrayList<>(); + if (anomalyDetector.getName() == null || anomalyDetector.getName() == "") { + missingFields.add("name"); + } else if (!anomalyDetector.getName().matches(NAME_REGEX)) { + formatErrors.add(anomalyDetector.getName()); + failuresMap.put("format", formatErrors); } if (anomalyDetector.getTimeField() == null) { - failures.add("time-field missing"); + missingFields.add("time_field"); } if (anomalyDetector.getIndices() == null) { - failures.add("data source indices missing"); + missingFields.add("indices"); } if (anomalyDetector.getWindowDelay() == null) { - failures.add("window-delay missing"); + missingFields.add("window_delay"); } if (anomalyDetector.getDetectionInterval() == null) { - failures.add("detector-interval missing"); + missingFields.add("detector_interval"); } if (anomalyDetector.getFeatureAttributes().isEmpty()) { - failures.add("feature is missing"); + missingFields.add("feature_attributes"); + } + if (!missingFields.isEmpty()) { + failuresMap.put("missing", missingFields); } - String error = RestHandlerUtils.validateAnomalyDetector(anomalyDetector, maxAnomalyFeatures); if (StringUtils.isNotBlank(error)) { - failures.add(error); + List dupErrorsFeatures = new ArrayList<>(); + dupErrorsFeatures.addAll(Arrays.asList(error.split("\\r?\\n"))); + failuresMap.put("duplicates", dupErrorsFeatures); } - if (!failures.isEmpty()) { + if (!failuresMap.isEmpty()) { validateAnomalyDetectorResponse(); } else { validateNumberOfDetectors(); @@ -210,9 +202,9 @@ public void validateNumberOfDetectors() { private void onSearchAdResponse(SearchResponse response) throws IOException { if (response.getHits().getTotalHits().value >= maxAnomalyDetectors) { - String errorMsg = "Can't create anomaly detector more than " + maxAnomalyDetectors; - logger.error(errorMsg); - failures.add(errorMsg); + failuresMap + .computeIfAbsent("others", k -> new ArrayList<>()) + .add("Can't create anomaly detector more than " + maxAnomalyDetectors); validateAnomalyDetectorResponse(); } else { searchAdInputIndices(null); @@ -221,28 +213,27 @@ private void onSearchAdResponse(SearchResponse response) throws IOException { private void searchAdInputIndices(String detectorId) { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() - .query(QueryBuilders.matchAllQuery()) - .size(0) - .timeout(requestTimeout); + .query(QueryBuilders.matchAllQuery()) + .size(0) + .timeout(requestTimeout); SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().toArray(new String[0])).source(searchSourceBuilder); client - .search( - searchRequest, - ActionListener - .wrap(searchResponse -> onSearchAdInputIndicesResponse(searchResponse, detectorId), exception -> onFailure(exception)) - ); + .search( + searchRequest, + ActionListener + .wrap(searchResponse -> onSearchAdInputIndicesResponse(searchResponse, detectorId), exception -> onFailure(exception)) + ); } private void onSearchAdInputIndicesResponse(SearchResponse response, String detectorId) throws IOException { if (response.getHits().getTotalHits().value == 0) { String errorMsg = "Can't create anomaly detector as no document found in indices: " - + Arrays.toString(anomalyDetector.getIndices().toArray(new String[0])); - logger.error(errorMsg); - failures.add(errorMsg); + + Arrays.toString(anomalyDetector.getIndices().toArray(new String[0])); + failuresMap.computeIfAbsent("others", k -> new ArrayList<>()).add(errorMsg); validateAnomalyDetectorResponse(); - return; + } else { + checkADNameExists(detectorId); } - checkADNameExists(detectorId); } private void checkADNameExists(String detectorId) throws IOException { @@ -253,677 +244,329 @@ private void checkADNameExists(String detectorId) throws IOException { } SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(boolQueryBuilder).timeout(requestTimeout); SearchRequest searchRequest = new SearchRequest(ANOMALY_DETECTORS_INDEX).source(searchSourceBuilder); - client - .search( - searchRequest, - ActionListener - .wrap( - searchResponse -> onSearchADNameResponse(searchResponse, detectorId, anomalyDetector.getName()), - - exception -> onFailure(exception) - ) - ); + .search( + searchRequest, + ActionListener + .wrap( + searchResponse -> onSearchADNameResponse(searchResponse, anomalyDetector.getName()), + exception -> onFailure(exception) + ) + ); } - private void onSearchADNameResponse(SearchResponse response, String detectorId, String name) throws IOException { + private void onSearchADNameResponse(SearchResponse response, String name) throws IOException { if (response.getHits().getTotalHits().value > 0) { - String errorMsg = String - .format( - "Cannot create anomaly detector with name [%s] as it's already used by detector %s", - name, - Arrays.stream(response.getHits().getHits()).map(hit -> hit.getId()).collect(Collectors.toList()) - ); - logger.warn(errorMsg); - failures.add(errorMsg); + failuresMap.computeIfAbsent("duplicates", k -> new ArrayList<>()).add(name); validateAnomalyDetectorResponse(); - } else if (anomalyDetector.getFilterQuery() != null) { - queryFilterValidation(); } else { - featureQueryValidation(); + checkForHistoricalData(); + } + } + + public void checkForHistoricalData() { + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() + .aggregation(AggregationBuilders.max(AGG_NAME_MAX).field(anomalyDetector.getTimeField())) + .size(1) + .sort(new FieldSortBuilder(anomalyDetector.getTimeField()).order(SortOrder.DESC)); + SearchRequest searchRequest = new SearchRequest().indices(anomalyDetector.getIndices().get(0)).source(searchSourceBuilder); + client + .search( + searchRequest, + ActionListener + .wrap(response -> checkIfAnyHistoricalData(getLatestDataTime(response)), exception -> { onFailure(exception); }) + ); + } + + private void checkIfAnyHistoricalData(Optional lastTimeStamp) { + if (lastTimeStamp.isPresent() && (Instant.now().toEpochMilli() - HISTORICAL_CHECK_IN_MS > lastTimeStamp.get())) { + failuresMap.computeIfAbsent("others", k -> new ArrayList<>()).add("No historical data for past 3 months"); + validateAnomalyDetectorResponse(); + } else { + queryFilterValidation(); } } private long[] startEndTimeRangeWithIntervals(int numOfIntervals) { long delayMillis = Optional - .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) - .map(t -> t.toDuration().toMillis()) - .orElse(0L); + .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) + .map(t -> t.toDuration().toMillis()) + .orElse(0L); long dataEndTime = Instant.now().toEpochMilli() - delayMillis; long detectorInterval = Optional - .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) - .map(t -> t.toDuration().toMillis()) - .orElse(0L); - long dataStartTime = dataEndTime - ((long) (numOfIntervals) * detectorInterval - delayMillis); - - return new long[]{dataStartTime, dataEndTime}; + .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) + .map(t -> t.toDuration().toMillis()) + .orElse(0L); + long dataStartTime = dataEndTime - ((long) (numOfIntervals) * detectorInterval); + return new long[] { dataStartTime, dataEndTime }; } private void queryFilterValidation() { - long[] startEnd = startEndTimeRangeWithIntervals(NUM_OF_INTERVALS_CHECKED); + long[] startEnd = startEndTimeRangeWithIntervals(NUM_OF_INTERVALS_CHECKED_FILTER); long dataEndTime = startEnd[1]; long dataStartTime = startEnd[0]; RangeQueryBuilder rangeQuery = new RangeQueryBuilder(anomalyDetector.getTimeField()) - .from(dataStartTime) - .to(dataEndTime) - .format("epoch_millis"); + .from(dataStartTime) + .to(dataEndTime) + .format("epoch_millis"); BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().must(rangeQuery).must(anomalyDetector.getFilterQuery()); - SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(internalFilterQuery).size(1).terminateAfter(1).timeout(requestTimeout); + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() + .query(internalFilterQuery) + .size(1) + .terminateAfter(1) + .timeout(requestTimeout); SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(searchSourceBuilder); - // System.out.println("Query filter request: " + searchRequest.toString()); client - .search( - searchRequest, - ActionListener - .wrap( - searchResponse -> onQueryFilterSearch(searchResponse), - exception -> { - System.out.println("queryfilter exception: " + exception.getMessage()); - onFailure(exception); - } - ) - ); + .search( + searchRequest, + ActionListener.wrap(searchResponse -> onQueryFilterSearch(searchResponse), exception -> { onFailure(exception); }) + ); } private void onQueryFilterSearch(SearchResponse response) throws IOException { if (response.getHits().getTotalHits().value <= 0) { - String errorMsg = "query filter is potentially wrong as no hits were found at all. "; - logger.warn(errorMsg); - suggestedChanges.add(errorMsg); + List filterError = new ArrayList<>(); + filterError + .add( + "query filter is potentially wrong as no hits were found at all or no historical data in last " + + NUM_OF_INTERVALS_CHECKED_FILTER + + " intervals" + ); + suggestedChangesMap.put("filter_query", filterError); validateAnomalyDetectorResponse(); } else { - featureQueryValidation(); - //getFieldMapping(); + long timestamp = (long) response.getHits().getHits()[0].getSourceAsMap().get("timestamp"); + featureQueryValidation(timestamp); } } - private void featureQueryValidation() throws IOException { + private void featureQueryValidation(long startTimeStamp) throws IOException { long delayMillis = Optional - .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) - .map(t -> t.toDuration().toMillis()) - .orElse(0L); + .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) + .map(t -> t.toDuration().toMillis()) + .orElse(0L); long[] startEnd = startEndTimeRangeWithIntervals(NUM_OF_INTERVALS_CHECKED); long dataEndTime = startEnd[1]; long dataStartTime = startEnd[0]; + if (startEnd[0] > startTimeStamp) { + dataStartTime = startTimeStamp; + } IntervalTimeConfiguration searchRange = new IntervalTimeConfiguration(FEATURE_VALIDATION_TIME_BACK_MINUTES, ChronoUnit.MINUTES); - long searchRangeTime = Optional - .ofNullable(searchRange) - .map(t -> t.toDuration().toMillis()) - .orElse(0L); + long searchRangeTime = Optional.ofNullable(searchRange).map(t -> t.toDuration().toMillis()).orElse(0L); long startTimeWithSetTime = startEnd[1] - (searchRangeTime - delayMillis); + // Make sure start time includes timestamp seen by filter query check if (startEnd[0] > startTimeWithSetTime) { dataStartTime = startTimeWithSetTime; } AtomicInteger featureCounter = new AtomicInteger(); RangeQueryBuilder rangeQuery = new RangeQueryBuilder(anomalyDetector.getTimeField()) - .from(dataStartTime) - .to(dataEndTime) - .format("epoch_millis") - .includeLower(true) - .includeUpper(false); + .from(dataStartTime) + .to(dataEndTime) + .format("epoch_millis") + .includeLower(true) + .includeUpper(false); if (anomalyDetector.getFeatureAttributes() != null) { for (Feature feature : anomalyDetector.getFeatureAttributes()) { ParseUtils.parseAggregators(feature.getAggregation().toString(), xContent, feature.getId()); - XContentParser parser = XContentType.JSON.xContent().createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); + XContentParser parser = XContentType.JSON + .xContent() + .createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); parser.nextToken(); - List fieldNames = parseAggregationRequest(parser, 0, feature.getAggregation().getName()); -// BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().must(rangeQuery).must(anomalyDetector.getFilterQuery()) -// .must(QueryBuilders.boolQuery().filter(QueryBuilders.existsQuery(fieldNames.get(0)))); - BoolQueryBuilder boolQuery2 = QueryBuilders.boolQuery().filter(rangeQuery).filter(anomalyDetector.getFilterQuery()) - .filter(QueryBuilders.existsQuery(fieldNames.get(0))); + List fieldNames = parseAggregationRequest(parser); + BoolQueryBuilder boolQuery2 = QueryBuilders + .boolQuery() + .filter(rangeQuery) + .filter(anomalyDetector.getFilterQuery()) + .filter(QueryBuilders.existsQuery(fieldNames.get(0))); SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(boolQuery2).size(1).terminateAfter(1); SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(internalSearchSourceBuilder); - //System.out.println("search builder for each feature query: " + searchRequest.toString()); - client - .search( - searchRequest, - ActionListener - .wrap( - searchResponse -> { - featureCounter.incrementAndGet(); - onFeatureAggregationValidation(searchResponse, feature, featureCounter); - }, - exception -> { - System.out.println(exception.getMessage()); - onFailure(exception); - } - ) - ); + client.search(searchRequest, ActionListener.wrap(searchResponse -> { + featureCounter.incrementAndGet(); + onFeatureAggregationValidation(searchResponse, feature, featureCounter); + }, exception -> onFailure(exception))); } - } } - private List parseAggregationRequest(XContentParser parser, int level, String aggName) throws IOException { + private List parseAggregationRequest(XContentParser parser) throws IOException { List fieldNames = new ArrayList<>(); - XContentParser.Token token = null; + XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { final String field = parser.currentName(); - switch (field) { - case "field": - parser.nextToken(); - fieldNames.add(parser.textOrNull()); - break; - default: - parser.skipChildren(); - break; - } - } + switch (field) { + case "field": + parser.nextToken(); + fieldNames.add(parser.textOrNull()); + break; + default: + parser.skipChildren(); + break; } + } + } return fieldNames; } - private void onFeatureAggregationValidation(SearchResponse response, Feature feature, AtomicInteger counter) throws IOException { - // System.out.println("response for each feature agg validation " + response.toString()); if (response.getHits().getTotalHits().value <= 0) { - String errorMsg = "feature query is potentially wrong as no hits were found at all for feature " + feature.getName(); - logger.warn(errorMsg); - suggestedChanges.add(errorMsg); + String errorMsg = feature.getName() + ": feature query is potentially wrong as no hits were found"; + suggestedChangesMap.computeIfAbsent("feature_attributes", k -> new ArrayList<>()).add(errorMsg); } if (counter.get() == anomalyDetector.getFeatureAttributes().size()) { - // System.out.println("went into here"); - // System.out.println("feature name: " + feature.getName()); - if (!suggestedChanges.isEmpty()) { + if (!suggestedChangesMap.isEmpty()) { validateAnomalyDetectorResponse(); } else { - randomSamplingIntervalValidation(); + intervalValidation(); } - } - - } + private long[][] createNewTimeRange(long detectorInterval) { + long timeRanges[][] = new long[MAX_NUM_OF_SAMPLES_VIEWED][2]; + long delayMillis = Optional + .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) + .map(t -> t.toDuration().toMillis()) + .orElse(0L); + long dataEndTime = Instant.now().toEpochMilli() - delayMillis; + long dataStartTime = dataEndTime - ((long) (MAX_NUM_OF_SAMPLES_VIEWED) * detectorInterval); + for (int j = 0; j < MAX_NUM_OF_SAMPLES_VIEWED; j++) { + timeRanges[j][0] = dataStartTime + (j * detectorInterval); + timeRanges[j][1] = timeRanges[j][0] + detectorInterval; + } + return timeRanges; + } -// private void onFeatureAggregationValidation(SearchResponse response, Feature feature) throws IOException { -// //System.out.println("response for each feature agg validation " + response.toString()); -// Optional aggValue = Optional -// .ofNullable(response) -// .map(SearchResponse::getAggregations) -// .map(aggs -> aggs.asMap()) -// .map(map -> map.get(feature.getId())) -// .map(this::parseAggregation); -// if (Double.isNaN(aggValue.get()) || Double.isInfinite(aggValue.get())) { -// String errorMsg = "feature query is potentially wrong as no hits were found at all for feature " + feature.getName(); -// logger.warn(errorMsg); -// suggestedChanges.add(errorMsg); -// } -// } - - private synchronized void randomSamplingIntervalValidation() throws IOException { + private synchronized void intervalValidation() throws IOException { long detectorInterval = Optional - .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) - .map(t -> t.toDuration().toMillis()) - .orElse(0L); - //CountDownLatch latch = new CountDownLatch(Math.sqrt((double) MAX_INTERVAL_LENGTH / detectorInterval)); - - for (long i = detectorInterval; i < MAX_INTERVAL_LENGTH; i *= 1.5) { - if (inferringInterval) { - break; - } - detectorInterval = i; - long timeRanges[][] = new long[MAX_NUM_OF_SAMPLES_VIEWED][2]; - long delayMillis = Optional - .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) - .map(t -> t.toDuration().toMillis()) - .orElse(0L); - long dataEndTime = Instant.now().toEpochMilli() - delayMillis; - long dataStartTime = dataEndTime - ((long) (MAX_NUM_OF_SAMPLES_VIEWED) * detectorInterval - delayMillis); - for (int j = 0; j < MAX_NUM_OF_SAMPLES_VIEWED; j++) { - timeRanges[j][0] = dataStartTime + (j * detectorInterval); - timeRanges[j][1] = timeRanges[j][0] + detectorInterval; - } - AtomicInteger listenerCounter = new AtomicInteger(); + .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) + .map(t -> t.toDuration().toMillis()) + .orElse(0L); + for (long i = detectorInterval; i <= MAX_INTERVAL_LENGTH; i *= INTERVAL_RECOMMENDATION_MULTIPLIER) { + long timeRanges[][] = createNewTimeRange(i); + // Need to try and check if the infering logic is done before calling on the method again + // with a new interval since otherwise the requests get mixed up try { if (inferAgain.get()) { - randomSamplingHelper(timeRanges, listenerCounter, i); + samplingHelper(timeRanges, i); } wait(); } catch (Exception ex) { - + onFailure(ex); } + } + } - System.out.println("value of i inside loop: " + i); + private List getFeatureFieldNames() throws IOException { + List featureFields = new ArrayList<>(); + for (Feature feature : anomalyDetector.getFeatureAttributes()) { + ParseUtils.parseAggregators(feature.getAggregation().toString(), xContent, feature.getId()); + XContentParser parser = XContentType.JSON + .xContent() + .createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); + parser.nextToken(); + List fieldNames = parseAggregationRequest(parser); + featureFields.add(fieldNames.get(0)); } + return featureFields; } - private synchronized void randomSamplingHelper(long[][] timeRanges, AtomicInteger listenerCounter, long detectorInterval) throws IOException { + private synchronized void samplingHelper(long[][] timeRanges, long detectorInterval) throws IOException { inferAgain.set(false); - List featureFields = new ArrayList<>(); - for (Feature feature : anomalyDetector.getFeatureAttributes()) { - ParseUtils.parseAggregators(feature.getAggregation().toString(), xContent, feature.getId()); - XContentParser parser = XContentType.JSON.xContent().createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); - parser.nextToken(); - List fieldNames = parseAggregationRequest(parser, 0, feature.getAggregation().getName()); - featureFields.add(fieldNames.get(0)); - } - // System.out.println("featureFields: " + featureFields); + List featureFields = getFeatureFieldNames(); MultiSearchRequest sr = new MultiSearchRequest(); - for (int i = 0; i < NUM_OF_RANDOM_SAMPLES; i++) { + for (int i = 0; i < NUM_OF_INTERVAL_SAMPLES; i++) { long rangeStart = timeRanges[i][0]; long rangeEnd = timeRanges[i][1]; RangeQueryBuilder rangeQueryRandom = new RangeQueryBuilder(anomalyDetector.getTimeField()) - .from(rangeStart) - .to(rangeEnd) - .format("epoch_millis") - .includeLower(true) - .includeUpper(false); + .from(rangeStart) + .to(rangeEnd) + .format("epoch_millis") + .includeLower(true) + .includeUpper(false); BoolQueryBuilder qb = QueryBuilders.boolQuery().filter(rangeQueryRandom).filter(anomalyDetector.getFilterQuery()); - for (int j = 0 ; j < featureFields.size(); j++) { - qb.filter(QueryBuilders.existsQuery(featureFields.get(j) ) ) ; + for (int j = 0; j < featureFields.size(); j++) { + qb.filter(QueryBuilders.existsQuery(featureFields.get(j))); } -// BoolQueryBuilder boolQuery2 = QueryBuilders.boolQuery().filter(rangeQueryRandom).filter(anomalyDetector.getFilterQuery()) -// .filter(QueryBuilders.existsQuery(fieldNames.get(0))); SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(qb).size(1).terminateAfter(1); - SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(internalSearchSourceBuilder); - if (i == 0) { - System.out.println("search request: " + searchRequest); + sr.add(new SearchRequest(anomalyDetector.getIndices().get(0)).source(internalSearchSourceBuilder)); + } + client.multiSearch(sr, ActionListener.wrap(searchResponse -> { + if (doneInferring(detectorInterval, searchResponse)) { + checkWindowDelay(); } - sr.add(searchRequest); - } - // System.out.println("8 requests: " + sr.requests().toString()); - - client - .multiSearch( - sr, - ActionListener - .wrap( - searchResponse -> { -// savedMultiResponses.add(searchResponse); -// listenerCounter.incrementAndGet(); - - if (doneInferring(detectorInterval, searchResponse)) { - onRandomGetMultiResponse(); - } - }, - exception -> { - System.out.println(exception.getMessage()); - onFailure(exception); - } - ) - ); + }, exception -> onFailure(exception))); } - private synchronized boolean doneInferring(long detectorInterval, MultiSearchResponse searchResponse){ - // System.out.println("number of responses in multiresponse: " + searchResponse.getResponses().length); - // System.out.println("detector interval: " + detectorInterval); - boolean firstCase = false; + + private synchronized boolean doneInferring(long detectorInterval, MultiSearchResponse searchResponse) { long originalInterval = Optional - .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) - .map(t -> t.toDuration().toMillis()) - .orElse(0L); - if (detectorInterval == originalInterval) { - System.out.println("went into first case"); - firstCase = true; - } - if (detectorInterval >= MAX_INTERVAL_LENGTH) { - suggestedChanges.add("detector interval: failed to infer max up too: " + MAX_INTERVAL_LENGTH); - System.out.println("went into max check"); - inferAgain.set(false); - return true; - } - String errorMsg = ""; - final AtomicInteger hitCounter = new AtomicInteger(); - for (MultiSearchResponse.Item item : searchResponse) { - SearchResponse response = item.getResponse(); - if (response.getHits().getTotalHits().value > 0) { - hitCounter.incrementAndGet(); - } + .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) + .map(t -> t.toDuration().toMillis()) + .orElse(0L); + final AtomicInteger hitCounter = new AtomicInteger(); + for (MultiSearchResponse.Item item : searchResponse) { + SearchResponse response = item.getResponse(); + if (response.getHits().getTotalHits().value > 0) { + hitCounter.incrementAndGet(); } - inferAgain.set(true); - notify(); - //System.out.println("Hit counter before last validation: " + hitCounter.get()); - // System.out.println("successRate: " + hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES); - if (hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES < SAMPLE_SUCCESS_RATE) { + } + inferAgain.set(true); + notify(); + if (hitCounter.doubleValue() / (double) NUM_OF_INTERVAL_SAMPLES < SAMPLE_SUCCESS_RATE) { + if ((detectorInterval * INTERVAL_RECOMMENDATION_MULTIPLIER) >= MAX_INTERVAL_LENGTH) { + suggestedChangesMap + .computeIfAbsent("detection_interval", k -> new ArrayList<>()) + .add("detector interval: failed to infer max up too: " + MAX_INTERVAL_LENGTH); + } else { return false; - } else if (!firstCase){ - String suggestion = "detector interval: " + detectorInterval; - suggestedChanges.add(suggestion); - inferAgain.set(false); - return true; } + } else if (detectorInterval != originalInterval) { + suggestedChangesMap.computeIfAbsent("detection_interval", k -> new ArrayList<>()).add(Long.toString(detectorInterval)); + inferAgain.set(false); + return true; + } inferAgain.set(false); return true; } - private void onRandomGetMultiResponse() { - checkWindowDelay(); - } - - -// private void onRandomGetMultiResponse(MultiSearchResponse multiSearchResponse, Feature feature, AtomicInteger listnerCounter, long detectorInterval) { -// boolean firstCase = false; -// long originalInterval = Optional -// .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) -// .map(t -> t.toDuration().toMillis()) -// .orElse(0L); -// if (detectorInterval == originalInterval) { -// System.out.println("went into first case"); -// firstCase = true; -// } -// for (String f: savedResponsesToFeature.keySet()) { -// String errorMsg = ""; -// final AtomicInteger hitCounter = new AtomicInteger(); -// for (MultiSearchResponse.Item item : savedResponsesToFeature.get(f)) { -// SearchResponse response = item.getResponse(); -// if (response.getHits().getTotalHits().value > 0) { -// hitCounter.incrementAndGet(); -// } -// } -// System.out.println("Hit counter before last validation: " + hitCounter.get()); -// System.out.println("successRate: " + hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES); -// if (hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES < SAMPLE_SUCCESS_RATE) { -// featureIntervalValidation.put(f, false); -//// errorMsg += "data is too sparse with this interval for feature: " + f; -//// logger.warn(errorMsg); -//// suggestedChanges.add(errorMsg); -// -//// -// } else { -// if (!firstCase) { -// featureValidTimerecommendation.put(f, detectorInterval); -// } -// featureIntervalValidation.put(f, true); -// } -// } -// System.out.println("1: " + featureIntervalValidation); -// -// if (!featureIntervalValidation.containsValue((false)) && firstCase && !inferringInterval) { -// inferringInterval = true; -// System.out.println("BEFORE WINDOW DELAY CALL 1"); -// System.out.println(featureIntervalValidation); -// checkWindowDelay(); -// } else if (!featureIntervalValidation.containsValue((false)) && !inferringInterval) { -// for (String featureName : featureValidTimerecommendation.keySet()) { -// String suggestion = featureName + ": " + featureValidTimerecommendation.get(featureName).toString(); -// suggestedChanges.add(suggestion); -// } -// inferringInterval = true; -// System.out.println("BEFORE WINDOW DELAY CALL 2"); -// System.out.println(featureIntervalValidation); -// checkWindowDelay(); -// } else if (detectorInterval >= MAX_INTERVAL_LENGTH && featureIntervalValidation.containsValue(false) && !inferringInterval) { -// for (String featureName : featureIntervalValidation.keySet()) { -// if (!featureIntervalValidation.get(featureName)) { -// String doneInferring = "failed to infer max up too: " + MAX_INTERVAL_LENGTH + "for feature: " + featureName; -// suggestedChanges.add(doneInferring); -// } else { -// for (String featureNameRecc : featureValidTimerecommendation.keySet()) { -// String suggestion = featureName + ": " + featureValidTimerecommendation.get(featureNameRecc).toString(); -// suggestedChanges.add(suggestion); -// } -// } -// } -// inferringInterval = true; -// System.out.println("BEFORE WINDOW DELAY CALL 3"); -// checkWindowDelay(); -// } else if (detectorInterval >= MAX_INTERVAL_LENGTH && !inferringInterval) { -// inferringInterval = true; -// System.out.println("BEFORE WINDOW DELAY CALL 34"); -// checkWindowDelay(); -// } else { -// System.out.println("hello"); -// System.out.println(featureIntervalValidation); -// } -// } - - //long detectorInterval = intervalTime * 2; - -//// long[][] timeRanges = new long[MAX_NUM_OF_SAMPLES_VIEWED][2]; -//// long delayMillis = Optional -//// .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) -//// .map(t -> t.toDuration().toMillis()) -//// .orElse(0L); -//// long dataEndTime = Instant.now().toEpochMilli() - delayMillis; -//// long dataStartTime = dataEndTime - ((long) (MAX_NUM_OF_SAMPLES_VIEWED) * detectorInterval - delayMillis); -//// for (int j = 0; j < MAX_NUM_OF_SAMPLES_VIEWED; j++) { -//// timeRanges[j][0] = dataStartTime + (j * detectorInterval); -//// timeRanges[j][1] = timeRanges[j][0] + detectorInterval; -//// } - -// boolean valid = false; -// for (long i = detectorInterval; i < MAX_INTERVAL_LENGTH; i*=2) { -// detectorInterval = i; -// timeRanges = new long[MAX_NUM_OF_SAMPLES_VIEWED][2]; -// long delayMillis = Optional -// .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) -// .map(t -> t.toDuration().toMillis()) -// .orElse(0L); -// long dataEndTime = Instant.now().toEpochMilli() - delayMillis; -// dataStartTime = dataEndTime - ((long) (MAX_NUM_OF_SAMPLES_VIEWED) * detectorInterval - delayMillis); -// for (int j = 0; j < MAX_NUM_OF_SAMPLES_VIEWED; j++) { -// timeRanges[j][0] = dataStartTime + (j * detectorInterval); -// timeRanges[j][1] = timeRanges[j][0] + detectorInterval; -// } -// randomSamplingHelper(timeRanges, feature, listenerCounter, i); -// if (featureValidTime.containsKey(feature.getName())) { -// break; -// } -// } - - // private void randomSamplingHelper(long[][] timeRanges, Feature feature, AtomicInteger listenerCounter, long intervalTime) throws IOException { -// ParseUtils.parseAggregators(feature.getAggregation().toString(), xContent, feature.getId()); -// XContentParser parser = XContentType.JSON.xContent().createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); -// parser.nextToken(); -// List fieldNames = parseAggregationRequest(parser, 0, feature.getAggregation().getName()); -// //Random rand = new Random(); -// MultiSearchRequest sr = new MultiSearchRequest(); -// for (int i = 0; i < NUM_OF_RANDOM_SAMPLES; i++) { -//// int randIndex = rand.nextInt(MAX_NUM_OF_SAMPLES_VIEWED - 1); -//// long RandomRangeStart = timeRanges[randIndex][0]; -//// long RandomRangeEnd = timeRanges[randIndex][1]; -// long rangeStart = timeRanges[i][0]; -// long rangeEnd = timeRanges[i][1]; -// RangeQueryBuilder rangeQueryRandom = new RangeQueryBuilder(anomalyDetector.getTimeField()) -// .from(rangeStart) -// .to(rangeEnd) -// .format("epoch_millis") -// .includeLower(true) -// .includeUpper(false); -// BoolQueryBuilder boolQuery2 = QueryBuilders.boolQuery().filter(rangeQueryRandom).filter(anomalyDetector.getFilterQuery()) -// .filter(QueryBuilders.existsQuery(fieldNames.get(0))); -// SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(boolQuery2).size(1).terminateAfter(1); -// SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(internalSearchSourceBuilder); -// sr.add(searchRequest); -// } -// // System.out.println("8 requests: " + sr.requests().toString()); -// client -// .multiSearch( -// sr, -// ActionListener -// .wrap( -// searchResponse -> { -// savedResponsesToFeature.put(feature.getName(), searchResponse); -// listenerCounter.incrementAndGet(); -// if (listenerCounter.get() >= anomalyDetector.getFeatureAttributes().size()) { -// onRandomGetMultiResponse(searchResponse, feature, listenerCounter, intervalTime); -// } -// }, -// exception -> { -// System.out.println(exception.getMessage()); -// onFailure(exception); -// } -// ) -// ); -// } - -// private void onRandomGetMultiResponse(MultiSearchResponse multiSearchResponse, Feature feature, AtomicInteger listnerCounter, long intervalTime) -// throws IOException { -// if (intervalTime >= MAX_INTERVAL_LENGTH) { -// return; -// } -// for (String f: savedResponsesToFeature.keySet()) { -// String errorMsg = ""; -// final AtomicInteger hitCounter = new AtomicInteger(); -// //System.out.println("feature name out of all feature loop: " + f); -// for (MultiSearchResponse.Item item : savedResponsesToFeature.get(f)) { -// SearchResponse response = item.getResponse(); -// // System.out.println("each response for feature, " + f + ": " + response.toString()); -// if (response.getHits().getTotalHits().value > 0) { -// hitCounter.incrementAndGet(); -// } -// } -//// System.out.println("Hit counter before last validation: " + hitCounter.get()); -//// System.out.println("successRate: " + hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES); -// -// if (hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES < SAMPLE_SUCCESS_RATE) { -// featureIntervalValidation.put(f, false); -// errorMsg += "data is too sparse with this interval for feature: " + f; -// logger.warn(errorMsg); -// suggestedChanges.add(errorMsg); -// } else { -// featureValidTime.put(f, intervalTime); -// featureIntervalValidation.put(f, true); -// } -// } -// if (!featureIntervalValidation.containsValue(false)) { -// return; -// } -// System.out.println("valid time" + featureValidTime); -// if (featureValidTime.keySet().size() == anomalyDetector.getFeatureAttributes().size()) { -// featureIntervalValidation.put(feature.getName(), false); -// } -// if (featureIntervalValidation.containsValue(false)) { -// long detectorInterval = intervalTime * 2; -// long[][] timeRanges = new long[MAX_NUM_OF_SAMPLES_VIEWED][2]; -// long delayMillis = Optional -// .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) -// .map(t -> t.toDuration().toMillis()) -// .orElse(0L); -// long dataEndTime = Instant.now().toEpochMilli() - delayMillis; -// long dataStartTime = dataEndTime - ((long) (MAX_NUM_OF_SAMPLES_VIEWED) * detectorInterval - delayMillis); -// for (int j = 0; j < MAX_NUM_OF_SAMPLES_VIEWED; j++) { -// timeRanges[j][0] = dataStartTime + (j * detectorInterval); -// timeRanges[j][1] = timeRanges[j][0] + detectorInterval; -// } -// randomSamplingHelper(timeRanges, feature, listnerCounter, detectorInterval); -// } -// if (featureValidTime.keySet().size() != anomalyDetector.getFeatureAttributes().size()) { -// validateAnomalyDetectorResponse(); -// } else { -// checkWindowDelay(); -// } -// } - -// private void randomSamplingHelper(long[][] timeRanges, Feature feature, AtomicInteger listenerCounter) throws IOException { -// ParseUtils.parseAggregators(feature.getAggregation().toString(), xContent, feature.getId()); -// XContentParser parser = XContentType.JSON.xContent().createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); -// parser.nextToken(); -// List fieldNames = parseAggregationRequest(parser, 0, feature.getAggregation().getName()); -// //Random rand = new Random(); -// MultiSearchRequest sr = new MultiSearchRequest(); -// for (int i = 0; i < NUM_OF_RANDOM_SAMPLES; i++) { -//// int randIndex = rand.nextInt(MAX_NUM_OF_SAMPLES_VIEWED - 1); -//// long RandomRangeStart = timeRanges[randIndex][0]; -//// long RandomRangeEnd = timeRanges[randIndex][1]; -// long rangeStart = timeRanges[i][0]; -// long rangeEnd = timeRanges[i][1]; -// RangeQueryBuilder rangeQueryRandom = new RangeQueryBuilder(anomalyDetector.getTimeField()) -// .from(rangeStart) -// .to(rangeEnd) -// .format("epoch_millis") -// .includeLower(true) -// .includeUpper(false); -// BoolQueryBuilder boolQuery2 = QueryBuilders.boolQuery().filter(rangeQueryRandom).filter(anomalyDetector.getFilterQuery()) -// .filter(QueryBuilders.existsQuery(fieldNames.get(0))); -// SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(boolQuery2).size(1).terminateAfter(1); -// SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(internalSearchSourceBuilder); -// sr.add(searchRequest); -// } -// // System.out.println("8 requests: " + sr.requests().toString()); -// client -// .multiSearch( -// sr, -// ActionListener -// .wrap( -// searchResponse -> { -// savedResponsesToFeature.put(feature.getName(), searchResponse); -// listenerCounter.incrementAndGet(); -// if (listenerCounter.get() >= anomalyDetector.getFeatureAttributes().size()) { -// onRandomGetMultiResponse(searchResponse, feature, listenerCounter); -// } -// }, -// exception -> { -// System.out.println(exception.getMessage()); -// onFailure(exception); -// } -// ) -// ); -// } -// -// private void onRandomGetMultiResponse(MultiSearchResponse multiSearchResponse, Feature feature, AtomicInteger listnerCounter) { -// for (String f: savedResponsesToFeature.keySet()) { -// String errorMsg = ""; -// final AtomicInteger hitCounter = new AtomicInteger(); -// //System.out.println("feature name out of all feature loop: " + f); -// for (MultiSearchResponse.Item item : savedResponsesToFeature.get(f)) { -// SearchResponse response = item.getResponse(); -// // System.out.println("each response for feature, " + f + ": " + response.toString()); -// if (response.getHits().getTotalHits().value > 0) { -// hitCounter.incrementAndGet(); -// } -// } -// System.out.println("Hit counter before last validation: " + hitCounter.get()); -// System.out.println("successRate: " + hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES); -// -// if (hitCounter.doubleValue() / (double) NUM_OF_RANDOM_SAMPLES < SAMPLE_SUCCESS_RATE) { -// -// featureIntervalValidation.put(f, false); -// errorMsg += "data is too sparse with this interval for feature: " + f; -// logger.warn(errorMsg); -// suggestedChanges.add(errorMsg); -// } else { -// featureIntervalValidation.put(f, true); -// } -// } -// // System.out.println("validateIntervalMap: " + featureIntervalValidation.toString()); -// if (featureIntervalValidation.containsValue(false)) { -// validateAnomalyDetectorResponse(); -// } else { -// checkWindowDelay(); -// } -// } - private void checkWindowDelay() { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() - .aggregation(AggregationBuilders.max(AGG_NAME_MAX).field(anomalyDetector.getTimeField())) - .size(1).sort(new FieldSortBuilder("timestamp").order(SortOrder.DESC)); + .aggregation(AggregationBuilders.max(AGG_NAME_MAX).field(anomalyDetector.getTimeField())) + .size(1) + .sort(new FieldSortBuilder("timestamp").order(SortOrder.DESC)); SearchRequest searchRequest = new SearchRequest().indices(anomalyDetector.getIndices().get(0)).source(searchSourceBuilder); - client.search(searchRequest, ActionListener.wrap(response -> checkDelayResponse(getLatestDataTime(response)), exception -> onFailure(exception))); + client + .search( + searchRequest, + ActionListener.wrap(response -> checkDelayResponse(getLatestDataTime(response)), exception -> onFailure(exception)) + ); } private Optional getLatestDataTime(SearchResponse searchResponse) { - //System.out.println(searchResponse.toString()); Optional x = Optional - .ofNullable(searchResponse) - .map(SearchResponse::getAggregations) - .map(aggs -> aggs.asMap()) - .map(map -> (Max) map.get(AGG_NAME_MAX)) - .map(agg -> (long) agg.getValue()); - //System.out.println("after parsing the max timestamp to long: " + x.get()); + .ofNullable(searchResponse) + .map(SearchResponse::getAggregations) + .map(aggs -> aggs.asMap()) + .map(map -> (Max) map.get(AGG_NAME_MAX)) + .map(agg -> (long) agg.getValue()); return x; } private void checkDelayResponse(Optional lastTimeStamp) { long delayMillis = Optional - .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) - .map(t -> t.toDuration().toMillis()) - .orElse(0L); + .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) + .map(t -> t.toDuration().toMillis()) + .orElse(0L); if (lastTimeStamp.isPresent() && (Instant.now().toEpochMilli() - lastTimeStamp.get() > delayMillis)) { long minutesSinceLastStamp = TimeUnit.MILLISECONDS.toMinutes(Instant.now().toEpochMilli() - lastTimeStamp.get()); - long windowDelayMins = TimeUnit.MILLISECONDS.toMinutes(delayMillis); - String errorMsg = "window-delay given is too short, and last seen timestamp is " + minutesSinceLastStamp + - " minutes ago " + "and the window-delay given is only of " + windowDelayMins + " minutes"; - logger.warn(errorMsg); - suggestedChanges.add(errorMsg); + suggestedChangesMap.computeIfAbsent("window_delay", k -> new ArrayList<>()).add(Long.toString(minutesSinceLastStamp)); } validateAnomalyDetectorResponse(); } private void validateAnomalyDetectorResponse() { - this.responseValidate.setFailures(failures); - this.responseValidate.setSuggestedChanges(suggestedChanges); + this.responseValidate.setFailures(failuresMap); + this.responseValidate.setSuggestedChanges(suggestedChangesMap); try { BytesRestResponse restResponse = new BytesRestResponse(RestStatus.OK, responseValidate.toXContent(channel.newBuilder())); channel.sendResponse(restResponse); @@ -939,48 +582,9 @@ private void onCreateMappingsResponse(CreateIndexResponse response) throws IOExc } else { logger.warn("Created {} with mappings call not acknowledged.", ANOMALY_DETECTORS_INDEX); channel - .sendResponse( - new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, response.toXContent(channel.newErrorBuilder(), EMPTY_PARAMS)) - ); - } - } - - private double parseAggregation(Aggregation aggregation) { - Double result = null; - if (aggregation instanceof NumericMetricsAggregation.SingleValue) { - result = ((NumericMetricsAggregation.SingleValue) aggregation).value(); - } else if (aggregation instanceof InternalTDigestPercentiles) { - Iterator percentile = ((InternalTDigestPercentiles) aggregation).iterator(); - if (percentile.hasNext()) { - result = percentile.next().getValue(); - } + .sendResponse( + new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, response.toXContent(channel.newErrorBuilder(), EMPTY_PARAMS)) + ); } - return Optional.ofNullable(result).orElseThrow(() -> new IllegalStateException("Failed to parse aggregation " + aggregation)); - } - private void getFieldMapping() { - GetMappingsRequest request = new GetMappingsRequest().indices(anomalyDetector.getIndices().get(0)); - - - adminClient - .indices().getMappings( - request, - ActionListener - .wrap( - response -> checkFieldIndex(response), - exception -> onFailure(exception) - ) - ); - } - - private void checkFieldIndex(GetMappingsResponse response) { - System.out.println(response); - System.out.println("response mapping: " + response.mappings().get("feature-1")); -// Optional x = Optional -// .ofNullable(response) -// .map(SearchResponse::get) -// .map(aggs -> aggs.asMap()) -// .map(map -> (Max) map.get(AGG_NAME_MAX)) -// .map(agg -> (long) agg.getValue()); } } - diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/MultiSearchResponseDelegateActionListener.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/MultiSearchResponseDelegateActionListener.java deleted file mode 100644 index 1f1d6d7b..00000000 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/util/MultiSearchResponseDelegateActionListener.java +++ /dev/null @@ -1,112 +0,0 @@ -//package com.amazon.opendistroforelasticsearch.ad.util; -// -//import com.amazon.opendistroforelasticsearch.ad.model.Mergeable; -//import org.apache.logging.log4j.LogManager; -//import org.apache.logging.log4j.Logger; -//import org.elasticsearch.action.ActionListener; -// -//import java.util.ArrayList; -//import java.util.Collections; -//import java.util.List; -//import java.util.Locale; -//import java.util.concurrent.atomic.AtomicInteger; -// -///* -// * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. -// * -// * Licensed under the Apache License, Version 2.0 (the "License"). -// * You may not use this file except in compliance with the License. -// * A copy of the License is located at -// * -// * http://www.apache.org/licenses/LICENSE-2.0 -// * -// * or in the "license" file accompanying this file. This file is distributed -// * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -// * express or implied. See the License for the specific language governing -// * permissions and limitations under the License. -// */ -// -///** -// * A listener wrapper to help send multiple requests asynchronously and return one final responses together -// */ -//public class MultiSearchResponsesDelegateActionListener implements ActionListener { -// private static final Logger LOG = LogManager.getLogger(MultiResponsesDelegateActionListener.class); -// private final ActionListener delegate; -// private final AtomicInteger collectedResponseCount; -// private final int maxResponseCount; -// // save responses from multiple requests -// private final List savedResponses; -// private List exceptions; -// private String finalErrorMsg; -// -// public MultiSearchResponsesDelegateActionListener(ActionListener delegate, int maxResponseCount, String finalErrorMsg) { -// this.delegate = delegate; -// this.collectedResponseCount = new AtomicInteger(0); -// this.maxResponseCount = maxResponseCount; -// this.savedResponses = Collections.synchronizedList(new ArrayList()); -// this.exceptions = Collections.synchronizedList(new ArrayList()); -// this.finalErrorMsg = finalErrorMsg; -// } -// -// @Override -// public void onResponse(T response) { -// try { -// if (response != null) { -// this.savedResponses.add(response); -// } -// } finally { -// // If expectedResponseCount == 0 , collectedResponseCount.incrementAndGet() will be greater than expectedResponseCount -// if (collectedResponseCount.incrementAndGet() >= maxResponseCount) { -// finish(); -// } -// } -// -// } -// -// @Override -// public void onFailure(Exception e) { -// LOG.error(e); -// try { -// this.exceptions.add(e.getMessage()); -// } finally { -// // no matter the asynchronous request is a failure or success, we need to increment the count. -// // We need finally here to increment the count when there is a failure. -// if (collectedResponseCount.incrementAndGet() >= maxResponseCount) { -// finish(); -// } -// } -// } -// -// private void finish() { -// if (this.exceptions.size() == 0) { -// if (savedResponses.size() == 0) { -// this.delegate.onFailure(new RuntimeException("No response collected")); -// } else { -// T response0 = savedResponses.get(0); -// for (int i = 1; i < savedResponses.size(); i++) { -// response0.merge(savedResponses.get(i)); -// } -// this.delegate.onResponse(savedResponses); -// } -// } else { -// this.delegate.onFailure(new RuntimeException(String.format(Locale.ROOT, finalErrorMsg + " Exceptions: %s", exceptions))); -// } -// } -// -// public void failImmediately(Exception e) { -// this.delegate.onFailure(new RuntimeException(finalErrorMsg, e)); -// } -// -// public void failImmediately(String errMsg) { -// this.delegate.onFailure(new RuntimeException(errMsg)); -// } -// -// public void failImmediately(String errMsg, Exception e) { -// this.delegate.onFailure(new RuntimeException(errMsg, e)); -// } -// -// public void respondImmediately(T o) { -// this.delegate.onResponse(o); -// } -//} -// diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/e2e/DetectionResultEvalutationIT.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/e2e/DetectionResultEvalutationIT.java index 48e80cc2..dc56ab51 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/e2e/DetectionResultEvalutationIT.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/e2e/DetectionResultEvalutationIT.java @@ -15,8 +15,11 @@ package com.amazon.opendistroforelasticsearch.ad.e2e; +import static org.apache.http.entity.ContentType.APPLICATION_JSON; + import java.io.File; import java.io.FileReader; +import java.io.IOException; import java.time.Instant; import java.time.format.DateTimeFormatter; import java.time.temporal.ChronoUnit; @@ -29,13 +32,21 @@ import java.util.Map.Entry; import java.util.Set; +import org.apache.http.HttpEntity; +import org.apache.http.entity.StringEntity; import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; +import org.elasticsearch.common.xcontent.support.XContentMapValues; import com.amazon.opendistroforelasticsearch.ad.ODFERestTestCase; +import com.amazon.opendistroforelasticsearch.ad.TestHelpers; +import com.google.common.collect.ImmutableMap; import com.google.gson.JsonArray; +import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; +import com.google.gson.JsonPrimitive; public class DetectionResultEvalutationIT extends ODFERestTestCase { @@ -43,6 +54,230 @@ public void testDataset() throws Exception { verifyAnomaly("synthetic", 1, 1500, 8, .9, .9, 10); } + protected HttpEntity toHttpEntity(String jsonString) throws IOException { + return new StringEntity(jsonString, APPLICATION_JSON); + } + + public void testNoHistoricalData() throws Exception { + RestClient client = client(); + List data = createData(10, 7776001000L); + indexTrainData("validation", data, 1500, client); + indexTestData(data, "validation", 1500, client); + String requestBody = String + .format( + Locale.ROOT, + "{ \"name\": \"test\", \"description\": \"test\", \"time_field\": \"timestamp\"" + + ", \"indices\": [\"validation\"], \"feature_attributes\": [{ \"feature_name\": \"feature 1\", \"feature_enabled\": " + + "\"true\", \"aggregation_query\": { \"Feature1\": { \"sum\": { \"field\": \"Feature1\" } } } }, { \"feature_name\"" + + ": \"feature 2\", \"feature_enabled\": \"true\", \"aggregation_query\": { \"Feature2\": { \"sum\": { \"field\": " + + "\"Feature2\" } } } }], \"detection_interval\": { \"period\": { \"interval\": %d, \"unit\": \"Minutes\" } }" + + ",\"window_delay\":{\"period\":{\"interval\":35,\"unit\":\"Minutes\"}}}", + 1 + ); + Response resp = TestHelpers + .makeRequest( + client(), + "POST", + TestHelpers.AD_BASE_DETECTORS_URI + "/_validate", + ImmutableMap.of(), + toHttpEntity(requestBody), + null + ); + Map responseMap = entityAsMap(resp); + @SuppressWarnings("unchecked") + Map>> failuresMap = (Map>>) XContentMapValues + .extractValue("failures", responseMap); + @SuppressWarnings("unchecked") + Map>> suggestionsMap = (Map>>) XContentMapValues + .extractValue("suggestedChanges", responseMap); + assertTrue(failuresMap.keySet().size() == 1); + assertTrue(failuresMap.containsKey("others")); + } + + public void testValidationIntervalRecommendation() throws Exception { + RestClient client = client(); + List data = createData(300, 1800000); + indexTrainData("validation", data, 1500, client); + indexTestData(data, "validation", 1500, client); + String requestBody = String + .format( + Locale.ROOT, + "{ \"name\": \"test\", \"description\": \"test\", \"time_field\": \"timestamp\"" + + ", \"indices\": [\"validation\"], \"feature_attributes\": [{ \"feature_name\": \"feature 1\", \"feature_enabled\": " + + "\"true\", \"aggregation_query\": { \"Feature1\": { \"sum\": { \"field\": \"Feature1\" } } } }, { \"feature_name\"" + + ": \"feature 2\", \"feature_enabled\": \"true\", \"aggregation_query\": { \"Feature2\": { \"sum\": { \"field\": " + + "\"Feature2\" } } } }], \"detection_interval\": { \"period\": { \"interval\": %d, \"unit\": \"Minutes\" } }" + + ",\"window_delay\":{\"period\":{\"interval\":35,\"unit\":\"Minutes\"}}}", + 1 + ); + Response resp = TestHelpers + .makeRequest( + client(), + "POST", + TestHelpers.AD_BASE_DETECTORS_URI + "/_validate", + ImmutableMap.of(), + toHttpEntity(requestBody), + null + ); + Map responseMap = entityAsMap(resp); + @SuppressWarnings("unchecked") + Map>> failuresMap = (Map>>) XContentMapValues + .extractValue("failures", responseMap); + @SuppressWarnings("unchecked") + Map>> suggestionsMap = (Map>>) XContentMapValues + .extractValue("suggestedChanges", responseMap); + assertTrue(failuresMap.keySet().size() == 0); + assertTrue(suggestionsMap.keySet().size() == 1); + assertTrue(suggestionsMap.containsKey("detection_interval")); + } + + public void testValidationWindowDelayRecommendation() throws Exception { + RestClient client = client(); + List data = createData(1000, 120000); + indexTrainData("validation", data, 1000, client); + indexTestData(data, "validation", 1000, client); + String requestBody = String + .format( + Locale.ROOT, + "{ \"name\": \"test\", \"description\": \"test\", \"time_field\": \"timestamp\"" + + ", \"indices\": [\"validation\"], \"feature_attributes\": [{ \"feature_name\": \"feature 1\", \"feature_enabled\": " + + "\"true\", \"aggregation_query\": { \"Feature1\": { \"sum\": { \"field\": \"Feature1\" } } } }, { \"feature_name\"" + + ": \"feature 2\", \"feature_enabled\": \"true\", \"aggregation_query\": { \"Feature2\": { \"sum\": { \"field\": " + + "\"Feature2\" } } } }], \"detection_interval\": { \"period\": { \"interval\": %d, \"unit\": \"Minutes\" } }" + + ",\"window_delay\":{\"period\":{\"interval\":1,\"unit\":\"Minutes\"}}}", + 10 + ); + Response resp = TestHelpers + .makeRequest( + client(), + "POST", + TestHelpers.AD_BASE_DETECTORS_URI + "/_validate", + ImmutableMap.of(), + toHttpEntity(requestBody), + null + ); + Map responseMap = entityAsMap(resp); + @SuppressWarnings("unchecked") + Map>> failuresMap = (Map>>) XContentMapValues + .extractValue("failures", responseMap); + @SuppressWarnings("unchecked") + Map>> suggestionsMap = (Map>>) XContentMapValues + .extractValue("suggestedChanges", responseMap); + assertTrue(failuresMap.keySet().size() == 0); + assertTrue(suggestionsMap.keySet().size() == 1); + assertTrue(suggestionsMap.containsKey("window_delay")); + } + + public void testValidationFilterQuery() throws Exception { + RestClient client = client(); + List data = createData(1000, 6000); + indexTrainData("validation", data, 1000, client); + indexTestData(data, "validation", 1000, client); + String requestBody = String + .format( + Locale.ROOT, + "{\"name\":\"test\",\"description\":\"Test\",\"time_field\":\"timestamp\"," + + "\"indices\":[\"validation\"],\"feature_attributes\":[{\"feature_name\":\"feature 1\"" + + ",\"feature_enabled\":true,\"aggregation_query\":{\"Feature1\":{\"sum\":{\"field\":\"Feature1\"}}}}," + + "{\"feature_name\":\"feature 2\",\"feature_enabled\":true,\"aggregation_query\":{\"Feature2\":{\"sum\":{\"field\":\"Feature2\"}}}}]," + + "\"filter_query\":{\"bool\":{\"filter\":[{\"exists\":{\"field\":\"value\",\"boost\":1}}],\"adjust_pure_negative\":true,\"boost\":1}}," + + "\"detection_interval\":{\"period\":{\"interval\": %d,\"unit\":\"Minutes\"}},\"window_delay\":{\"period\":{\"interval\":1,\"unit\":\"Minutes\"}}}", + 1 + ); + Response resp = TestHelpers + .makeRequest( + client(), + "POST", + TestHelpers.AD_BASE_DETECTORS_URI + "/_validate", + ImmutableMap.of(), + toHttpEntity(requestBody), + null + ); + Map responseMap = entityAsMap(resp); + @SuppressWarnings("unchecked") + Map>> failuresMap = (Map>>) XContentMapValues + .extractValue("failures", responseMap); + @SuppressWarnings("unchecked") + Map>> suggestionsMap = (Map>>) XContentMapValues + .extractValue("suggestedChanges", responseMap); + assertTrue(failuresMap.keySet().size() == 0); + assertTrue(suggestionsMap.keySet().size() == 1); + assertTrue(suggestionsMap.containsKey("filter_query")); + } + + public void testValidationFeatureQuery() throws Exception { + RestClient client = client(); + List data = createData(1000, 6000); + indexTrainData("validation", data, 1000, client); + indexTestData(data, "validation", 1000, client); + String requestBody = String + .format( + Locale.ROOT, + "{\"name\":\"test\",\"description\":\"Test\",\"time_field\":\"timestamp\"," + + "\"indices\":[\"validation\"],\"feature_attributes\":[{\"feature_name\":\"feature 1\"" + + ",\"feature_enabled\":true,\"aggregation_query\":{\"Feature1\":{\"sum\":{\"field\":\"Feature1\"}}}}," + + "{\"feature_name\":\"feature 2\",\"feature_enabled\":true,\"aggregation_query\":{\"Feature2\":{\"sum\":{\"field\":\"Feature5\"}}}}]," + + "\"filter_query\":{\"bool\":{\"filter\":[{\"exists\":{\"field\":\"Feature1\",\"boost\":1}}],\"adjust_pure_negative\":true,\"boost\":1}}," + + "\"detection_interval\":{\"period\":{\"interval\": %d,\"unit\":\"Minutes\"}},\"window_delay\":{\"period\":{\"interval\":1,\"unit\":\"Minutes\"}}}", + 1 + ); + Response resp = TestHelpers + .makeRequest( + client(), + "POST", + TestHelpers.AD_BASE_DETECTORS_URI + "/_validate", + ImmutableMap.of(), + toHttpEntity(requestBody), + null + ); + Map responseMap = entityAsMap(resp); + @SuppressWarnings("unchecked") + Map>> failuresMap = (Map>>) XContentMapValues + .extractValue("failures", responseMap); + @SuppressWarnings("unchecked") + Map>> suggestionsMap = (Map>>) XContentMapValues + .extractValue("suggestedChanges", responseMap); + assertTrue(failuresMap.keySet().size() == 0); + assertTrue(suggestionsMap.keySet().size() == 1); + assertTrue(suggestionsMap.containsKey("feature_attributes")); + } + + public void testValidationWithDataSetSuccess() throws Exception { + RestClient client = client(); + List data = createData(300, 60000); + indexTrainData("validation", data, 1500, client); + indexTestData(data, "validation", 1500, client); + String requestBody = String + .format( + Locale.ROOT, + "{ \"name\": \"test\", \"description\": \"test\", \"time_field\": \"timestamp\"" + + ", \"indices\": [\"validation\"], \"feature_attributes\": [{ \"feature_name\": \"feature 1\", \"feature_enabled\": " + + "\"true\", \"aggregation_query\": { \"Feature1\": { \"sum\": { \"field\": \"Feature1\" } } } }, { \"feature_name\"" + + ": \"feature 2\", \"feature_enabled\": \"true\", \"aggregation_query\": { \"Feature2\": { \"sum\": { \"field\": " + + "\"Feature2\" } } } }], \"detection_interval\": { \"period\": { \"interval\": %d, \"unit\": \"Minutes\" } }" + + ",\"window_delay\":{\"period\":{\"interval\":2,\"unit\":\"Minutes\"}}}", + 1 + ); + Response resp = TestHelpers + .makeRequest( + client(), + "POST", + TestHelpers.AD_BASE_DETECTORS_URI + "/_validate", + ImmutableMap.of(), + toHttpEntity(requestBody), + null + ); + Map responseMap = entityAsMap(resp); + @SuppressWarnings("unchecked") + Map>> failuresMap = (Map>>) XContentMapValues + .extractValue("failures", responseMap); + @SuppressWarnings("unchecked") + Map>> suggestionsMap = (Map>>) XContentMapValues + .extractValue("suggestedChanges", responseMap); + assertTrue(failuresMap.keySet().size() == 0); + assertTrue(suggestionsMap.keySet().size() == 0); + } + private void verifyAnomaly( String datasetName, int intervalMinutes, @@ -227,7 +462,7 @@ private List> getAnomalyWindows(String labalFileName) th private void indexTrainData(String datasetName, List data, int trainTestSplit, RestClient client) throws Exception { Request request = new Request("PUT", datasetName); String requestBody = "{ \"mappings\": { \"properties\": { \"timestamp\": { \"type\": \"date\"}," - + " \"Feature1\": { \"type\": \"double\" }, \"Feature2\": { \"type\": \"double\" } } } }"; + + " \"Feature1\": { \"type\": \"long\" }, \"Feature2\": { \"type\": \"long\" } } } }"; request.setJsonEntity(requestBody); client.performRequest(request); Thread.sleep(1_000); @@ -253,6 +488,21 @@ private List getData(String datasetFileName) throws Exception { return list; } + private List createData(int numOfDataPoints, long detectorIntervalMS) { + List list = new ArrayList<>(); + for (int i = 1; i < numOfDataPoints; i++) { + long valueFeature1 = randomLongBetween(1, 10000000); + long valueFeature2 = randomLongBetween(1, 10000000); + JsonObject obj = new JsonObject(); + JsonElement element = new JsonPrimitive(Instant.now().toEpochMilli() - (detectorIntervalMS * i)); + obj.add("timestamp", element); + obj.add("Feature1", new JsonPrimitive(valueFeature1)); + obj.add("Feature2", new JsonPrimitive(valueFeature2)); + list.add(obj); + } + return list; + } + private Map getDetectionResult(String detectorId, Instant begin, Instant end, RestClient client) { try { Request request = new Request("POST", String.format("/_opendistro/_anomaly_detection/detectors/%s/_run", detectorId)); diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/AnomalyDetectorRestApiIT.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/AnomalyDetectorRestApiIT.java index af079ab3..e06bd8dc 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/AnomalyDetectorRestApiIT.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/AnomalyDetectorRestApiIT.java @@ -25,6 +25,7 @@ import java.io.IOException; import java.time.Instant; +import java.util.List; import java.util.Map; import org.apache.http.entity.ContentType; @@ -33,6 +34,7 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -83,6 +85,101 @@ public void testCreateAnomalyDetectorWithEmptyIndices() throws Exception { ); } + public void testValidateAnomalyDetectorWithEmptyIndices() throws Exception { + AnomalyDetector detector = TestHelpers.randomAnomalyDetector(TestHelpers.randomUiMetadata(), null); + TestHelpers + .makeRequest( + client(), + "PUT", + "/" + detector.getIndices().get(0), + ImmutableMap.of(), + toHttpEntity( + "{\"settings\":{\"number_of_shards\":1},\"mappings\":{\"properties\":{\"field1\":" + + "{\"type\":\"text\"},\"" + + detector.getTimeField() + + "\":{\"type\":\"date\",\"format\":\"yyyy-MM-dd HH:mm:ss||yyyy-MM-dd||epoch_millis\"}}}}" + ), + null + ); + Response resp = TestHelpers + .makeRequest( + client(), + "POST", + TestHelpers.AD_BASE_DETECTORS_URI + "/_validate", + ImmutableMap.of(), + toHttpEntity(detector), + null + ); + Map responseMap = entityAsMap(resp); + System.out.println(responseMap); + @SuppressWarnings("unchecked") + Map>> failuresMap = (Map>>) XContentMapValues + .extractValue("failures", responseMap); + assertTrue(failuresMap.containsKey("others")); + System.out.println(failuresMap.get("others").get(0)); + assertEquals( + "Can't create anomaly detector as no document found in indices: [" + detector.getIndices().get(0) + "]", + failuresMap.get("others").get(0) + ); + } + + public void testValidateMissingNameAndTimeFieldFailure() throws Exception { + Response resp = TestHelpers + .makeRequest( + client(), + "POST", + TestHelpers.AD_BASE_DETECTORS_URI + "/_validate", + ImmutableMap.of(), + toHttpEntity( + "{\"description\":\"Test detector\",\"indices\":[\"test-index-sparse\"],\"feature_attributes\":[{\"feature_name\":\"total_order\",\"feature_enabled\":true,\"aggregation_query\":{\"total_order\":{\"max\":{\"field\":\"feature-1\"}}}},{\"feature_name\":\"second_feature\",\"feature_enabled\":true,\"aggregation_query\":{\"total\":{\"max\":{\"field\":\"feature-2\"}}}}],\"detection_interval\":{\"period\":{\"interval\":70,\"unit\":\"Minutes\"}},\"window_delay\":{\"period\":{\"interval\":70,\"unit\":\"Minutes\"}}}" + ), + null + ); + Map responseMap = entityAsMap(resp); + System.out.println(responseMap); + @SuppressWarnings("unchecked") + Map>> failuresMap = (Map>>) XContentMapValues + .extractValue("failures", responseMap); + assertTrue(failuresMap.containsKey("missing")); + assertTrue(failuresMap.keySet().size() == 1); + assertTrue(failuresMap.get("missing").size() == 2); + assertEquals("name", failuresMap.get("missing").get(0)); + assertEquals("time_field", failuresMap.get("missing").get(1)); + } + + public void testValidateAnomalyDetectorWithDuplicateName() throws Exception { + AnomalyDetector detector = createRandomAnomalyDetector(true, true); + TestHelpers.createIndex(client(), "test-index", toHttpEntity("{\"timestamp\": " + Instant.now().toEpochMilli() + "}")); + Response resp = TestHelpers + .makeRequest( + client(), + "POST", + TestHelpers.AD_BASE_DETECTORS_URI + "/_validate", + ImmutableMap.of(), + toHttpEntity( + "{\"name\":\"" + + detector.getName() + + "\",\"description\":\"Test detector\",\"time_field\":\"timestamp\"," + + "\"indices\":[\"test-index\"],\"feature_attributes\":[{\"feature_name\":\"totssal\",\"" + + "feature_enabled\":true,\"aggregation_query\":{\"totalquery\":{\"max\":{\"field\":\"feature-3\"}}}}," + + "{\"feature_name\":\"totaly\",\"feature_enabled\":true,\"aggregation_query\":" + + "{\"totalqusery\":" + + "{\"max\":{\"field\":\"feature-1\"}}}}],\"filter_query\":{\"bool\":{\"filter\":[{\"exists\":" + + "{\"field\":" + + "\"feature-4\",\"boost\":1}}],\"adjust_pure_negative\":true,\"boost\":1}},\"detection_interval\":" + + "{\"period\":{\"interval\":1,\"unit\":\"Minutes\"}},\"window_delay\":{\"period\":{\"interval\":2,\"unit\":\"Minutes\"}}}" + ), + null + ); + Map responseMap = entityAsMap(resp); + @SuppressWarnings("unchecked") + Map>> failuresMap = (Map>>) XContentMapValues + .extractValue("failures", responseMap); + assertTrue(failuresMap.containsKey("duplicates")); + assertTrue(failuresMap.keySet().size() == 1); + assertEquals(detector.getName(), failuresMap.get("duplicates").get(0)); + } + public void testCreateAnomalyDetectorWithDuplicateName() throws Exception { AnomalyDetector detector = createRandomAnomalyDetector(true, true); From 8ade35d61cea821a72a8420922715edc391ebf70 Mon Sep 17 00:00:00 2001 From: Galitzky Date: Wed, 19 Aug 2020 15:30:01 -0700 Subject: [PATCH 12/20] fixed style issue for build --- .../ad/e2e/DetectionResultEvalutationIT.java | 17 +++++++++++------ .../ad/rest/AnomalyDetectorRestApiIT.java | 11 +++++++++-- 2 files changed, 20 insertions(+), 8 deletions(-) diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/e2e/DetectionResultEvalutationIT.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/e2e/DetectionResultEvalutationIT.java index dc56ab51..f85b8345 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/e2e/DetectionResultEvalutationIT.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/e2e/DetectionResultEvalutationIT.java @@ -179,9 +179,11 @@ public void testValidationFilterQuery() throws Exception { "{\"name\":\"test\",\"description\":\"Test\",\"time_field\":\"timestamp\"," + "\"indices\":[\"validation\"],\"feature_attributes\":[{\"feature_name\":\"feature 1\"" + ",\"feature_enabled\":true,\"aggregation_query\":{\"Feature1\":{\"sum\":{\"field\":\"Feature1\"}}}}," - + "{\"feature_name\":\"feature 2\",\"feature_enabled\":true,\"aggregation_query\":{\"Feature2\":{\"sum\":{\"field\":\"Feature2\"}}}}]," - + "\"filter_query\":{\"bool\":{\"filter\":[{\"exists\":{\"field\":\"value\",\"boost\":1}}],\"adjust_pure_negative\":true,\"boost\":1}}," - + "\"detection_interval\":{\"period\":{\"interval\": %d,\"unit\":\"Minutes\"}},\"window_delay\":{\"period\":{\"interval\":1,\"unit\":\"Minutes\"}}}", + + "{\"feature_name\":\"feature 2\",\"feature_enabled\":true,\"aggregation_query\":{\"Feature2\":" + + "{\"sum\":{\"field\":\"Feature2\"}}}}],\"filter_query\":{\"bool\":" + + "{\"filter\":[{\"exists\":{\"field\":\"value\",\"boost\":1}}],\"adjust_pure_negative\":true,\"boost\":1}}," + + "\"detection_interval\":{\"period\":{\"interval\": %d,\"unit\":\"Minutes\"}}" + + ",\"window_delay\":{\"period\":{\"interval\":1,\"unit\":\"Minutes\"}}}", 1 ); Response resp = TestHelpers @@ -216,9 +218,12 @@ public void testValidationFeatureQuery() throws Exception { "{\"name\":\"test\",\"description\":\"Test\",\"time_field\":\"timestamp\"," + "\"indices\":[\"validation\"],\"feature_attributes\":[{\"feature_name\":\"feature 1\"" + ",\"feature_enabled\":true,\"aggregation_query\":{\"Feature1\":{\"sum\":{\"field\":\"Feature1\"}}}}," - + "{\"feature_name\":\"feature 2\",\"feature_enabled\":true,\"aggregation_query\":{\"Feature2\":{\"sum\":{\"field\":\"Feature5\"}}}}]," - + "\"filter_query\":{\"bool\":{\"filter\":[{\"exists\":{\"field\":\"Feature1\",\"boost\":1}}],\"adjust_pure_negative\":true,\"boost\":1}}," - + "\"detection_interval\":{\"period\":{\"interval\": %d,\"unit\":\"Minutes\"}},\"window_delay\":{\"period\":{\"interval\":1,\"unit\":\"Minutes\"}}}", + + "{\"feature_name\":\"feature 2\",\"feature_enabled\":true,\"aggregation_query\":" + + "{\"Feature2\":{\"sum\":{\"field\":\"Feature5\"}}}}]," + + "\"filter_query\":{\"bool\":{\"filter\":[{\"exists\":{\"field\":\"Feature1\",\"boost\":1}}]," + + "\"adjust_pure_negative\":true,\"boost\":1}}," + + "\"detection_interval\":{\"period\":{\"interval\": %d,\"unit\":\"Minutes\"}}," + + "\"window_delay\":{\"period\":{\"interval\":1,\"unit\":\"Minutes\"}}}", 1 ); Response resp = TestHelpers diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/AnomalyDetectorRestApiIT.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/AnomalyDetectorRestApiIT.java index e06bd8dc..555ca7a1 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/AnomalyDetectorRestApiIT.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/rest/AnomalyDetectorRestApiIT.java @@ -131,7 +131,13 @@ public void testValidateMissingNameAndTimeFieldFailure() throws Exception { TestHelpers.AD_BASE_DETECTORS_URI + "/_validate", ImmutableMap.of(), toHttpEntity( - "{\"description\":\"Test detector\",\"indices\":[\"test-index-sparse\"],\"feature_attributes\":[{\"feature_name\":\"total_order\",\"feature_enabled\":true,\"aggregation_query\":{\"total_order\":{\"max\":{\"field\":\"feature-1\"}}}},{\"feature_name\":\"second_feature\",\"feature_enabled\":true,\"aggregation_query\":{\"total\":{\"max\":{\"field\":\"feature-2\"}}}}],\"detection_interval\":{\"period\":{\"interval\":70,\"unit\":\"Minutes\"}},\"window_delay\":{\"period\":{\"interval\":70,\"unit\":\"Minutes\"}}}" + "{\"description\":\"Test detector\",\"indices\":[\"test-index-sparse\"]," + + "\"feature_attributes\":[{\"feature_name\":\"total_order\",\"feature_enabled\":true," + + "\"aggregation_query\":{\"total_order\":{\"max\":{\"field\":\"feature-1\"}}}}," + + "{\"feature_name\":\"second_feature\",\"feature_enabled\":true,\"aggregation_query\":" + + "{\"total\":{\"max\":{\"field\":\"feature-2\"}}}}],\"detection_interval\":{\"period\":" + + "{\"interval\":70,\"unit\":\"Minutes\"}},\"window_delay\":" + + "{\"period\":{\"interval\":70,\"unit\":\"Minutes\"}}}" ), null ); @@ -167,7 +173,8 @@ public void testValidateAnomalyDetectorWithDuplicateName() throws Exception { + "{\"max\":{\"field\":\"feature-1\"}}}}],\"filter_query\":{\"bool\":{\"filter\":[{\"exists\":" + "{\"field\":" + "\"feature-4\",\"boost\":1}}],\"adjust_pure_negative\":true,\"boost\":1}},\"detection_interval\":" - + "{\"period\":{\"interval\":1,\"unit\":\"Minutes\"}},\"window_delay\":{\"period\":{\"interval\":2,\"unit\":\"Minutes\"}}}" + + "{\"period\":{\"interval\":1,\"unit\":\"Minutes\"}}," + + "\"window_delay\":{\"period\":{\"interval\":2,\"unit\":\"Minutes\"}}}" ), null ); From 2f79363d0f3eddc759889f4979789b560af30471 Mon Sep 17 00:00:00 2001 From: Galitzky Date: Mon, 31 Aug 2020 09:06:40 -0700 Subject: [PATCH 13/20] fixed all changes from CR and added check for field type --- .../ad/AnomalyDetectorPlugin.java | 4 +- .../ad/feature/SearchFeatureDao.java | 8 +- .../ad/model/AnomalyDetector.java | 1 + .../ad/model/ValidationFailures.java | 25 + .../ad/model/ValidationSuggestedChanges.java | 25 + .../ValidateAnomalyDetectorActionHandler.java | 545 +++++++++++------- .../AnomalyResultTransportAction.java | 4 - 7 files changed, 376 insertions(+), 236 deletions(-) create mode 100644 src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationFailures.java create mode 100644 src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationSuggestedChanges.java diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java index bc0bb007..6d79e451 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/AnomalyDetectorPlugin.java @@ -211,7 +211,7 @@ public List getRestHandlers( anomalyDetectionIndices ); RestSearchAnomalyDetectorAction searchAnomalyDetectorAction = new RestSearchAnomalyDetectorAction(); - RestValidateAnomalyDetectorAction restValidateAnomalyDetectorAction = new RestValidateAnomalyDetectorAction( + RestValidateAnomalyDetectorAction validateAnomalyDetectorAction = new RestValidateAnomalyDetectorAction( settings, anomalyDetectionIndices, xContentRegistry @@ -238,7 +238,7 @@ public List getRestHandlers( .of( restGetAnomalyDetectorAction, restIndexAnomalyDetectorAction, - restValidateAnomalyDetectorAction, + validateAnomalyDetectorAction, searchAnomalyDetectorAction, searchAnomalyResultAction, deleteAnomalyDetectorAction, diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDao.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDao.java index fd9e8192..7b316bd7 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDao.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/feature/SearchFeatureDao.java @@ -121,17 +121,12 @@ public void getLatestDataTime(AnomalyDetector detector, ActionListener getLatestDataTime(SearchResponse searchResponse) { - - Optional x = Optional + return Optional .ofNullable(searchResponse) .map(SearchResponse::getAggregations) .map(aggs -> aggs.asMap()) .map(map -> (Max) map.get(AGG_NAME_MAX)) .map(agg -> (long) agg.getValue()); - - System.out.println("windowDelay optional: " + x.toString()); - System.out.println("windowDelay optional without .toString(): " + x); - return x; } /** @@ -560,7 +555,6 @@ private SearchRequest createPreviewSearchRequest(AnomalyDetector detector, List< } private Optional parseBucket(InternalDateRange.Bucket bucket, List featureIds) { - return parseAggregations(Optional.ofNullable(bucket).map(b -> b.getAggregations()), featureIds); } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java index 78bf9302..c148bc62 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java @@ -188,6 +188,7 @@ public AnomalyDetector( this.schemaVersion = schemaVersion; this.lastUpdateTime = lastUpdateTime; this.validation = validation; + this.shingleSize = 8; } public XContentBuilder toXContent(XContentBuilder builder) throws IOException { diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationFailures.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationFailures.java new file mode 100644 index 00000000..1f3cff95 --- /dev/null +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationFailures.java @@ -0,0 +1,25 @@ +package com.amazon.opendistroforelasticsearch.ad.model; + +public enum ValidationFailures { + MISSING("missing"), + OTHERS("others"), + FIELD_TYPE("field_type"), + FORMAT("format"), + DUPLICATES("duplicates"); + + private String name; + + /** + * Get stat name + * + * @return name + */ + public String getName() { + return name; + } + + ValidationFailures(String name) { + this.name = name; + } + +} diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationSuggestedChanges.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationSuggestedChanges.java new file mode 100644 index 00000000..355ea2bc --- /dev/null +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationSuggestedChanges.java @@ -0,0 +1,25 @@ +package com.amazon.opendistroforelasticsearch.ad.model; + +public enum ValidationSuggestedChanges { + OTHERS("others"), + FILTER_QUERY("filter_query"), + FEATURE_ATTRIBUTES("feature_attributes"), + DETECTION_INTERVAL("detection_interval"), + WINDOW_DELAY("window_delay"); + + private String name; + + /** + * Get stat name + * + * @return name + */ + public String getName() { + return name; + } + + ValidationSuggestedChanges(String name) { + this.name = name; + } + +} diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java index d2f97b67..a0141c31 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java @@ -16,9 +16,9 @@ package com.amazon.opendistroforelasticsearch.ad.rest.handler; import static com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector.ANOMALY_DETECTORS_INDEX; -import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; import java.io.IOException; +import java.text.ParseException; import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.ArrayList; @@ -27,19 +27,25 @@ import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.TreeMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; +import com.amazon.opendistroforelasticsearch.ad.model.ValidationSuggestedChanges; +import com.amazon.opendistroforelasticsearch.ad.model.TimeConfiguration; +import com.amazon.opendistroforelasticsearch.ad.model.ValidationFailures; import org.apache.commons.lang.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.client.AdminClient; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; @@ -84,9 +90,11 @@ public class ValidateAnomalyDetectorActionHandler extends AbstractActionHandler protected static final long HISTORICAL_CHECK_IN_MS = 7776000000L; protected static final String NAME_REGEX = "[a-zA-Z0-9._-]+"; protected static final double INTERVAL_RECOMMENDATION_MULTIPLIER = 1.2; + protected static final String[] numericType = {"long", "integer", "short", "double", "float"}; private final AnomalyDetectionIndices anomalyDetectionIndices; private final AnomalyDetector anomalyDetector; + private final AdminClient adminClient; private final Logger logger = LogManager.getLogger(IndexAnomalyDetectorActionHandler.class); private final Integer maxAnomalyDetectors; @@ -94,11 +102,10 @@ public class ValidateAnomalyDetectorActionHandler extends AbstractActionHandler private final TimeValue requestTimeout; private final NamedXContentRegistry xContent; - private ValidateResponse responseValidate; - private Map> failuresMap; - private Map> suggestedChangesMap; - private Boolean inferringInterval; - private AtomicBoolean inferAgain; + private final ValidateResponse responseValidate; + private final Map> failuresMap; + private final Map> suggestedChangesMap; + private final AtomicBoolean inferAgain; /** * Constructor function. @@ -109,14 +116,14 @@ public class ValidateAnomalyDetectorActionHandler extends AbstractActionHandler * @param anomalyDetector anomaly detector instance */ public ValidateAnomalyDetectorActionHandler( - NodeClient client, - RestChannel channel, - AnomalyDetectionIndices anomalyDetectionIndices, - AnomalyDetector anomalyDetector, - Integer maxAnomalyDetectors, - Integer maxAnomalyFeatures, - TimeValue requestTimeout, - NamedXContentRegistry xContentRegistry + NodeClient client, + RestChannel channel, + AnomalyDetectionIndices anomalyDetectionIndices, + AnomalyDetector anomalyDetector, + Integer maxAnomalyDetectors, + Integer maxAnomalyFeatures, + TimeValue requestTimeout, + NamedXContentRegistry xContentRegistry ) { super(client, channel); this.anomalyDetectionIndices = anomalyDetectionIndices; @@ -126,10 +133,10 @@ public ValidateAnomalyDetectorActionHandler( this.requestTimeout = requestTimeout; this.responseValidate = new ValidateResponse(); this.xContent = xContentRegistry; - this.inferringInterval = false; this.inferAgain = new AtomicBoolean(true); this.failuresMap = new HashMap<>(); this.suggestedChangesMap = new HashMap<>(); + this.adminClient = client.admin(); } /** @@ -140,23 +147,20 @@ public ValidateAnomalyDetectorActionHandler( */ public void startValidation() throws IOException { if (!anomalyDetectionIndices.doesAnomalyDetectorIndexExist()) { - anomalyDetectionIndices - .initAnomalyDetectorIndex( - ActionListener.wrap(response -> onCreateMappingsResponse(response), exception -> onFailure(exception)) - ); + preDataValidationSteps(false); } else { - preDataValidationSteps(); + preDataValidationSteps(true); } } - public void preDataValidationSteps() { + private void preDataValidationSteps(boolean indexExists) { List missingFields = new ArrayList<>(); List formatErrors = new ArrayList<>(); - if (anomalyDetector.getName() == null || anomalyDetector.getName() == "") { + if (StringUtils.isBlank(anomalyDetector.getName())) { missingFields.add("name"); } else if (!anomalyDetector.getName().matches(NAME_REGEX)) { formatErrors.add(anomalyDetector.getName()); - failuresMap.put("format", formatErrors); + failuresMap.put(ValidationFailures.FORMAT.getName(), formatErrors); } if (anomalyDetector.getTimeField() == null) { missingFields.add("time_field"); @@ -174,91 +178,97 @@ public void preDataValidationSteps() { missingFields.add("feature_attributes"); } if (!missingFields.isEmpty()) { - failuresMap.put("missing", missingFields); + failuresMap.put(ValidationFailures.MISSING.getName(), missingFields); } String error = RestHandlerUtils.validateAnomalyDetector(anomalyDetector, maxAnomalyFeatures); if (StringUtils.isNotBlank(error)) { List dupErrorsFeatures = new ArrayList<>(); dupErrorsFeatures.addAll(Arrays.asList(error.split("\\r?\\n"))); - failuresMap.put("duplicates", dupErrorsFeatures); + failuresMap.put(ValidationFailures.DUPLICATES.getName(), dupErrorsFeatures); } if (!failuresMap.isEmpty()) { - validateAnomalyDetectorResponse(); - } else { + sendAnomalyDetectorValidationResponse(); + } else if (indexExists){ validateNumberOfDetectors(); + } else { + searchAdInputIndices(false); } } - public void validateNumberOfDetectors() { + private void validateNumberOfDetectors() { try { QueryBuilder query = QueryBuilders.matchAllQuery(); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(query).size(0).timeout(requestTimeout); SearchRequest searchRequest = new SearchRequest(ANOMALY_DETECTORS_INDEX).source(searchSourceBuilder); client.search(searchRequest, ActionListener.wrap(response -> onSearchAdResponse(response), exception -> onFailure(exception))); } catch (Exception e) { + logger.warn("Failed to create search request for validation", e); onFailure(e); } } - private void onSearchAdResponse(SearchResponse response) throws IOException { + private void onSearchAdResponse(SearchResponse response) { if (response.getHits().getTotalHits().value >= maxAnomalyDetectors) { - failuresMap - .computeIfAbsent("others", k -> new ArrayList<>()) - .add("Can't create anomaly detector more than " + maxAnomalyDetectors); - validateAnomalyDetectorResponse(); + suggestedChangesMap + .computeIfAbsent(ValidationSuggestedChanges.OTHERS.getName(), k -> new ArrayList<>()) + .add("Can't create anomaly detector more than " + maxAnomalyDetectors + " ,please delete unused detectors"); } else { - searchAdInputIndices(null); + searchAdInputIndices(true); } } - private void searchAdInputIndices(String detectorId) { + private void searchAdInputIndices(boolean indexExists) { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() - .query(QueryBuilders.matchAllQuery()) - .size(0) - .timeout(requestTimeout); + .query(QueryBuilders.matchAllQuery()) + .size(0) + .timeout(requestTimeout); SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().toArray(new String[0])).source(searchSourceBuilder); client - .search( - searchRequest, - ActionListener - .wrap(searchResponse -> onSearchAdInputIndicesResponse(searchResponse, detectorId), exception -> onFailure(exception)) - ); + .search( + searchRequest, + ActionListener + .wrap(searchResponse -> onSearchAdInputIndicesResponse(searchResponse, indexExists), exception -> { + onFailure(exception); + logger.warn("Failed to create search request for validation", exception); + })); } - private void onSearchAdInputIndicesResponse(SearchResponse response, String detectorId) throws IOException { + private void onSearchAdInputIndicesResponse(SearchResponse response, boolean indexExists) throws IOException { if (response.getHits().getTotalHits().value == 0) { - String errorMsg = "Can't create anomaly detector as no document found in indices: " - + Arrays.toString(anomalyDetector.getIndices().toArray(new String[0])); - failuresMap.computeIfAbsent("others", k -> new ArrayList<>()).add(errorMsg); - validateAnomalyDetectorResponse(); + String errorMsg = String.format + ("Can't create anomaly detector as no document found in indices: %s", anomalyDetector.getIndices()); + failuresMap.computeIfAbsent(ValidationFailures.OTHERS.getName(), k -> new ArrayList<>()).add(errorMsg); + sendAnomalyDetectorValidationResponse(); + } else if (indexExists){ + checkADNameExists(); } else { - checkADNameExists(detectorId); + checkForHistoricalData(); } } - private void checkADNameExists(String detectorId) throws IOException { + private void checkADNameExists() { BoolQueryBuilder boolQueryBuilder = new BoolQueryBuilder(); boolQueryBuilder.must(QueryBuilders.termQuery("name.keyword", anomalyDetector.getName())); - if (StringUtils.isNotBlank(detectorId)) { - boolQueryBuilder.mustNot(QueryBuilders.termQuery(RestHandlerUtils._ID, detectorId)); - } SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(boolQueryBuilder).timeout(requestTimeout); SearchRequest searchRequest = new SearchRequest(ANOMALY_DETECTORS_INDEX).source(searchSourceBuilder); client - .search( - searchRequest, - ActionListener - .wrap( - searchResponse -> onSearchADNameResponse(searchResponse, anomalyDetector.getName()), - exception -> onFailure(exception) - ) - ); + .search( + searchRequest, + ActionListener + .wrap( + searchResponse -> onSearchADNameResponse(searchResponse, anomalyDetector.getName()), + exception -> { + onFailure(exception); + logger.warn("Failed to create search request for validation", exception); + } + ) + ); } - private void onSearchADNameResponse(SearchResponse response, String name) throws IOException { + private void onSearchADNameResponse(SearchResponse response, String name) { if (response.getHits().getTotalHits().value > 0) { - failuresMap.computeIfAbsent("duplicates", k -> new ArrayList<>()).add(name); - validateAnomalyDetectorResponse(); + failuresMap.computeIfAbsent(ValidationFailures.DUPLICATES.getName(), k -> new ArrayList<>()).add(name); + sendAnomalyDetectorValidationResponse(); } else { checkForHistoricalData(); } @@ -266,39 +276,42 @@ private void onSearchADNameResponse(SearchResponse response, String name) throws public void checkForHistoricalData() { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() - .aggregation(AggregationBuilders.max(AGG_NAME_MAX).field(anomalyDetector.getTimeField())) - .size(1) - .sort(new FieldSortBuilder(anomalyDetector.getTimeField()).order(SortOrder.DESC)); + .aggregation(AggregationBuilders.max(AGG_NAME_MAX).field(anomalyDetector.getTimeField())) + .size(1) + .sort(new FieldSortBuilder(anomalyDetector.getTimeField()).order(SortOrder.DESC)); SearchRequest searchRequest = new SearchRequest().indices(anomalyDetector.getIndices().get(0)).source(searchSourceBuilder); client - .search( - searchRequest, - ActionListener - .wrap(response -> checkIfAnyHistoricalData(getLatestDataTime(response)), exception -> { onFailure(exception); }) - ); + .search( + searchRequest, + ActionListener + .wrap(response -> checkIfAnyHistoricalData(getLatestDataTime(response)), exception -> { + onFailure(exception); + logger.warn("Failed to create search request for validation", exception); + }) + ); } private void checkIfAnyHistoricalData(Optional lastTimeStamp) { if (lastTimeStamp.isPresent() && (Instant.now().toEpochMilli() - HISTORICAL_CHECK_IN_MS > lastTimeStamp.get())) { - failuresMap.computeIfAbsent("others", k -> new ArrayList<>()).add("No historical data for past 3 months"); - validateAnomalyDetectorResponse(); + failuresMap.computeIfAbsent(ValidationFailures.OTHERS.getName(), k -> new ArrayList<>()).add("No historical data for past 3 months"); + sendAnomalyDetectorValidationResponse(); } else { queryFilterValidation(); } } + private Long timeConfigToMilliSec(TimeConfiguration config) { + return Optional + .ofNullable((IntervalTimeConfiguration) config) + .map(t -> t.toDuration().toMillis()) + .orElse(0L); + } private long[] startEndTimeRangeWithIntervals(int numOfIntervals) { - long delayMillis = Optional - .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) - .map(t -> t.toDuration().toMillis()) - .orElse(0L); + long delayMillis = timeConfigToMilliSec(anomalyDetector.getWindowDelay()); long dataEndTime = Instant.now().toEpochMilli() - delayMillis; - long detectorInterval = Optional - .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) - .map(t -> t.toDuration().toMillis()) - .orElse(0L); + long detectorInterval = timeConfigToMilliSec(anomalyDetector.getDetectionInterval()); long dataStartTime = dataEndTime - ((long) (numOfIntervals) * detectorInterval); - return new long[] { dataStartTime, dataEndTime }; + return new long[]{dataStartTime, dataEndTime}; } private void queryFilterValidation() { @@ -306,128 +319,223 @@ private void queryFilterValidation() { long dataEndTime = startEnd[1]; long dataStartTime = startEnd[0]; RangeQueryBuilder rangeQuery = new RangeQueryBuilder(anomalyDetector.getTimeField()) - .from(dataStartTime) - .to(dataEndTime) - .format("epoch_millis"); + .from(dataStartTime) + .to(dataEndTime) + .format("epoch_millis"); BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().must(rangeQuery).must(anomalyDetector.getFilterQuery()); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() - .query(internalFilterQuery) - .size(1) - .terminateAfter(1) - .timeout(requestTimeout); + .query(internalFilterQuery) + .size(1) + .terminateAfter(1) + .timeout(requestTimeout); SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(searchSourceBuilder); client - .search( - searchRequest, - ActionListener.wrap(searchResponse -> onQueryFilterSearch(searchResponse), exception -> { onFailure(exception); }) - ); + .search( + searchRequest, + ActionListener.wrap(searchResponse -> onQueryFilterSearch(searchResponse), exception -> { + onFailure(exception); + logger.warn("Failed to create data query search request for validation", exception); + }) + ); } - private void onQueryFilterSearch(SearchResponse response) throws IOException { + private void onQueryFilterSearch(SearchResponse response) throws IOException, ParseException { if (response.getHits().getTotalHits().value <= 0) { List filterError = new ArrayList<>(); filterError - .add( - "query filter is potentially wrong as no hits were found at all or no historical data in last " - + NUM_OF_INTERVALS_CHECKED_FILTER - + " intervals" - ); - suggestedChangesMap.put("filter_query", filterError); - validateAnomalyDetectorResponse(); + .add( + "query filter is potentially wrong as no hits were found at all or no historical data in last " + + NUM_OF_INTERVALS_CHECKED_FILTER + + " intervals" + ); + suggestedChangesMap.put(ValidationSuggestedChanges.FILTER_QUERY.getName(), filterError); + sendAnomalyDetectorValidationResponse(); + } else { + featureQueryValidation(); + } + } + + private List parseAggregationRequest(XContentParser parser) throws IOException { + List fieldNames = new ArrayList<>(); + XContentParser.Token token; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + final String field = parser.currentName(); + switch (field) { + case "field": + parser.nextToken(); + fieldNames.add(parser.textOrNull()); + break; + default: + parser.skipChildren(); + break; + } + } + } + return fieldNames; + } + + private List ForFieldMapping(XContentParser parser) throws IOException { + List fieldNameAggType = new ArrayList<>(); + XContentParser.Token token; + String agg = ""; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + final String field = parser.currentName(); + switch (field) { + case "max": + agg = parser.currentName(); + break; + case "avg": + agg = parser.currentName(); + break; + case "sum": + agg = parser.currentName(); + break; + case "field": + parser.nextToken(); + fieldNameAggType.add(agg); + fieldNameAggType.add(parser.textOrNull()); + break; + default: + parser.skipChildren(); + break; + } + } + } + return fieldNameAggType; + } + + + private void checkFeatureAggregationType() throws IOException { + Map featureToField = new TreeMap<>(); + if (anomalyDetector.getFeatureAttributes() != null) { + for (Feature feature : anomalyDetector.getFeatureAttributes()) { + ParseUtils.parseAggregators(feature.getAggregation().toString(), xContent, feature.getId()); + XContentParser parser = XContentType.JSON + .xContent() + .createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); + parser.nextToken(); + List aggTypeFieldName = ForFieldMapping(parser); + featureToField.put(feature.getName(), aggTypeFieldName.get(1)); + } + } + getFieldMapping(featureToField); + } + + private void getFieldMapping(Map featureToAgg) { + GetFieldMappingsRequest request = new GetFieldMappingsRequest().indices(anomalyDetector.getIndices().get(0)); + request.fields(featureToAgg.values().toArray(new String[0])); + adminClient + .indices().getFieldMappings( + request, + ActionListener + .wrap( + response -> checkFieldIndex(response, featureToAgg.values().toArray(new String[0]), featureToAgg), + exception -> { + onFailure(exception); + logger.warn("Failed to get field mapping for validation", exception); + } + ) + ); + } + + private void checkFieldIndex(GetFieldMappingsResponse response, String[] fields, Map featuresToAgg) throws IOException { + List numericTypes = Arrays.asList(numericType); + for (int i = 0; i < fields.length; i++) { + Map>> mappings = + response.mappings(); + final Map> fieldMappings = + mappings.get(anomalyDetector.getIndices().get(0)); + final GetFieldMappingsResponse.FieldMappingMetadata metadata = + fieldMappings.get("_doc").get(fields[i]); + final Map source = metadata.sourceAsMap(); + String fieldTypeJSON = source.get(fields[i]).toString(); + String fieldType = fieldTypeJSON.substring(fieldTypeJSON.lastIndexOf('=') + 1, fieldTypeJSON.length() - 1).trim(); + if (!numericTypes.contains(fieldType)) { + failuresMap + .computeIfAbsent(ValidationFailures.FIELD_TYPE.getName(), k -> new ArrayList<>()) + .add("Field named: "+ fields[i] + " can't be aggregated due to it being of type " + fieldType + + " which isn't numeric, please use a different aggregation type"); + } + } + if (!failuresMap.isEmpty()) { + sendAnomalyDetectorValidationResponse(); } else { - long timestamp = (long) response.getHits().getHits()[0].getSourceAsMap().get("timestamp"); - featureQueryValidation(timestamp); + intervalValidation(); } } - private void featureQueryValidation(long startTimeStamp) throws IOException { - long delayMillis = Optional - .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) - .map(t -> t.toDuration().toMillis()) - .orElse(0L); + private long[] getFeatureQueryValidationDateRange() { + long delayMillis = timeConfigToMilliSec(anomalyDetector.getWindowDelay()); long[] startEnd = startEndTimeRangeWithIntervals(NUM_OF_INTERVALS_CHECKED); long dataEndTime = startEnd[1]; long dataStartTime = startEnd[0]; - if (startEnd[0] > startTimeStamp) { - dataStartTime = startTimeStamp; - } IntervalTimeConfiguration searchRange = new IntervalTimeConfiguration(FEATURE_VALIDATION_TIME_BACK_MINUTES, ChronoUnit.MINUTES); long searchRangeTime = Optional.ofNullable(searchRange).map(t -> t.toDuration().toMillis()).orElse(0L); long startTimeWithSetTime = startEnd[1] - (searchRangeTime - delayMillis); - // Make sure start time includes timestamp seen by filter query check if (startEnd[0] > startTimeWithSetTime) { dataStartTime = startTimeWithSetTime; } + return new long[]{dataStartTime, dataEndTime}; + } + + + + private void featureQueryValidation() throws IOException { + long[] startEnd = getFeatureQueryValidationDateRange(); AtomicInteger featureCounter = new AtomicInteger(); RangeQueryBuilder rangeQuery = new RangeQueryBuilder(anomalyDetector.getTimeField()) - .from(dataStartTime) - .to(dataEndTime) - .format("epoch_millis") - .includeLower(true) - .includeUpper(false); + .from(startEnd[0]) + .to(startEnd[1]) + .format("epoch_millis") + .includeLower(true) + .includeUpper(false); if (anomalyDetector.getFeatureAttributes() != null) { for (Feature feature : anomalyDetector.getFeatureAttributes()) { ParseUtils.parseAggregators(feature.getAggregation().toString(), xContent, feature.getId()); XContentParser parser = XContentType.JSON - .xContent() - .createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); + .xContent() + .createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); parser.nextToken(); List fieldNames = parseAggregationRequest(parser); - BoolQueryBuilder boolQuery2 = QueryBuilders - .boolQuery() - .filter(rangeQuery) - .filter(anomalyDetector.getFilterQuery()) - .filter(QueryBuilders.existsQuery(fieldNames.get(0))); - SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(boolQuery2).size(1).terminateAfter(1); + BoolQueryBuilder boolQuery = QueryBuilders + .boolQuery() + .filter(rangeQuery) + .filter(anomalyDetector.getFilterQuery()) + .filter(QueryBuilders.existsQuery(fieldNames.get(0))); + SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(boolQuery).size(1).terminateAfter(1); SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(internalSearchSourceBuilder); client.search(searchRequest, ActionListener.wrap(searchResponse -> { featureCounter.incrementAndGet(); onFeatureAggregationValidation(searchResponse, feature, featureCounter); - }, exception -> onFailure(exception))); - } - } - } - - private List parseAggregationRequest(XContentParser parser) throws IOException { - List fieldNames = new ArrayList<>(); - XContentParser.Token token; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - final String field = parser.currentName(); - switch (field) { - case "field": - parser.nextToken(); - fieldNames.add(parser.textOrNull()); - break; - default: - parser.skipChildren(); - break; - } + }, exception -> { + onFailure(exception); + logger.warn("Failed to create feature search request for validation", exception); + })); } } - return fieldNames; } private void onFeatureAggregationValidation(SearchResponse response, Feature feature, AtomicInteger counter) throws IOException { if (response.getHits().getTotalHits().value <= 0) { String errorMsg = feature.getName() + ": feature query is potentially wrong as no hits were found"; - suggestedChangesMap.computeIfAbsent("feature_attributes", k -> new ArrayList<>()).add(errorMsg); + suggestedChangesMap.computeIfAbsent(ValidationSuggestedChanges.FEATURE_ATTRIBUTES.getName(), k -> new ArrayList<>()).add(errorMsg); } if (counter.get() == anomalyDetector.getFeatureAttributes().size()) { if (!suggestedChangesMap.isEmpty()) { - validateAnomalyDetectorResponse(); + sendAnomalyDetectorValidationResponse(); } else { - intervalValidation(); + checkFeatureAggregationType(); } } } + //creates a new 2D array of time ranges based of a different detector interval inorder to validate + // detector interval with a new range every time. Creates 128 new interval time ranges private long[][] createNewTimeRange(long detectorInterval) { long timeRanges[][] = new long[MAX_NUM_OF_SAMPLES_VIEWED][2]; - long delayMillis = Optional - .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) - .map(t -> t.toDuration().toMillis()) - .orElse(0L); + long delayMillis = timeConfigToMilliSec(anomalyDetector.getWindowDelay()); long dataEndTime = Instant.now().toEpochMilli() - delayMillis; long dataStartTime = dataEndTime - ((long) (MAX_NUM_OF_SAMPLES_VIEWED) * detectorInterval); for (int j = 0; j < MAX_NUM_OF_SAMPLES_VIEWED; j++) { @@ -437,72 +545,77 @@ private long[][] createNewTimeRange(long detectorInterval) { return timeRanges; } - private synchronized void intervalValidation() throws IOException { - long detectorInterval = Optional - .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) - .map(t -> t.toDuration().toMillis()) - .orElse(0L); - for (long i = detectorInterval; i <= MAX_INTERVAL_LENGTH; i *= INTERVAL_RECOMMENDATION_MULTIPLIER) { - long timeRanges[][] = createNewTimeRange(i); - // Need to try and check if the infering logic is done before calling on the method again - // with a new interval since otherwise the requests get mixed up + private synchronized void intervalValidation() { + long detectorInterval = timeConfigToMilliSec(anomalyDetector.getDetectionInterval()); + for (long inferredDetectorInterval = detectorInterval; + inferredDetectorInterval <= MAX_INTERVAL_LENGTH; inferredDetectorInterval *= INTERVAL_RECOMMENDATION_MULTIPLIER) { + long timeRanges[][] = createNewTimeRange(inferredDetectorInterval); try { if (inferAgain.get()) { - samplingHelper(timeRanges, i); + verifyWithInterval(timeRanges, inferredDetectorInterval); } wait(); } catch (Exception ex) { onFailure(ex); + logger.warn(ex); } } } + private List getFieldNamesForFeature(Feature feature) throws IOException{ + ParseUtils.parseAggregators(feature.getAggregation().toString(), xContent, feature.getId()); + XContentParser parser = XContentType.JSON + .xContent() + .createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); + parser.nextToken(); + List fieldNames = parseAggregationRequest(parser); + return fieldNames; + } + + private List getFeatureFieldNames() throws IOException { List featureFields = new ArrayList<>(); for (Feature feature : anomalyDetector.getFeatureAttributes()) { - ParseUtils.parseAggregators(feature.getAggregation().toString(), xContent, feature.getId()); - XContentParser parser = XContentType.JSON - .xContent() - .createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); - parser.nextToken(); - List fieldNames = parseAggregationRequest(parser); - featureFields.add(fieldNames.get(0)); + featureFields.add(getFieldNamesForFeature(feature).get(0)); } return featureFields; } - private synchronized void samplingHelper(long[][] timeRanges, long detectorInterval) throws IOException { + private void verifyWithInterval(long[][] timeRanges, long detectorInterval) throws IOException { inferAgain.set(false); List featureFields = getFeatureFieldNames(); MultiSearchRequest sr = new MultiSearchRequest(); for (int i = 0; i < NUM_OF_INTERVAL_SAMPLES; i++) { long rangeStart = timeRanges[i][0]; long rangeEnd = timeRanges[i][1]; - RangeQueryBuilder rangeQueryRandom = new RangeQueryBuilder(anomalyDetector.getTimeField()) - .from(rangeStart) - .to(rangeEnd) - .format("epoch_millis") - .includeLower(true) - .includeUpper(false); - BoolQueryBuilder qb = QueryBuilders.boolQuery().filter(rangeQueryRandom).filter(anomalyDetector.getFilterQuery()); + RangeQueryBuilder rangeQuery = new RangeQueryBuilder(anomalyDetector.getTimeField()) + .from(rangeStart) + .to(rangeEnd) + .format("epoch_millis") + .includeLower(true) + .includeUpper(false); + BoolQueryBuilder boolQuery = QueryBuilders.boolQuery().filter(rangeQuery).filter(anomalyDetector.getFilterQuery()); for (int j = 0; j < featureFields.size(); j++) { - qb.filter(QueryBuilders.existsQuery(featureFields.get(j))); + boolQuery.filter(QueryBuilders.existsQuery(featureFields.get(j))); } - SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(qb).size(1).terminateAfter(1); + SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(boolQuery).size(1).terminateAfter(1); sr.add(new SearchRequest(anomalyDetector.getIndices().get(0)).source(internalSearchSourceBuilder)); } client.multiSearch(sr, ActionListener.wrap(searchResponse -> { if (doneInferring(detectorInterval, searchResponse)) { checkWindowDelay(); } - }, exception -> onFailure(exception))); + }, exception -> { + onFailure(exception); + logger.warn("Failed to create multi search request for validation", exception); + })); } private synchronized boolean doneInferring(long detectorInterval, MultiSearchResponse searchResponse) { long originalInterval = Optional - .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) - .map(t -> t.toDuration().toMillis()) - .orElse(0L); + .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) + .map(t -> t.toDuration().toMillis()) + .orElse(0L); final AtomicInteger hitCounter = new AtomicInteger(); for (MultiSearchResponse.Item item : searchResponse) { SearchResponse response = item.getResponse(); @@ -515,13 +628,13 @@ private synchronized boolean doneInferring(long detectorInterval, MultiSearchRes if (hitCounter.doubleValue() / (double) NUM_OF_INTERVAL_SAMPLES < SAMPLE_SUCCESS_RATE) { if ((detectorInterval * INTERVAL_RECOMMENDATION_MULTIPLIER) >= MAX_INTERVAL_LENGTH) { suggestedChangesMap - .computeIfAbsent("detection_interval", k -> new ArrayList<>()) - .add("detector interval: failed to infer max up too: " + MAX_INTERVAL_LENGTH); + .computeIfAbsent(ValidationSuggestedChanges.DETECTION_INTERVAL.getName(), k -> new ArrayList<>()) + .add("detector interval: failed to infer max up too: " + MAX_INTERVAL_LENGTH); } else { return false; } } else if (detectorInterval != originalInterval) { - suggestedChangesMap.computeIfAbsent("detection_interval", k -> new ArrayList<>()).add(Long.toString(detectorInterval)); + suggestedChangesMap.computeIfAbsent(ValidationSuggestedChanges.DETECTION_INTERVAL.getName(), k -> new ArrayList<>()).add(Long.toString(detectorInterval)); inferAgain.set(false); return true; } @@ -531,40 +644,39 @@ private synchronized boolean doneInferring(long detectorInterval, MultiSearchRes private void checkWindowDelay() { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() - .aggregation(AggregationBuilders.max(AGG_NAME_MAX).field(anomalyDetector.getTimeField())) - .size(1) - .sort(new FieldSortBuilder("timestamp").order(SortOrder.DESC)); + .aggregation(AggregationBuilders.max(AGG_NAME_MAX).field(anomalyDetector.getTimeField())) + .size(1) + .sort(new FieldSortBuilder(anomalyDetector.getTimeField()).order(SortOrder.DESC)); SearchRequest searchRequest = new SearchRequest().indices(anomalyDetector.getIndices().get(0)).source(searchSourceBuilder); client - .search( - searchRequest, - ActionListener.wrap(response -> checkDelayResponse(getLatestDataTime(response)), exception -> onFailure(exception)) - ); + .search( + searchRequest, + ActionListener.wrap(response -> checkDelayResponse(getLatestDataTime(response)), exception -> { + onFailure(exception); + logger.warn("Failed to create search request for last data point", exception); + }) + ); } private Optional getLatestDataTime(SearchResponse searchResponse) { - Optional x = Optional - .ofNullable(searchResponse) - .map(SearchResponse::getAggregations) - .map(aggs -> aggs.asMap()) - .map(map -> (Max) map.get(AGG_NAME_MAX)) - .map(agg -> (long) agg.getValue()); - return x; + return Optional + .ofNullable(searchResponse) + .map(SearchResponse::getAggregations) + .map(aggs -> aggs.asMap()) + .map(map -> (Max) map.get(AGG_NAME_MAX)) + .map(agg -> (long) agg.getValue()); } private void checkDelayResponse(Optional lastTimeStamp) { - long delayMillis = Optional - .ofNullable((IntervalTimeConfiguration) anomalyDetector.getWindowDelay()) - .map(t -> t.toDuration().toMillis()) - .orElse(0L); + long delayMillis = timeConfigToMilliSec(anomalyDetector.getWindowDelay()); if (lastTimeStamp.isPresent() && (Instant.now().toEpochMilli() - lastTimeStamp.get() > delayMillis)) { long minutesSinceLastStamp = TimeUnit.MILLISECONDS.toMinutes(Instant.now().toEpochMilli() - lastTimeStamp.get()); - suggestedChangesMap.computeIfAbsent("window_delay", k -> new ArrayList<>()).add(Long.toString(minutesSinceLastStamp)); + suggestedChangesMap.computeIfAbsent(ValidationSuggestedChanges.WINDOW_DELAY.getName(), k -> new ArrayList<>()).add(Long.toString(minutesSinceLastStamp)); } - validateAnomalyDetectorResponse(); + sendAnomalyDetectorValidationResponse(); } - private void validateAnomalyDetectorResponse() { + private void sendAnomalyDetectorValidationResponse() { this.responseValidate.setFailures(failuresMap); this.responseValidate.setSuggestedChanges(suggestedChangesMap); try { @@ -574,17 +686,4 @@ private void validateAnomalyDetectorResponse() { channel.sendResponse(new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage())); } } - - private void onCreateMappingsResponse(CreateIndexResponse response) throws IOException { - if (response.isAcknowledged()) { - logger.info("Created {} with mappings.", ANOMALY_DETECTORS_INDEX); - preDataValidationSteps(); - } else { - logger.warn("Created {} with mappings call not acknowledged.", ANOMALY_DETECTORS_INDEX); - channel - .sendResponse( - new BytesRestResponse(RestStatus.INTERNAL_SERVER_ERROR, response.toXContent(channel.newErrorBuilder(), EMPTY_PARAMS)) - ); - } - } } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportAction.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportAction.java index b005c9ff..826f899b 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportAction.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/transport/AnomalyResultTransportAction.java @@ -260,10 +260,6 @@ private ActionListener> onGetDetector( .orElse(0L); long dataStartTime = request.getStart() - delayMillis; long dataEndTime = request.getEnd() - delayMillis; - - System.out.println("dataStartTime: " + dataStartTime); - System.out.println("dataEndTime: " + dataEndTime); - featureManager .getCurrentFeatures( anomalyDetector, From 4235849d3918184d0df6e7265732973c0bdfaa18 Mon Sep 17 00:00:00 2001 From: Galitzky Date: Mon, 31 Aug 2020 09:17:53 -0700 Subject: [PATCH 14/20] style fixes --- .../ad/model/ValidationFailures.java | 15 + .../ad/model/ValidationSuggestedChanges.java | 15 + .../ValidateAnomalyDetectorActionHandler.java | 284 +++++++++--------- 3 files changed, 165 insertions(+), 149 deletions(-) diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationFailures.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationFailures.java index 1f3cff95..aecfbc3c 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationFailures.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationFailures.java @@ -1,3 +1,18 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + package com.amazon.opendistroforelasticsearch.ad.model; public enum ValidationFailures { diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationSuggestedChanges.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationSuggestedChanges.java index 355ea2bc..1b9cbeeb 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationSuggestedChanges.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationSuggestedChanges.java @@ -1,3 +1,18 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + package com.amazon.opendistroforelasticsearch.ad.model; public enum ValidationSuggestedChanges { diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java index a0141c31..1bf7118c 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java @@ -32,9 +32,6 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; -import com.amazon.opendistroforelasticsearch.ad.model.ValidationSuggestedChanges; -import com.amazon.opendistroforelasticsearch.ad.model.TimeConfiguration; -import com.amazon.opendistroforelasticsearch.ad.model.ValidationFailures; import org.apache.commons.lang.StringUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -69,7 +66,10 @@ import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; import com.amazon.opendistroforelasticsearch.ad.model.Feature; import com.amazon.opendistroforelasticsearch.ad.model.IntervalTimeConfiguration; +import com.amazon.opendistroforelasticsearch.ad.model.TimeConfiguration; import com.amazon.opendistroforelasticsearch.ad.model.ValidateResponse; +import com.amazon.opendistroforelasticsearch.ad.model.ValidationFailures; +import com.amazon.opendistroforelasticsearch.ad.model.ValidationSuggestedChanges; import com.amazon.opendistroforelasticsearch.ad.util.ParseUtils; import com.amazon.opendistroforelasticsearch.ad.util.RestHandlerUtils; @@ -90,7 +90,7 @@ public class ValidateAnomalyDetectorActionHandler extends AbstractActionHandler protected static final long HISTORICAL_CHECK_IN_MS = 7776000000L; protected static final String NAME_REGEX = "[a-zA-Z0-9._-]+"; protected static final double INTERVAL_RECOMMENDATION_MULTIPLIER = 1.2; - protected static final String[] numericType = {"long", "integer", "short", "double", "float"}; + protected static final String[] numericType = { "long", "integer", "short", "double", "float" }; private final AnomalyDetectionIndices anomalyDetectionIndices; private final AnomalyDetector anomalyDetector; @@ -116,14 +116,14 @@ public class ValidateAnomalyDetectorActionHandler extends AbstractActionHandler * @param anomalyDetector anomaly detector instance */ public ValidateAnomalyDetectorActionHandler( - NodeClient client, - RestChannel channel, - AnomalyDetectionIndices anomalyDetectionIndices, - AnomalyDetector anomalyDetector, - Integer maxAnomalyDetectors, - Integer maxAnomalyFeatures, - TimeValue requestTimeout, - NamedXContentRegistry xContentRegistry + NodeClient client, + RestChannel channel, + AnomalyDetectionIndices anomalyDetectionIndices, + AnomalyDetector anomalyDetector, + Integer maxAnomalyDetectors, + Integer maxAnomalyFeatures, + TimeValue requestTimeout, + NamedXContentRegistry xContentRegistry ) { super(client, channel); this.anomalyDetectionIndices = anomalyDetectionIndices; @@ -188,7 +188,7 @@ private void preDataValidationSteps(boolean indexExists) { } if (!failuresMap.isEmpty()) { sendAnomalyDetectorValidationResponse(); - } else if (indexExists){ + } else if (indexExists) { validateNumberOfDetectors(); } else { searchAdInputIndices(false); @@ -210,8 +210,8 @@ private void validateNumberOfDetectors() { private void onSearchAdResponse(SearchResponse response) { if (response.getHits().getTotalHits().value >= maxAnomalyDetectors) { suggestedChangesMap - .computeIfAbsent(ValidationSuggestedChanges.OTHERS.getName(), k -> new ArrayList<>()) - .add("Can't create anomaly detector more than " + maxAnomalyDetectors + " ,please delete unused detectors"); + .computeIfAbsent(ValidationSuggestedChanges.OTHERS.getName(), k -> new ArrayList<>()) + .add("Can't create anomaly detector more than " + maxAnomalyDetectors + " ,please delete unused detectors"); } else { searchAdInputIndices(true); } @@ -219,27 +219,27 @@ private void onSearchAdResponse(SearchResponse response) { private void searchAdInputIndices(boolean indexExists) { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() - .query(QueryBuilders.matchAllQuery()) - .size(0) - .timeout(requestTimeout); + .query(QueryBuilders.matchAllQuery()) + .size(0) + .timeout(requestTimeout); SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().toArray(new String[0])).source(searchSourceBuilder); client - .search( - searchRequest, - ActionListener - .wrap(searchResponse -> onSearchAdInputIndicesResponse(searchResponse, indexExists), exception -> { - onFailure(exception); - logger.warn("Failed to create search request for validation", exception); - })); + .search( + searchRequest, + ActionListener.wrap(searchResponse -> onSearchAdInputIndicesResponse(searchResponse, indexExists), exception -> { + onFailure(exception); + logger.warn("Failed to create search request for validation", exception); + }) + ); } private void onSearchAdInputIndicesResponse(SearchResponse response, boolean indexExists) throws IOException { if (response.getHits().getTotalHits().value == 0) { - String errorMsg = String.format - ("Can't create anomaly detector as no document found in indices: %s", anomalyDetector.getIndices()); + String errorMsg = String + .format("Can't create anomaly detector as no document found in indices: %s", anomalyDetector.getIndices()); failuresMap.computeIfAbsent(ValidationFailures.OTHERS.getName(), k -> new ArrayList<>()).add(errorMsg); sendAnomalyDetectorValidationResponse(); - } else if (indexExists){ + } else if (indexExists) { checkADNameExists(); } else { checkForHistoricalData(); @@ -252,17 +252,13 @@ private void checkADNameExists() { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(boolQueryBuilder).timeout(requestTimeout); SearchRequest searchRequest = new SearchRequest(ANOMALY_DETECTORS_INDEX).source(searchSourceBuilder); client - .search( - searchRequest, - ActionListener - .wrap( - searchResponse -> onSearchADNameResponse(searchResponse, anomalyDetector.getName()), - exception -> { - onFailure(exception); - logger.warn("Failed to create search request for validation", exception); - } - ) - ); + .search( + searchRequest, + ActionListener.wrap(searchResponse -> onSearchADNameResponse(searchResponse, anomalyDetector.getName()), exception -> { + onFailure(exception); + logger.warn("Failed to create search request for validation", exception); + }) + ); } private void onSearchADNameResponse(SearchResponse response, String name) { @@ -276,34 +272,29 @@ private void onSearchADNameResponse(SearchResponse response, String name) { public void checkForHistoricalData() { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() - .aggregation(AggregationBuilders.max(AGG_NAME_MAX).field(anomalyDetector.getTimeField())) - .size(1) - .sort(new FieldSortBuilder(anomalyDetector.getTimeField()).order(SortOrder.DESC)); + .aggregation(AggregationBuilders.max(AGG_NAME_MAX).field(anomalyDetector.getTimeField())) + .size(1) + .sort(new FieldSortBuilder(anomalyDetector.getTimeField()).order(SortOrder.DESC)); SearchRequest searchRequest = new SearchRequest().indices(anomalyDetector.getIndices().get(0)).source(searchSourceBuilder); - client - .search( - searchRequest, - ActionListener - .wrap(response -> checkIfAnyHistoricalData(getLatestDataTime(response)), exception -> { - onFailure(exception); - logger.warn("Failed to create search request for validation", exception); - }) - ); + client.search(searchRequest, ActionListener.wrap(response -> checkIfAnyHistoricalData(getLatestDataTime(response)), exception -> { + onFailure(exception); + logger.warn("Failed to create search request for validation", exception); + })); } private void checkIfAnyHistoricalData(Optional lastTimeStamp) { if (lastTimeStamp.isPresent() && (Instant.now().toEpochMilli() - HISTORICAL_CHECK_IN_MS > lastTimeStamp.get())) { - failuresMap.computeIfAbsent(ValidationFailures.OTHERS.getName(), k -> new ArrayList<>()).add("No historical data for past 3 months"); + failuresMap + .computeIfAbsent(ValidationFailures.OTHERS.getName(), k -> new ArrayList<>()) + .add("No historical data for past 3 months"); sendAnomalyDetectorValidationResponse(); } else { queryFilterValidation(); } } + private Long timeConfigToMilliSec(TimeConfiguration config) { - return Optional - .ofNullable((IntervalTimeConfiguration) config) - .map(t -> t.toDuration().toMillis()) - .orElse(0L); + return Optional.ofNullable((IntervalTimeConfiguration) config).map(t -> t.toDuration().toMillis()).orElse(0L); } private long[] startEndTimeRangeWithIntervals(int numOfIntervals) { @@ -311,7 +302,7 @@ private long[] startEndTimeRangeWithIntervals(int numOfIntervals) { long dataEndTime = Instant.now().toEpochMilli() - delayMillis; long detectorInterval = timeConfigToMilliSec(anomalyDetector.getDetectionInterval()); long dataStartTime = dataEndTime - ((long) (numOfIntervals) * detectorInterval); - return new long[]{dataStartTime, dataEndTime}; + return new long[] { dataStartTime, dataEndTime }; } private void queryFilterValidation() { @@ -319,35 +310,31 @@ private void queryFilterValidation() { long dataEndTime = startEnd[1]; long dataStartTime = startEnd[0]; RangeQueryBuilder rangeQuery = new RangeQueryBuilder(anomalyDetector.getTimeField()) - .from(dataStartTime) - .to(dataEndTime) - .format("epoch_millis"); + .from(dataStartTime) + .to(dataEndTime) + .format("epoch_millis"); BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().must(rangeQuery).must(anomalyDetector.getFilterQuery()); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() - .query(internalFilterQuery) - .size(1) - .terminateAfter(1) - .timeout(requestTimeout); + .query(internalFilterQuery) + .size(1) + .terminateAfter(1) + .timeout(requestTimeout); SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(searchSourceBuilder); - client - .search( - searchRequest, - ActionListener.wrap(searchResponse -> onQueryFilterSearch(searchResponse), exception -> { - onFailure(exception); - logger.warn("Failed to create data query search request for validation", exception); - }) - ); + client.search(searchRequest, ActionListener.wrap(searchResponse -> onQueryFilterSearch(searchResponse), exception -> { + onFailure(exception); + logger.warn("Failed to create data query search request for validation", exception); + })); } private void onQueryFilterSearch(SearchResponse response) throws IOException, ParseException { if (response.getHits().getTotalHits().value <= 0) { List filterError = new ArrayList<>(); filterError - .add( - "query filter is potentially wrong as no hits were found at all or no historical data in last " - + NUM_OF_INTERVALS_CHECKED_FILTER - + " intervals" - ); + .add( + "query filter is potentially wrong as no hits were found at all or no historical data in last " + + NUM_OF_INTERVALS_CHECKED_FILTER + + " intervals" + ); suggestedChangesMap.put(ValidationSuggestedChanges.FILTER_QUERY.getName(), filterError); sendAnomalyDetectorValidationResponse(); } else { @@ -406,19 +393,18 @@ private List ForFieldMapping(XContentParser parser) throws IOException { return fieldNameAggType; } - private void checkFeatureAggregationType() throws IOException { Map featureToField = new TreeMap<>(); if (anomalyDetector.getFeatureAttributes() != null) { for (Feature feature : anomalyDetector.getFeatureAttributes()) { ParseUtils.parseAggregators(feature.getAggregation().toString(), xContent, feature.getId()); XContentParser parser = XContentType.JSON - .xContent() - .createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); + .xContent() + .createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); parser.nextToken(); List aggTypeFieldName = ForFieldMapping(parser); featureToField.put(feature.getName(), aggTypeFieldName.get(1)); - } + } } getFieldMapping(featureToField); } @@ -427,36 +413,37 @@ private void getFieldMapping(Map featureToAgg) { GetFieldMappingsRequest request = new GetFieldMappingsRequest().indices(anomalyDetector.getIndices().get(0)); request.fields(featureToAgg.values().toArray(new String[0])); adminClient - .indices().getFieldMappings( + .indices() + .getFieldMappings( request, ActionListener - .wrap( - response -> checkFieldIndex(response, featureToAgg.values().toArray(new String[0]), featureToAgg), - exception -> { - onFailure(exception); - logger.warn("Failed to get field mapping for validation", exception); - } - ) - ); + .wrap(response -> checkFieldIndex(response, featureToAgg.values().toArray(new String[0]), featureToAgg), exception -> { + onFailure(exception); + logger.warn("Failed to get field mapping for validation", exception); + }) + ); } private void checkFieldIndex(GetFieldMappingsResponse response, String[] fields, Map featuresToAgg) throws IOException { List numericTypes = Arrays.asList(numericType); for (int i = 0; i < fields.length; i++) { - Map>> mappings = - response.mappings(); - final Map> fieldMappings = - mappings.get(anomalyDetector.getIndices().get(0)); - final GetFieldMappingsResponse.FieldMappingMetadata metadata = - fieldMappings.get("_doc").get(fields[i]); + Map>> mappings = response.mappings(); + final Map> fieldMappings = mappings + .get(anomalyDetector.getIndices().get(0)); + final GetFieldMappingsResponse.FieldMappingMetadata metadata = fieldMappings.get("_doc").get(fields[i]); final Map source = metadata.sourceAsMap(); String fieldTypeJSON = source.get(fields[i]).toString(); String fieldType = fieldTypeJSON.substring(fieldTypeJSON.lastIndexOf('=') + 1, fieldTypeJSON.length() - 1).trim(); if (!numericTypes.contains(fieldType)) { failuresMap - .computeIfAbsent(ValidationFailures.FIELD_TYPE.getName(), k -> new ArrayList<>()) - .add("Field named: "+ fields[i] + " can't be aggregated due to it being of type " + fieldType + - " which isn't numeric, please use a different aggregation type"); + .computeIfAbsent(ValidationFailures.FIELD_TYPE.getName(), k -> new ArrayList<>()) + .add( + "Field named: " + + fields[i] + + " can't be aggregated due to it being of type " + + fieldType + + " which isn't numeric, please use a different aggregation type" + ); } } if (!failuresMap.isEmpty()) { @@ -477,33 +464,31 @@ private long[] getFeatureQueryValidationDateRange() { if (startEnd[0] > startTimeWithSetTime) { dataStartTime = startTimeWithSetTime; } - return new long[]{dataStartTime, dataEndTime}; + return new long[] { dataStartTime, dataEndTime }; } - - private void featureQueryValidation() throws IOException { long[] startEnd = getFeatureQueryValidationDateRange(); AtomicInteger featureCounter = new AtomicInteger(); RangeQueryBuilder rangeQuery = new RangeQueryBuilder(anomalyDetector.getTimeField()) - .from(startEnd[0]) - .to(startEnd[1]) - .format("epoch_millis") - .includeLower(true) - .includeUpper(false); + .from(startEnd[0]) + .to(startEnd[1]) + .format("epoch_millis") + .includeLower(true) + .includeUpper(false); if (anomalyDetector.getFeatureAttributes() != null) { for (Feature feature : anomalyDetector.getFeatureAttributes()) { ParseUtils.parseAggregators(feature.getAggregation().toString(), xContent, feature.getId()); XContentParser parser = XContentType.JSON - .xContent() - .createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); + .xContent() + .createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); parser.nextToken(); List fieldNames = parseAggregationRequest(parser); BoolQueryBuilder boolQuery = QueryBuilders - .boolQuery() - .filter(rangeQuery) - .filter(anomalyDetector.getFilterQuery()) - .filter(QueryBuilders.existsQuery(fieldNames.get(0))); + .boolQuery() + .filter(rangeQuery) + .filter(anomalyDetector.getFilterQuery()) + .filter(QueryBuilders.existsQuery(fieldNames.get(0))); SearchSourceBuilder internalSearchSourceBuilder = new SearchSourceBuilder().query(boolQuery).size(1).terminateAfter(1); SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(internalSearchSourceBuilder); client.search(searchRequest, ActionListener.wrap(searchResponse -> { @@ -520,7 +505,9 @@ private void featureQueryValidation() throws IOException { private void onFeatureAggregationValidation(SearchResponse response, Feature feature, AtomicInteger counter) throws IOException { if (response.getHits().getTotalHits().value <= 0) { String errorMsg = feature.getName() + ": feature query is potentially wrong as no hits were found"; - suggestedChangesMap.computeIfAbsent(ValidationSuggestedChanges.FEATURE_ATTRIBUTES.getName(), k -> new ArrayList<>()).add(errorMsg); + suggestedChangesMap + .computeIfAbsent(ValidationSuggestedChanges.FEATURE_ATTRIBUTES.getName(), k -> new ArrayList<>()) + .add(errorMsg); } if (counter.get() == anomalyDetector.getFeatureAttributes().size()) { if (!suggestedChangesMap.isEmpty()) { @@ -531,7 +518,7 @@ private void onFeatureAggregationValidation(SearchResponse response, Feature fea } } - //creates a new 2D array of time ranges based of a different detector interval inorder to validate + // creates a new 2D array of time ranges based of a different detector interval inorder to validate // detector interval with a new range every time. Creates 128 new interval time ranges private long[][] createNewTimeRange(long detectorInterval) { long timeRanges[][] = new long[MAX_NUM_OF_SAMPLES_VIEWED][2]; @@ -547,8 +534,8 @@ private long[][] createNewTimeRange(long detectorInterval) { private synchronized void intervalValidation() { long detectorInterval = timeConfigToMilliSec(anomalyDetector.getDetectionInterval()); - for (long inferredDetectorInterval = detectorInterval; - inferredDetectorInterval <= MAX_INTERVAL_LENGTH; inferredDetectorInterval *= INTERVAL_RECOMMENDATION_MULTIPLIER) { + for (long inferredDetectorInterval = detectorInterval; inferredDetectorInterval <= MAX_INTERVAL_LENGTH; inferredDetectorInterval *= + INTERVAL_RECOMMENDATION_MULTIPLIER) { long timeRanges[][] = createNewTimeRange(inferredDetectorInterval); try { if (inferAgain.get()) { @@ -562,17 +549,16 @@ private synchronized void intervalValidation() { } } - private List getFieldNamesForFeature(Feature feature) throws IOException{ + private List getFieldNamesForFeature(Feature feature) throws IOException { ParseUtils.parseAggregators(feature.getAggregation().toString(), xContent, feature.getId()); XContentParser parser = XContentType.JSON - .xContent() - .createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); + .xContent() + .createParser(xContent, LoggingDeprecationHandler.INSTANCE, feature.getAggregation().toString()); parser.nextToken(); List fieldNames = parseAggregationRequest(parser); return fieldNames; } - private List getFeatureFieldNames() throws IOException { List featureFields = new ArrayList<>(); for (Feature feature : anomalyDetector.getFeatureAttributes()) { @@ -589,11 +575,11 @@ private void verifyWithInterval(long[][] timeRanges, long detectorInterval) thro long rangeStart = timeRanges[i][0]; long rangeEnd = timeRanges[i][1]; RangeQueryBuilder rangeQuery = new RangeQueryBuilder(anomalyDetector.getTimeField()) - .from(rangeStart) - .to(rangeEnd) - .format("epoch_millis") - .includeLower(true) - .includeUpper(false); + .from(rangeStart) + .to(rangeEnd) + .format("epoch_millis") + .includeLower(true) + .includeUpper(false); BoolQueryBuilder boolQuery = QueryBuilders.boolQuery().filter(rangeQuery).filter(anomalyDetector.getFilterQuery()); for (int j = 0; j < featureFields.size(); j++) { boolQuery.filter(QueryBuilders.existsQuery(featureFields.get(j))); @@ -613,9 +599,9 @@ private void verifyWithInterval(long[][] timeRanges, long detectorInterval) thro private synchronized boolean doneInferring(long detectorInterval, MultiSearchResponse searchResponse) { long originalInterval = Optional - .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) - .map(t -> t.toDuration().toMillis()) - .orElse(0L); + .ofNullable((IntervalTimeConfiguration) anomalyDetector.getDetectionInterval()) + .map(t -> t.toDuration().toMillis()) + .orElse(0L); final AtomicInteger hitCounter = new AtomicInteger(); for (MultiSearchResponse.Item item : searchResponse) { SearchResponse response = item.getResponse(); @@ -628,13 +614,15 @@ private synchronized boolean doneInferring(long detectorInterval, MultiSearchRes if (hitCounter.doubleValue() / (double) NUM_OF_INTERVAL_SAMPLES < SAMPLE_SUCCESS_RATE) { if ((detectorInterval * INTERVAL_RECOMMENDATION_MULTIPLIER) >= MAX_INTERVAL_LENGTH) { suggestedChangesMap - .computeIfAbsent(ValidationSuggestedChanges.DETECTION_INTERVAL.getName(), k -> new ArrayList<>()) - .add("detector interval: failed to infer max up too: " + MAX_INTERVAL_LENGTH); + .computeIfAbsent(ValidationSuggestedChanges.DETECTION_INTERVAL.getName(), k -> new ArrayList<>()) + .add("detector interval: failed to infer max up too: " + MAX_INTERVAL_LENGTH); } else { return false; } } else if (detectorInterval != originalInterval) { - suggestedChangesMap.computeIfAbsent(ValidationSuggestedChanges.DETECTION_INTERVAL.getName(), k -> new ArrayList<>()).add(Long.toString(detectorInterval)); + suggestedChangesMap + .computeIfAbsent(ValidationSuggestedChanges.DETECTION_INTERVAL.getName(), k -> new ArrayList<>()) + .add(Long.toString(detectorInterval)); inferAgain.set(false); return true; } @@ -644,34 +632,32 @@ private synchronized boolean doneInferring(long detectorInterval, MultiSearchRes private void checkWindowDelay() { SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() - .aggregation(AggregationBuilders.max(AGG_NAME_MAX).field(anomalyDetector.getTimeField())) - .size(1) - .sort(new FieldSortBuilder(anomalyDetector.getTimeField()).order(SortOrder.DESC)); + .aggregation(AggregationBuilders.max(AGG_NAME_MAX).field(anomalyDetector.getTimeField())) + .size(1) + .sort(new FieldSortBuilder(anomalyDetector.getTimeField()).order(SortOrder.DESC)); SearchRequest searchRequest = new SearchRequest().indices(anomalyDetector.getIndices().get(0)).source(searchSourceBuilder); - client - .search( - searchRequest, - ActionListener.wrap(response -> checkDelayResponse(getLatestDataTime(response)), exception -> { - onFailure(exception); - logger.warn("Failed to create search request for last data point", exception); - }) - ); + client.search(searchRequest, ActionListener.wrap(response -> checkDelayResponse(getLatestDataTime(response)), exception -> { + onFailure(exception); + logger.warn("Failed to create search request for last data point", exception); + })); } private Optional getLatestDataTime(SearchResponse searchResponse) { return Optional - .ofNullable(searchResponse) - .map(SearchResponse::getAggregations) - .map(aggs -> aggs.asMap()) - .map(map -> (Max) map.get(AGG_NAME_MAX)) - .map(agg -> (long) agg.getValue()); + .ofNullable(searchResponse) + .map(SearchResponse::getAggregations) + .map(aggs -> aggs.asMap()) + .map(map -> (Max) map.get(AGG_NAME_MAX)) + .map(agg -> (long) agg.getValue()); } private void checkDelayResponse(Optional lastTimeStamp) { long delayMillis = timeConfigToMilliSec(anomalyDetector.getWindowDelay()); if (lastTimeStamp.isPresent() && (Instant.now().toEpochMilli() - lastTimeStamp.get() > delayMillis)) { long minutesSinceLastStamp = TimeUnit.MILLISECONDS.toMinutes(Instant.now().toEpochMilli() - lastTimeStamp.get()); - suggestedChangesMap.computeIfAbsent(ValidationSuggestedChanges.WINDOW_DELAY.getName(), k -> new ArrayList<>()).add(Long.toString(minutesSinceLastStamp)); + suggestedChangesMap + .computeIfAbsent(ValidationSuggestedChanges.WINDOW_DELAY.getName(), k -> new ArrayList<>()) + .add(Long.toString(minutesSinceLastStamp)); } sendAnomalyDetectorValidationResponse(); } From a05c272f1bd49ae42b8e90309621dff45a82f08c Mon Sep 17 00:00:00 2001 From: Galitzky Date: Tue, 1 Sep 2020 16:55:42 -0700 Subject: [PATCH 15/20] added a datetimerange class plus some other fixes --- .../ad/model/AnomalyDetector.java | 84 +++++++++++-------- .../ad/model/DateTimeRange.java | 56 +++++++++++++ .../RestValidateAnomalyDetectorAction.java | 3 +- .../ValidateAnomalyDetectorActionHandler.java | 66 +++++++-------- 4 files changed, 134 insertions(+), 75 deletions(-) create mode 100644 src/main/java/com/amazon/opendistroforelasticsearch/ad/model/DateTimeRange.java diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java index c148bc62..76ad8b3d 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetector.java @@ -123,36 +123,23 @@ public AnomalyDetector( Integer schemaVersion, Instant lastUpdateTime ) { - if (Strings.isBlank(name)) { - throw new IllegalArgumentException("Detector name should be set"); - } - if (timeField == null) { - throw new IllegalArgumentException("Time field should be set"); - } - if (indices == null || indices.isEmpty()) { - throw new IllegalArgumentException("Indices should be set"); - } - if (detectionInterval == null) { - throw new IllegalArgumentException("Detection interval should be set"); - } - if (shingleSize != null && shingleSize < 1) { - throw new IllegalArgumentException("Shingle size must be a positive integer"); - } - this.detectorId = detectorId; - this.version = version; - this.name = name; - this.description = description; - this.timeField = timeField; - this.indices = indices; - this.featureAttributes = features; - this.filterQuery = filterQuery; - this.detectionInterval = detectionInterval; - this.windowDelay = windowDelay; - this.shingleSize = shingleSize; - this.uiMetadata = uiMetadata; - this.schemaVersion = schemaVersion; - this.lastUpdateTime = lastUpdateTime; - this.validation = false; + this( + detectorId, + version, + name, + description, + timeField, + indices, + features, + filterQuery, + detectionInterval, + windowDelay, + shingleSize, + uiMetadata, + schemaVersion, + lastUpdateTime, + false + ); } public AnomalyDetector( @@ -166,13 +153,32 @@ public AnomalyDetector( QueryBuilder filterQuery, TimeConfiguration detectionInterval, TimeConfiguration windowDelay, + Integer shingleSize, Map uiMetadata, Integer schemaVersion, Instant lastUpdateTime, Boolean validation ) { - if (indices == null || indices.isEmpty()) { - indices = null; + if (validation) { + if (indices == null || indices.isEmpty()) { + indices = null; + } + } else { + if (Strings.isBlank(name)) { + throw new IllegalArgumentException("Detector name should be set"); + } + if (timeField == null) { + throw new IllegalArgumentException("Time field should be set"); + } + if (indices == null || indices.isEmpty()) { + throw new IllegalArgumentException("Indices should be set"); + } + if (detectionInterval == null) { + throw new IllegalArgumentException("Detection interval should be set"); + } + if (shingleSize != null && shingleSize < 1) { + throw new IllegalArgumentException("Shingle size must be a positive integer"); + } } this.detectorId = detectorId; this.version = version; @@ -184,11 +190,11 @@ public AnomalyDetector( this.filterQuery = filterQuery; this.detectionInterval = detectionInterval; this.windowDelay = windowDelay; + this.shingleSize = shingleSize; this.uiMetadata = uiMetadata; this.schemaVersion = schemaVersion; this.lastUpdateTime = lastUpdateTime; this.validation = validation; - this.shingleSize = 8; } public XContentBuilder toXContent(XContentBuilder builder) throws IOException { @@ -291,7 +297,6 @@ public static AnomalyDetector parse( while (parser.nextToken() != XContentParser.Token.END_OBJECT) { String fieldName = parser.currentName(); parser.nextToken(); - switch (fieldName) { case NAME_FIELD: name = parser.text(); @@ -365,8 +370,8 @@ public static AnomalyDetector parse( ); } - public static AnomalyDetector parseValidation(XContentParser parser, String detectorId, Long version) throws IOException { - Boolean validation = true; + public static AnomalyDetector parseValidation(XContentParser parser, String detectorId, Long version, Integer defaultShingleSize) + throws IOException { String name = null; String description = null; String timeField = null; @@ -378,6 +383,7 @@ public static AnomalyDetector parseValidation(XContentParser parser, String dete int schemaVersion = 0; Map uiMetadata = null; Instant lastUpdateTime = null; + Integer shingleSize = defaultShingleSize; ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation); while (parser.nextToken() != XContentParser.Token.END_OBJECT) { @@ -428,6 +434,9 @@ public static AnomalyDetector parseValidation(XContentParser parser, String dete case WINDOW_DELAY_FIELD: windowDelay = TimeConfiguration.parse(parser); break; + case SHINGLE_SIZE_FIELD: + shingleSize = parser.intValue(); + break; case LAST_UPDATE_TIME_FIELD: lastUpdateTime = ParseUtils.toInstant(parser); break; @@ -447,10 +456,11 @@ public static AnomalyDetector parseValidation(XContentParser parser, String dete filterQuery, detectionInterval, windowDelay, + shingleSize, uiMetadata, schemaVersion, lastUpdateTime, - validation + true ); } diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/DateTimeRange.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/DateTimeRange.java new file mode 100644 index 00000000..1138e725 --- /dev/null +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/model/DateTimeRange.java @@ -0,0 +1,56 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +package com.amazon.opendistroforelasticsearch.ad.model; + +import java.time.Instant; + +/** + * A DateTimeRange is used to represent start and end time for a timeRange + */ +public class DateTimeRange { + + private long start; + private long end; + + public DateTimeRange(long start, long end) { + this.start = start; + this.end = end; + } + + public static DateTimeRange rangeBasedOfInterval(long windowDelay, long intervalLength, int numOfIntervals) { + long dataEndTime = Instant.now().toEpochMilli() - windowDelay; + long dataStartTime = dataEndTime - ((long) (numOfIntervals) * intervalLength); + return new DateTimeRange(dataStartTime, dataEndTime); + + } + + public long getStart() { + return start; + } + + public long getEnd() { + return end; + } + + public void setStart(long start) { + this.start = start; + } + + public void setEnd(long end) { + this.end = end; + } + +} diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java index fe7c8790..ff29b5d6 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/RestValidateAnomalyDetectorAction.java @@ -15,6 +15,7 @@ package com.amazon.opendistroforelasticsearch.ad.rest; +import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.DEFAULT_SHINGLE_SIZE; import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.DETECTION_INTERVAL; import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.DETECTION_WINDOW_DELAY; import static com.amazon.opendistroforelasticsearch.ad.settings.AnomalyDetectorSettings.MAX_ANOMALY_DETECTORS; @@ -84,7 +85,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli String detectorId = AnomalyDetector.NO_ID; XContentParser parser = request.contentParser(); ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); - AnomalyDetector detector = AnomalyDetector.parseValidation(parser, detectorId, null); + AnomalyDetector detector = AnomalyDetector.parseValidation(parser, detectorId, null, DEFAULT_SHINGLE_SIZE); return channel -> new ValidateAnomalyDetectorActionHandler( client, diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java index 1bf7118c..b0c50c9f 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java @@ -64,6 +64,7 @@ import com.amazon.opendistroforelasticsearch.ad.indices.AnomalyDetectionIndices; import com.amazon.opendistroforelasticsearch.ad.model.AnomalyDetector; +import com.amazon.opendistroforelasticsearch.ad.model.DateTimeRange; import com.amazon.opendistroforelasticsearch.ad.model.Feature; import com.amazon.opendistroforelasticsearch.ad.model.IntervalTimeConfiguration; import com.amazon.opendistroforelasticsearch.ad.model.TimeConfiguration; @@ -86,8 +87,8 @@ public class ValidateAnomalyDetectorActionHandler extends AbstractActionHandler protected static final double SAMPLE_SUCCESS_RATE = 0.75; protected static final int FEATURE_VALIDATION_TIME_BACK_MINUTES = 10080; protected static final int NUM_OF_INTERVALS_CHECKED_FILTER = 384; - protected static final long MAX_INTERVAL_LENGTH = 2592000000L; - protected static final long HISTORICAL_CHECK_IN_MS = 7776000000L; + protected static final long MAX_INTERVAL_LENGTH = (30L * 24 * 60 * 60 * 1000); + protected static final long HISTORICAL_CHECK_IN_MS = (90L * 24 * 60 * 60 * 1000); protected static final String NAME_REGEX = "[a-zA-Z0-9._-]+"; protected static final double INTERVAL_RECOMMENDATION_MULTIPLIER = 1.2; protected static final String[] numericType = { "long", "integer", "short", "double", "float" }; @@ -146,11 +147,8 @@ public ValidateAnomalyDetectorActionHandler( * @throws IOException IOException from {@link AnomalyDetectionIndices#initAnomalyDetectorIndexIfAbsent(ActionListener)} */ public void startValidation() throws IOException { - if (!anomalyDetectionIndices.doesAnomalyDetectorIndexExist()) { - preDataValidationSteps(false); - } else { - preDataValidationSteps(true); - } + boolean indexExists = anomalyDetectionIndices.doesAnomalyDetectorIndexExist(); + preDataValidationSteps(indexExists); } private void preDataValidationSteps(boolean indexExists) { @@ -202,7 +200,7 @@ private void validateNumberOfDetectors() { SearchRequest searchRequest = new SearchRequest(ANOMALY_DETECTORS_INDEX).source(searchSourceBuilder); client.search(searchRequest, ActionListener.wrap(response -> onSearchAdResponse(response), exception -> onFailure(exception))); } catch (Exception e) { - logger.warn("Failed to create search request for validation", e); + logger.error("Failed to create search request for validation", e); onFailure(e); } } @@ -228,7 +226,7 @@ private void searchAdInputIndices(boolean indexExists) { searchRequest, ActionListener.wrap(searchResponse -> onSearchAdInputIndicesResponse(searchResponse, indexExists), exception -> { onFailure(exception); - logger.warn("Failed to create search request for validation", exception); + logger.error("Failed to create search request for validation", exception); }) ); } @@ -256,7 +254,7 @@ private void checkADNameExists() { searchRequest, ActionListener.wrap(searchResponse -> onSearchADNameResponse(searchResponse, anomalyDetector.getName()), exception -> { onFailure(exception); - logger.warn("Failed to create search request for validation", exception); + logger.error("Failed to create search request for validation", exception); }) ); } @@ -278,7 +276,7 @@ public void checkForHistoricalData() { SearchRequest searchRequest = new SearchRequest().indices(anomalyDetector.getIndices().get(0)).source(searchSourceBuilder); client.search(searchRequest, ActionListener.wrap(response -> checkIfAnyHistoricalData(getLatestDataTime(response)), exception -> { onFailure(exception); - logger.warn("Failed to create search request for validation", exception); + logger.error("Failed to create search request for validation", exception); })); } @@ -297,18 +295,16 @@ private Long timeConfigToMilliSec(TimeConfiguration config) { return Optional.ofNullable((IntervalTimeConfiguration) config).map(t -> t.toDuration().toMillis()).orElse(0L); } - private long[] startEndTimeRangeWithIntervals(int numOfIntervals) { + private DateTimeRange startEndTimeRangeWithIntervals(int numOfIntervals) { long delayMillis = timeConfigToMilliSec(anomalyDetector.getWindowDelay()); - long dataEndTime = Instant.now().toEpochMilli() - delayMillis; long detectorInterval = timeConfigToMilliSec(anomalyDetector.getDetectionInterval()); - long dataStartTime = dataEndTime - ((long) (numOfIntervals) * detectorInterval); - return new long[] { dataStartTime, dataEndTime }; + return DateTimeRange.rangeBasedOfInterval(delayMillis, detectorInterval, numOfIntervals); } private void queryFilterValidation() { - long[] startEnd = startEndTimeRangeWithIntervals(NUM_OF_INTERVALS_CHECKED_FILTER); - long dataEndTime = startEnd[1]; - long dataStartTime = startEnd[0]; + DateTimeRange timeRange = startEndTimeRangeWithIntervals(NUM_OF_INTERVALS_CHECKED_FILTER); + long dataStartTime = timeRange.getStart(); + long dataEndTime = timeRange.getEnd(); RangeQueryBuilder rangeQuery = new RangeQueryBuilder(anomalyDetector.getTimeField()) .from(dataStartTime) .to(dataEndTime) @@ -322,7 +318,7 @@ private void queryFilterValidation() { SearchRequest searchRequest = new SearchRequest(anomalyDetector.getIndices().get(0)).source(searchSourceBuilder); client.search(searchRequest, ActionListener.wrap(searchResponse -> onQueryFilterSearch(searchResponse), exception -> { onFailure(exception); - logger.warn("Failed to create data query search request for validation", exception); + logger.error("Failed to create data query search request for validation", exception); })); } @@ -419,7 +415,7 @@ private void getFieldMapping(Map featureToAgg) { ActionListener .wrap(response -> checkFieldIndex(response, featureToAgg.values().toArray(new String[0]), featureToAgg), exception -> { onFailure(exception); - logger.warn("Failed to get field mapping for validation", exception); + logger.error("Failed to get field mapping for validation", exception); }) ); } @@ -453,26 +449,24 @@ private void checkFieldIndex(GetFieldMappingsResponse response, String[] fields, } } - private long[] getFeatureQueryValidationDateRange() { + private DateTimeRange getFeatureQueryValidationDateRange() { long delayMillis = timeConfigToMilliSec(anomalyDetector.getWindowDelay()); - long[] startEnd = startEndTimeRangeWithIntervals(NUM_OF_INTERVALS_CHECKED); - long dataEndTime = startEnd[1]; - long dataStartTime = startEnd[0]; + DateTimeRange timeRange = startEndTimeRangeWithIntervals(NUM_OF_INTERVALS_CHECKED_FILTER); IntervalTimeConfiguration searchRange = new IntervalTimeConfiguration(FEATURE_VALIDATION_TIME_BACK_MINUTES, ChronoUnit.MINUTES); long searchRangeTime = Optional.ofNullable(searchRange).map(t -> t.toDuration().toMillis()).orElse(0L); - long startTimeWithSetTime = startEnd[1] - (searchRangeTime - delayMillis); - if (startEnd[0] > startTimeWithSetTime) { - dataStartTime = startTimeWithSetTime; + long startTimeWithSetTime = timeRange.getEnd() - (searchRangeTime - delayMillis); + if (timeRange.getStart() > startTimeWithSetTime) { + timeRange.setStart(startTimeWithSetTime); } - return new long[] { dataStartTime, dataEndTime }; + return timeRange; } private void featureQueryValidation() throws IOException { - long[] startEnd = getFeatureQueryValidationDateRange(); + DateTimeRange timeRange = getFeatureQueryValidationDateRange(); AtomicInteger featureCounter = new AtomicInteger(); RangeQueryBuilder rangeQuery = new RangeQueryBuilder(anomalyDetector.getTimeField()) - .from(startEnd[0]) - .to(startEnd[1]) + .from(timeRange.getStart()) + .to(timeRange.getEnd()) .format("epoch_millis") .includeLower(true) .includeUpper(false); @@ -496,7 +490,7 @@ private void featureQueryValidation() throws IOException { onFeatureAggregationValidation(searchResponse, feature, featureCounter); }, exception -> { onFailure(exception); - logger.warn("Failed to create feature search request for validation", exception); + logger.error("Failed to create feature search request for validation", exception); })); } } @@ -544,7 +538,7 @@ private synchronized void intervalValidation() { wait(); } catch (Exception ex) { onFailure(ex); - logger.warn(ex); + logger.error(ex); } } } @@ -593,7 +587,7 @@ private void verifyWithInterval(long[][] timeRanges, long detectorInterval) thro } }, exception -> { onFailure(exception); - logger.warn("Failed to create multi search request for validation", exception); + logger.error("Failed to create multi search request for validation", exception); })); } @@ -623,8 +617,6 @@ private synchronized boolean doneInferring(long detectorInterval, MultiSearchRes suggestedChangesMap .computeIfAbsent(ValidationSuggestedChanges.DETECTION_INTERVAL.getName(), k -> new ArrayList<>()) .add(Long.toString(detectorInterval)); - inferAgain.set(false); - return true; } inferAgain.set(false); return true; @@ -638,7 +630,7 @@ private void checkWindowDelay() { SearchRequest searchRequest = new SearchRequest().indices(anomalyDetector.getIndices().get(0)).source(searchSourceBuilder); client.search(searchRequest, ActionListener.wrap(response -> checkDelayResponse(getLatestDataTime(response)), exception -> { onFailure(exception); - logger.warn("Failed to create search request for last data point", exception); + logger.error("Failed to create search request for last data point", exception); })); } From 4a523f84cdea106af6c0aeb186c27e067d3c3f26 Mon Sep 17 00:00:00 2001 From: Galitzky Date: Tue, 1 Sep 2020 17:40:12 -0700 Subject: [PATCH 16/20] creating new series of timerange with the DateTimeRange class --- .../ValidateAnomalyDetectorActionHandler.java | 27 +++++++++++++++---- 1 file changed, 22 insertions(+), 5 deletions(-) diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java index b0c50c9f..fc38af40 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java @@ -23,6 +23,7 @@ import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Arrays; +import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -514,7 +515,8 @@ private void onFeatureAggregationValidation(SearchResponse response, Feature fea // creates a new 2D array of time ranges based of a different detector interval inorder to validate // detector interval with a new range every time. Creates 128 new interval time ranges - private long[][] createNewTimeRange(long detectorInterval) { + private long[][] createNewTimeRangeObject(long detectorInterval) { + createNewTimeRangeObject(detectorInterval); long timeRanges[][] = new long[MAX_NUM_OF_SAMPLES_VIEWED][2]; long delayMillis = timeConfigToMilliSec(anomalyDetector.getWindowDelay()); long dataEndTime = Instant.now().toEpochMilli() - delayMillis; @@ -526,11 +528,26 @@ private long[][] createNewTimeRange(long detectorInterval) { return timeRanges; } + // creates a new 2D array of time ranges based of a different detector interval inorder to validate + // detector interval with a new range every time. Creates 128 new interval time ranges + private DateTimeRange[] createNewTimeRange(long detectorInterval) { + DateTimeRange timeRanges[] = new DateTimeRange[MAX_NUM_OF_SAMPLES_VIEWED]; + long delayMillis = timeConfigToMilliSec(anomalyDetector.getWindowDelay()); + long dataEndTime = Instant.now().toEpochMilli() - delayMillis; + long dataStartTime = dataEndTime - ((long) (MAX_NUM_OF_SAMPLES_VIEWED) * detectorInterval); + for (int i = 0; i < MAX_NUM_OF_SAMPLES_VIEWED; i++) { + long newStartTime = dataStartTime + (i * detectorInterval); + long newEndTime = newStartTime + detectorInterval; + timeRanges[i] = new DateTimeRange(newStartTime, newEndTime); + } + return timeRanges; + } + private synchronized void intervalValidation() { long detectorInterval = timeConfigToMilliSec(anomalyDetector.getDetectionInterval()); for (long inferredDetectorInterval = detectorInterval; inferredDetectorInterval <= MAX_INTERVAL_LENGTH; inferredDetectorInterval *= INTERVAL_RECOMMENDATION_MULTIPLIER) { - long timeRanges[][] = createNewTimeRange(inferredDetectorInterval); + DateTimeRange timeRanges[] = createNewTimeRange(inferredDetectorInterval); try { if (inferAgain.get()) { verifyWithInterval(timeRanges, inferredDetectorInterval); @@ -561,13 +578,13 @@ private List getFeatureFieldNames() throws IOException { return featureFields; } - private void verifyWithInterval(long[][] timeRanges, long detectorInterval) throws IOException { + private void verifyWithInterval(DateTimeRange[] timeRanges, long detectorInterval) throws IOException { inferAgain.set(false); List featureFields = getFeatureFieldNames(); MultiSearchRequest sr = new MultiSearchRequest(); for (int i = 0; i < NUM_OF_INTERVAL_SAMPLES; i++) { - long rangeStart = timeRanges[i][0]; - long rangeEnd = timeRanges[i][1]; + long rangeStart = timeRanges[i].getStart(); + long rangeEnd = timeRanges[i].getEnd(); RangeQueryBuilder rangeQuery = new RangeQueryBuilder(anomalyDetector.getTimeField()) .from(rangeStart) .to(rangeEnd) From a2e56dafa9ac96c0b31855c795a8fe35a912f861 Mon Sep 17 00:00:00 2001 From: Galitzky Date: Tue, 1 Sep 2020 17:40:29 -0700 Subject: [PATCH 17/20] creating new series of timerange with the DateTimeRange class --- .../ValidateAnomalyDetectorActionHandler.java | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java index fc38af40..4ba2f08e 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java @@ -513,21 +513,6 @@ private void onFeatureAggregationValidation(SearchResponse response, Feature fea } } - // creates a new 2D array of time ranges based of a different detector interval inorder to validate - // detector interval with a new range every time. Creates 128 new interval time ranges - private long[][] createNewTimeRangeObject(long detectorInterval) { - createNewTimeRangeObject(detectorInterval); - long timeRanges[][] = new long[MAX_NUM_OF_SAMPLES_VIEWED][2]; - long delayMillis = timeConfigToMilliSec(anomalyDetector.getWindowDelay()); - long dataEndTime = Instant.now().toEpochMilli() - delayMillis; - long dataStartTime = dataEndTime - ((long) (MAX_NUM_OF_SAMPLES_VIEWED) * detectorInterval); - for (int j = 0; j < MAX_NUM_OF_SAMPLES_VIEWED; j++) { - timeRanges[j][0] = dataStartTime + (j * detectorInterval); - timeRanges[j][1] = timeRanges[j][0] + detectorInterval; - } - return timeRanges; - } - // creates a new 2D array of time ranges based of a different detector interval inorder to validate // detector interval with a new range every time. Creates 128 new interval time ranges private DateTimeRange[] createNewTimeRange(long detectorInterval) { From 143e5d893ffcbf66eda4b2924261af40240a890e Mon Sep 17 00:00:00 2001 From: Galitzky Date: Tue, 1 Sep 2020 17:41:11 -0700 Subject: [PATCH 18/20] style fix --- .../ad/rest/handler/ValidateAnomalyDetectorActionHandler.java | 1 - 1 file changed, 1 deletion(-) diff --git a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java index 4ba2f08e..644196e2 100644 --- a/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java +++ b/src/main/java/com/amazon/opendistroforelasticsearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java @@ -23,7 +23,6 @@ import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Arrays; -import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; From ef0bddfbcb979cc49ee343d2bcddad55d813f468 Mon Sep 17 00:00:00 2001 From: Galitzky Date: Thu, 3 Sep 2020 17:32:07 -0700 Subject: [PATCH 19/20] more unit tests added for models --- .../ad/model/AnomalyDetectorTests.java | 68 +++++++++++++++++++ .../ad/model/TestDateTimeRange.java | 27 ++++++++ .../ad/model/ValidationTests.java | 39 +++++++++++ 3 files changed, 134 insertions(+) create mode 100644 src/test/java/com/amazon/opendistroforelasticsearch/ad/model/TestDateTimeRange.java create mode 100644 src/test/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationTests.java diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorTests.java index da3ddcd9..072f96cc 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorTests.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorTests.java @@ -18,7 +18,11 @@ import java.io.IOException; import java.time.Instant; import java.time.temporal.ChronoUnit; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; import java.util.Locale; +import java.util.Map; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.xcontent.ToXContent; @@ -68,6 +72,21 @@ public void testParseAnomalyDetectorWithEmptyFilterQuery() throws IOException { assertTrue(parsedDetector.getFilterQuery() instanceof MatchAllQueryBuilder); } + public void testParseValidationAnomalyDetectorWithEmptyFilterQuery() throws IOException { + String detectorString = "{\"name\":\"todagtCMkwpcaedpyYUM\",\"description\":" + + "\"ClrcaMpuLfeDSlVduRcKlqPZyqWDBf\",\"time_field\":\"dJRwh\",\"indices\":[\"eIrgWMqAED\"]," + + "\"feature_attributes\":[{\"feature_id\":\"lxYRN\",\"feature_name\":\"eqSeU\",\"feature_enabled\":" + + "true,\"aggregation_query\":{\"aa\":{\"value_count\":{\"field\":\"ok\"}}}}],\"filter_query\":{}," + + "\"detection_interval\":{\"period\":{\"interval\":425,\"unit\":\"Minutes\"}},\"window_delay\":" + + "{\"period\":{\"interval\":973,\"unit\":\"Minutes\"}},\"shingle_size\":4,\"schema_version\":-1203962153,\"ui_metadata\":" + + "{\"JbAaV\":{\"feature_id\":\"rIFjS\",\"feature_name\":\"QXCmS\",\"feature_enabled\":false," + + "\"aggregation_query\":{\"aa\":{\"value_count\":{\"field\":\"ok\"}}}}}," + + "\"last_update_time\":1568396089028}"; + AnomalyDetector parsedDetector = AnomalyDetector + .parse(TestHelpers.parser(detectorString), "id", 1L, null, null, AnomalyDetectorSettings.DEFAULT_SHINGLE_SIZE); + assertTrue(parsedDetector.getFilterQuery() instanceof MatchAllQueryBuilder); + } + public void testParseAnomalyDetectorWithWrongFilterQuery() throws Exception { String detectorString = "{\"name\":\"todagtCMkwpcaedpyYUM\",\"description\":" + "\"ClrcaMpuLfeDSlVduRcKlqPZyqWDBf\",\"time_field\":\"dJRwh\",\"indices\":[\"eIrgWMqAED\"]," @@ -95,6 +114,38 @@ public void testParseAnomalyDetectorWithoutOptionalParams() throws IOException { assertEquals((long) parsedDetector.getShingleSize(), (long) AnomalyDetectorSettings.DEFAULT_SHINGLE_SIZE); } + public void testParseValidationAnomalyDetector() throws IOException { + String detectorString = "{\"name\":\"todagtCMkwpcaedpyYUM\",\"description\":" + + "\"ClrcaMpuLfeDSlVduRcKlqPZyqWDBf\",\"time_field\":\"dJRwh\",\"indices\":[\"eIrgWMqAED\"]," + + "\"feature_attributes\":[{\"feature_id\":\"lxYRN\",\"feature_name\":\"eqSeU\",\"feature_enabled\"" + + ":true,\"aggregation_query\":{\"aa\":{\"value_count\":{\"field\":\"ok\"}}}}],\"detection_interval\":" + + "{\"period\":{\"interval\":425,\"unit\":\"Minutes\"}},\"schema_version\":-1203962153,\"ui_metadata\":" + + "{\"JbAaV\":{\"feature_id\":\"rIFjS\",\"feature_name\":\"QXCmS\",\"feature_enabled\":false," + + "\"aggregation_query\":{\"aa\":{\"value_count\":{\"field\":\"ok\"}}}}},\"last_update_time\":1568396089028}"; + AnomalyDetector parsedDetector = AnomalyDetector + .parseValidation(TestHelpers.parser(detectorString), "id", 1L, AnomalyDetectorSettings.DEFAULT_SHINGLE_SIZE); + assertTrue(parsedDetector.getFilterQuery() instanceof MatchAllQueryBuilder); + assertEquals((long) parsedDetector.getShingleSize(), (long) AnomalyDetectorSettings.DEFAULT_SHINGLE_SIZE); + } + + public void testParseValidationAnomalyDetectorWithWrongFilterQuery() throws Exception { + String detectorString = "{\"name\":\"todagtCMkwpcaedpyYUM\",\"description\":" + + "\"ClrcaMpuLfeDSlVduRcKlqPZyqWDBf\",\"time_field\":\"dJRwh\",\"indices\":[\"eIrgWMqAED\"]," + + "\"feature_attributes\":[{\"feature_id\":\"lxYRN\",\"feature_name\":\"eqSeU\",\"feature_enabled\":" + + "true,\"aggregation_query\":{\"aa\":{\"value_count\":{\"field\":\"ok\"}}}}],\"filter_query\":" + + "{\"aa\":\"bb\"},\"detection_interval\":{\"period\":{\"interval\":425,\"unit\":\"Minutes\"}}," + + "\"window_delay\":{\"period\":{\"interval\":973,\"unit\":\"Minutes\"}},\"shingle_size\":8,\"schema_version\":" + + "-1203962153,\"ui_metadata\":{\"JbAaV\":{\"feature_id\":\"rIFjS\",\"feature_name\":\"QXCmS\"," + + "\"feature_enabled\":false,\"aggregation_query\":{\"aa\":{\"value_count\":{\"field\":\"ok\"}}}}}," + + "\"last_update_time\":1568396089028}"; + TestHelpers + .assertFailWith( + ParsingException.class, + () -> AnomalyDetector + .parseValidation(TestHelpers.parser(detectorString), "id", 1L, AnomalyDetectorSettings.DEFAULT_SHINGLE_SIZE) + ); + } + public void testParseAnomalyDetectorWithInvalidShingleSize() throws Exception { String detectorString = "{\"name\":\"todagtCMkwpcaedpyYUM\",\"description\":" + "\"ClrcaMpuLfeDSlVduRcKlqPZyqWDBf\",\"time_field\":\"dJRwh\",\"indices\":[\"eIrgWMqAED\"]," @@ -114,6 +165,23 @@ public void testParseAnomalyDetectorWithNullUiMetadata() throws IOException { assertNull(parsedDetector.getUiMetadata()); } + public void testParseValidationAnomalyDetectorWithNullUiMetadata() throws IOException { + AnomalyDetector detector = TestHelpers.randomAnomalyDetector(null, Instant.now()); + String detectorString = TestHelpers.xContentBuilderToString(detector.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + AnomalyDetector parsedDetector = AnomalyDetector + .parseValidation(TestHelpers.parser(detectorString), "id", 1L, AnomalyDetectorSettings.DEFAULT_SHINGLE_SIZE); + assertEquals("Parsing anomaly detector doesn't work", detector, parsedDetector); + assertNull(parsedDetector.getUiMetadata()); + } + + public void testParseValidationAnomalyDetectorWithEmptyUiMetadata() throws IOException { + AnomalyDetector detector = TestHelpers.randomAnomalyDetector(ImmutableMap.of(), Instant.now()); + String detectorString = TestHelpers.xContentBuilderToString(detector.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + AnomalyDetector parsedDetector = AnomalyDetector + .parseValidation(TestHelpers.parser(detectorString), "id", 1L, AnomalyDetectorSettings.DEFAULT_SHINGLE_SIZE); + assertEquals("Parsing anomaly detector doesn't work", detector, parsedDetector); + } + public void testParseAnomalyDetectorWithEmptyUiMetadata() throws IOException { AnomalyDetector detector = TestHelpers.randomAnomalyDetector(ImmutableMap.of(), Instant.now()); String detectorString = TestHelpers.xContentBuilderToString(detector.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/TestDateTimeRange.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/TestDateTimeRange.java new file mode 100644 index 00000000..c8707c45 --- /dev/null +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/TestDateTimeRange.java @@ -0,0 +1,27 @@ +package com.amazon.opendistroforelasticsearch.ad.model; + +import org.elasticsearch.test.ESTestCase; + +public class TestDateTimeRange extends ESTestCase { + public void testDateTimeRangeGetStart() { + DateTimeRange timeRange = new DateTimeRange(10, 20); + assertEquals(10, timeRange.getStart()); + } + + public void testDateTimeRangeGetEnd() { + DateTimeRange timeRange = new DateTimeRange(10, 20); + assertEquals(20, timeRange.getEnd()); + } + + public void testDateTimeRangeSetEnd() { + DateTimeRange timeRange = new DateTimeRange(10, 20); + timeRange.setEnd(50); + assertEquals(50, timeRange.getEnd()); + } + + public void testDateTimeRangeSetStart() { + DateTimeRange timeRange = DateTimeRange.rangeBasedOfInterval(0, 20, 2); + timeRange.setStart(10); + assertEquals(10, timeRange.getStart()); + } +} diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationTests.java new file mode 100644 index 00000000..5a01857c --- /dev/null +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationTests.java @@ -0,0 +1,39 @@ +package com.amazon.opendistroforelasticsearch.ad.model; + +import com.amazon.opendistroforelasticsearch.ad.TestHelpers; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class ValidationTests extends ESTestCase { + + public void testValidationSuggestedChanges() { + assertEquals("others", ValidationSuggestedChanges.OTHERS.getName()); + } + + public void testValidationFailures() { + assertEquals("missing", ValidationFailures.MISSING.getName()); + } + + public void testValidationResponse() throws IOException { + Map> failuresMap = new HashMap<>(); + Map> suggestedChanges = new HashMap<>(); + failuresMap.put("missing", Arrays.asList("name")); + suggestedChanges.put("detection_interval", Arrays.asList("200000")); + ValidateResponse responseValidate = new ValidateResponse(); + responseValidate.setFailures(failuresMap); + responseValidate.setSuggestedChanges(suggestedChanges); + String validation = TestHelpers + .xContentBuilderToString(responseValidate.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + System.out.println(validation); + assertEquals("{\"failures\":{\"missing\":[\"name\"]}," + "\"suggestedChanges\":{\"detection_interval\":[\"200000\"]}}", validation); + assertEquals(failuresMap, responseValidate.getFailures()); + assertEquals(suggestedChanges, responseValidate.getSuggestedChanges()); + } + +} From e9fc63ad9fee91b27f8d52b1ee2429c67ee0ff38 Mon Sep 17 00:00:00 2001 From: Galitzky Date: Thu, 3 Sep 2020 17:40:26 -0700 Subject: [PATCH 20/20] style fix --- .../ad/model/AnomalyDetectorTests.java | 4 --- .../ad/model/TestDateTimeRange.java | 15 +++++++++++ .../ad/model/ValidationTests.java | 26 +++++++++++++++---- 3 files changed, 36 insertions(+), 9 deletions(-) diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorTests.java index 072f96cc..7c8f7e6c 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorTests.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/AnomalyDetectorTests.java @@ -18,11 +18,7 @@ import java.io.IOException; import java.time.Instant; import java.time.temporal.ChronoUnit; -import java.util.Arrays; -import java.util.HashMap; -import java.util.List; import java.util.Locale; -import java.util.Map; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.xcontent.ToXContent; diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/TestDateTimeRange.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/TestDateTimeRange.java index c8707c45..921a0ada 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/TestDateTimeRange.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/TestDateTimeRange.java @@ -1,3 +1,18 @@ +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + package com.amazon.opendistroforelasticsearch.ad.model; import org.elasticsearch.test.ESTestCase; diff --git a/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationTests.java b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationTests.java index 5a01857c..eb5f0638 100644 --- a/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationTests.java +++ b/src/test/java/com/amazon/opendistroforelasticsearch/ad/model/ValidationTests.java @@ -1,8 +1,19 @@ -package com.amazon.opendistroforelasticsearch.ad.model; +/* + * Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ -import com.amazon.opendistroforelasticsearch.ad.TestHelpers; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.test.ESTestCase; +package com.amazon.opendistroforelasticsearch.ad.model; import java.io.IOException; import java.util.Arrays; @@ -10,6 +21,11 @@ import java.util.List; import java.util.Map; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.test.ESTestCase; + +import com.amazon.opendistroforelasticsearch.ad.TestHelpers; + public class ValidationTests extends ESTestCase { public void testValidationSuggestedChanges() { @@ -29,7 +45,7 @@ public void testValidationResponse() throws IOException { responseValidate.setFailures(failuresMap); responseValidate.setSuggestedChanges(suggestedChanges); String validation = TestHelpers - .xContentBuilderToString(responseValidate.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); + .xContentBuilderToString(responseValidate.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); System.out.println(validation); assertEquals("{\"failures\":{\"missing\":[\"name\"]}," + "\"suggestedChanges\":{\"detection_interval\":[\"200000\"]}}", validation); assertEquals(failuresMap, responseValidate.getFailures());