|
@@ -19,6 +19,7 @@ import org.elasticsearch.xpack.core.ml.job.config.Detector;
|
|
|
import org.elasticsearch.xpack.core.ml.job.config.Job;
|
|
|
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
|
|
|
import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils;
|
|
|
+import org.elasticsearch.Version;
|
|
|
|
|
|
import java.io.IOException;
|
|
|
import java.util.ArrayList;
|
|
@@ -44,6 +45,7 @@ public class AnomalyRecord implements ToXContentObject, Writeable {
|
|
|
* Result fields (all detector types)
|
|
|
*/
|
|
|
public static final ParseField PROBABILITY = new ParseField("probability");
|
|
|
+ public static final ParseField MULTI_BUCKET_IMPACT = new ParseField("multi_bucket_impact");
|
|
|
public static final ParseField BY_FIELD_NAME = new ParseField("by_field_name");
|
|
|
public static final ParseField BY_FIELD_VALUE = new ParseField("by_field_value");
|
|
|
public static final ParseField CORRELATED_BY_FIELD_VALUE = new ParseField("correlated_by_field_value");
|
|
@@ -100,6 +102,7 @@ public class AnomalyRecord implements ToXContentObject, Writeable {
|
|
|
parser.declareLong(ConstructingObjectParser.constructorArg(), BUCKET_SPAN);
|
|
|
parser.declareString((anomalyRecord, s) -> {}, Result.RESULT_TYPE);
|
|
|
parser.declareDouble(AnomalyRecord::setProbability, PROBABILITY);
|
|
|
+ parser.declareDouble(AnomalyRecord::setMultiBucketImpact, MULTI_BUCKET_IMPACT);
|
|
|
parser.declareDouble(AnomalyRecord::setRecordScore, RECORD_SCORE);
|
|
|
parser.declareDouble(AnomalyRecord::setInitialRecordScore, INITIAL_RECORD_SCORE);
|
|
|
parser.declareInt(AnomalyRecord::setDetectorIndex, Detector.DETECTOR_INDEX);
|
|
@@ -127,6 +130,7 @@ public class AnomalyRecord implements ToXContentObject, Writeable {
|
|
|
private final String jobId;
|
|
|
private int detectorIndex;
|
|
|
private double probability;
|
|
|
+ private Double multiBucketImpact;
|
|
|
private String byFieldName;
|
|
|
private String byFieldValue;
|
|
|
private String correlatedByFieldValue;
|
|
@@ -164,6 +168,9 @@ public class AnomalyRecord implements ToXContentObject, Writeable {
|
|
|
jobId = in.readString();
|
|
|
detectorIndex = in.readInt();
|
|
|
probability = in.readDouble();
|
|
|
+ if (in.getVersion().onOrAfter(Version.V_6_5_0)) {
|
|
|
+ multiBucketImpact = in.readOptionalDouble();
|
|
|
+ }
|
|
|
byFieldName = in.readOptionalString();
|
|
|
byFieldValue = in.readOptionalString();
|
|
|
correlatedByFieldValue = in.readOptionalString();
|
|
@@ -198,6 +205,9 @@ public class AnomalyRecord implements ToXContentObject, Writeable {
|
|
|
out.writeString(jobId);
|
|
|
out.writeInt(detectorIndex);
|
|
|
out.writeDouble(probability);
|
|
|
+ if (out.getVersion().onOrAfter(Version.V_6_5_0)) {
|
|
|
+ out.writeOptionalDouble(multiBucketImpact);
|
|
|
+ }
|
|
|
out.writeOptionalString(byFieldName);
|
|
|
out.writeOptionalString(byFieldValue);
|
|
|
out.writeOptionalString(correlatedByFieldValue);
|
|
@@ -247,6 +257,9 @@ public class AnomalyRecord implements ToXContentObject, Writeable {
|
|
|
builder.field(Job.ID.getPreferredName(), jobId);
|
|
|
builder.field(Result.RESULT_TYPE.getPreferredName(), RESULT_TYPE_VALUE);
|
|
|
builder.field(PROBABILITY.getPreferredName(), probability);
|
|
|
+ if (multiBucketImpact != null) {
|
|
|
+ builder.field(MULTI_BUCKET_IMPACT.getPreferredName(), multiBucketImpact);
|
|
|
+ }
|
|
|
builder.field(RECORD_SCORE.getPreferredName(), recordScore);
|
|
|
builder.field(INITIAL_RECORD_SCORE.getPreferredName(), initialRecordScore);
|
|
|
builder.field(BUCKET_SPAN.getPreferredName(), bucketSpan);
|
|
@@ -389,6 +402,14 @@ public class AnomalyRecord implements ToXContentObject, Writeable {
|
|
|
probability = value;
|
|
|
}
|
|
|
|
|
|
+ public double getMultiBucketImpact() {
|
|
|
+ return multiBucketImpact;
|
|
|
+ }
|
|
|
+
|
|
|
+ public void setMultiBucketImpact(double value) {
|
|
|
+ multiBucketImpact = value;
|
|
|
+ }
|
|
|
+
|
|
|
public String getByFieldName() {
|
|
|
return byFieldName;
|
|
|
}
|
|
@@ -519,7 +540,7 @@ public class AnomalyRecord implements ToXContentObject, Writeable {
|
|
|
|
|
|
@Override
|
|
|
public int hashCode() {
|
|
|
- return Objects.hash(jobId, detectorIndex, bucketSpan, probability, recordScore,
|
|
|
+ return Objects.hash(jobId, detectorIndex, bucketSpan, probability, multiBucketImpact, recordScore,
|
|
|
initialRecordScore, typical, actual,function, functionDescription, fieldName,
|
|
|
byFieldName, byFieldValue, correlatedByFieldValue, partitionFieldName,
|
|
|
partitionFieldValue, overFieldName, overFieldValue, timestamp, isInterim,
|
|
@@ -543,6 +564,7 @@ public class AnomalyRecord implements ToXContentObject, Writeable {
|
|
|
&& this.detectorIndex == that.detectorIndex
|
|
|
&& this.bucketSpan == that.bucketSpan
|
|
|
&& this.probability == that.probability
|
|
|
+ && Objects.equals(this.multiBucketImpact, that.multiBucketImpact)
|
|
|
&& this.recordScore == that.recordScore
|
|
|
&& this.initialRecordScore == that.initialRecordScore
|
|
|
&& Objects.deepEquals(this.typical, that.typical)
|