|
@@ -15,9 +15,14 @@ import org.elasticsearch.common.settings.ClusterSettings;
|
|
|
import org.elasticsearch.common.settings.Settings;
|
|
|
import org.elasticsearch.common.unit.ByteSizeValue;
|
|
|
import org.elasticsearch.common.util.set.Sets;
|
|
|
+import org.elasticsearch.core.Tuple;
|
|
|
import org.elasticsearch.test.ESTestCase;
|
|
|
+import org.elasticsearch.xpack.autoscaling.capacity.AutoscalingCapacity;
|
|
|
+import org.elasticsearch.xpack.ml.autoscaling.NativeMemoryCapacity;
|
|
|
|
|
|
+import java.util.Arrays;
|
|
|
import java.util.HashMap;
|
|
|
+import java.util.List;
|
|
|
import java.util.Map;
|
|
|
import java.util.OptionalLong;
|
|
|
import java.util.function.BiConsumer;
|
|
@@ -27,6 +32,7 @@ import static org.elasticsearch.xpack.ml.MachineLearning.MAX_JVM_SIZE_NODE_ATTR;
|
|
|
import static org.elasticsearch.xpack.ml.MachineLearning.MAX_MACHINE_MEMORY_PERCENT;
|
|
|
import static org.elasticsearch.xpack.ml.MachineLearning.USE_AUTO_MACHINE_MEMORY_PERCENT;
|
|
|
import static org.elasticsearch.xpack.ml.utils.NativeMemoryCalculator.MINIMUM_AUTOMATIC_NODE_SIZE;
|
|
|
+import static org.elasticsearch.xpack.ml.utils.NativeMemoryCalculator.dynamicallyCalculateJvmSizeFromNodeSize;
|
|
|
import static org.hamcrest.Matchers.equalTo;
|
|
|
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
|
|
|
|
|
@@ -49,6 +55,54 @@ public class NativeMemoryCalculatorTests extends ESTestCase{
|
|
|
}
|
|
|
}
|
|
|
|
|
|
+ public void testConsistencyInAutoCalculation() {
|
|
|
+ for (Tuple<Long, Long> nodeAndJvmSize : Arrays.asList(
|
|
|
+ Tuple.tuple(1073741824L, 432013312L), // 1GB and true JVM size
|
|
|
+ Tuple.tuple(2147483648L, 536870912L), // 2GB ...
|
|
|
+ Tuple.tuple(4294967296L, 1073741824L), // 4GB ...
|
|
|
+ Tuple.tuple(8589934592L, 2147483648L), // 8GB ...
|
|
|
+ Tuple.tuple(17179869184L, 2147483648L), // 16GB ...
|
|
|
+ Tuple.tuple(34359738368L, 2147483648L), // 32GB ...
|
|
|
+ Tuple.tuple(68719476736L, 2147483648L), // 64GB ...
|
|
|
+ Tuple.tuple(16106127360L, 2147483648L), // 15GB ...
|
|
|
+ Tuple.tuple(32212254720L, 2147483648L), // 30GB ...
|
|
|
+ Tuple.tuple(64424509440L, 2147483648L) // 60GB ...
|
|
|
+ )) {
|
|
|
+ final long trueJvmSize = nodeAndJvmSize.v2();
|
|
|
+ final long trueNodeSize = nodeAndJvmSize.v1();
|
|
|
+ List<Long> nodeSizes = Arrays.asList(
|
|
|
+ trueNodeSize + ByteSizeValue.ofMb(10).getBytes(),
|
|
|
+ trueNodeSize - ByteSizeValue.ofMb(10).getBytes(),
|
|
|
+ trueNodeSize
|
|
|
+ );
|
|
|
+ for (long nodeSize : nodeSizes) {
|
|
|
+ // Simulate having a true size that already exists from the node vs. us dynamically calculating it
|
|
|
+ long jvmSize = randomBoolean() ? dynamicallyCalculateJvmSizeFromNodeSize(nodeSize) : trueJvmSize;
|
|
|
+ DiscoveryNode node = newNode(jvmSize, nodeSize);
|
|
|
+ Settings settings = newSettings(30, true);
|
|
|
+ ClusterSettings clusterSettings = newClusterSettings(30, true);
|
|
|
+
|
|
|
+ long bytesForML = randomBoolean() ?
|
|
|
+ NativeMemoryCalculator.allowedBytesForMl(node, settings).getAsLong() :
|
|
|
+ NativeMemoryCalculator.allowedBytesForMl(node, clusterSettings).getAsLong();
|
|
|
+
|
|
|
+ NativeMemoryCapacity nativeMemoryCapacity = new NativeMemoryCapacity(
|
|
|
+ bytesForML,
|
|
|
+ bytesForML,
|
|
|
+ jvmSize
|
|
|
+ );
|
|
|
+
|
|
|
+ AutoscalingCapacity capacity = nativeMemoryCapacity.autoscalingCapacity(30, true);
|
|
|
+ // We don't allow node sizes below 1GB, so we will always be at least that large
|
|
|
+ // Also, allow 1 byte off for weird rounding issues
|
|
|
+ assertThat(capacity.node().memory().getBytes(), greaterThanOrEqualTo(
|
|
|
+ Math.max(nodeSize, ByteSizeValue.ofGb(1).getBytes()) - 1L));
|
|
|
+ assertThat(capacity.total().memory().getBytes(), greaterThanOrEqualTo(
|
|
|
+ Math.max(nodeSize, ByteSizeValue.ofGb(1).getBytes()) - 1L));
|
|
|
+ }
|
|
|
+ }
|
|
|
+ }
|
|
|
+
|
|
|
public void testAllowedBytesForMlWhenAutoIsTrue() {
|
|
|
for (int i = 0; i < NUM_TEST_RUNS; i++) {
|
|
|
long nodeSize = randomLongBetween(ByteSizeValue.ofMb(500).getBytes(), ByteSizeValue.ofGb(64).getBytes());
|
|
@@ -58,10 +112,10 @@ public class NativeMemoryCalculatorTests extends ESTestCase{
|
|
|
Settings settings = newSettings(percent, true);
|
|
|
ClusterSettings clusterSettings = newClusterSettings(percent, true);
|
|
|
|
|
|
- int truePercent = Math.min(
|
|
|
+ double truePercent = Math.min(
|
|
|
90,
|
|
|
- (int)Math.ceil(((nodeSize - jvmSize - ByteSizeValue.ofMb(200).getBytes()) / (double)nodeSize) * 100.0D));
|
|
|
- long expected = (long)(nodeSize * (truePercent / 100.0));
|
|
|
+ ((nodeSize - jvmSize - ByteSizeValue.ofMb(200).getBytes()) / (double)nodeSize) * 100.0D);
|
|
|
+ long expected = Math.round(nodeSize * (truePercent / 100.0));
|
|
|
|
|
|
assertThat(NativeMemoryCalculator.allowedBytesForMl(node, settings).getAsLong(), equalTo(expected));
|
|
|
assertThat(NativeMemoryCalculator.allowedBytesForMl(node, clusterSettings).getAsLong(), equalTo(expected));
|
|
@@ -69,20 +123,6 @@ public class NativeMemoryCalculatorTests extends ESTestCase{
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- public void testAllowedBytesForMlWhenAutoIsTrueButJVMSizeIsUnknown() {
|
|
|
- long nodeSize = randomLongBetween(ByteSizeValue.ofMb(500).getBytes(), ByteSizeValue.ofGb(64).getBytes());
|
|
|
- int percent = randomIntBetween(5, 200);
|
|
|
- DiscoveryNode node = newNode(null, nodeSize);
|
|
|
- Settings settings = newSettings(percent, true);
|
|
|
- ClusterSettings clusterSettings = newClusterSettings(percent, true);
|
|
|
-
|
|
|
- long expected = (long)(nodeSize * (percent / 100.0));
|
|
|
-
|
|
|
- assertThat(NativeMemoryCalculator.allowedBytesForMl(node, settings).getAsLong(), equalTo(expected));
|
|
|
- assertThat(NativeMemoryCalculator.allowedBytesForMl(node, clusterSettings).getAsLong(), equalTo(expected));
|
|
|
- assertThat(NativeMemoryCalculator.allowedBytesForMl(node, percent, false).getAsLong(), equalTo(expected));
|
|
|
- }
|
|
|
-
|
|
|
public void testAllowedBytesForMlWhenBothJVMAndNodeSizeAreUnknown() {
|
|
|
int percent = randomIntBetween(5, 200);
|
|
|
DiscoveryNode node = newNode(null, null);
|
|
@@ -110,7 +150,6 @@ public class NativeMemoryCalculatorTests extends ESTestCase{
|
|
|
}
|
|
|
|
|
|
public void testActualNodeSizeCalculationConsistency() {
|
|
|
-
|
|
|
final TriConsumer<Long, Integer, Long> consistentAutoAssertions = (nativeMemory, memoryPercentage, delta) -> {
|
|
|
long autoNodeSize = NativeMemoryCalculator.calculateApproxNecessaryNodeSize(nativeMemory, null, memoryPercentage, true);
|
|
|
// It should always be greater than the minimum supported node size
|
|
@@ -119,12 +158,13 @@ public class NativeMemoryCalculatorTests extends ESTestCase{
|
|
|
greaterThanOrEqualTo(MINIMUM_AUTOMATIC_NODE_SIZE));
|
|
|
// Our approximate real node size should always return a usable native memory size that is at least the original native memory
|
|
|
// size. Rounding errors may cause it to be non-exact.
|
|
|
+ long allowedBytesForMl = NativeMemoryCalculator.allowedBytesForMl(autoNodeSize, memoryPercentage, true);
|
|
|
assertThat("native memory ["
|
|
|
- + NativeMemoryCalculator.allowedBytesForMl(autoNodeSize, memoryPercentage, true)
|
|
|
+ + allowedBytesForMl
|
|
|
+ "] smaller than original native memory ["
|
|
|
+ nativeMemory
|
|
|
+ "]",
|
|
|
- NativeMemoryCalculator.allowedBytesForMl(autoNodeSize, memoryPercentage, true),
|
|
|
+ allowedBytesForMl,
|
|
|
greaterThanOrEqualTo(nativeMemory - delta));
|
|
|
};
|
|
|
|
|
@@ -155,18 +195,18 @@ public class NativeMemoryCalculatorTests extends ESTestCase{
|
|
|
int memoryPercentage = randomIntBetween(5, 200);
|
|
|
{ // tiny memory
|
|
|
long nodeMemory = randomLongBetween(ByteSizeValue.ofKb(100).getBytes(), ByteSizeValue.ofMb(500).getBytes());
|
|
|
- consistentAutoAssertions.apply(nodeMemory, memoryPercentage, 0L);
|
|
|
+ consistentAutoAssertions.apply(nodeMemory, memoryPercentage, 1L);
|
|
|
consistentManualAssertions.accept(nodeMemory, memoryPercentage);
|
|
|
}
|
|
|
{ // normal-ish memory
|
|
|
long nodeMemory = randomLongBetween(ByteSizeValue.ofMb(500).getBytes(), ByteSizeValue.ofGb(4).getBytes());
|
|
|
// periodically, the calculated assertions end up being about 6% off, allowing this small delta to account for flakiness
|
|
|
- consistentAutoAssertions.apply(nodeMemory, memoryPercentage, (long) (0.06 * nodeMemory));
|
|
|
+ consistentAutoAssertions.apply(nodeMemory, memoryPercentage, 1L);
|
|
|
consistentManualAssertions.accept(nodeMemory, memoryPercentage);
|
|
|
}
|
|
|
{ // huge memory
|
|
|
long nodeMemory = randomLongBetween(ByteSizeValue.ofGb(30).getBytes(), ByteSizeValue.ofGb(60).getBytes());
|
|
|
- consistentAutoAssertions.apply(nodeMemory, memoryPercentage, 0L);
|
|
|
+ consistentAutoAssertions.apply(nodeMemory, memoryPercentage, 1L);
|
|
|
consistentManualAssertions.accept(nodeMemory, memoryPercentage);
|
|
|
}
|
|
|
}
|