|
@@ -12,6 +12,7 @@ import org.elasticsearch.cluster.ClusterState;
|
|
|
import org.elasticsearch.cluster.metadata.Metadata;
|
|
|
import org.elasticsearch.cluster.node.DiscoveryNode;
|
|
|
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
|
|
+import org.elasticsearch.common.Randomness;
|
|
|
import org.elasticsearch.common.collect.MapBuilder;
|
|
|
import org.elasticsearch.core.Tuple;
|
|
|
import org.elasticsearch.common.transport.TransportAddress;
|
|
@@ -34,9 +35,12 @@ import org.elasticsearch.xpack.ml.support.BaseMlIntegTestCase;
|
|
|
import org.junit.Before;
|
|
|
|
|
|
import java.net.InetAddress;
|
|
|
+import java.util.ArrayList;
|
|
|
+import java.util.Collection;
|
|
|
import java.util.Collections;
|
|
|
import java.util.Date;
|
|
|
import java.util.HashMap;
|
|
|
+import java.util.List;
|
|
|
import java.util.Map;
|
|
|
import java.util.SortedMap;
|
|
|
import java.util.TreeMap;
|
|
@@ -107,7 +111,11 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|
|
|
|
|
Job job = BaseMlIntegTestCase.createFareQuoteJob("job_id1000", JOB_MEMORY_REQUIREMENT).build(new Date());
|
|
|
|
|
|
- JobNodeSelector jobNodeSelector = new JobNodeSelector(cs.build(), cs.nodes().getAllNodes(), job.getId(), MlTasks.JOB_TASK_NAME,
|
|
|
+ JobNodeSelector jobNodeSelector = new JobNodeSelector(
|
|
|
+ cs.build(),
|
|
|
+ shuffled(cs.nodes().getAllNodes()),
|
|
|
+ job.getId(),
|
|
|
+ MlTasks.JOB_TASK_NAME,
|
|
|
memoryTracker, 0, node -> nodeFilter(node, job));
|
|
|
PersistentTasksCustomMetadata.Assignment result = jobNodeSelector.selectNode(maxRunningJobsPerNode,
|
|
|
2,
|
|
@@ -133,7 +141,10 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|
|
|
|
|
String dataFrameAnalyticsId = "data_frame_analytics_id1000";
|
|
|
|
|
|
- JobNodeSelector jobNodeSelector = new JobNodeSelector(cs.build(), cs.nodes().getAllNodes(), dataFrameAnalyticsId,
|
|
|
+ JobNodeSelector jobNodeSelector = new JobNodeSelector(
|
|
|
+ cs.build(),
|
|
|
+ shuffled(cs.nodes().getAllNodes()),
|
|
|
+ dataFrameAnalyticsId,
|
|
|
MlTasks.DATA_FRAME_ANALYTICS_TASK_NAME, memoryTracker, 0,
|
|
|
node -> TransportStartDataFrameAnalyticsAction.TaskExecutor.nodeFilter(node, createTaskParams(dataFrameAnalyticsId)));
|
|
|
PersistentTasksCustomMetadata.Assignment result = jobNodeSelector.selectNode(maxRunningJobsPerNode,
|
|
@@ -166,7 +177,11 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|
|
|
|
|
Job job = BaseMlIntegTestCase.createFareQuoteJob("job_id1000", JOB_MEMORY_REQUIREMENT).build(new Date());
|
|
|
|
|
|
- JobNodeSelector jobNodeSelector = new JobNodeSelector(cs.build(), cs.nodes().getAllNodes(), job.getId(), MlTasks.JOB_TASK_NAME,
|
|
|
+ JobNodeSelector jobNodeSelector = new JobNodeSelector(
|
|
|
+ cs.build(),
|
|
|
+ shuffled(cs.nodes().getAllNodes()),
|
|
|
+ job.getId(),
|
|
|
+ MlTasks.JOB_TASK_NAME,
|
|
|
memoryTracker, 0, node -> nodeFilter(node, job));
|
|
|
PersistentTasksCustomMetadata.Assignment result = jobNodeSelector.selectNode(maxRunningJobsPerNode,
|
|
|
2,
|
|
@@ -195,7 +210,10 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|
|
|
|
|
String dataFrameAnalyticsId = "data_frame_analytics_id_new";
|
|
|
|
|
|
- JobNodeSelector jobNodeSelector = new JobNodeSelector(cs.build(), cs.nodes().getAllNodes(), dataFrameAnalyticsId,
|
|
|
+ JobNodeSelector jobNodeSelector = new JobNodeSelector(
|
|
|
+ cs.build(),
|
|
|
+ shuffled(cs.nodes().getAllNodes()),
|
|
|
+ dataFrameAnalyticsId,
|
|
|
MlTasks.DATA_FRAME_ANALYTICS_TASK_NAME, memoryTracker, 0,
|
|
|
node -> TransportStartDataFrameAnalyticsAction.TaskExecutor.nodeFilter(node, createTaskParams(dataFrameAnalyticsId)));
|
|
|
PersistentTasksCustomMetadata.Assignment result = jobNodeSelector.selectNode(maxRunningJobsPerNode,
|
|
@@ -221,7 +239,11 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|
|
|
|
|
Job job = BaseMlIntegTestCase.createFareQuoteJob("job_id1000", JOB_MEMORY_REQUIREMENT).build(new Date());
|
|
|
|
|
|
- JobNodeSelector jobNodeSelector = new JobNodeSelector(cs.build(), cs.nodes().getAllNodes(), job.getId(), MlTasks.JOB_TASK_NAME,
|
|
|
+ JobNodeSelector jobNodeSelector = new JobNodeSelector(
|
|
|
+ cs.build(),
|
|
|
+ shuffled(cs.nodes().getAllNodes()),
|
|
|
+ job.getId(),
|
|
|
+ MlTasks.JOB_TASK_NAME,
|
|
|
memoryTracker, 0, node -> nodeFilter(node, job));
|
|
|
PersistentTasksCustomMetadata.Assignment result = jobNodeSelector.selectNode(maxRunningJobsPerNode,
|
|
|
2,
|
|
@@ -255,7 +277,10 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|
|
|
|
|
String dataFrameAnalyticsId = "data_frame_analytics_id1000";
|
|
|
|
|
|
- JobNodeSelector jobNodeSelector = new JobNodeSelector(cs.build(), cs.nodes().getAllNodes(), dataFrameAnalyticsId,
|
|
|
+ JobNodeSelector jobNodeSelector = new JobNodeSelector(
|
|
|
+ cs.build(),
|
|
|
+ shuffled(cs.nodes().getAllNodes()),
|
|
|
+ dataFrameAnalyticsId,
|
|
|
MlTasks.DATA_FRAME_ANALYTICS_TASK_NAME, memoryTracker, 0,
|
|
|
node -> TransportStartDataFrameAnalyticsAction.TaskExecutor.nodeFilter(node, createTaskParams(dataFrameAnalyticsId)));
|
|
|
PersistentTasksCustomMetadata.Assignment result = jobNodeSelector.selectNode(
|
|
@@ -287,7 +312,10 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|
|
|
|
|
String dataFrameAnalyticsId = "data_frame_analytics_id1000";
|
|
|
|
|
|
- JobNodeSelector jobNodeSelector = new JobNodeSelector(cs.build(), cs.nodes().getAllNodes(), dataFrameAnalyticsId,
|
|
|
+ JobNodeSelector jobNodeSelector = new JobNodeSelector(
|
|
|
+ cs.build(),
|
|
|
+ shuffled(cs.nodes().getAllNodes()),
|
|
|
+ dataFrameAnalyticsId,
|
|
|
MlTasks.DATA_FRAME_ANALYTICS_TASK_NAME, memoryTracker, 0,
|
|
|
node -> TransportStartDataFrameAnalyticsAction.TaskExecutor.nodeFilter(node, createTaskParams(dataFrameAnalyticsId)));
|
|
|
PersistentTasksCustomMetadata.Assignment result = jobNodeSelector.selectNode(
|
|
@@ -323,7 +351,11 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|
|
|
|
|
Job job = BaseMlIntegTestCase.createFareQuoteJob("job_id2", JOB_MEMORY_REQUIREMENT).build(new Date());
|
|
|
|
|
|
- JobNodeSelector jobNodeSelector = new JobNodeSelector(cs.build(), cs.nodes().getAllNodes(), job.getId(), MlTasks.JOB_TASK_NAME,
|
|
|
+ JobNodeSelector jobNodeSelector = new JobNodeSelector(
|
|
|
+ cs.build(),
|
|
|
+ shuffled(cs.nodes().getAllNodes()),
|
|
|
+ job.getId(),
|
|
|
+ MlTasks.JOB_TASK_NAME,
|
|
|
memoryTracker, 0, node -> nodeFilter(node, job));
|
|
|
PersistentTasksCustomMetadata.Assignment result = jobNodeSelector.selectNode(
|
|
|
20,
|
|
@@ -365,7 +397,11 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|
|
Job job6 = BaseMlIntegTestCase.createFareQuoteJob("job_id6", JOB_MEMORY_REQUIREMENT).build(new Date());
|
|
|
|
|
|
ClusterState cs = csBuilder.build();
|
|
|
- JobNodeSelector jobNodeSelector = new JobNodeSelector(cs, cs.nodes().getAllNodes(), job6.getId(), MlTasks.JOB_TASK_NAME,
|
|
|
+ JobNodeSelector jobNodeSelector = new JobNodeSelector(
|
|
|
+ cs,
|
|
|
+ shuffled(cs.nodes().getAllNodes()),
|
|
|
+ job6.getId(),
|
|
|
+ MlTasks.JOB_TASK_NAME,
|
|
|
memoryTracker, 0, node -> nodeFilter(node, job6));
|
|
|
PersistentTasksCustomMetadata.Assignment result = jobNodeSelector.selectNode(
|
|
|
10,
|
|
@@ -384,7 +420,13 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|
|
cs = csBuilder.build();
|
|
|
|
|
|
Job job7 = BaseMlIntegTestCase.createFareQuoteJob("job_id7", JOB_MEMORY_REQUIREMENT).build(new Date());
|
|
|
- jobNodeSelector = new JobNodeSelector(cs, cs.nodes().getAllNodes(), job7.getId(), MlTasks.JOB_TASK_NAME, memoryTracker, 0,
|
|
|
+ jobNodeSelector = new JobNodeSelector(
|
|
|
+ cs,
|
|
|
+ shuffled(cs.nodes().getAllNodes()),
|
|
|
+ job7.getId(),
|
|
|
+ MlTasks.JOB_TASK_NAME,
|
|
|
+ memoryTracker,
|
|
|
+ 0,
|
|
|
node -> nodeFilter(node, job7));
|
|
|
result = jobNodeSelector.selectNode(10,
|
|
|
2,
|
|
@@ -402,7 +444,13 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|
|
csBuilder = ClusterState.builder(cs);
|
|
|
csBuilder.metadata(Metadata.builder(cs.metadata()).putCustom(PersistentTasksCustomMetadata.TYPE, tasks));
|
|
|
cs = csBuilder.build();
|
|
|
- jobNodeSelector = new JobNodeSelector(cs, cs.nodes().getAllNodes(), job7.getId(), MlTasks.JOB_TASK_NAME, memoryTracker, 0,
|
|
|
+ jobNodeSelector = new JobNodeSelector(
|
|
|
+ cs,
|
|
|
+ shuffled(cs.nodes().getAllNodes()),
|
|
|
+ job7.getId(),
|
|
|
+ MlTasks.JOB_TASK_NAME,
|
|
|
+ memoryTracker,
|
|
|
+ 0,
|
|
|
node -> nodeFilter(node, job7));
|
|
|
result = jobNodeSelector.selectNode(10, 2, 30, MAX_JOB_BYTES, false);
|
|
|
assertNull("no node selected, because stale task", result.getExecutorNode());
|
|
@@ -415,7 +463,13 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|
|
csBuilder = ClusterState.builder(cs);
|
|
|
csBuilder.metadata(Metadata.builder(cs.metadata()).putCustom(PersistentTasksCustomMetadata.TYPE, tasks));
|
|
|
cs = csBuilder.build();
|
|
|
- jobNodeSelector = new JobNodeSelector(cs, cs.nodes().getAllNodes(), job7.getId(), MlTasks.JOB_TASK_NAME, memoryTracker, 0,
|
|
|
+ jobNodeSelector = new JobNodeSelector(
|
|
|
+ cs,
|
|
|
+ shuffled(cs.nodes().getAllNodes()),
|
|
|
+ job7.getId(),
|
|
|
+ MlTasks.JOB_TASK_NAME,
|
|
|
+ memoryTracker,
|
|
|
+ 0,
|
|
|
node -> nodeFilter(node, job7));
|
|
|
result = jobNodeSelector.selectNode(10, 2, 30, MAX_JOB_BYTES, false);
|
|
|
assertNull("no node selected, because null state", result.getExecutorNode());
|
|
@@ -457,7 +511,11 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|
|
Job job7 = BaseMlIntegTestCase.createFareQuoteJob("job_id7", JOB_MEMORY_REQUIREMENT).build(new Date());
|
|
|
|
|
|
// Allocation won't be possible if the stale failed job is treated as opening
|
|
|
- JobNodeSelector jobNodeSelector = new JobNodeSelector(cs, cs.nodes().getAllNodes(), job7.getId(), MlTasks.JOB_TASK_NAME,
|
|
|
+ JobNodeSelector jobNodeSelector = new JobNodeSelector(
|
|
|
+ cs,
|
|
|
+ shuffled(cs.nodes().getAllNodes()),
|
|
|
+ job7.getId(),
|
|
|
+ MlTasks.JOB_TASK_NAME,
|
|
|
memoryTracker, 0, node -> nodeFilter(node, job7));
|
|
|
PersistentTasksCustomMetadata.Assignment result = jobNodeSelector.selectNode(10,
|
|
|
2,
|
|
@@ -474,7 +532,13 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|
|
csBuilder.metadata(Metadata.builder(cs.metadata()).putCustom(PersistentTasksCustomMetadata.TYPE, tasks));
|
|
|
cs = csBuilder.build();
|
|
|
Job job8 = BaseMlIntegTestCase.createFareQuoteJob("job_id8", JOB_MEMORY_REQUIREMENT).build(new Date());
|
|
|
- jobNodeSelector = new JobNodeSelector(cs, cs.nodes().getAllNodes(), job8.getId(), MlTasks.JOB_TASK_NAME, memoryTracker, 0,
|
|
|
+ jobNodeSelector = new JobNodeSelector(
|
|
|
+ cs,
|
|
|
+ shuffled(cs.nodes().getAllNodes()),
|
|
|
+ job8.getId(),
|
|
|
+ MlTasks.JOB_TASK_NAME,
|
|
|
+ memoryTracker,
|
|
|
+ 0,
|
|
|
node -> nodeFilter(node, job8));
|
|
|
result = jobNodeSelector.selectNode(10, 2, 30, MAX_JOB_BYTES, false);
|
|
|
assertNull("no node selected, because OPENING state", result.getExecutorNode());
|
|
@@ -508,7 +572,11 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|
|
cs.nodes(nodes);
|
|
|
metadata.putCustom(PersistentTasksCustomMetadata.TYPE, tasks);
|
|
|
cs.metadata(metadata);
|
|
|
- JobNodeSelector jobNodeSelector = new JobNodeSelector(cs.build(), cs.nodes().getAllNodes(), job.getId(), MlTasks.JOB_TASK_NAME,
|
|
|
+ JobNodeSelector jobNodeSelector = new JobNodeSelector(
|
|
|
+ cs.build(),
|
|
|
+ shuffled(cs.nodes().getAllNodes()),
|
|
|
+ job.getId(),
|
|
|
+ MlTasks.JOB_TASK_NAME,
|
|
|
memoryTracker, 0, node -> nodeFilter(node, job));
|
|
|
PersistentTasksCustomMetadata.Assignment result = jobNodeSelector.selectNode(10,
|
|
|
2,
|
|
@@ -519,6 +587,75 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|
|
assertNull(result.getExecutorNode());
|
|
|
}
|
|
|
|
|
|
+ public void testSelectLeastLoadedMlNode_reasonsAreInDeterministicOrder() {
|
|
|
+ Map<String, String> nodeAttr = new HashMap<>();
|
|
|
+ nodeAttr.put(MachineLearning.MAX_OPEN_JOBS_NODE_ATTR, "10");
|
|
|
+ nodeAttr.put(MachineLearning.MACHINE_MEMORY_NODE_ATTR, "1000000000");
|
|
|
+ DiscoveryNodes nodes = DiscoveryNodes.builder()
|
|
|
+ .add(
|
|
|
+ new DiscoveryNode(
|
|
|
+ "_node_name1",
|
|
|
+ "_node_id1",
|
|
|
+ new TransportAddress(InetAddress.getLoopbackAddress(), 9300),
|
|
|
+ nodeAttr,
|
|
|
+ Collections.emptySet(),
|
|
|
+ Version.CURRENT
|
|
|
+ )
|
|
|
+ )
|
|
|
+ .add(
|
|
|
+ new DiscoveryNode(
|
|
|
+ "_node_name2",
|
|
|
+ "_node_id2",
|
|
|
+ new TransportAddress(InetAddress.getLoopbackAddress(), 9301),
|
|
|
+ nodeAttr,
|
|
|
+ Collections.emptySet(),
|
|
|
+ Version.CURRENT
|
|
|
+ )
|
|
|
+ )
|
|
|
+ .build();
|
|
|
+
|
|
|
+ PersistentTasksCustomMetadata.Builder tasksBuilder = PersistentTasksCustomMetadata.builder();
|
|
|
+ OpenJobPersistentTasksExecutorTests.addJobTask("incompatible_type_job", "_node_id1", null, tasksBuilder);
|
|
|
+ PersistentTasksCustomMetadata tasks = tasksBuilder.build();
|
|
|
+
|
|
|
+ ClusterState.Builder cs = ClusterState.builder(new ClusterName("_name"));
|
|
|
+ Metadata.Builder metadata = Metadata.builder();
|
|
|
+
|
|
|
+ Job job = mock(Job.class);
|
|
|
+ when(job.getId()).thenReturn("incompatible_type_job");
|
|
|
+ when(job.getJobVersion()).thenReturn(Version.CURRENT);
|
|
|
+ when(job.getJobType()).thenReturn("incompatible_type");
|
|
|
+ when(job.getInitialResultsIndexName()).thenReturn("shared");
|
|
|
+
|
|
|
+ cs.nodes(nodes);
|
|
|
+ metadata.putCustom(PersistentTasksCustomMetadata.TYPE, tasks);
|
|
|
+ cs.metadata(metadata);
|
|
|
+ JobNodeSelector jobNodeSelector = new JobNodeSelector(
|
|
|
+ cs.build(),
|
|
|
+ shuffled(cs.nodes().getAllNodes()),
|
|
|
+ job.getId(),
|
|
|
+ MlTasks.JOB_TASK_NAME,
|
|
|
+ memoryTracker,
|
|
|
+ 0,
|
|
|
+ node -> nodeFilter(node, job)
|
|
|
+ );
|
|
|
+ PersistentTasksCustomMetadata.Assignment result = jobNodeSelector.selectNode(10, 2, 30, MAX_JOB_BYTES, false);
|
|
|
+ assertThat(
|
|
|
+ result.getExplanation(),
|
|
|
+ equalTo(
|
|
|
+ "Not opening job [incompatible_type_job] on node [{_node_name1}{version="
|
|
|
+ + Version.CURRENT
|
|
|
+ + "}], "
|
|
|
+ + "because this node does not support jobs of type [incompatible_type]|"
|
|
|
+ + "Not opening job [incompatible_type_job] on node [{_node_name2}{version="
|
|
|
+ + Version.CURRENT
|
|
|
+ + "}], "
|
|
|
+ + "because this node does not support jobs of type [incompatible_type]"
|
|
|
+ )
|
|
|
+ );
|
|
|
+ assertNull(result.getExecutorNode());
|
|
|
+ }
|
|
|
+
|
|
|
public void testSelectLeastLoadedMlNode_noNodesMatchingModelSnapshotMinVersion() {
|
|
|
Map<String, String> nodeAttr = new HashMap<>();
|
|
|
nodeAttr.put(MachineLearning.MAX_OPEN_JOBS_NODE_ATTR, "10");
|
|
@@ -544,7 +681,10 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|
|
cs.nodes(nodes);
|
|
|
metadata.putCustom(PersistentTasksCustomMetadata.TYPE, tasks);
|
|
|
cs.metadata(metadata);
|
|
|
- JobNodeSelector jobNodeSelector = new JobNodeSelector(cs.build(), cs.nodes().getAllNodes(), job.getId(),
|
|
|
+ JobNodeSelector jobNodeSelector = new JobNodeSelector(
|
|
|
+ cs.build(),
|
|
|
+ shuffled(cs.nodes().getAllNodes()),
|
|
|
+ job.getId(),
|
|
|
MlTasks.JOB_TASK_NAME, memoryTracker, 0, node -> nodeFilter(node, job));
|
|
|
PersistentTasksCustomMetadata.Assignment result = jobNodeSelector.selectNode(10,
|
|
|
2,
|
|
@@ -578,7 +718,11 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|
|
cs.metadata(metadata);
|
|
|
|
|
|
Job job = jobWithRules("job_with_rules");
|
|
|
- JobNodeSelector jobNodeSelector = new JobNodeSelector(cs.build(), cs.nodes().getAllNodes(), job.getId(), MlTasks.JOB_TASK_NAME,
|
|
|
+ JobNodeSelector jobNodeSelector = new JobNodeSelector(
|
|
|
+ cs.build(),
|
|
|
+ shuffled(cs.nodes().getAllNodes()),
|
|
|
+ job.getId(),
|
|
|
+ MlTasks.JOB_TASK_NAME,
|
|
|
memoryTracker, 0, node -> nodeFilter(node, job));
|
|
|
PersistentTasksCustomMetadata.Assignment result = jobNodeSelector.selectNode(10,
|
|
|
2,
|
|
@@ -637,7 +781,11 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|
|
cs.nodes(nodes);
|
|
|
|
|
|
Job job = BaseMlIntegTestCase.createFareQuoteJob("job_id1000", JOB_MEMORY_REQUIREMENT).build(new Date());
|
|
|
- JobNodeSelector jobNodeSelector = new JobNodeSelector(cs.build(), cs.nodes().getAllNodes(), job.getId(), MlTasks.JOB_TASK_NAME,
|
|
|
+ JobNodeSelector jobNodeSelector = new JobNodeSelector(
|
|
|
+ cs.build(),
|
|
|
+ shuffled(cs.nodes().getAllNodes()),
|
|
|
+ job.getId(),
|
|
|
+ MlTasks.JOB_TASK_NAME,
|
|
|
memoryTracker, 0, node -> nodeFilter(node, job));
|
|
|
PersistentTasksCustomMetadata.Assignment result =
|
|
|
jobNodeSelector.considerLazyAssignment(new PersistentTasksCustomMetadata.Assignment(null, "foo"));
|
|
@@ -657,7 +805,11 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|
|
cs.nodes(nodes);
|
|
|
|
|
|
Job job = BaseMlIntegTestCase.createFareQuoteJob("job_id1000", JOB_MEMORY_REQUIREMENT).build(new Date());
|
|
|
- JobNodeSelector jobNodeSelector = new JobNodeSelector(cs.build(), cs.nodes().getAllNodes(), job.getId(), MlTasks.JOB_TASK_NAME,
|
|
|
+ JobNodeSelector jobNodeSelector = new JobNodeSelector(
|
|
|
+ cs.build(),
|
|
|
+ shuffled(cs.nodes().getAllNodes()),
|
|
|
+ job.getId(),
|
|
|
+ MlTasks.JOB_TASK_NAME,
|
|
|
memoryTracker, randomIntBetween(1, 3), node -> nodeFilter(node, job));
|
|
|
PersistentTasksCustomMetadata.Assignment result =
|
|
|
jobNodeSelector.considerLazyAssignment(new PersistentTasksCustomMetadata.Assignment(null, "foo"));
|
|
@@ -680,7 +832,11 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|
|
Job job = BaseMlIntegTestCase.createFareQuoteJob("job_id1000", ByteSizeValue.ofMb(10)).build(new Date());
|
|
|
when(memoryTracker.getJobMemoryRequirement(anyString(), eq("job_id1000"))).thenReturn(1000L);
|
|
|
|
|
|
- JobNodeSelector jobNodeSelector = new JobNodeSelector(cs.build(), cs.nodes().getAllNodes(), job.getId(), MlTasks.JOB_TASK_NAME,
|
|
|
+ JobNodeSelector jobNodeSelector = new JobNodeSelector(
|
|
|
+ cs.build(),
|
|
|
+ shuffled(cs.nodes().getAllNodes()),
|
|
|
+ job.getId(),
|
|
|
+ MlTasks.JOB_TASK_NAME,
|
|
|
memoryTracker, randomIntBetween(1, 3), node -> nodeFilter(node, job));
|
|
|
PersistentTasksCustomMetadata.Assignment result = jobNodeSelector.selectNode(maxRunningJobsPerNode,
|
|
|
2,
|
|
@@ -741,7 +897,11 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|
|
|
|
|
Job job = BaseMlIntegTestCase.createFareQuoteJob("job_id2", JOB_MEMORY_REQUIREMENT).build(new Date());
|
|
|
|
|
|
- JobNodeSelector jobNodeSelector = new JobNodeSelector(cs.build(), cs.nodes().getAllNodes(), job.getId(), MlTasks.JOB_TASK_NAME,
|
|
|
+ JobNodeSelector jobNodeSelector = new JobNodeSelector(
|
|
|
+ cs.build(),
|
|
|
+ shuffled(cs.nodes().getAllNodes()),
|
|
|
+ job.getId(),
|
|
|
+ MlTasks.JOB_TASK_NAME,
|
|
|
memoryTracker, 0, node -> nodeFilter(node, job));
|
|
|
Tuple<NativeMemoryCapacity, Long> capacityAndFreeMemory = jobNodeSelector.perceivedCapacityAndMaxFreeMemory(
|
|
|
10,
|
|
@@ -790,6 +950,12 @@ public class JobNodeSelectorTests extends ESTestCase {
|
|
|
return cs;
|
|
|
}
|
|
|
|
|
|
+ static Collection<DiscoveryNode> shuffled(Collection<DiscoveryNode> nodes) {
|
|
|
+ List<DiscoveryNode> toShuffle = new ArrayList<>(nodes);
|
|
|
+ Randomness.shuffle(toShuffle);
|
|
|
+ return toShuffle;
|
|
|
+ }
|
|
|
+
|
|
|
static void addDataFrameAnalyticsJobTask(String id, String nodeId, DataFrameAnalyticsState state,
|
|
|
PersistentTasksCustomMetadata.Builder builder) {
|
|
|
addDataFrameAnalyticsJobTask(id, nodeId, state, builder, false, false);
|