diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index e705a9c..58831ed 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -15,4 +15,4 @@ jobs: distribution: 'zulu' java-version: '11' - name: Build - run: ./gradlew clean build \ No newline at end of file + run: ./run_tests.sh \ No newline at end of file diff --git a/build.gradle b/build.gradle index da0220e..c893223 100644 --- a/build.gradle +++ b/build.gradle @@ -18,7 +18,7 @@ ext { revConductor : '3.13.8', revTestContainer : '1.17.2', revGuava : '32.0.0-jre', - log4j : '2.17.1', + revLog4j : '2.17.1', revJedis : '3.8.0', revMockServerClient : '5.12.0', revCommonsLang : '3.12.0', @@ -31,7 +31,8 @@ ext { revProtoBuf : '3.16.3', revRarefiedRedis : '0.0.17', revOrkesProtos : '0.9.2', - revOrkesQueues : '1.0.6' + revOrkesQueues : '1.0.6', + ioGRPC : '1.53.0' ] } @@ -85,12 +86,12 @@ subprojects { implementation 'com.amazonaws:aws-java-sdk-s3:1.12.548' implementation "redis.clients:jedis:${versions.revJedis}" - implementation "org.apache.logging.log4j:log4j-core:${versions.log4j}!!" - implementation "org.apache.logging.log4j:log4j-api:${versions.log4j}!!" - implementation "org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}!!" - implementation "org.apache.logging.log4j:log4j-jul:${versions.log4j}!!" - implementation "org.apache.logging.log4j:log4j-web:${versions.log4j}!!" - implementation "org.apache.logging.log4j:log4j-to-slf4j:${versions.log4j}!!" + implementation "org.apache.logging.log4j:log4j-core:${versions.revLog4j}!!" + implementation "org.apache.logging.log4j:log4j-api:${versions.revLog4j}!!" + implementation "org.apache.logging.log4j:log4j-slf4j-impl:${versions.revLog4j}!!" + implementation "org.apache.logging.log4j:log4j-jul:${versions.revLog4j}!!" + implementation "org.apache.logging.log4j:log4j-web:${versions.revLog4j}!!" + implementation "org.apache.logging.log4j:log4j-to-slf4j:${versions.revLog4j}!!" compileOnly "org.projectlombok:lombok:${versions.revLombok}" annotationProcessor "org.projectlombok:lombok:${versions.revLombok}" diff --git a/persistence/src/main/java/com/netflix/conductor/redis/config/InMemoryRedisConfiguration.java b/persistence/src/main/java/com/netflix/conductor/redis/config/InMemoryRedisConfiguration.java index 243940b..0e5007d 100644 --- a/persistence/src/main/java/com/netflix/conductor/redis/config/InMemoryRedisConfiguration.java +++ b/persistence/src/main/java/com/netflix/conductor/redis/config/InMemoryRedisConfiguration.java @@ -64,4 +64,4 @@ public OrkesJedisProxy OrkesJedisProxy() { System.out.println("OrkesJedisProxy created"); return new OrkesJedisProxy(jedisCommands()); } -} \ No newline at end of file +} diff --git a/persistence/src/main/java/com/netflix/conductor/redis/jedis/JedisCluster.java b/persistence/src/main/java/com/netflix/conductor/redis/jedis/JedisCluster.java index af84e2a..daac27a 100644 --- a/persistence/src/main/java/com/netflix/conductor/redis/jedis/JedisCluster.java +++ b/persistence/src/main/java/com/netflix/conductor/redis/jedis/JedisCluster.java @@ -22,25 +22,10 @@ import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.stereotype.Component; -import redis.clients.jedis.BitPosParams; -import redis.clients.jedis.GeoCoordinate; -import redis.clients.jedis.GeoRadiusResponse; -import redis.clients.jedis.GeoUnit; -import redis.clients.jedis.ListPosition; -import redis.clients.jedis.ScanParams; -import redis.clients.jedis.ScanResult; -import redis.clients.jedis.SortingParams; -import redis.clients.jedis.StreamConsumersInfo; -import redis.clients.jedis.StreamEntry; -import redis.clients.jedis.StreamEntryID; -import redis.clients.jedis.StreamGroupInfo; -import redis.clients.jedis.StreamInfo; -import redis.clients.jedis.StreamPendingEntry; -import redis.clients.jedis.Tuple; -import redis.clients.jedis.params.GeoRadiusParam; -import redis.clients.jedis.params.SetParams; -import redis.clients.jedis.params.ZAddParams; -import redis.clients.jedis.params.ZIncrByParams; +import redis.clients.jedis.*; +import redis.clients.jedis.params.*; +import redis.clients.jedis.resps.KeyedListElement; +import redis.clients.jedis.resps.LCSMatchResult; @Component @ConditionalOnProperty(name = "conductor.db.type", havingValue = "redis_cluster") @@ -964,4 +949,206 @@ public String set(byte[] key, byte[] value) { public byte[] getBytes(byte[] key) { return jedisCluster.get(key); } -} \ No newline at end of file + + // + + @Override + public String getDel(String key) { + return jedisCluster.getDel(key); + } + + @Override + public String getEx(String key, GetExParams params) { + return jedisCluster.getEx(key, params); + } + + @Override + public String restore(String key, long ttl, byte[] serializedValue) { + return jedisCluster.restore(key, ttl, serializedValue); + } + + @Override + public String restoreReplace(String key, long ttl, byte[] serializedValue) { + throw new UnsupportedOperationException(); + } + + @Override + public String restore(String key, long ttl, byte[] serializedValue, RestoreParams params) { + throw new UnsupportedOperationException(); + } + + @Override + public Long expire(String key, long seconds) { + return jedisCluster.expire(key, seconds); + } + + @Override + public String setex(String key, long seconds, String value) { + return jedisCluster.setex(key, seconds, value); + } + + @Override + public String hrandfield(String key) { + return jedisCluster.hrandfield(key); + } + + @Override + public List hrandfield(String key, long count) { + return jedisCluster.hrandfield(key, count); + } + + @Override + public Map hrandfieldWithValues(String key, long count) { + return null; + } + + @Override + public List lpop(String key, int count) { + return null; + } + + @Override + public Long lpos(String key, String element) { + return null; + } + + @Override + public Long lpos(String key, String element, LPosParams params) { + return null; + } + + @Override + public List lpos(String key, String element, LPosParams params, long count) { + return null; + } + + @Override + public List rpop(String key, int count) { + return null; + } + + @Override + public List smismember(String key, String... members) { + return null; + } + + @Override + public Double zaddIncr(String key, double score, String member, ZAddParams params) { + return null; + } + + @Override + public String zrandmember(String key) { + return null; + } + + @Override + public Set zrandmember(String key, long count) { + return null; + } + + @Override + public Set zrandmemberWithScores(String key, long count) { + return null; + } + + @Override + public List zmscore(String key, String... members) { + return null; + } + + @Override + public KeyedListElement blpop(double timeout, String key) { + return null; + } + + @Override + public KeyedListElement brpop(double timeout, String key) { + return null; + } + + @Override + public Long geoadd( + String key, GeoAddParams params, Map memberCoordinateMap) { + return null; + } + + @Override + public StreamEntryID xadd(String key, Map hash, XAddParams params) { + return null; + } + + @Override + public List xrange(String key, StreamEntryID start, StreamEntryID end) { + return null; + } + + @Override + public List xrevrange(String key, StreamEntryID end, StreamEntryID start) { + return null; + } + + @Override + public StreamPendingSummary xpending(String key, String groupname) { + return null; + } + + @Override + public List xpending(String key, String groupname, XPendingParams params) { + return null; + } + + @Override + public long xtrim(String key, XTrimParams params) { + return 0; + } + + @Override + public List xclaim( + String key, + String group, + String consumername, + long minIdleTime, + XClaimParams params, + StreamEntryID... ids) { + return null; + } + + @Override + public List xclaimJustId( + String key, + String group, + String consumername, + long minIdleTime, + XClaimParams params, + StreamEntryID... ids) { + return null; + } + + @Override + public Entry> xautoclaim( + String key, + String group, + String consumerName, + long minIdleTime, + StreamEntryID start, + XAutoClaimParams params) { + return null; + } + + @Override + public Entry> xautoclaimJustId( + String key, + String group, + String consumerName, + long minIdleTime, + StreamEntryID start, + XAutoClaimParams params) { + return null; + } + + @Override + public LCSMatchResult strAlgoLCSStrings(String strA, String strB, StrAlgoLCSParams params) { + return null; + } +} diff --git a/persistence/src/main/java/com/netflix/conductor/redis/jedis/JedisSentinel.java b/persistence/src/main/java/com/netflix/conductor/redis/jedis/JedisSentinel.java index 607ff82..0548a26 100644 --- a/persistence/src/main/java/com/netflix/conductor/redis/jedis/JedisSentinel.java +++ b/persistence/src/main/java/com/netflix/conductor/redis/jedis/JedisSentinel.java @@ -20,27 +20,10 @@ import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.stereotype.Component; -import redis.clients.jedis.BitPosParams; -import redis.clients.jedis.GeoCoordinate; -import redis.clients.jedis.GeoRadiusResponse; -import redis.clients.jedis.GeoUnit; -import redis.clients.jedis.Jedis; -import redis.clients.jedis.JedisSentinelPool; -import redis.clients.jedis.ListPosition; -import redis.clients.jedis.ScanParams; -import redis.clients.jedis.ScanResult; -import redis.clients.jedis.SortingParams; -import redis.clients.jedis.StreamConsumersInfo; -import redis.clients.jedis.StreamEntry; -import redis.clients.jedis.StreamEntryID; -import redis.clients.jedis.StreamGroupInfo; -import redis.clients.jedis.StreamInfo; -import redis.clients.jedis.StreamPendingEntry; -import redis.clients.jedis.Tuple; -import redis.clients.jedis.params.GeoRadiusParam; -import redis.clients.jedis.params.SetParams; -import redis.clients.jedis.params.ZAddParams; -import redis.clients.jedis.params.ZIncrByParams; +import redis.clients.jedis.*; +import redis.clients.jedis.params.*; +import redis.clients.jedis.resps.KeyedListElement; +import redis.clients.jedis.resps.LCSMatchResult; @Component @ConditionalOnProperty(name = "conductor.db.type", havingValue = "redis_sentinel") @@ -1291,4 +1274,207 @@ public byte[] getBytes(byte[] key) { return jedis.get(key); } } -} \ No newline at end of file + + // The following methods are not implemented and not used but required for the interface + // contract + + @Override + public String getDel(String key) { + return null; + } + + @Override + public String getEx(String key, GetExParams params) { + return null; + } + + @Override + public String restore(String key, long ttl, byte[] serializedValue) { + return null; + } + + @Override + public String restoreReplace(String key, long ttl, byte[] serializedValue) { + return null; + } + + @Override + public String restore(String key, long ttl, byte[] serializedValue, RestoreParams params) { + return null; + } + + @Override + public Long expire(String key, long seconds) { + return null; + } + + @Override + public String setex(String key, long seconds, String value) { + return null; + } + + @Override + public String hrandfield(String key) { + return null; + } + + @Override + public List hrandfield(String key, long count) { + return null; + } + + @Override + public Map hrandfieldWithValues(String key, long count) { + return null; + } + + @Override + public List lpop(String key, int count) { + return null; + } + + @Override + public Long lpos(String key, String element) { + return null; + } + + @Override + public Long lpos(String key, String element, LPosParams params) { + return null; + } + + @Override + public List lpos(String key, String element, LPosParams params, long count) { + return null; + } + + @Override + public List rpop(String key, int count) { + return null; + } + + @Override + public List smismember(String key, String... members) { + return null; + } + + @Override + public Double zaddIncr(String key, double score, String member, ZAddParams params) { + return null; + } + + @Override + public String zrandmember(String key) { + return null; + } + + @Override + public Set zrandmember(String key, long count) { + return null; + } + + @Override + public Set zrandmemberWithScores(String key, long count) { + return null; + } + + @Override + public List zmscore(String key, String... members) { + return null; + } + + @Override + public KeyedListElement blpop(double timeout, String key) { + return null; + } + + @Override + public KeyedListElement brpop(double timeout, String key) { + return null; + } + + @Override + public Long geoadd( + String key, GeoAddParams params, Map memberCoordinateMap) { + return null; + } + + @Override + public StreamEntryID xadd(String key, Map hash, XAddParams params) { + return null; + } + + @Override + public List xrange(String key, StreamEntryID start, StreamEntryID end) { + return null; + } + + @Override + public List xrevrange(String key, StreamEntryID end, StreamEntryID start) { + return null; + } + + @Override + public StreamPendingSummary xpending(String key, String groupname) { + return null; + } + + @Override + public List xpending(String key, String groupname, XPendingParams params) { + return null; + } + + @Override + public long xtrim(String key, XTrimParams params) { + return 0; + } + + @Override + public List xclaim( + String key, + String group, + String consumername, + long minIdleTime, + XClaimParams params, + StreamEntryID... ids) { + return null; + } + + @Override + public List xclaimJustId( + String key, + String group, + String consumername, + long minIdleTime, + XClaimParams params, + StreamEntryID... ids) { + return null; + } + + @Override + public Entry> xautoclaim( + String key, + String group, + String consumerName, + long minIdleTime, + StreamEntryID start, + XAutoClaimParams params) { + return null; + } + + @Override + public Entry> xautoclaimJustId( + String key, + String group, + String consumerName, + long minIdleTime, + StreamEntryID start, + XAutoClaimParams params) { + return null; + } + + @Override + public LCSMatchResult strAlgoLCSStrings(String strA, String strB, StrAlgoLCSParams params) { + return null; + } +} diff --git a/persistence/src/main/java/com/netflix/conductor/redis/jedis/OrkesJedisCommands.java b/persistence/src/main/java/com/netflix/conductor/redis/jedis/OrkesJedisCommands.java index 3061969..8817afd 100644 --- a/persistence/src/main/java/com/netflix/conductor/redis/jedis/OrkesJedisCommands.java +++ b/persistence/src/main/java/com/netflix/conductor/redis/jedis/OrkesJedisCommands.java @@ -1,3 +1,15 @@ +/* + * Copyright 2023 Orkes, Inc. + *

+ * Licensed under the Orkes Community License (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * https://github.com/orkes-io/licenses/blob/main/community/LICENSE.txt + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ package com.netflix.conductor.redis.jedis; import redis.clients.jedis.commands.JedisCommands; diff --git a/run_tests.sh b/run_tests.sh new file mode 100755 index 0000000..7fdddf1 --- /dev/null +++ b/run_tests.sh @@ -0,0 +1,13 @@ +docker rm -f conductor_test_container +docker build -f docker/DockerfileStandalone . -t conductor_test_container +docker run -dit --name conductor_test_container -p 8899:8080 -t conductor_test_container +while ! curl -s http://localhost:8899/api/metadata/workflow -o /dev/null +do + echo "$(date) - still trying" + sleep 1 +done +sleep 5 +echo "All set - starting tests now" +./gradlew -PIntegrationTests orkes-conductor-test-harness:test +docker rm -f conductor_test_container + diff --git a/settings.gradle b/settings.gradle index 66e5072..6d4ada5 100644 --- a/settings.gradle +++ b/settings.gradle @@ -2,5 +2,6 @@ rootProject.name = 'orkes-conductor' include 'archive' include 'persistence' include 'server' +include 'test-harness' rootProject.children.each { it.name = "${rootProject.name}-${it.name}" } \ No newline at end of file diff --git a/test-harness/build.gradle b/test-harness/build.gradle new file mode 100644 index 0000000..0911fcc --- /dev/null +++ b/test-harness/build.gradle @@ -0,0 +1,42 @@ + +dependencies { + + + + implementation "org.awaitility:awaitility:3.1.6" + implementation "org.testcontainers:postgresql:${versions.revTestContainer}" + implementation "org.testcontainers:testcontainers:${versions.revTestContainer}" + implementation 'ch.qos.logback:logback-classic:1.4.5' + + + + implementation 'io.orkes.conductor:orkes-conductor-client:1.1.19' + implementation 'javax.annotation:javax.annotation-api:1.3.2' + + implementation "com.google.protobuf:protobuf-java:${versions.revProtoBuf}" + implementation "io.netty:netty-tcnative-boringssl-static:2.0.51.Final" + implementation "com.google.guava:guava:${versions.revGuava}" + + implementation "org.apache.commons:commons-lang3:${versions.revCommonsLang}" + + testImplementation 'org.testcontainers:testcontainers:1.17.1' + testImplementation 'org.junit.jupiter:junit-jupiter-api:5.7.0' + testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.7.0' +} + +test { + useJUnitPlatform() + testLogging { + events = ["SKIPPED", "FAILED"] + exceptionFormat = "short" + showStandardStreams = true + } + minHeapSize = "2g" // initial heap size + maxHeapSize = "4g" // maximum heap size +} + +tasks.withType(Test) { + maxParallelForks = 10 +} + +tasks.forEach(task -> task.onlyIf { project.hasProperty('IntegrationTests') }) \ No newline at end of file diff --git a/test-harness/src/test/java/io/orkes/conductor/client/e2e/AbstractConductorTest.java b/test-harness/src/test/java/io/orkes/conductor/client/e2e/AbstractConductorTest.java new file mode 100644 index 0000000..15fd02f --- /dev/null +++ b/test-harness/src/test/java/io/orkes/conductor/client/e2e/AbstractConductorTest.java @@ -0,0 +1,64 @@ +package io.orkes.conductor.client.e2e; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.netflix.conductor.common.config.ObjectMapperProvider; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.sdk.workflow.executor.WorkflowExecutor; +import io.orkes.conductor.client.ApiClient; +import io.orkes.conductor.client.http.OrkesMetadataClient; +import io.orkes.conductor.client.http.OrkesTaskClient; +import io.orkes.conductor.client.http.OrkesWorkflowClient; +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.testcontainers.containers.GenericContainer; + +import java.io.InputStream; +import java.io.InputStreamReader; +import java.util.List; + +@Slf4j +public abstract class AbstractConductorTest { + + protected static final ObjectMapper objectMapper = new ObjectMapperProvider().getObjectMapper(); + + protected static GenericContainer conductor; + + protected static ApiClient apiClient; + protected static io.orkes.conductor.client.WorkflowClient workflowClient; + protected static io.orkes.conductor.client.TaskClient taskClient; + protected static io.orkes.conductor.client.MetadataClient metadataClient; + protected static WorkflowExecutor executor; + + protected static final String[] workflows = new String[]{"/metadata/rerun.json", "/metadata/popminmax.json", "/metadata/fail.json"}; + + @SneakyThrows + @BeforeAll + public static final void setup() { + String url = "http://localhost:8899/api"; + + apiClient = new ApiClient(url); + workflowClient = new OrkesWorkflowClient(apiClient); + metadataClient = new OrkesMetadataClient(apiClient); + taskClient = new OrkesTaskClient(apiClient); + executor = new WorkflowExecutor(taskClient, workflowClient, metadataClient, 1000); + + for (String workflow : workflows) { + InputStream resource = AbstractConductorTest.class.getResourceAsStream(workflow); + WorkflowDef workflowDef = objectMapper.readValue(new InputStreamReader(resource), WorkflowDef.class); + metadataClient.updateWorkflowDefs(List.of(workflowDef), true); + } + } + + @AfterAll + public static void cleanup() { + executor.shutdown(); + } + + + + + +} + diff --git a/test-harness/src/test/java/io/orkes/conductor/client/e2e/BackoffTests.java b/test-harness/src/test/java/io/orkes/conductor/client/e2e/BackoffTests.java new file mode 100644 index 0000000..e0f1847 --- /dev/null +++ b/test-harness/src/test/java/io/orkes/conductor/client/e2e/BackoffTests.java @@ -0,0 +1,213 @@ +/* + * Copyright 2023 Orkes, Inc. + *

+ * Licensed under the Orkes Community License (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * https://github.com/orkes-io/licenses/blob/main/community/LICENSE.txt + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package io.orkes.conductor.client.e2e; + +import java.util.*; +import java.util.concurrent.TimeUnit; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +import com.netflix.conductor.client.worker.Worker; +import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.tasks.TaskResult; +import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; +import com.netflix.conductor.common.run.Workflow; +import com.netflix.conductor.sdk.workflow.def.ConductorWorkflow; +import com.netflix.conductor.sdk.workflow.def.tasks.SimpleTask; + +import io.orkes.conductor.client.automator.TaskRunnerConfigurer; +import io.orkes.conductor.client.http.OrkesTaskClient; + +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; + +import static org.junit.jupiter.api.Assertions.*; +import static org.testcontainers.shaded.org.awaitility.Awaitility.await; + +@Slf4j +public class BackoffTests extends AbstractConductorTest { + + private static final String WORKFLOW_NAME = "retry_logic_test"; + + private static TaskRunnerConfigurer configurer; + + @SneakyThrows + @BeforeAll + public static void beforeAll() { + + ConductorWorkflow workflow = new ConductorWorkflow(null); + workflow.setName(WORKFLOW_NAME); + workflow.setVersion(1); + + List taskDefs = new ArrayList<>(); + int i = 0; + for (TaskDef.RetryLogic value : TaskDef.RetryLogic.values()) { + TaskDef taskDef = new TaskDef(); + taskDef.setName("retry_" + i++); + taskDef.setRetryLogic(value); + taskDef.setBackoffScaleFactor(2); + taskDef.setRetryDelaySeconds(2); + taskDef.setRetryCount(3); + taskDefs.add(taskDef); + + workflow.add(new SimpleTask(taskDef.getName(), taskDef.getName())); + } + + metadataClient.registerTaskDefs(taskDefs); + metadataClient.updateWorkflowDefs(Arrays.asList(workflow.toWorkflowDef())); + startWorkers(taskDefs); + } + + @AfterAll + public static void cleanup() { + if (configurer != null) { + try { + configurer.shutdown(); + } catch (Exception e) { + } + } + } + + @Test + public void testRetryLogic() { + StartWorkflowRequest request = new StartWorkflowRequest(); + request.setName(WORKFLOW_NAME); + request.setVersion(1); + request.setInput(Map.of()); + String id = workflowClient.startWorkflow(request); + log.info("Started Retry logic workflow {} ", id); + + await().pollInterval(3, TimeUnit.SECONDS) + .atMost(1, TimeUnit.MINUTES) + .untilAsserted( + () -> { + Workflow workflow = workflowClient.getWorkflow(id, true); + assertNotNull(workflow); + assertEquals(Workflow.WorkflowStatus.COMPLETED, workflow.getStatus()); + }); + + Workflow workflow = workflowClient.getWorkflow(id, true); + assertNotNull(workflow); + assertEquals(9, workflow.getTasks().size()); + List tasks = workflow.getTasks(); + assertTaskRetryLogic(tasks); + } + + private void assertTaskRetryLogic(List runs) { + for (int i = 1; i < runs.size(); i++) { + Task task = runs.get(i); + TaskDef.RetryLogic retryLogic = task.getTaskDefinition().get().getRetryLogic(); + long delay = task.getTaskDefinition().get().getRetryDelaySeconds() * 1000; + long backoffRate = task.getTaskDefinition().get().getBackoffScaleFactor(); + switch (retryLogic) { + case FIXED: + long diff = task.getStartTime() - task.getScheduledTime(); + long expectedDelay = delay; + assertTrue( + diff >= (expectedDelay), + "delay " + + diff + + " not within the range of expected " + + expectedDelay + + ", taskId = " + + task.getReferenceTaskName() + + ":" + + task.getTaskId()); + break; + case LINEAR_BACKOFF: + diff = task.getStartTime() - task.getScheduledTime(); + expectedDelay = task.getRetryCount() * delay * backoffRate; + assertTrue( + diff >= (expectedDelay), + "delay " + + diff + + " not within the range of expected " + + expectedDelay + + ", taskId = " + + task.getReferenceTaskName() + + ":" + + task.getTaskId()); + break; + case EXPONENTIAL_BACKOFF: + diff = task.getStartTime() - task.getScheduledTime(); + if (task.getRetryCount() == 0) { + expectedDelay = 0; + } else { + expectedDelay = (long) (Math.pow(2, task.getRetryCount() - 1) * (delay)); + } + assertTrue( + diff >= (expectedDelay), + "delay " + + diff + + " not within the range of expected " + + expectedDelay + + ", taskId = " + + task.getReferenceTaskName() + + ":" + + task.getTaskId()); + break; + default: + break; + } + } + } + + private static void startWorkers(List tasks) { + List workers = new ArrayList<>(); + for (TaskDef task : tasks) { + workers.add(new TestWorker(task.getName())); + } + + configurer = + new TaskRunnerConfigurer.Builder((OrkesTaskClient) taskClient, workers) + .withThreadCount(1) + .withTaskPollTimeout(10) + .build(); + configurer.init(); + } + + private static class TestWorker implements Worker { + + private String name; + + public TestWorker(String name) { + this.name = name; + } + + @Override + public String getTaskDefName() { + return name; + } + + @Override + public TaskResult execute(Task task) { + TaskResult result = new TaskResult(task); + result.getOutputData().put("number", 42); + if (task.getRetryCount() < 2) { + result.setStatus(TaskResult.Status.FAILED); + } else { + result.setStatus(TaskResult.Status.COMPLETED); + } + + return result; + } + + @Override + public int getPollingInterval() { + return 100; + } + } +} diff --git a/test-harness/src/test/java/io/orkes/conductor/client/e2e/ConductorStartupTest.java b/test-harness/src/test/java/io/orkes/conductor/client/e2e/ConductorStartupTest.java new file mode 100644 index 0000000..5cc34cb --- /dev/null +++ b/test-harness/src/test/java/io/orkes/conductor/client/e2e/ConductorStartupTest.java @@ -0,0 +1,12 @@ +package io.orkes.conductor.client.e2e; + +import org.junit.jupiter.api.Test; + +public class ConductorStartupTest extends AbstractConductorTest { + + + @Test + public void test() { + + } +} diff --git a/test-harness/src/test/java/io/orkes/conductor/client/e2e/DynamicForkOptionalTests.java b/test-harness/src/test/java/io/orkes/conductor/client/e2e/DynamicForkOptionalTests.java new file mode 100644 index 0000000..e353b01 --- /dev/null +++ b/test-harness/src/test/java/io/orkes/conductor/client/e2e/DynamicForkOptionalTests.java @@ -0,0 +1,363 @@ +/* + * Copyright 2022 Orkes, Inc. + *

+ * Licensed under the Orkes Community License (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * https://github.com/orkes-io/licenses/blob/main/community/LICENSE.txt + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package io.orkes.conductor.client.e2e; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import org.junit.jupiter.api.Test; + +import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.tasks.TaskResult; +import com.netflix.conductor.common.metadata.tasks.TaskType; +import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowTask; +import com.netflix.conductor.common.run.Workflow; + +import io.orkes.conductor.client.*; +import io.orkes.conductor.client.http.OrkesMetadataClient; +import io.orkes.conductor.client.http.OrkesTaskClient; +import io.orkes.conductor.client.http.OrkesWorkflowClient; +import io.orkes.conductor.client.model.*; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.testcontainers.shaded.org.awaitility.Awaitility.await; + +public class DynamicForkOptionalTests extends AbstractConductorTest { + + @Test + public void testTaskDynamicForkOptional() { + + String workflowName1 = "DynamicFanInOutTest"; + + // Register workflow + registerWorkflowDef(workflowName1, metadataClient); + + // Trigger workflow + StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); + startWorkflowRequest.setName(workflowName1); + startWorkflowRequest.setVersion(1); + + String workflowId = workflowClient.startWorkflow(startWorkflowRequest); + + Workflow workflow = workflowClient.getWorkflow(workflowId, true); + TaskResult taskResult = new TaskResult(); + taskResult.setWorkflowInstanceId(workflowId); + taskResult.setTaskId(workflow.getTasks().get(0).getTaskId()); + taskResult.setStatus(TaskResult.Status.COMPLETED); + + WorkflowTask workflowTask2 = new WorkflowTask(); + workflowTask2.setName("integration_task_2"); + workflowTask2.setTaskReferenceName("xdt1"); + + WorkflowTask workflowTask3 = new WorkflowTask(); + workflowTask3.setName("integration_task_3"); + workflowTask3.setTaskReferenceName("xdt2"); + workflowTask3.setOptional(true); + + Map output = new HashMap<>(); + Map> input = new HashMap<>(); + input.put("xdt1", Map.of("k1", "v1")); + input.put("xdt2", Map.of("k2", "v2")); + output.put("dynamicTasks", Arrays.asList(workflowTask2, workflowTask3)); + output.put("dynamicTasksInput", input); + taskResult.setOutputData(output); + taskClient.updateTask(taskResult); + + await().atMost(30, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); + assertEquals( + workflow1.getStatus().name(), + WorkflowStatus.StatusEnum.RUNNING.name()); + assertTrue(workflow1.getTasks().size() == 5); + assertEquals( + workflow1.getTasks().get(2).getStatus().name(), + Task.Status.SCHEDULED.name()); + assertEquals( + workflow1.getTasks().get(3).getStatus().name(), + Task.Status.SCHEDULED.name()); + assertEquals( + workflow1.getTasks().get(4).getStatus().name(), + Task.Status.IN_PROGRESS.name()); + }); + + workflow = workflowClient.getWorkflow(workflowId, true); + taskResult = new TaskResult(); + taskResult.setWorkflowInstanceId(workflowId); + taskResult.setTaskId(workflow.getTasks().get(3).getTaskId()); + taskResult.setStatus(TaskResult.Status.FAILED); + taskClient.updateTask(taskResult); + + // Since the tasks are marked as optional. The workflow should be in running state. + await().atMost(2, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); + assertTrue(workflow1.getTasks().size() == 6); + assertEquals( + workflow1.getStatus().name(), + WorkflowStatus.StatusEnum.RUNNING.name()); + assertEquals( + workflow1.getTasks().get(2).getStatus().name(), + Task.Status.SCHEDULED.name()); + assertEquals( + workflow1.getTasks().get(3).getStatus().name(), + Task.Status.FAILED.name()); + assertEquals( + workflow1.getTasks().get(4).getStatus().name(), + Task.Status.IN_PROGRESS.name()); + assertEquals( + workflow1.getTasks().get(5).getStatus().name(), + Task.Status.SCHEDULED.name()); + }); + + workflow = workflowClient.getWorkflow(workflowId, true); + taskResult = new TaskResult(); + taskResult.setWorkflowInstanceId(workflowId); + taskResult.setTaskId(workflow.getTasks().get(2).getTaskId()); + taskResult.setStatus(TaskResult.Status.COMPLETED); + taskClient.updateTask(taskResult); + + workflow = workflowClient.getWorkflow(workflowId, true); + taskResult = new TaskResult(); + taskResult.setWorkflowInstanceId(workflowId); + taskResult.setTaskId(workflow.getTasks().get(5).getTaskId()); + taskResult.setStatus(TaskResult.Status.COMPLETED); + taskClient.updateTask(taskResult); + + // Workflow should be completed + await().atMost(100, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); + assertTrue(workflow1.getTasks().size() == 6); + assertEquals( + workflow1.getStatus().name(), + WorkflowStatus.StatusEnum.COMPLETED.name()); + assertEquals( + workflow1.getTasks().get(2).getStatus().name(), + Task.Status.COMPLETED.name()); + assertEquals( + workflow1.getTasks().get(3).getStatus().name(), + Task.Status.FAILED.name()); + assertEquals( + workflow1.getTasks().get(4).getStatus().name(), + Task.Status.COMPLETED.name()); + assertEquals( + workflow1.getTasks().get(4).getStatus().name(), + Task.Status.COMPLETED.name()); + }); + + metadataClient.unregisterWorkflowDef(workflowName1, 1); + } + + @Test + public void testTaskDynamicForkRetryCount() { + + String workflowName1 = "DynamicFanInOutTest1"; + + // Register workflow + registerWorkflowDef(workflowName1, metadataClient); + + // Trigger workflow + StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); + startWorkflowRequest.setName(workflowName1); + startWorkflowRequest.setVersion(1); + + String workflowId = workflowClient.startWorkflow(startWorkflowRequest); + Workflow workflow = workflowClient.getWorkflow(workflowId, true); + + workflow = workflowClient.getWorkflow(workflowId, true); + TaskResult taskResult = new TaskResult(); + taskResult.setWorkflowInstanceId(workflowId); + taskResult.setTaskId(workflow.getTasks().get(0).getTaskId()); + taskResult.setStatus(TaskResult.Status.COMPLETED); + + WorkflowTask workflowTask2 = new WorkflowTask(); + workflowTask2.setName("integration_task_2"); + workflowTask2.setTaskReferenceName("xdt1"); + workflowTask2.setOptional(true); + workflowTask2.setSink("kitchen_sink"); + + WorkflowTask workflowTask3 = new WorkflowTask(); + workflowTask3.setName("integration_task_3"); + workflowTask3.setTaskReferenceName("xdt2"); + workflowTask3.setRetryCount(2); + + Map output = new HashMap<>(); + Map> input = new HashMap<>(); + input.put("xdt1", Map.of("k1", "v1")); + input.put("xdt2", Map.of("k2", "v2")); + output.put("dynamicTasks", Arrays.asList(workflowTask2, workflowTask3)); + output.put("dynamicTasksInput", input); + taskResult.setOutputData(output); + taskClient.updateTask(taskResult); + + await().atMost(10, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); + assertEquals( + workflow1.getStatus().name(), + WorkflowStatus.StatusEnum.RUNNING.name()); + assertTrue(workflow1.getTasks().size() == 5); + assertEquals( + workflow1.getTasks().get(2).getStatus().name(), + Task.Status.SCHEDULED.name()); + assertEquals( + workflow1.getTasks().get(2).getWorkflowTask().getSink(), + "kitchen_sink"); + assertEquals( + workflow1.getTasks().get(3).getStatus().name(), + Task.Status.SCHEDULED.name()); + assertEquals( + workflow1.getTasks().get(4).getStatus().name(), + Task.Status.IN_PROGRESS.name()); + }); + + workflow = workflowClient.getWorkflow(workflowId, true); + taskResult = new TaskResult(); + taskResult.setWorkflowInstanceId(workflowId); + taskResult.setTaskId(workflow.getTasks().get(3).getTaskId()); + taskResult.setStatus(TaskResult.Status.FAILED); + taskClient.updateTask(taskResult); + + // Since the retry count is 2 task will be retried. + await().atMost(10, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); + assertEquals( + workflow1.getStatus().name(), + WorkflowStatus.StatusEnum.RUNNING.name()); + assertTrue(workflow1.getTasks().size() == 6); + assertEquals( + workflow1.getTasks().get(2).getStatus().name(), + Task.Status.SCHEDULED.name()); + assertEquals( + workflow1.getTasks().get(3).getStatus().name(), + Task.Status.FAILED.name()); + assertEquals( + workflow1.getTasks().get(4).getStatus().name(), + Task.Status.IN_PROGRESS.name()); + assertEquals( + workflow1.getTasks().get(5).getStatus().name(), + Task.Status.SCHEDULED.name()); + }); + + workflow = workflowClient.getWorkflow(workflowId, true); + taskResult = new TaskResult(); + taskResult.setWorkflowInstanceId(workflowId); + taskResult.setTaskId(workflow.getTasks().get(2).getTaskId()); + taskResult.setStatus(TaskResult.Status.COMPLETED); + taskClient.updateTask(taskResult); + + workflow = workflowClient.getWorkflow(workflowId, true); + taskResult = new TaskResult(); + taskResult.setWorkflowInstanceId(workflowId); + taskResult.setTaskId(workflow.getTasks().get(5).getTaskId()); + taskResult.setStatus(TaskResult.Status.COMPLETED); + taskClient.updateTask(taskResult); + + // Workflow should be completed + await().atMost(10, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); + assertEquals( + workflow1.getStatus().name(), + WorkflowStatus.StatusEnum.COMPLETED.name()); + assertTrue(workflow1.getTasks().size() >= 6); + assertEquals( + workflow1.getTasks().get(2).getStatus().name(), + Task.Status.COMPLETED.name()); + assertEquals( + workflow1.getTasks().get(3).getStatus().name(), + Task.Status.FAILED.name()); + assertEquals( + workflow1.getTasks().get(4).getStatus().name(), + Task.Status.COMPLETED.name()); + assertEquals( + workflow1.getTasks().get(5).getStatus().name(), + Task.Status.COMPLETED.name()); + }); + + metadataClient.unregisterWorkflowDef(workflowName1, 1); + } + + private void registerWorkflowDef(String workflowName, MetadataClient metadataClient1) { + TaskDef taskDef = new TaskDef("dt1"); + taskDef.setOwnerEmail("test@orkes.io"); + + TaskDef taskDef4 = new TaskDef("integration_task_2"); + taskDef4.setOwnerEmail("test@orkes.io"); + + TaskDef taskDef3 = new TaskDef("integration_task_3"); + taskDef3.setOwnerEmail("test@orkes.io"); + + TaskDef taskDef2 = new TaskDef("dt2"); + taskDef2.setOwnerEmail("test@orkes.io"); + + WorkflowTask workflowTask = new WorkflowTask(); + workflowTask.setTaskReferenceName("dt2"); + workflowTask.setName("dt2"); + workflowTask.setTaskDefinition(taskDef2); + workflowTask.setWorkflowTaskType(TaskType.SIMPLE); + + WorkflowTask inline = new WorkflowTask(); + inline.setTaskReferenceName("dt1"); + inline.setName("dt1"); + inline.setTaskDefinition(taskDef); + inline.setWorkflowTaskType(TaskType.SIMPLE); + + WorkflowTask join = new WorkflowTask(); + join.setTaskReferenceName("join_dynamic"); + join.setName("join_dynamic"); + join.setWorkflowTaskType(TaskType.JOIN); + + WorkflowTask dynamicFork = new WorkflowTask(); + dynamicFork.setTaskReferenceName("dynamicFork"); + dynamicFork.setName("dynamicFork"); + dynamicFork.setTaskDefinition(taskDef); + dynamicFork.setWorkflowTaskType(TaskType.FORK_JOIN_DYNAMIC); + dynamicFork.setInputParameters( + Map.of( + "dynamicTasks", + "${dt1.output.dynamicTasks}", + "dynamicTasksInput", + "${dt1.output.dynamicTasksInput}")); + dynamicFork.setDynamicForkTasksParam("dynamicTasks"); + dynamicFork.setDynamicForkTasksInputParamName("dynamicTasksInput"); + + WorkflowDef workflowDef = new WorkflowDef(); + workflowDef.setName(workflowName); + workflowDef.setOwnerEmail("test@orkes.io"); + workflowDef.setTimeoutSeconds(600); + workflowDef.setTimeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF); + workflowDef.setInputParameters(Arrays.asList("value", "inlineValue")); + workflowDef.setDescription("Workflow to test retry"); + workflowDef.setTasks(Arrays.asList(inline, dynamicFork, join)); + try { + metadataClient1.registerWorkflowDef(workflowDef); + metadataClient1.registerTaskDefs(Arrays.asList(taskDef, taskDef2, taskDef3, taskDef4)); + } catch (Exception e) { + } + } +} diff --git a/test-harness/src/test/java/io/orkes/conductor/client/e2e/FailureWorkflowTests.java b/test-harness/src/test/java/io/orkes/conductor/client/e2e/FailureWorkflowTests.java new file mode 100644 index 0000000..b811b03 --- /dev/null +++ b/test-harness/src/test/java/io/orkes/conductor/client/e2e/FailureWorkflowTests.java @@ -0,0 +1,164 @@ +/* + * Copyright 2022 Orkes, Inc. + *

+ * Licensed under the Orkes Community License (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * https://github.com/orkes-io/licenses/blob/main/community/LICENSE.txt + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package io.orkes.conductor.client.e2e; + +import java.util.*; +import java.util.concurrent.TimeUnit; + +import com.netflix.conductor.common.metadata.tasks.Task; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; + +import com.netflix.conductor.common.metadata.tasks.TaskResult; +import com.netflix.conductor.common.metadata.tasks.TaskType; +import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowTask; +import com.netflix.conductor.common.run.Workflow; + +import io.orkes.conductor.client.ApiClient; +import io.orkes.conductor.client.MetadataClient; +import io.orkes.conductor.client.TaskClient; +import io.orkes.conductor.client.WorkflowClient; +import io.orkes.conductor.client.http.OrkesMetadataClient; +import io.orkes.conductor.client.http.OrkesTaskClient; +import io.orkes.conductor.client.http.OrkesWorkflowClient; + +import lombok.extern.slf4j.Slf4j; + +import static org.junit.jupiter.api.Assertions.*; +import static org.testcontainers.shaded.org.awaitility.Awaitility.await; + +@Slf4j +public class FailureWorkflowTests extends AbstractConductorTest { + + @Test + @DisplayName("Check failure workflow input as passed properly") + public void testFailureWorkflowInputs() { + String workflowName = "failure-workflow-test"; + String taskDefName = "simple-task1"; + String taskDefName2 = "simple-task2"; + + // Register workflow + registerWorkflowDefWithFailureWorkflow( + workflowName, taskDefName, taskDefName2, metadataClient); + + StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); + startWorkflowRequest.setName(workflowName); + startWorkflowRequest.setVersion(1); + + String workflowId = workflowClient.startWorkflow(startWorkflowRequest); + Workflow workflow = workflowClient.getWorkflow(workflowId, true); + // Fail the simple task + Map output = new HashMap<>(); + output.put("status", "completed"); + output.put("reason", "inserted"); + var task = workflow.getTasks().get(0); + TaskResult taskResult = new TaskResult(task); + taskResult.setStatus(TaskResult.Status.COMPLETED); + taskResult.setOutputData(output); + taskClient.updateTask(taskResult); + workflow = workflowClient.getWorkflow(workflowId, true); + + String reason = "Employee not found"; + String taskId = workflow.getTasks().get(1).getTaskId(); + taskResult = new TaskResult(); + taskResult = new TaskResult(); + taskResult.setWorkflowInstanceId(workflowId); + taskResult.setTaskId(taskId); + taskResult.setStatus(TaskResult.Status.FAILED_WITH_TERMINAL_ERROR); + taskResult.setReasonForIncompletion(reason); + taskClient.updateTask(taskResult); + + // Wait for workflow to get failed + await().atMost(5, TimeUnit.SECONDS) + .pollInterval(1, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(workflowId, false); + assertEquals( + Workflow.WorkflowStatus.FAILED.name(), + workflow1.getStatus().name()); + assertNotNull(workflow1.getOutput().get("conductor.failure_workflow")); + }); + + // Check failure workflow has complete parent workflow information + workflow = workflowClient.getWorkflow(workflowId, false); + String failureWorkflowId = + workflow.getOutput().get("conductor.failure_workflow").toString(); + + workflow = workflowClient.getWorkflow(failureWorkflowId, false); + // Assert on input attributes + assertNotNull(workflow.getInput().get("failedWorkflow")); + assertNotNull(workflow.getInput().get("failureTaskId")); + assertNotNull(workflow.getInput().get("workflowId")); + assertEquals("FAILED", workflow.getInput().get("failureStatus").toString()); + assertTrue(workflow.getInput().get("reason").toString().contains("Employee not found")); + Map input = (Map) workflow.getInput().get("failedWorkflow"); + + assertNotNull(input.get("tasks")); + List> tasks = (List>) input.get("tasks"); + assertNotNull(tasks.get(0).get("outputData")); + Map task1Output = (Map) tasks.get(0).get("outputData"); + assertEquals("inserted", task1Output.get("reason")); + assertEquals("completed", task1Output.get("status")); + Map failedWorkflowOutput = (Map) input.get("output"); + assertEquals("completed", failedWorkflowOutput.get("status")); + } + + private void registerWorkflowDefWithFailureWorkflow( + String workflowName, + String taskName1, + String taskName2, + MetadataClient metadataClient) { + + WorkflowTask inline = new WorkflowTask(); + inline.setTaskReferenceName(taskName1); + inline.setName(taskName1); + inline.setWorkflowTaskType(TaskType.SIMPLE); + inline.setInputParameters(Map.of("evaluatorType", "graaljs", "expression", "true;")); + + WorkflowTask simpleTask = new WorkflowTask(); + simpleTask.setTaskReferenceName(taskName2); + simpleTask.setName(taskName2); + simpleTask.setWorkflowTaskType(TaskType.SIMPLE); + + WorkflowTask simpleTask2 = new WorkflowTask(); + simpleTask2.setTaskReferenceName(taskName2); + simpleTask2.setName(taskName2); + simpleTask2.setWorkflowTaskType(TaskType.SIMPLE); + + WorkflowDef failureWorkflow = new WorkflowDef(); + failureWorkflow.setName("failure_workflow"); + failureWorkflow.setOwnerEmail("test@orkes.io"); + failureWorkflow.setInputParameters(Arrays.asList("value", "inlineValue")); + failureWorkflow.setDescription("Workflow to monitor order state"); + failureWorkflow.setTimeoutSeconds(600); + failureWorkflow.setTimeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF); + failureWorkflow.setTasks(Arrays.asList(simpleTask2)); + metadataClient.registerWorkflowDef(failureWorkflow); + + WorkflowDef workflowDef = new WorkflowDef(); + workflowDef.setName(workflowName); + workflowDef.setOwnerEmail("test@orkes.io"); + workflowDef.setInputParameters(Arrays.asList("value", "inlineValue")); + workflowDef.setDescription("Workflow to monitor order state"); + workflowDef.setTimeoutSeconds(600); + workflowDef.setFailureWorkflow("failure_workflow"); + workflowDef.getOutputParameters().put("status", "${" + taskName1 + ".output.status}"); + workflowDef.setTimeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF); + workflowDef.setTasks(Arrays.asList(inline, simpleTask)); + metadataClient.registerWorkflowDef(workflowDef); + } +} diff --git a/test-harness/src/test/java/io/orkes/conductor/client/e2e/GraaljsTests.java b/test-harness/src/test/java/io/orkes/conductor/client/e2e/GraaljsTests.java new file mode 100644 index 0000000..ae237c0 --- /dev/null +++ b/test-harness/src/test/java/io/orkes/conductor/client/e2e/GraaljsTests.java @@ -0,0 +1,96 @@ +/* + * Copyright 2022 Orkes, Inc. + *

+ * Licensed under the Orkes Community License (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * https://github.com/orkes-io/licenses/blob/main/community/LICENSE.txt + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package io.orkes.conductor.client.e2e; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import org.apache.commons.lang3.RandomStringUtils; +import org.junit.After; +import org.junit.jupiter.api.Test; + +import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.common.run.Workflow; + +import io.orkes.conductor.client.model.WorkflowStatus; + +import static io.orkes.conductor.client.e2e.util.RegistrationUtil.registerWorkflowDef; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testcontainers.shaded.org.awaitility.Awaitility.await; + +public class GraaljsTests extends AbstractConductorTest { + + List workflowNames = new ArrayList<>(); + List taskNames = new ArrayList<>(); + + @Test + public void testInfiniteExecution() + throws ExecutionException, InterruptedException, TimeoutException { + String workflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); + String taskName1 = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); + String taskName2 = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); + // Register workflow + registerWorkflowDef(workflowName, taskName1, taskName2, metadataClient); + WorkflowDef workflowDef = metadataClient.getWorkflowDef(workflowName, 1); + workflowDef + .getTasks() + .get(0) + .setInputParameters( + Map.of( + "evaluatorType", + "graaljs", + "expression", + "function e() { while(true){} }; e();")); + metadataClient.updateWorkflowDefs(List.of(workflowDef), true); + workflowNames.add(workflowName); + taskNames.add(taskName1); + taskNames.add(taskName2); + + StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); + startWorkflowRequest.setName(workflowName); + startWorkflowRequest.setVersion(1); + + String workflowId = workflowClient.startWorkflow(startWorkflowRequest); + + // Wait for workflow to get failed since inline task will failed + await().atMost(30, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow = workflowClient.getWorkflow(workflowId, true); + assertEquals( + workflow.getStatus().name(), + WorkflowStatus.StatusEnum.FAILED.name()); + }); + } + + @After + public void cleanUp() { + for (String workflowName : workflowNames) { + try { + metadataClient.unregisterWorkflowDef(workflowName, 1); + } catch (Exception e) { + } + } + for (String taskName : taskNames) { + try { + metadataClient.unregisterTaskDef(taskName); + } catch (Exception e) { + } + } + } +} diff --git a/test-harness/src/test/java/io/orkes/conductor/client/e2e/HttpWorkerTests.java b/test-harness/src/test/java/io/orkes/conductor/client/e2e/HttpWorkerTests.java new file mode 100644 index 0000000..6b59ccd --- /dev/null +++ b/test-harness/src/test/java/io/orkes/conductor/client/e2e/HttpWorkerTests.java @@ -0,0 +1,236 @@ +/* + * Copyright 2023 Orkes, Inc. + *

+ * Licensed under the Orkes Community License (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * https://github.com/orkes-io/licenses/blob/main/community/LICENSE.txt + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package io.orkes.conductor.client.e2e; + +import java.util.*; +import java.util.concurrent.TimeUnit; + +import org.junit.After; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.common.metadata.tasks.TaskType; +import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowTask; +import com.netflix.conductor.common.run.Workflow; + +import io.orkes.conductor.client.*; +import io.orkes.conductor.client.http.OrkesMetadataClient; +import io.orkes.conductor.client.http.OrkesWorkflowClient; + +import static org.junit.jupiter.api.Assertions.*; +import static org.testcontainers.shaded.org.awaitility.Awaitility.await; + +public class HttpWorkerTests extends AbstractConductorTest { + @After + public void cleanupWorkflows() { + try { + metadataClient.unregisterWorkflowDef("http_workflow", 1); + metadataClient.unregisterTaskDef("http_task"); + } catch (Exception e) { + } + } + + // @Test + public void testHttpWorkerWithFailureConditionFailure() { + registerWorkflowWithSingleHttpTask(); + + StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); + startWorkflowRequest.setName("http_workflow"); + startWorkflowRequest.setVersion(1); + startWorkflowRequest.setInput( + Map.of( + "value", + 33, + "failureCondition", + "function e() {return $.response.statusCode / 100 === 2} e();")); + + String workflowId = workflowClient.startWorkflow(startWorkflowRequest); + + await().atMost(3, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); + assertTrue(workflow1.getTasks().get(0).getStatus().isTerminal()); + }); + + Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); + Task task = workflow1.getTasks().get(0); + assertEquals(Task.Status.FAILED.name(), task.getStatus().name()); + } + + @Test + public void testHttpWorkerWithFailureConditionSuccess() { + registerWorkflowWithSingleHttpTask(); + + StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); + startWorkflowRequest.setName("http_workflow"); + startWorkflowRequest.setVersion(1); + startWorkflowRequest.setInput( + Map.of( + "value", + 33, + "failureCondition", + "function e() {return $.statusCode / 100 !== 2} e();")); + + String workflowId = workflowClient.startWorkflow(startWorkflowRequest); + + await().atMost(3, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); + assertTrue(workflow1.getTasks().get(0).getStatus().isTerminal()); + }); + + Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); + Task task = workflow1.getTasks().get(0); + assertEquals(Task.Status.COMPLETED.name(), task.getStatus().name()); + } + + @Test + public void testHttpWorkerWithFailureConditionUsingWorkflowInput() { + registerWorkflowWithSingleHttpTask(); + + StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); + startWorkflowRequest.setName("http_workflow"); + startWorkflowRequest.setVersion(1); + startWorkflowRequest.setInput( + Map.of( + "value", + 33, + "failureCondition", + "function e() {return !($.value === 33)} e();")); + + String workflowId = workflowClient.startWorkflow(startWorkflowRequest); + + await().atMost(3, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); + assertTrue(workflow1.getTasks().get(0).getStatus().isTerminal()); + }); + + Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); + Task task = workflow1.getTasks().get(0); + assertEquals(Task.Status.COMPLETED.name(), task.getStatus().name()); + } + + @Test + public void testHttpWorkerWithFailureConditionUsingAnotherTaskInput() { + registerWorkflowWithMultipleHttpTasks(); + + StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); + startWorkflowRequest.setName("http_workflow"); + startWorkflowRequest.setVersion(1); + startWorkflowRequest.setInput( + Map.of("value", 33, "failureCondition", "function e() {return false} e();")); + + String workflowId = workflowClient.startWorkflow(startWorkflowRequest); + + await().atMost(2, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); + assertTrue(workflow1.getTasks().get(0).getStatus().isTerminal()); + }); + + Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); + Task task = workflow1.getTasks().get(0); + assertEquals(Task.Status.COMPLETED.name(), task.getStatus().name()); + + await().atMost(3, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow2 = workflowClient.getWorkflow(workflowId, true); + assertTrue(workflow2.getTasks().get(1).getStatus().isTerminal()); + }); + + Workflow workflow2 = workflowClient.getWorkflow(workflowId, true); + Task task2 = workflow2.getTasks().get(1); + assertEquals(Task.Status.COMPLETED.name(), task2.getStatus().name()); + } + + private void registerWorkflowWithMultipleHttpTasks() { + // fill the values correctly + Map inputParams = + Map.of( + "http_request", + Map.of( + "uri", "https://jsonplaceholder.typicode.com/posts", + "method", "GET", + "failureCondition", "${workflow.input.failureCondition}"), + "value", + "${workflow.input.value}"); + + WorkflowTask httpTask = new WorkflowTask(); + httpTask.setName("http_task"); + httpTask.setTaskReferenceName("http_task"); + httpTask.setWorkflowTaskType(TaskType.HTTP); + httpTask.setInputParameters(inputParams); + + // fill the values correctly + Map inputParams2 = + Map.of( + "http_request", + Map.of( + "uri", "https://jsonplaceholder.typicode.com/posts/1", + "method", "GET", + "failureCondition", + "function e() {return !($.custom === 100)} e();"), + "custom", + "${http_task.output.response.body.length()}"); + + WorkflowTask httpTask2 = new WorkflowTask(); + httpTask2.setName("http_task2"); + httpTask2.setTaskReferenceName("http_task2"); + httpTask2.setWorkflowTaskType(TaskType.HTTP); + httpTask2.setInputParameters(inputParams2); + + registerWorkflowWithTasks(Arrays.asList(httpTask, httpTask2)); + } + + private void registerWorkflowWithSingleHttpTask() { + // fill the values correctly + Map inputParams = + Map.of( + "http_request", + Map.of( + "uri", "https://jsonplaceholder.typicode.com/posts/1", + "method", "GET", + "failureCondition", "${workflow.input.failureCondition}"), + "value", + "${workflow.input.value}"); + + WorkflowTask httpTask = new WorkflowTask(); + httpTask.setName("http_task"); + httpTask.setTaskReferenceName("http_task"); + httpTask.setWorkflowTaskType(TaskType.HTTP); + httpTask.setInputParameters(inputParams); + + registerWorkflowWithTasks(Arrays.asList(httpTask)); + } + + private void registerWorkflowWithTasks(List tasks) { + WorkflowDef workflowDef = new WorkflowDef(); + workflowDef.setName("http_workflow"); + workflowDef.setOwnerEmail("test@orkes.io"); + workflowDef.setInputParameters(Arrays.asList("value")); + workflowDef.setDescription("Workflow to test http worker"); + workflowDef.setTimeoutSeconds(600); + workflowDef.setTasks(tasks); + + metadataClient.updateWorkflowDefs(List.of(workflowDef), true); + } +} diff --git a/test-harness/src/test/java/io/orkes/conductor/client/e2e/JSONJQTests.java b/test-harness/src/test/java/io/orkes/conductor/client/e2e/JSONJQTests.java new file mode 100644 index 0000000..ed89585 --- /dev/null +++ b/test-harness/src/test/java/io/orkes/conductor/client/e2e/JSONJQTests.java @@ -0,0 +1,85 @@ +/* + * Copyright 2023 Orkes, Inc. + *

+ * Licensed under the Orkes Community License (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * https://github.com/orkes-io/licenses/blob/main/community/LICENSE.txt + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package io.orkes.conductor.client.e2e; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import org.apache.commons.lang3.RandomStringUtils; +import org.junit.jupiter.api.Test; + +import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowTask; +import com.netflix.conductor.common.run.Workflow; + +import io.orkes.conductor.client.ApiClient; +import io.orkes.conductor.client.WorkflowClient; +import io.orkes.conductor.client.http.OrkesWorkflowClient; +import io.orkes.conductor.client.e2e.util.Commons; + +import com.google.common.util.concurrent.Uninterruptibles; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +public class JSONJQTests extends AbstractConductorTest{ + @Test + public void testJQOutputIsReachableWhenSyncSystemTaskIsNext() { + + String workflowName = RandomStringUtils.randomAlphanumeric(10).toUpperCase(); + + var request = new StartWorkflowRequest(); + WorkflowDef workflowDef = new WorkflowDef(); + workflowDef.setName(workflowName); + workflowDef.setVersion(1); + workflowDef.setOwnerEmail(Commons.OWNER_EMAIL); + workflowDef.setTimeoutSeconds(60); + workflowDef.setTimeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF); + List tasks = new ArrayList<>(); + + WorkflowTask jqTask = new WorkflowTask(); + jqTask.setName("jqTaskName"); + jqTask.setTaskReferenceName("generate_operators_ref"); + jqTask.setInputParameters(Map.of("queryExpression", "{\"as\": \"+\", \"md\": \"/\"}")); + jqTask.setType("JSON_JQ_TRANSFORM"); + + WorkflowTask setVariableTask = new WorkflowTask(); + setVariableTask.setName("setvartaskname"); + setVariableTask.setTaskReferenceName("setvartaskname_ref"); + setVariableTask.setInputParameters( + Map.of("name", "${generate_operators_ref.output.result.md}")); + setVariableTask.setType("SET_VARIABLE"); + + tasks.add(jqTask); + tasks.add(setVariableTask); + workflowDef.setTasks(tasks); + request.setName(workflowName); + request.setVersion(1); + request.setWorkflowDef(workflowDef); + + List workflowIds = new ArrayList<>(); + for (var i = 0; i < 40; ++i) { + Uninterruptibles.sleepUninterruptibly(5, TimeUnit.MILLISECONDS); + workflowIds.add(workflowClient.startWorkflow(request)); + } + assertEquals(40, workflowIds.size()); + workflowIds.forEach( + id -> { + var workflow = workflowClient.getWorkflow(id, true); + assertEquals(Workflow.WorkflowStatus.COMPLETED, workflow.getStatus()); + assertEquals("/", workflow.getTasks().get(1).getInputData().get("name")); + }); + } +} diff --git a/test-harness/src/test/java/io/orkes/conductor/client/e2e/JavaSDKTests.java b/test-harness/src/test/java/io/orkes/conductor/client/e2e/JavaSDKTests.java new file mode 100644 index 0000000..a89ed9d --- /dev/null +++ b/test-harness/src/test/java/io/orkes/conductor/client/e2e/JavaSDKTests.java @@ -0,0 +1,84 @@ +/* + * Copyright 2022 Orkes, Inc. + *

+ * Licensed under the Orkes Community License (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * https://github.com/orkes-io/licenses/blob/main/community/LICENSE.txt + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package io.orkes.conductor.client.e2e; + +import java.math.BigDecimal; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +import com.netflix.conductor.client.http.MetadataClient; +import com.netflix.conductor.client.http.TaskClient; +import com.netflix.conductor.client.http.WorkflowClient; +import com.netflix.conductor.common.run.Workflow; +import com.netflix.conductor.sdk.workflow.def.ConductorWorkflow; +import com.netflix.conductor.sdk.workflow.def.tasks.SimpleTask; +import com.netflix.conductor.sdk.workflow.def.tasks.Switch; +import com.netflix.conductor.sdk.workflow.executor.WorkflowExecutor; +import com.netflix.conductor.sdk.workflow.task.WorkerTask; + +import io.orkes.conductor.client.ApiClient; +import io.orkes.conductor.client.OrkesClients; + +import static org.junit.jupiter.api.Assertions.*; + +public class JavaSDKTests extends AbstractConductorTest { + + @BeforeAll + public static void init() { + executor.initWorkers("io.orkes.conductor.client.e2e"); + } + + @Test + public void testSDK() throws ExecutionException, InterruptedException, TimeoutException { + ConductorWorkflow> workflow = new ConductorWorkflow<>(executor); + workflow.setName("sdk_integration_test"); + workflow.setVersion(1); + workflow.setOwnerEmail("test@orkes.io"); + workflow.setVariables(new HashMap<>()); + workflow.add(new SimpleTask("task1", "task1").input("name", "orkes")); + + Switch decision = new Switch("decide_ref", "${workflow.input.caseValue}"); + decision.switchCase( + "caseA", new SimpleTask("task1", "task1"), new SimpleTask("task1", "task11")); + decision.switchCase("caseB", new SimpleTask("task2", "task2")); + decision.defaultCase(new SimpleTask("task1", "default_task")); + + CompletableFuture future = workflow.executeDynamic(new HashMap<>()); + assertNotNull(future); + Workflow run = future.get(20, TimeUnit.SECONDS); + assertNotNull(run); + assertEquals(Workflow.WorkflowStatus.COMPLETED, run.getStatus()); + assertEquals(1, run.getTasks().size()); + assertEquals("Hello, orkes", run.getTasks().get(0).getOutputData().get("greetings")); + } + + @AfterAll + public static void cleanup() { + if (executor != null) { + executor.shutdown(); + } + } + + @WorkerTask("sum_numbers") + public BigDecimal sum(BigDecimal num1, BigDecimal num2) { + return num1.add(num2); + } +} diff --git a/test-harness/src/test/java/io/orkes/conductor/client/e2e/SDKWorkers.java b/test-harness/src/test/java/io/orkes/conductor/client/e2e/SDKWorkers.java new file mode 100644 index 0000000..3da485a --- /dev/null +++ b/test-harness/src/test/java/io/orkes/conductor/client/e2e/SDKWorkers.java @@ -0,0 +1,25 @@ +/* + * Copyright 2022 Orkes, Inc. + *

+ * Licensed under the Orkes Community License (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * https://github.com/orkes-io/licenses/blob/main/community/LICENSE.txt + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package io.orkes.conductor.client.e2e; + +import com.netflix.conductor.sdk.workflow.task.InputParam; +import com.netflix.conductor.sdk.workflow.task.OutputParam; +import com.netflix.conductor.sdk.workflow.task.WorkerTask; + +public class SDKWorkers { + + @WorkerTask(value = "task1", pollingInterval = 1000) + public @OutputParam("greetings") String task1(@InputParam("name") String name) { + return "Hello, " + name; + } +} diff --git a/test-harness/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowInlineTests.java b/test-harness/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowInlineTests.java new file mode 100644 index 0000000..7ce596f --- /dev/null +++ b/test-harness/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowInlineTests.java @@ -0,0 +1,157 @@ +/* + * Copyright 2022 Orkes, Inc. + *

+ * Licensed under the Orkes Community License (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * https://github.com/orkes-io/licenses/blob/main/community/LICENSE.txt + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package io.orkes.conductor.client.e2e; + +import java.util.Arrays; +import java.util.concurrent.TimeUnit; + +import org.apache.commons.lang3.RandomStringUtils; +import org.junit.jupiter.api.Test; + +import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.tasks.TaskResult; +import com.netflix.conductor.common.metadata.tasks.TaskType; +import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; +import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowTask; +import com.netflix.conductor.common.run.Workflow; + +import io.orkes.conductor.client.ApiClient; +import io.orkes.conductor.client.MetadataClient; +import io.orkes.conductor.client.TaskClient; +import io.orkes.conductor.client.WorkflowClient; +import io.orkes.conductor.client.http.OrkesMetadataClient; +import io.orkes.conductor.client.http.OrkesTaskClient; +import io.orkes.conductor.client.http.OrkesWorkflowClient; +import io.orkes.conductor.client.model.WorkflowStatus; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.testcontainers.shaded.org.awaitility.Awaitility.await; + +public class SubWorkflowInlineTests extends AbstractConductorTest { + + @Test + public void testSubWorkflow0version() { + + String parentWorkflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); + + // Register workflow + registerInlineWorkflowDef(parentWorkflowName, metadataClient); + + // Trigger workflow + StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); + startWorkflowRequest.setName(parentWorkflowName); + startWorkflowRequest.setVersion(1); + + String workflowId = workflowClient.startWorkflow(startWorkflowRequest); + + // User1 should be able to complete task/workflow + String taskId = workflowClient.getWorkflow(workflowId, true).getTasks().get(0).getTaskId(); + TaskResult taskResult = new TaskResult(); + taskResult.setWorkflowInstanceId(workflowId); + taskResult.setStatus(TaskResult.Status.COMPLETED); + taskResult.setTaskId(taskId); + taskClient.updateTask(taskResult); + + // Workflow will be still running state + await().atMost(10, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); + assertEquals( + workflow1.getStatus().name(), + WorkflowStatus.StatusEnum.RUNNING.name()); + assertEquals( + workflow1.getTasks().get(0).getStatus(), Task.Status.COMPLETED); + assertEquals( + workflow1.getTasks().get(1).getStatus(), + Task.Status.IN_PROGRESS); + }); + + Workflow workflow = workflowClient.getWorkflow(workflowId, true); + String subWorkflowId = workflow.getTasks().get(1).getSubWorkflowId(); + taskResult = new TaskResult(); + taskResult.setWorkflowInstanceId(subWorkflowId); + taskResult.setStatus(TaskResult.Status.COMPLETED); + taskResult.setTaskId(workflow.getTasks().get(1).getTaskId()); + taskClient.updateTask(taskResult); + + // Wait for workflow to get completed + await().atMost(3, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); + assertEquals( + workflow1.getTasks().get(0).getStatus(), Task.Status.COMPLETED); + assertEquals( + workflow1.getTasks().get(1).getStatus(), Task.Status.COMPLETED); + }); + + // Cleanup + metadataClient.unregisterWorkflowDef(parentWorkflowName, 1); + } + + private void registerInlineWorkflowDef(String workflowName, MetadataClient metadataClient1) { + TaskDef taskDef = new TaskDef("dt1"); + taskDef.setOwnerEmail("test@orkes.io"); + + TaskDef taskDef2 = new TaskDef("dt2"); + taskDef2.setOwnerEmail("test@orkes.io"); + + WorkflowTask workflowTask = new WorkflowTask(); + workflowTask.setTaskReferenceName("dt2"); + workflowTask.setName("dt2"); + workflowTask.setTaskDefinition(taskDef2); + workflowTask.setWorkflowTaskType(TaskType.SIMPLE); + + WorkflowTask inline = new WorkflowTask(); + inline.setTaskReferenceName("dt1"); + inline.setName("dt1"); + inline.setTaskDefinition(taskDef); + inline.setWorkflowTaskType(TaskType.SIMPLE); + + WorkflowTask inlineSubworkflow = new WorkflowTask(); + inlineSubworkflow.setTaskReferenceName("dynamicFork"); + inlineSubworkflow.setName("dynamicFork"); + inlineSubworkflow.setTaskDefinition(taskDef); + inlineSubworkflow.setWorkflowTaskType(TaskType.SUB_WORKFLOW); + + WorkflowDef inlineWorkflowDef = new WorkflowDef(); + inlineWorkflowDef.setName("inline_test_sub_workflow"); + inlineWorkflowDef.setVersion(1); + inlineWorkflowDef.setTasks(Arrays.asList(inline)); + inlineWorkflowDef.setTimeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF); + inlineWorkflowDef.setTimeoutSeconds(600); + SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); + subWorkflowParams.setName("inline_test_sub_workflow"); + subWorkflowParams.setVersion(1); + subWorkflowParams.setWorkflowDef(inlineWorkflowDef); + inlineSubworkflow.setSubWorkflowParam(subWorkflowParams); + + WorkflowDef workflowDef = new WorkflowDef(); + workflowDef.setName(workflowName); + workflowDef.setTimeoutSeconds(600); + workflowDef.setTimeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF); + workflowDef.setOwnerEmail("test@orkes.io"); + workflowDef.setInputParameters(Arrays.asList("value", "inlineValue")); + workflowDef.setDescription("Workflow to test inline sub_workflow definition"); + workflowDef.setTasks(Arrays.asList(workflowTask, inlineSubworkflow)); + try { + metadataClient1.registerWorkflowDef(workflowDef); + metadataClient1.registerTaskDefs(Arrays.asList(taskDef, taskDef2)); + } catch (Exception e) { + } + } +} diff --git a/test-harness/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowTimeoutRetryTests.java b/test-harness/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowTimeoutRetryTests.java new file mode 100644 index 0000000..c672d18 --- /dev/null +++ b/test-harness/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowTimeoutRetryTests.java @@ -0,0 +1,185 @@ +/* + * Copyright 2023 Orkes, Inc. + *

+ * Licensed under the Orkes Community License (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * https://github.com/orkes-io/licenses/blob/main/community/LICENSE.txt + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package io.orkes.conductor.client.e2e; + +import java.io.InputStream; +import java.io.InputStreamReader; +import java.util.*; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; + +import com.netflix.conductor.client.http.MetadataClient; +import com.netflix.conductor.client.http.TaskClient; +import com.netflix.conductor.client.http.WorkflowClient; +import com.netflix.conductor.common.config.ObjectMapperProvider; +import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.common.metadata.tasks.TaskResult; +import com.netflix.conductor.common.metadata.tasks.TaskType; +import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowTask; +import com.netflix.conductor.common.run.Workflow; + +import io.orkes.conductor.client.AuthorizationClient; +import io.orkes.conductor.client.OrkesClients; +import io.orkes.conductor.client.automator.TaskRunnerConfigurer; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.util.concurrent.Uninterruptibles; +import lombok.SneakyThrows; +import lombok.extern.slf4j.Slf4j; + +import static org.junit.jupiter.api.Assertions.*; +import static org.testcontainers.shaded.org.awaitility.Awaitility.await; + +@Slf4j +public class SubWorkflowTimeoutRetryTests extends AbstractConductorTest { + + private static TypeReference> WORKFLOW_DEF_LIST = new TypeReference>() {}; + + private static final String WORKFLOW_NAME = "integration_test_wf_with_sub_wf"; + + private static Map taskToDomainMap = new HashMap<>(); + + private static TaskRunnerConfigurer configurer; + + private static TaskRunnerConfigurer configurerNoDomain; + + @SneakyThrows + @BeforeAll + public static void beforeAll() { + InputStream resource = + SubWorkflowTimeoutRetryTests.class.getResourceAsStream( + "/metadata/sub_workflow_tests.json"); + List workflowDefs = + objectMapper.readValue(new InputStreamReader(resource), WORKFLOW_DEF_LIST); + metadataClient.updateWorkflowDefs(workflowDefs); + Set tasks = new HashSet<>(); + for (WorkflowDef workflowDef : workflowDefs) { + List allTasks = workflowDef.collectTasks(); + tasks.addAll( + allTasks.stream() + .filter(tt -> !tt.getType().equals("SIMPLE")) + .map(t -> t.getType()) + .collect(Collectors.toSet())); + + tasks.addAll( + allTasks.stream() + .filter(tt -> tt.getType().equals("SIMPLE")) + .map(t -> t.getName()) + .collect(Collectors.toSet())); + } + log.info( + "Updated workflow definitions: {}", + workflowDefs.stream().map(def -> def.getName()).collect(Collectors.toList())); + } + + @AfterAll + public static void cleanup() { + if (configurer != null) { + configurer.shutdown(); + configurerNoDomain.shutdown(); + } + } + + @Test + public void test() { + + String correlationId = "wf_with_subwf_test_1"; + Map input = Map.of("param1", "p1 value", "subwf", "sub_workflow"); + + StartWorkflowRequest request = new StartWorkflowRequest(); + request.setName(WORKFLOW_NAME); + request.setVersion(1); + request.setCorrelationId(correlationId); + request.setInput(input); + String workflowInstanceId = workflowClient.startWorkflow(request); + + log.info("Started {} ", workflowInstanceId); + pollAndCompleteTask(workflowInstanceId, "integration_task_1", Map.of()); + Workflow workflow = workflowClient.getWorkflow(workflowInstanceId, true); + await().atMost(3, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = + workflowClient.getWorkflow(workflowInstanceId, true); + assertNotNull(workflow1); + assertEquals(2, workflow1.getTasks().size()); + assertEquals( + Task.Status.COMPLETED, workflow1.getTasks().get(0).getStatus()); + assertEquals( + TaskType.SUB_WORKFLOW.name(), + workflow1.getTasks().get(1).getTaskType()); + assertEquals( + Task.Status.IN_PROGRESS, + workflow1.getTasks().get(1).getStatus()); + }); + workflow = workflowClient.getWorkflow(workflowInstanceId, true); + String subWorkflowId = workflow.getTasks().get(1).getSubWorkflowId(); + log.info("Sub workflow Id {} ", subWorkflowId); + + assertNotNull(subWorkflowId); + Workflow subWorkflow = workflowClient.getWorkflow(subWorkflowId, true); + assertEquals(Workflow.WorkflowStatus.RUNNING, subWorkflow.getStatus()); + + // Wait for 7 seconds which is > 5 sec timeout for the workflow + Uninterruptibles.sleepUninterruptibly(7, TimeUnit.SECONDS); + workflowClient.runDecider(workflowInstanceId); + + workflow = workflowClient.getWorkflow(workflowInstanceId, true); + assertNotNull(workflow); + assertEquals(2, workflow.getTasks().size()); + assertEquals(Workflow.WorkflowStatus.TIMED_OUT, workflow.getStatus()); + assertEquals(Task.Status.COMPLETED, workflow.getTasks().get(0).getStatus()); + assertEquals(Task.Status.CANCELED, workflow.getTasks().get(1).getStatus()); + + // Verify that the sub-workflow is terminated + subWorkflow = workflowClient.getWorkflow(subWorkflowId, true); + assertEquals(Workflow.WorkflowStatus.TERMINATED, subWorkflow.getStatus()); + + // Retry sub-workflow + workflowClient.retryLastFailedTask(subWorkflowId); + + // Sub workflow should be in the running state now + subWorkflow = workflowClient.getWorkflow(subWorkflowId, true); + assertEquals(Workflow.WorkflowStatus.RUNNING, subWorkflow.getStatus()); + assertEquals(Task.Status.CANCELED, subWorkflow.getTasks().get(0).getStatus()); + assertEquals(Task.Status.SCHEDULED, subWorkflow.getTasks().get(1).getStatus()); + } + + private Task pollAndCompleteTask( + String workflowInstanceId, String taskName, Map output) { + Workflow workflow = workflowClient.getWorkflow(workflowInstanceId, true); + if (workflow == null) { + return null; + } + Optional optional = + workflow.getTasks().stream() + .filter(task -> task.getTaskDefName().equals(taskName)) + .findFirst(); + if (optional.isEmpty()) { + return null; + } + Task task = optional.get(); + task.setStatus(Task.Status.COMPLETED); + task.getOutputData().putAll(output); + taskClient.updateTask(new TaskResult(task)); + + return task; + } +} diff --git a/test-harness/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowVersionTests.java b/test-harness/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowVersionTests.java new file mode 100644 index 0000000..3c51c17 --- /dev/null +++ b/test-harness/src/test/java/io/orkes/conductor/client/e2e/SubWorkflowVersionTests.java @@ -0,0 +1,336 @@ +/* + * Copyright 2022 Orkes, Inc. + *

+ * Licensed under the Orkes Community License (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * https://github.com/orkes-io/licenses/blob/main/community/LICENSE.txt + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package io.orkes.conductor.client.e2e; + +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import org.apache.commons.lang3.RandomStringUtils; +import org.junit.jupiter.api.Test; + +import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.tasks.TaskResult; +import com.netflix.conductor.common.metadata.tasks.TaskType; +import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; +import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowTask; +import com.netflix.conductor.common.run.Workflow; + +import io.orkes.conductor.client.*; +import io.orkes.conductor.client.model.*; +import io.orkes.conductor.client.e2e.util.RegistrationUtil; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.testcontainers.shaded.org.awaitility.Awaitility.await; + +public class SubWorkflowVersionTests extends AbstractConductorTest { + @Test + public void testSubWorkflowNullVersion() { + + String taskName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); + String parentWorkflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); + String subWorkflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); + + // Register workflow + RegistrationUtil.registerWorkflowWithSubWorkflowDef( + parentWorkflowName, subWorkflowName, taskName, metadataClient); + WorkflowDef workflowDef = metadataClient.getWorkflowDef(parentWorkflowName, 1); + // Set sub workflow version to null + workflowDef.getTasks().get(0).getSubWorkflowParam().setVersion(null); + metadataClient.updateWorkflowDefs(List.of(workflowDef), true); + + // Trigger workflow + StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); + startWorkflowRequest.setName(parentWorkflowName); + startWorkflowRequest.setVersion(1); + + String workflowId = workflowClient.startWorkflow(startWorkflowRequest); + + // User1 should be able to complete task/workflow + String subWorkflowId = + workflowClient.getWorkflow(workflowId, true).getTasks().get(0).getSubWorkflowId(); + TaskResult taskResult = new TaskResult(); + taskResult.setWorkflowInstanceId(subWorkflowId); + taskResult.setStatus(TaskResult.Status.COMPLETED); + taskResult.setTaskId( + workflowClient.getWorkflow(subWorkflowId, true).getTasks().get(0).getTaskId()); + taskClient.updateTask(taskResult); + + // Wait for workflow to get completed + await().atMost(42, TimeUnit.SECONDS) + .pollInterval(1, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(workflowId, false); + assertEquals( + workflow1.getStatus().name(), + WorkflowStatus.StatusEnum.COMPLETED.name()); + }); + + // Cleanup + metadataClient.unregisterWorkflowDef(parentWorkflowName, 1); + metadataClient.unregisterWorkflowDef(subWorkflowName, 1); + metadataClient.unregisterTaskDef(taskName); + } + + @Test + public void testSubWorkflowEmptyVersion() { + + String taskName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); + String parentWorkflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); + String subWorkflowName = RandomStringUtils.randomAlphanumeric(5).toUpperCase(); + + // Register workflow + RegistrationUtil.registerWorkflowWithSubWorkflowDef( + parentWorkflowName, subWorkflowName, taskName, metadataClient); + WorkflowDef workflowDef = metadataClient.getWorkflowDef(parentWorkflowName, 1); + WorkflowDef subWorkflowDef = metadataClient.getWorkflowDef(subWorkflowName, null); + subWorkflowDef.setVersion(1); + metadataClient.updateWorkflowDefs(List.of(subWorkflowDef), true); + subWorkflowDef.setVersion(2); + metadataClient.updateWorkflowDefs(List.of(subWorkflowDef), true); + // Set sub workflow version to empty in parent workflow definition + SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); + subWorkflowParams.setName(subWorkflowName); + workflowDef.getTasks().get(0).setSubWorkflowParam(subWorkflowParams); + metadataClient.updateWorkflowDefs(List.of(workflowDef), true); + + // Trigger workflow + StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); + startWorkflowRequest.setName(parentWorkflowName); + startWorkflowRequest.setVersion(1); + + String workflowId = workflowClient.startWorkflow(startWorkflowRequest); + + // User1 should be able to complete task/workflow + String subWorkflowId = + workflowClient.getWorkflow(workflowId, true).getTasks().get(0).getSubWorkflowId(); + TaskResult taskResult = new TaskResult(); + taskResult.setWorkflowInstanceId(subWorkflowId); + taskResult.setStatus(TaskResult.Status.COMPLETED); + taskResult.setTaskId( + workflowClient.getWorkflow(subWorkflowId, true).getTasks().get(0).getTaskId()); + taskClient.updateTask(taskResult); + + // Wait for workflow to get completed + // Check sub-workflow is executed with the latest version. + await().atMost(10, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); + assertEquals( + workflow1.getStatus().name(), + WorkflowStatus.StatusEnum.COMPLETED.name()); + assertEquals( + workflow1 + .getTasks() + .get(0) + .getWorkflowTask() + .getSubWorkflowParam() + .getVersion(), + 2); + }); + + // Cleanup + metadataClient.unregisterWorkflowDef(parentWorkflowName, 1); + metadataClient.unregisterWorkflowDef(subWorkflowName, 1); + metadataClient.unregisterTaskDef(taskName); + } + + @Test + public void testDynamicSubWorkflow() { + String workflowName1 = "DynamicFanInOutTest_Version"; + String subWorkflowName = "test_subworkflow"; + + // Register workflow + registerWorkflowDef(workflowName1, metadataClient); + registerSubWorkflow(subWorkflowName, "test_task", metadataClient); + + // Trigger workflow + StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); + startWorkflowRequest.setName(workflowName1); + startWorkflowRequest.setVersion(1); + + String workflowId = workflowClient.startWorkflow(startWorkflowRequest); + Workflow workflow = workflowClient.getWorkflow(workflowId, true); + TaskResult taskResult = new TaskResult(); + taskResult.setWorkflowInstanceId(workflowId); + taskResult.setTaskId(workflow.getTasks().get(0).getTaskId()); + taskResult.setStatus(TaskResult.Status.COMPLETED); + + WorkflowTask workflowTask2 = new WorkflowTask(); + workflowTask2.setName("integration_task_2"); + workflowTask2.setTaskReferenceName("xdt1"); + workflowTask2.setType(TaskType.SUB_WORKFLOW.name()); + SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); + subWorkflowParams.setName(subWorkflowName); + workflowTask2.setSubWorkflowParam(subWorkflowParams); + + Map output = new HashMap<>(); + Map> input = new HashMap<>(); + input.put("xdt1", Map.of("k1", "v1")); + output.put("dynamicTasks", Arrays.asList(workflowTask2)); + output.put("dynamicTasksInput", input); + taskResult.setOutputData(output); + taskClient.updateTask(taskResult); + + await().atMost(10, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); + assertEquals( + workflow1.getStatus().name(), + WorkflowStatus.StatusEnum.RUNNING.name()); + assertTrue(workflow1.getTasks().size() == 4); + assertEquals( + workflow1.getTasks().get(0).getStatus().name(), + Task.Status.COMPLETED.name()); + assertEquals( + workflow1.getTasks().get(1).getStatus().name(), + Task.Status.COMPLETED.name()); + assertEquals( + workflow1.getTasks().get(2).getStatus().name(), + Task.Status.IN_PROGRESS.name()); + assertEquals( + workflow1.getTasks().get(3).getStatus().name(), + Task.Status.IN_PROGRESS.name()); + }); + + workflow = workflowClient.getWorkflow(workflowId, true); + taskResult = new TaskResult(); + taskResult.setWorkflowInstanceId(workflowId); + taskResult.setTaskId(workflow.getTasks().get(2).getTaskId()); + taskResult.setStatus(TaskResult.Status.COMPLETED); + taskClient.updateTask(taskResult); + + // Workflow should be completed + await().atMost(10, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); + assertTrue(workflow1.getTasks().size() == 4); + assertEquals( + workflow1.getStatus().name(), + WorkflowStatus.StatusEnum.COMPLETED.name()); + assertEquals( + workflow1.getTasks().get(0).getStatus().name(), + Task.Status.COMPLETED.name()); + assertEquals( + workflow1.getTasks().get(1).getStatus().name(), + Task.Status.COMPLETED.name()); + assertEquals( + workflow1.getTasks().get(2).getStatus().name(), + Task.Status.COMPLETED.name()); + assertEquals( + workflow1 + .getTasks() + .get(2) + .getInputData() + .get("subWorkflowVersion"), + 1l); + assertEquals( + workflow1.getTasks().get(3).getStatus().name(), + Task.Status.COMPLETED.name()); + }); + + metadataClient.unregisterWorkflowDef(workflowName1, 1); + } + + private void registerWorkflowDef(String workflowName, MetadataClient metadataClient1) { + TaskDef taskDef = new TaskDef("dt1"); + taskDef.setOwnerEmail("test@orkes.io"); + + TaskDef taskDef4 = new TaskDef("integration_task_2"); + taskDef4.setOwnerEmail("test@orkes.io"); + + TaskDef taskDef3 = new TaskDef("integration_task_3"); + taskDef3.setOwnerEmail("test@orkes.io"); + + TaskDef taskDef2 = new TaskDef("dt2"); + taskDef2.setOwnerEmail("test@orkes.io"); + + WorkflowTask workflowTask = new WorkflowTask(); + workflowTask.setTaskReferenceName("dt2"); + workflowTask.setName("dt2"); + workflowTask.setTaskDefinition(taskDef2); + workflowTask.setWorkflowTaskType(TaskType.SIMPLE); + + WorkflowTask inline = new WorkflowTask(); + inline.setTaskReferenceName("dt1"); + inline.setName("dt1"); + inline.setTaskDefinition(taskDef); + inline.setWorkflowTaskType(TaskType.SIMPLE); + + WorkflowTask join = new WorkflowTask(); + join.setTaskReferenceName("join_dynamic"); + join.setName("join_dynamic"); + join.setWorkflowTaskType(TaskType.JOIN); + + WorkflowTask dynamicFork = new WorkflowTask(); + dynamicFork.setTaskReferenceName("dynamicFork"); + dynamicFork.setName("dynamicFork"); + dynamicFork.setTaskDefinition(taskDef); + dynamicFork.setWorkflowTaskType(TaskType.FORK_JOIN_DYNAMIC); + dynamicFork.setInputParameters( + Map.of( + "dynamicTasks", + "${dt1.output.dynamicTasks}", + "dynamicTasksInput", + "${dt1.output.dynamicTasksInput}")); + dynamicFork.setDynamicForkTasksParam("dynamicTasks"); + dynamicFork.setDynamicForkTasksInputParamName("dynamicTasksInput"); + + WorkflowDef workflowDef = new WorkflowDef(); + workflowDef.setName(workflowName); + workflowDef.setOwnerEmail("test@orkes.io"); + workflowDef.setInputParameters(Arrays.asList("value", "inlineValue")); + workflowDef.setDescription("Workflow to test retry"); + workflowDef.setTasks(Arrays.asList(inline, dynamicFork, join)); + try { + metadataClient1.registerWorkflowDef(workflowDef); + metadataClient1.registerTaskDefs(Arrays.asList(taskDef, taskDef2, taskDef3, taskDef4)); + } catch (Exception e) { + } + } + + public static void registerSubWorkflow( + String subWorkflowName, String taskName, MetadataClient metadataClient) { + TaskDef taskDef = new TaskDef(taskName); + taskDef.setOwnerEmail("test@orkes.io"); + taskDef.setRetryCount(0); + + WorkflowTask inline = new WorkflowTask(); + inline.setTaskReferenceName(taskName); + inline.setName(taskName); + inline.setTaskDefinition(taskDef); + inline.setWorkflowTaskType(TaskType.SIMPLE); + inline.setInputParameters(Map.of("evaluatorType", "graaljs", "expression", "true;")); + + WorkflowDef subworkflowDef = new WorkflowDef(); + subworkflowDef.setName(subWorkflowName); + subworkflowDef.setOwnerEmail("test@orkes.io"); + subworkflowDef.setInputParameters(Arrays.asList("value", "inlineValue")); + subworkflowDef.setDescription("Sub Workflow to test retry"); + subworkflowDef.setTasks(Arrays.asList(inline)); + subworkflowDef.setTimeoutSeconds(600); + subworkflowDef.setTimeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF); + + metadataClient.registerWorkflowDef(subworkflowDef); + } +} diff --git a/test-harness/src/test/java/io/orkes/conductor/client/e2e/WaitTaskTest.java b/test-harness/src/test/java/io/orkes/conductor/client/e2e/WaitTaskTest.java new file mode 100644 index 0000000..3f3a4d9 --- /dev/null +++ b/test-harness/src/test/java/io/orkes/conductor/client/e2e/WaitTaskTest.java @@ -0,0 +1,54 @@ +/* + * Copyright 2023 Orkes, Inc. + *

+ * Licensed under the Orkes Community License (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * https://github.com/orkes-io/licenses/blob/main/community/LICENSE.txt + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package io.orkes.conductor.client.e2e; + +import java.time.Duration; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.TimeoutException; + +import org.junit.jupiter.api.Test; + +import com.netflix.conductor.common.run.Workflow; +import com.netflix.conductor.sdk.workflow.def.ConductorWorkflow; +import com.netflix.conductor.sdk.workflow.def.tasks.Wait; + +import static java.time.temporal.ChronoUnit.SECONDS; +import static org.junit.jupiter.api.Assertions.*; + +public class WaitTaskTest extends AbstractConductorTest { + + + + @Test + public void testWaitTimeout() throws ExecutionException, InterruptedException, TimeoutException { + ConductorWorkflow> workflow = new ConductorWorkflow<>(executor); + workflow.setName("wait_task_test"); + workflow.setVersion(1); + workflow.setVariables(new HashMap<>()); + workflow.add(new Wait("wait_for_2_second", Duration.of(2, SECONDS))); + CompletableFuture future = workflow.executeDynamic(new HashMap<>()); + assertNotNull(future); + Workflow run = future.get(60, TimeUnit.SECONDS); + assertNotNull(run); + assertEquals(Workflow.WorkflowStatus.COMPLETED, run.getStatus()); + assertEquals(1, run.getTasks().size()); + long timeToExecute = run.getTasks().get(0).getEndTime() - run.getTasks().get(0).getScheduledTime(); + + // Ensure the wait completes within 1sec buffer + assertTrue(timeToExecute < 31000, "Wait task did not complete in time, took " + timeToExecute + " millis"); + } +} diff --git a/test-harness/src/test/java/io/orkes/conductor/client/e2e/WorkflowRetryTests.java b/test-harness/src/test/java/io/orkes/conductor/client/e2e/WorkflowRetryTests.java new file mode 100644 index 0000000..c653b00 --- /dev/null +++ b/test-harness/src/test/java/io/orkes/conductor/client/e2e/WorkflowRetryTests.java @@ -0,0 +1,277 @@ +/* + * Copyright 2022 Orkes, Inc. + *

+ * Licensed under the Orkes Community License (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * https://github.com/orkes-io/licenses/blob/main/community/LICENSE.txt + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package io.orkes.conductor.client.e2e; + +import java.util.concurrent.TimeUnit; + +import org.junit.jupiter.api.DisplayName; +import org.junit.jupiter.api.Test; +import org.testcontainers.shaded.com.google.common.util.concurrent.Uninterruptibles; + +import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.common.metadata.tasks.TaskResult; +import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; +import com.netflix.conductor.common.run.SearchResult; +import com.netflix.conductor.common.run.Workflow; +import com.netflix.conductor.common.run.WorkflowSummary; + +import io.orkes.conductor.client.http.OrkesMetadataClient; +import io.orkes.conductor.client.http.OrkesTaskClient; +import io.orkes.conductor.client.http.OrkesWorkflowClient; +import io.orkes.conductor.client.model.*; + +import lombok.extern.slf4j.Slf4j; + +import static io.orkes.conductor.client.e2e.util.RegistrationUtil.registerWorkflowDef; +import static io.orkes.conductor.client.e2e.util.RegistrationUtil.registerWorkflowWithSubWorkflowDef; +import static org.junit.jupiter.api.Assertions.*; +import static org.testcontainers.shaded.org.awaitility.Awaitility.await; + +@Slf4j +public class WorkflowRetryTests extends AbstractConductorTest { + + @Test + @DisplayName("Check workflow with simple task and retry functionality") + public void testRetrySimpleWorkflow() { + String workflowName = "retry-simple-workflow"; + String taskDefName = "retry-simple-task1"; + + terminateExistingRunningWorkflows(workflowName); + + // Register workflow + registerWorkflowDef(workflowName, taskDefName, taskDefName, metadataClient); + + StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); + startWorkflowRequest.setName(workflowName); + startWorkflowRequest.setVersion(1); + + String workflowId = workflowClient.startWorkflow(startWorkflowRequest); + Workflow workflow = workflowClient.getWorkflow(workflowId, true); + // Fail the simple task + String taskId = workflow.getTasks().get(1).getTaskId(); + TaskResult taskResult = new TaskResult(); + taskResult.setWorkflowInstanceId(workflowId); + taskResult.setTaskId(taskId); + taskResult.setStatus(TaskResult.Status.FAILED); + taskResult.setReasonForIncompletion("failed"); + taskClient.updateTask(taskResult); + + // Wait for workflow to get failed + await().atMost(30, TimeUnit.SECONDS) + .pollInterval(1, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(workflowId, false); + assertEquals( + workflow1.getStatus().name(), + WorkflowStatus.StatusEnum.FAILED.name()); + }); + + // Retry the workflow + workflowClient.retryLastFailedTask(workflowId); + // Check the workflow status and few other parameters + await().atMost(5, TimeUnit.SECONDS) + .pollInterval(1, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(workflowId, true); + assertEquals( + workflow1.getStatus().name(), + WorkflowStatus.StatusEnum.RUNNING.name()); + assertTrue(workflow1.getLastRetriedTime() != 0L); + assertEquals( + workflow1.getTasks().get(2).getStatus().name(), + Task.Status.SCHEDULED.name()); + }); + + taskResult = new TaskResult(); + taskResult.setWorkflowInstanceId(workflowId); + taskResult.setTaskId( + workflowClient.getWorkflow(workflowId, true).getTasks().get(2).getTaskId()); + taskResult.setStatus(TaskResult.Status.COMPLETED); + taskClient.updateTask(taskResult); + + // Wait for workflow to get completed + await().atMost(30, TimeUnit.SECONDS) + .pollInterval(10, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(workflowId, false); + assertEquals( + workflow1.getStatus().name(), + WorkflowStatus.StatusEnum.COMPLETED.name()); + }); + } + + @Test + @DisplayName("Check workflow with sub_workflow task and retry functionality") + public void testRetryWithSubWorkflow() { + + String workflowName = "retry-parent-with-sub-workflow"; + String subWorkflowName = "retry-sub-workflow"; + String taskName = "simple-no-retry2"; + + terminateExistingRunningWorkflows(workflowName); + + // Register workflow + registerWorkflowWithSubWorkflowDef(workflowName, subWorkflowName, taskName, metadataClient); + + StartWorkflowRequest startWorkflowRequest = new StartWorkflowRequest(); + startWorkflowRequest.setName(workflowName); + startWorkflowRequest.setVersion(1); + + String workflowId = workflowClient.startWorkflow(startWorkflowRequest); + System.out.print("Workflow id is " + workflowId); + Workflow workflow = workflowClient.getWorkflow(workflowId, true); + // Fail the simple task + String subworkflowId = workflow.getTasks().get(0).getSubWorkflowId(); + Workflow subWorkflow = workflowClient.getWorkflow(subworkflowId, true); + String taskId = subWorkflow.getTasks().get(0).getTaskId(); + TaskResult taskResult = new TaskResult(); + taskResult.setWorkflowInstanceId(subworkflowId); + taskResult.setTaskId(taskId); + taskResult.setStatus(TaskResult.Status.FAILED); + taskClient.updateTask(taskResult); + + // Wait for parent workflow to get failed + await().atMost(3, TimeUnit.SECONDS) + .pollInterval(1, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(workflowId, false); + assertEquals( + workflow1.getStatus().name(), + WorkflowStatus.StatusEnum.FAILED.name()); + }); + + // Retry the sub workflow. + workflowClient.retryLastFailedTask(subworkflowId); + // Check the workflow status and few other parameters + await().atMost(3, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(subworkflowId, true); + assertEquals( + WorkflowStatus.StatusEnum.RUNNING.name(), + workflow1.getStatus().name()); + assertTrue(workflow1.getLastRetriedTime() != 0L); + assertEquals( + workflow1.getTasks().get(0).getStatus().name(), + Task.Status.FAILED.name()); + assertEquals( + workflow1.getTasks().get(1).getStatus().name(), + Task.Status.SCHEDULED.name()); + }); + taskId = workflowClient.getWorkflow(subworkflowId, true).getTasks().get(1).getTaskId(); + + taskResult = new TaskResult(); + taskResult.setWorkflowInstanceId(subworkflowId); + taskResult.setTaskId(taskId); + taskResult.setStatus(TaskResult.Status.COMPLETED); + taskClient.updateTask(taskResult); + + await().atMost(33, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(workflowId, false); + assertEquals( + WorkflowStatus.StatusEnum.COMPLETED.name(), + workflow1.getStatus().name(), + "workflow " + workflowId + " did not complete"); + }); + + // Check retry at parent workflow level. + String newWorkflowId = workflowClient.startWorkflow(startWorkflowRequest); + System.out.print("Workflow id is " + newWorkflowId); + Workflow newWorkflow = workflowClient.getWorkflow(newWorkflowId, true); + // Fail the simple task + String newSubworkflowId = newWorkflow.getTasks().get(0).getSubWorkflowId(); + Workflow newSubWorkflow = workflowClient.getWorkflow(newSubworkflowId, true); + taskId = newSubWorkflow.getTasks().get(0).getTaskId(); + taskResult = new TaskResult(); + taskResult.setWorkflowInstanceId(newSubworkflowId); + taskResult.setTaskId(taskId); + taskResult.setStatus(TaskResult.Status.FAILED); + taskClient.updateTask(taskResult); + + // Wait for parent workflow to get failed + await().atMost(3, TimeUnit.SECONDS) + .pollInterval(1, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(newWorkflowId, false); + assertEquals( + workflow1.getStatus().name(), + WorkflowStatus.StatusEnum.FAILED.name()); + }); + + // Retry parent workflow. + workflowClient.retryLastFailedTask(newWorkflowId); + + // Wait for parent workflow to get failed + await().atMost(3, TimeUnit.SECONDS) + .pollInterval(1, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(newWorkflowId, false); + assertEquals( + workflow1.getStatus().name(), + WorkflowStatus.StatusEnum.RUNNING.name()); + }); + + newWorkflow = workflowClient.getWorkflow(newWorkflowId, true); + newSubworkflowId = newWorkflow.getTasks().get(0).getSubWorkflowId(); + newSubWorkflow = workflowClient.getWorkflow(newSubworkflowId, true); + taskId = newSubWorkflow.getTasks().get(1).getTaskId(); + taskResult = new TaskResult(); + taskResult.setWorkflowInstanceId(newSubworkflowId); + taskResult.setTaskId(taskId); + taskResult.setStatus(TaskResult.Status.COMPLETED); + taskClient.updateTask(taskResult); + + await().atMost(3, TimeUnit.SECONDS) + .pollInterval(1, TimeUnit.SECONDS) + .untilAsserted( + () -> { + Workflow workflow1 = workflowClient.getWorkflow(newWorkflowId, false); + assertEquals( + workflow1.getStatus().name(), + WorkflowStatus.StatusEnum.COMPLETED.name()); + }); + } + + private void terminateExistingRunningWorkflows(String workflowName) { + // clean up first + SearchResult found = + workflowClient.search( + "workflowType IN (" + workflowName + ") AND status IN (RUNNING)"); + System.out.println( + "Found " + found.getResults().size() + " running workflows to be cleaned up"); + found.getResults() + .forEach( + workflowSummary -> { + try { + System.out.println( + "Going to terminate " + + workflowSummary.getWorkflowId() + + " with status " + + workflowSummary.getStatus()); + workflowClient.terminateWorkflow( + workflowSummary.getWorkflowId(), "terminate"); + } catch (Exception e) { + } + }); + } + + +} diff --git a/test-harness/src/test/java/io/orkes/conductor/client/e2e/util/Commons.java b/test-harness/src/test/java/io/orkes/conductor/client/e2e/util/Commons.java new file mode 100644 index 0000000..91e4841 --- /dev/null +++ b/test-harness/src/test/java/io/orkes/conductor/client/e2e/util/Commons.java @@ -0,0 +1,34 @@ +/* + * Copyright 2022 Orkes, Inc. + *

+ * Licensed under the Orkes Community License (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * https://github.com/orkes-io/licenses/blob/main/community/LICENSE.txt + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package io.orkes.conductor.client.e2e.util; + +import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.workflow.StartWorkflowRequest; + +public class Commons { + public static String WORKFLOW_NAME = "test-sdk-java-workflow"; + public static String TASK_NAME = "test-sdk-java-task"; + public static String OWNER_EMAIL = "example@orkes.io"; + public static int WORKFLOW_VERSION = 1; + + + public static TaskDef getTaskDef() { + TaskDef taskDef = new TaskDef(); + taskDef.setName(Commons.TASK_NAME); + return taskDef; + } + + public static StartWorkflowRequest getStartWorkflowRequest() { + return new StartWorkflowRequest().withName(WORKFLOW_NAME).withVersion(WORKFLOW_VERSION); + } +} diff --git a/test-harness/src/test/java/io/orkes/conductor/client/e2e/util/RegistrationUtil.java b/test-harness/src/test/java/io/orkes/conductor/client/e2e/util/RegistrationUtil.java new file mode 100644 index 0000000..366fde5 --- /dev/null +++ b/test-harness/src/test/java/io/orkes/conductor/client/e2e/util/RegistrationUtil.java @@ -0,0 +1,119 @@ +/* + * Copyright 2022 Orkes, Inc. + *

+ * Licensed under the Orkes Community License (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * https://github.com/orkes-io/licenses/blob/main/community/LICENSE.txt + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package io.orkes.conductor.client.e2e.util; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import com.netflix.conductor.common.metadata.tasks.TaskDef; +import com.netflix.conductor.common.metadata.tasks.TaskType; +import com.netflix.conductor.common.metadata.workflow.SubWorkflowParams; +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowTask; + +import io.orkes.conductor.client.MetadataClient; + +public class RegistrationUtil { + + public static void registerWorkflowDef( + String workflowName, + String taskName1, + String taskName2, + MetadataClient metadataClient1) { + TaskDef taskDef = new TaskDef(taskName1); + taskDef.setRetryCount(0); + taskDef.setOwnerEmail("test@orkes.io"); + TaskDef taskDef2 = new TaskDef(taskName2); + taskDef2.setRetryCount(0); + taskDef2.setOwnerEmail("test@orkes.io"); + + WorkflowTask inline = new WorkflowTask(); + inline.setTaskReferenceName("inline_" + taskName1); + inline.setName(taskName1); + inline.setTaskDefinition(taskDef); + inline.setWorkflowTaskType(TaskType.INLINE); + inline.setInputParameters(Map.of("evaluatorType", "javascript", "expression", "true;")); + + WorkflowTask simpleTask = new WorkflowTask(); + simpleTask.setTaskReferenceName(taskName2); + simpleTask.setName(taskName2); + simpleTask.setTaskDefinition(taskDef); + simpleTask.setWorkflowTaskType(TaskType.SIMPLE); + simpleTask.setInputParameters(Map.of("value", "${workflow.input.value}", "order", "123")); + + WorkflowDef workflowDef = new WorkflowDef(); + workflowDef.setName(workflowName); + workflowDef.setOwnerEmail("test@orkes.io"); + workflowDef.setInputParameters(Arrays.asList("value", "inlineValue")); + workflowDef.setDescription("Workflow to monitor order state"); + workflowDef.setTimeoutSeconds(600); + workflowDef.setTimeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF); + workflowDef.setTasks(Arrays.asList(inline, simpleTask)); + metadataClient1.updateWorkflowDefs(Arrays.asList(workflowDef)); + metadataClient1.registerTaskDefs(Arrays.asList(taskDef, taskDef2)); + } + + public static void registerWorkflowWithSubWorkflowDef( + String workflowName, + String subWorkflowName, + String taskName, + MetadataClient metadataClient) { + TaskDef taskDef = new TaskDef(taskName); + taskDef.setRetryCount(0); + taskDef.setOwnerEmail("test@orkes.io"); + TaskDef taskDef2 = new TaskDef(subWorkflowName); + taskDef2.setRetryCount(0); + taskDef2.setOwnerEmail("test@orkes.io"); + + WorkflowTask inline = new WorkflowTask(); + inline.setTaskReferenceName(taskName); + inline.setName(taskName); + inline.setTaskDefinition(taskDef); + inline.setWorkflowTaskType(TaskType.SIMPLE); + inline.setInputParameters(Map.of("evaluatorType", "graaljs", "expression", "true;")); + + WorkflowTask subworkflowTask = new WorkflowTask(); + subworkflowTask.setTaskReferenceName(subWorkflowName); + subworkflowTask.setName(subWorkflowName); + subworkflowTask.setTaskDefinition(taskDef2); + subworkflowTask.setWorkflowTaskType(TaskType.SUB_WORKFLOW); + SubWorkflowParams subWorkflowParams = new SubWorkflowParams(); + subWorkflowParams.setName(subWorkflowName); + subWorkflowParams.setVersion(1); + subworkflowTask.setSubWorkflowParam(subWorkflowParams); + subworkflowTask.setInputParameters( + Map.of("subWorkflowName", subWorkflowName, "subWorkflowVersion", "1")); + + WorkflowDef subworkflowDef = new WorkflowDef(); + subworkflowDef.setName(subWorkflowName); + subworkflowDef.setOwnerEmail("test@orkes.io"); + subworkflowDef.setInputParameters(Arrays.asList("value", "inlineValue")); + subworkflowDef.setDescription("Sub Workflow to test retry"); + subworkflowDef.setTimeoutSeconds(600); + subworkflowDef.setTimeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF); + subworkflowDef.setTasks(Arrays.asList(inline)); + + WorkflowDef workflowDef = new WorkflowDef(); + workflowDef.setName(workflowName); + workflowDef.setInputParameters(Arrays.asList("value", "inlineValue")); + workflowDef.setDescription("Workflow to test retry"); + workflowDef.setTimeoutSeconds(600); + workflowDef.setTimeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF); + workflowDef.setTasks(Arrays.asList(subworkflowTask)); + workflowDef.setOwnerEmail("test@orkes.io"); + metadataClient.updateWorkflowDefs(List.of(workflowDef)); + metadataClient.updateWorkflowDefs(List.of(subworkflowDef)); + metadataClient.registerTaskDefs(Arrays.asList(taskDef, taskDef2)); + } +} diff --git a/test-harness/src/test/java/io/orkes/conductor/client/e2e/util/SimpleWorker.java b/test-harness/src/test/java/io/orkes/conductor/client/e2e/util/SimpleWorker.java new file mode 100644 index 0000000..1f827a4 --- /dev/null +++ b/test-harness/src/test/java/io/orkes/conductor/client/e2e/util/SimpleWorker.java @@ -0,0 +1,32 @@ +/* + * Copyright 2022 Orkes, Inc. + *

+ * Licensed under the Orkes Community License (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * https://github.com/orkes-io/licenses/blob/main/community/LICENSE.txt + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package io.orkes.conductor.client.e2e.util; + +import com.netflix.conductor.client.worker.Worker; +import com.netflix.conductor.common.metadata.tasks.Task; +import com.netflix.conductor.common.metadata.tasks.TaskResult; + +public class SimpleWorker implements Worker { + @Override + public String getTaskDefName() { + return Commons.TASK_NAME; + } + + @Override + public TaskResult execute(Task task) { + task.setStatus(Task.Status.COMPLETED); + task.getOutputData().put("key", "value"); + task.getOutputData().put("key2", 42); + return new TaskResult(task); + } +} diff --git a/test-harness/src/test/java/io/orkes/conductor/client/e2e/util/WorkflowUtil.java b/test-harness/src/test/java/io/orkes/conductor/client/e2e/util/WorkflowUtil.java new file mode 100644 index 0000000..bba17fb --- /dev/null +++ b/test-harness/src/test/java/io/orkes/conductor/client/e2e/util/WorkflowUtil.java @@ -0,0 +1,34 @@ +/* + * Copyright 2022 Orkes, Inc. + *

+ * Licensed under the Orkes Community License (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + *

+ * https://github.com/orkes-io/licenses/blob/main/community/LICENSE.txt + *

+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + */ +package io.orkes.conductor.client.e2e.util; + +import java.util.List; + +import com.netflix.conductor.common.metadata.workflow.WorkflowDef; +import com.netflix.conductor.common.metadata.workflow.WorkflowTask; + +public class WorkflowUtil { + public static WorkflowDef getWorkflowDef() { + WorkflowDef workflowDef = new WorkflowDef(); + workflowDef.setName(Commons.WORKFLOW_NAME); + workflowDef.setVersion(Commons.WORKFLOW_VERSION); + workflowDef.setOwnerEmail(Commons.OWNER_EMAIL); + workflowDef.setTimeoutSeconds(600); + workflowDef.setTimeoutPolicy(WorkflowDef.TimeoutPolicy.TIME_OUT_WF); + WorkflowTask workflowTask = new WorkflowTask(); + workflowTask.setName(Commons.TASK_NAME); + workflowTask.setTaskReferenceName(Commons.TASK_NAME); + workflowDef.setTasks(List.of(workflowTask)); + return workflowDef; + } +} diff --git a/test-harness/src/test/resources/logback-test.xml b/test-harness/src/test/resources/logback-test.xml new file mode 100644 index 0000000..649d04c --- /dev/null +++ b/test-harness/src/test/resources/logback-test.xml @@ -0,0 +1,24 @@ + + + + + + + %black(%d{ISO8601}) %highlight(%-5level): %msg%n%throwable + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/test-harness/src/test/resources/metadata/fail.json b/test-harness/src/test/resources/metadata/fail.json new file mode 100644 index 0000000..0c8279a --- /dev/null +++ b/test-harness/src/test/resources/metadata/fail.json @@ -0,0 +1,123 @@ +{ + "createTime": 1685744302411, + "updateTime": 1685743597519, + "name": "this_will_fail", + "description": "Edit or extend this sample workflow. Set the workflow name to get started", + "version": 1, + "tasks": [ + { + "name": "http_task_lvome", + "taskReferenceName": "http_task_lvome_ref", + "inputParameters": { + "http_request": { + "uri": "https://orkes-api-tester.orkesconductor.com/api", + "method": "GET", + "connectionTimeOut": 3000, + "readTimeOut": "3000", + "accept": "application/json", + "contentType": "application/json" + } + }, + "type": "HTTP", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "set_variable_task_lxzgc", + "taskReferenceName": "set_variable_task_lxzgc_ref", + "inputParameters": { + "name": "Orkes" + }, + "type": "SET_VARIABLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "json_transform_task_518l3h", + "taskReferenceName": "json_transform_task_518l3h_ref", + "inputParameters": { + "persons": [ + { + "name": "some", + "last": "name", + "email": "mail@mail.com", + "id": 1 + }, + { + "name": "some2", + "last": "name2", + "email": "mail2@mail.com", + "id": 2 + } + ], + "queryExpression": ".persons | map({user:{email,id}})" + }, + "type": "JSON_JQ_TRANSFORM", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "get_random_fact2", + "taskReferenceName": "get_random_fact", + "inputParameters": { + "http_request": { + "uri": "https://orkes-api-tester.orkesconductor.com/dddd", + "method": "GET", + "connectionTimeOut": 3000, + "readTimeOut": 3000, + "accept": "application/json", + "contentType": "application/json" + } + }, + "type": "HTTP", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + "inputParameters": [], + "outputParameters": { + "data": "${get_random_fact.output.response.body.fact}" + }, + "failureWorkflow": "cat_facts", + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "ownerEmail": "viren@orkes.io", + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "variables": {}, + "inputTemplate": {}, + "onStateChange": {} +} \ No newline at end of file diff --git a/test-harness/src/test/resources/metadata/popminmax.json b/test-harness/src/test/resources/metadata/popminmax.json new file mode 100644 index 0000000..92b4a26 --- /dev/null +++ b/test-harness/src/test/resources/metadata/popminmax.json @@ -0,0 +1,38 @@ +{ + "createTime": 1670136356629, + "updateTime": 1694687962674, + "name": "PopulationMinMax", + "description": "Edit or extend this sample workflow. Set the workflow name to get started", + "version": 1, + "tasks": [ + { + "name": "set_variable_task_jqc56h_ref", + "taskReferenceName": "set_variable_task_jqc56h_ref", + "inputParameters": { + "name": "Orkes" + }, + "type": "SET_VARIABLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + "inputParameters": [], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "ownerEmail": "29219ff2-5bb4-4e52-8004-3570829d6970@apps.orkes.io", + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "variables": {}, + "inputTemplate": {}, + "onStateChange": {} +} \ No newline at end of file diff --git a/test-harness/src/test/resources/metadata/rerun.json b/test-harness/src/test/resources/metadata/rerun.json new file mode 100644 index 0000000..d1b5a4d --- /dev/null +++ b/test-harness/src/test/resources/metadata/rerun.json @@ -0,0 +1,651 @@ +{ + "createTime": 1684691483463, + "updateTime": 1684692300926, + "name": "re_run_test_workflow", + "version": 1, + "tasks": [ + { + "name": "x_test_worker_0", + "taskReferenceName": "simple_task_00", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "jq", + "taskReferenceName": "jq", + "inputParameters": { + "key1": { + "value1": [ + "a", + "b" + ] + }, + "queryExpression": "{ key3: (.key1.value1 + .key2.value2) }", + "value2": [ + "d", + "e" + ] + }, + "type": "JSON_JQ_TRANSFORM", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "http_task_5saz2", + "taskReferenceName": "http_task_5saz2_ref", + "inputParameters": { + "http_request": { + "uri": "https://orkes-api-tester.orkesconductor.com/api", + "method": "GET", + "connectionTimeOut": 3000, + "readTimeOut": "3000", + "accept": "application/json", + "contentType": "application/json" + } + }, + "type": "HTTP", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "wait", + "taskReferenceName": "wait", + "inputParameters": { + "duration": "1 seconds" + }, + "type": "WAIT", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "set_state", + "taskReferenceName": "set_state", + "inputParameters": { + "call_made": true + }, + "type": "SET_VARIABLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "fork_task_t7nhng", + "taskReferenceName": "fork_task_t7nhng_ref", + "inputParameters": {}, + "type": "FORK_JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [ + [ + { + "name": "x_test_workers_1", + "taskReferenceName": "x_test_worker_1_ref", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + [ + { + "name": "x_test_workers_0", + "taskReferenceName": "x_test_workers_0_ref", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "wait_task_guk0c", + "taskReferenceName": "wait_task_guk0c_ref", + "inputParameters": { + "duration": "1 seconds" + }, + "type": "WAIT", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + [ + { + "name": "x_test_workers_2", + "taskReferenceName": "x_test_workers_2_ref", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ] + ], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "join_task_y6nux", + "taskReferenceName": "join_task_y6nux_ref", + "inputParameters": {}, + "type": "JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [ + "wait_task_guk0c_ref", + "x_test_worker_1_ref", + "x_test_workers_2_ref" + ], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "sub_flow", + "taskReferenceName": "sub_flow", + "inputParameters": {}, + "type": "SUB_WORKFLOW", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "subWorkflowParam": { + "name": "PopulationMinMax" + }, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "dynamic_fork", + "taskReferenceName": "dynamic_fork", + "inputParameters": { + "forkTaskName": "x_test_worker_0", + "forkTaskInputs": [ + 1, + 2, + 3 + ] + }, + "type": "FORK_JOIN_DYNAMIC", + "decisionCases": {}, + "dynamicForkTasksParam": "forkedTasks", + "dynamicForkTasksInputParamName": "forkedTasksInputs", + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "dynamic_fork_join", + "taskReferenceName": "dynamic_fork_join", + "inputParameters": {}, + "type": "JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "fork", + "taskReferenceName": "fork", + "inputParameters": {}, + "type": "FORK_JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [ + [ + { + "name": "loop_until_success", + "taskReferenceName": "loop_until_success", + "inputParameters": { + "loop_count": 2 + }, + "type": "DO_WHILE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": true, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopCondition": "if ( $.loop_count['iteration'] < $.loop_until_success ) { true; } else { false; }", + "loopOver": [ + { + "name": "fact_length", + "taskReferenceName": "fact_length", + "description": "Fail if the fact is too short", + "inputParameters": { + "number": "${get_data.output.number}" + }, + "type": "SWITCH", + "decisionCases": { + "LONG": [ + { + "name": "x_test_worker_1", + "taskReferenceName": "simple_task_1", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + "SHORT": [ + { + "name": "too_short", + "taskReferenceName": "too_short", + "inputParameters": { + "terminationReason": "value too short", + "terminationStatus": "FAILED" + }, + "type": "TERMINATE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ] + }, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "evaluatorType": "javascript", + "expression": "$.number < 15 ? 'LONG':'LONG'", + "onStateChange": {} + } + ], + "onStateChange": {} + }, + { + "name": "sub_flow_inline", + "taskReferenceName": "sub_flow_inline", + "inputParameters": {}, + "type": "SUB_WORKFLOW", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "subWorkflowParam": { + "name": "inline_sub", + "version": 1, + "workflowDefinition": { + "name": "inline_sub", + "version": 1, + "tasks": [ + { + "name": "x_test_worker_2", + "taskReferenceName": "simple_task_0", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "fact_length2", + "taskReferenceName": "fact_length2", + "description": "Fail if the fact is too short", + "inputParameters": { + "number": "${get_data.output.number}" + }, + "type": "SWITCH", + "decisionCases": { + "LONG": [ + { + "name": "x_test_worker_1", + "taskReferenceName": "simple_task_1", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + "SHORT": [ + { + "name": "too_short", + "taskReferenceName": "too_short", + "inputParameters": { + "terminationReason": "value too short", + "terminationStatus": "FAILED" + }, + "type": "TERMINATE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ] + }, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "evaluatorType": "javascript", + "expression": "$.number < 15 ? 'LONG':'LONG'", + "onStateChange": {} + }, + { + "name": "sub_flow_inline_lvl2", + "taskReferenceName": "sub_flow_inline_lvl2", + "inputParameters": {}, + "type": "SUB_WORKFLOW", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "subWorkflowParam": { + "name": "inline_sub", + "version": 1, + "workflowDefinition": { + "name": "inline_sub", + "version": 1, + "tasks": [ + { + "name": "x_test_worker_2", + "taskReferenceName": "simple_task_0", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + "inputParameters": [], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "variables": {}, + "inputTemplate": {}, + "onStateChange": {} + } + }, + "joinOn": [], + "optional": false, + "taskDefinition": { + "name": "sub_flow_inline", + "description": "sub_flow_inline", + "retryCount": 0, + "timeoutSeconds": 3000, + "inputKeys": [], + "outputKeys": [], + "timeoutPolicy": "TIME_OUT_WF", + "retryLogic": "FIXED", + "retryDelaySeconds": 60, + "responseTimeoutSeconds": 20, + "inputTemplate": {}, + "rateLimitPerFrequency": 0, + "rateLimitFrequencyInSeconds": 1, + "pollTimeoutSeconds": 3600, + "backoffScaleFactor": 1 + }, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + "inputParameters": [], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "variables": {}, + "inputTemplate": {}, + "onStateChange": {} + } + }, + "joinOn": [], + "optional": false, + "taskDefinition": { + "name": "sub_flow_inline", + "description": "sub_flow_inline", + "retryCount": 0, + "timeoutSeconds": 3000, + "inputKeys": [], + "outputKeys": [], + "timeoutPolicy": "TIME_OUT_WF", + "retryLogic": "FIXED", + "retryDelaySeconds": 60, + "responseTimeoutSeconds": 20, + "inputTemplate": {}, + "rateLimitPerFrequency": 0, + "rateLimitFrequencyInSeconds": 1, + "pollTimeoutSeconds": 3600, + "backoffScaleFactor": 1 + }, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + [ + { + "name": "x_test_worker_2", + "taskReferenceName": "simple_task_2", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "x_test_worker_1", + "taskReferenceName": "simple_task_5", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ] + ], + "startDelay": 0, + "joinOn": [ + "sub_flow_inline", + "simple_task_5" + ], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "fork_join", + "taskReferenceName": "fork_join", + "inputParameters": {}, + "type": "JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [ + "simple_task_5", + "sub_flow_inline" + ], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + "inputParameters": [], + "outputParameters": { + "jq": "${jq.output}", + "inner_task": "${x_test_worker_1.output}" + }, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "ownerEmail": "viren@orkes.io", + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "variables": {}, + "inputTemplate": {}, + "onStateChange": {} +} \ No newline at end of file diff --git a/test-harness/src/test/resources/metadata/sub_workflow_tests.json b/test-harness/src/test/resources/metadata/sub_workflow_tests.json new file mode 100644 index 0000000..938cad3 --- /dev/null +++ b/test-harness/src/test/resources/metadata/sub_workflow_tests.json @@ -0,0 +1,131 @@ +[{ + "name": "sub_workflow", + "description": "sub_workflow", + "version": 1, + "tasks": [ + { + "name": "simple_task_in_sub_wf", + "taskReferenceName": "t1", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" +},{ + "name": "integration_test_wf", + "description": "integration_test_wf", + "version": 1, + "tasks": [ + { + "name": "integration_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "p3": "${CPEWF_TASK_ID}", + "someNullKey": null + }, + "type": "SIMPLE" + }, + { + "name": "integration_task_2", + "taskReferenceName": "t2", + "inputParameters": { + "tp1": "${workflow.input.param1}", + "tp2": "${t1.output.op}", + "tp3": "${CPEWF_TASK_ID}" + }, + "type": "SIMPLE" + } + ], + "inputParameters": [ + "param1", + "param2" + ], + "outputParameters": { + "o1": "${workflow.input.param1}", + "o2": "${t2.output.uuid}", + "o3": "${t1.output.op}" + }, + "failureWorkflow": "$workflow.input.failureWfName", + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" +},{ + "name": "integration_test_wf_with_sub_wf", + "description": "integration_test_wf_with_sub_wf", + "version": 1, + "tasks": [ + { + "name": "integration_task_1", + "taskReferenceName": "t1", + "inputParameters": { + "p1": "${workflow.input.param1}", + "p2": "${workflow.input.param2}", + "someNullKey": null + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "sub_workflow_task", + "taskReferenceName": "t2", + "inputParameters": { + "param1": "${workflow.input.param1}", + "param2": "${workflow.input.param2}", + "subwf": "${workflow.input.nextSubwf}" + }, + "type": "SUB_WORKFLOW", + "subWorkflowParam": { + "name": "${workflow.input.subwf}", + "version": 1 + }, + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "retryCount": 0 + } + ], + "inputParameters": [ + "param1", + "param2" + ], + "failureWorkflow": "$workflow.input.failureWfName", + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "TIME_OUT_WF", + "timeoutSeconds": 5, + "ownerEmail": "test@harness.com" +} +] \ No newline at end of file diff --git a/test-harness/src/test/resources/metadata/sync_workflows.json b/test-harness/src/test/resources/metadata/sync_workflows.json new file mode 100644 index 0000000..ef24f37 --- /dev/null +++ b/test-harness/src/test/resources/metadata/sync_workflows.json @@ -0,0 +1,1078 @@ +[ + { + "createTime": 1683107983049, + "updateTime": 1682358589819, + "name": "sync_workflow_no_poller", + "description": "Edit or extend this sample workflow. Set the workflow name to get started", + "version": 1, + "tasks": [ + { + "name": "get_random_fact", + "taskReferenceName": "get_random_fact", + "inputParameters": { + "http_request": { + "uri": "https://catfact.ninja/fact", + "method": "GET", + "connectionTimeOut": 3000, + "readTimeOut": 3000, + "accept": "application/json", + "contentType": "application/json" + } + }, + "type": "HTTP", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "simple_task_pia0h_ref", + "taskReferenceName": "simple_task_pia0h_ref", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + "inputParameters": [], + "outputParameters": { + "data": "${get_random_fact.output.response.body.fact}" + }, + "failureWorkflow": "", + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "ownerEmail": "manan.bhatt@orkes.io", + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "variables": {}, + "inputTemplate": {}, + "onStateChange": {} + }, + { + "createTime": 1683107970948, + "updateTime": 1684439813332, + "name": "sync_workflow_failed_case", + "description": "Edit or extend this sample workflow. Set the workflow name to get started", + "version": 1, + "tasks": [ + { + "name": "http_fail", + "taskReferenceName": "http_fail", + "inputParameters": { + "http_request": { + "uri": "https://cdatfact.ninja/fact", + "method": "GET", + "connectionTimeOut": 3000, + "readTimeOut": 3000, + "accept": "application/json", + "contentType": "application/json" + } + }, + "type": "HTTP", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + "inputParameters": [], + "outputParameters": { + "data": "${get_random_fact.output.response.body.fact}" + }, + "failureWorkflow": "", + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "ownerEmail": "manan.bhatt@orkes.io", + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "variables": {}, + "inputTemplate": {}, + "onStateChange": {} + }, + { + "createTime": 1683108777738, + "updateTime": 1683108838153, + "name": "load_test_perf_sync_workflow", + "version": 1, + "tasks": [ + { + "name": "x_test_worker_0", + "taskReferenceName": "simple_task_0", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "jq", + "taskReferenceName": "jq", + "inputParameters": { + "key1": { + "value1": [ + "a", + "b" + ] + }, + "queryExpression": "{ key3: (.key1.value1 + .key2.value2) }", + "value2": [ + "d", + "e" + ] + }, + "type": "JSON_JQ_TRANSFORM", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "http_task_5saz2", + "taskReferenceName": "http_task_5saz2_ref", + "inputParameters": { + "http_request": { + "uri": "https://orkes-api-tester.orkesconductor.com/api", + "method": "GET", + "connectionTimeOut": 3000, + "readTimeOut": "3000", + "accept": "application/json", + "contentType": "application/json" + } + }, + "type": "HTTP", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "wait", + "taskReferenceName": "wait", + "inputParameters": { + "duration": "1 seconds" + }, + "type": "WAIT", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "set_state", + "taskReferenceName": "set_state", + "inputParameters": { + "call_made": true, + "number": "${simple_task_0.output.number}" + }, + "type": "SET_VARIABLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "fork_task_t7nhng", + "taskReferenceName": "fork_task_t7nhng_ref", + "inputParameters": {}, + "type": "FORK_JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [ + [ + { + "name": "x_test_worker_1", + "taskReferenceName": "simple_task_hap09_ref", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + [ + { + "name": "x_test_worker_0", + "taskReferenceName": "simple_task_2nwrl_ref", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + [ + { + "name": "x_test_worker_2", + "taskReferenceName": "simple_task_jgi39g_ref", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ] + ], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "join_task_y6nux", + "taskReferenceName": "join_task_y6nux_ref", + "inputParameters": {}, + "type": "JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [ + "simple_task_hap09_ref", + "simple_task_jgi39g_ref", + "simple_task_2nwrl_ref" + ], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "sub_flow", + "taskReferenceName": "sub_flow", + "inputParameters": {}, + "type": "SUB_WORKFLOW", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "subWorkflowParam": { + "name": "PopulationMinMax" + }, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "dynamic_fork", + "taskReferenceName": "dynamic_fork", + "inputParameters": { + "forkTaskName": "x_test_worker_0", + "forkTaskInputs": [ + 1, + 2, + 3 + ] + }, + "type": "FORK_JOIN_DYNAMIC", + "decisionCases": {}, + "dynamicForkTasksParam": "forkedTasks", + "dynamicForkTasksInputParamName": "forkedTasksInputs", + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "dynamic_fork_join", + "taskReferenceName": "dynamic_fork_join", + "inputParameters": {}, + "type": "JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "fork", + "taskReferenceName": "fork", + "inputParameters": {}, + "type": "FORK_JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [ + [ + { + "name": "loop_until_success", + "taskReferenceName": "loop_until_success", + "inputParameters": { + "loop_count": 2 + }, + "type": "DO_WHILE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": true, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopCondition": "if ( $.loop_count['iteration'] < $.loop_until_success ) { true; } else { false; }", + "loopOver": [ + { + "name": "fact_length", + "taskReferenceName": "fact_length", + "description": "Fail if the fact is too short", + "inputParameters": { + "number": "${get_data.output.number}" + }, + "type": "SWITCH", + "decisionCases": { + "LONG": [ + { + "name": "x_test_worker_1", + "taskReferenceName": "simple_task_1", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + "SHORT": [ + { + "name": "too_short", + "taskReferenceName": "too_short", + "inputParameters": { + "terminationReason": "value too short", + "terminationStatus": "FAILED" + }, + "type": "TERMINATE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ] + }, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "evaluatorType": "javascript", + "expression": "$.number < 15 ? 'LONG':'LONG'", + "onStateChange": {} + } + ], + "onStateChange": {} + }, + { + "name": "sub_flow_inline", + "taskReferenceName": "sub_flow_inline", + "inputParameters": {}, + "type": "SUB_WORKFLOW", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "subWorkflowParam": { + "name": "inline_sub", + "version": 1, + "workflowDefinition": { + "name": "inline_sub", + "version": 1, + "tasks": [ + { + "name": "x_test_worker_2", + "taskReferenceName": "simple_task_0", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "fact_length2", + "taskReferenceName": "fact_length2", + "description": "Fail if the fact is too short", + "inputParameters": { + "number": "${get_data.output.number}" + }, + "type": "SWITCH", + "decisionCases": { + "LONG": [ + { + "name": "x_test_worker_1", + "taskReferenceName": "simple_task_1", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + "SHORT": [ + { + "name": "too_short", + "taskReferenceName": "too_short", + "inputParameters": { + "terminationReason": "value too short", + "terminationStatus": "FAILED" + }, + "type": "TERMINATE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ] + }, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "evaluatorType": "javascript", + "expression": "$.number < 15 ? 'LONG':'LONG'", + "onStateChange": {} + }, + { + "name": "sub_flow_inline_lvl2", + "taskReferenceName": "sub_flow_inline_lvl2", + "inputParameters": {}, + "type": "SUB_WORKFLOW", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "subWorkflowParam": { + "name": "inline_sub", + "version": 1, + "workflowDefinition": { + "name": "inline_sub", + "version": 1, + "tasks": [ + { + "name": "x_test_worker_2", + "taskReferenceName": "simple_task_0", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + "inputParameters": [], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "variables": {}, + "inputTemplate": {}, + "onStateChange": {} + } + }, + "joinOn": [], + "optional": false, + "taskDefinition": { + "name": "sub_flow_inline", + "description": "sub_flow_inline", + "retryCount": 0, + "timeoutSeconds": 3000, + "inputKeys": [], + "outputKeys": [], + "timeoutPolicy": "TIME_OUT_WF", + "retryLogic": "FIXED", + "retryDelaySeconds": 60, + "responseTimeoutSeconds": 20, + "inputTemplate": {}, + "rateLimitPerFrequency": 0, + "rateLimitFrequencyInSeconds": 1, + "pollTimeoutSeconds": 3600, + "backoffScaleFactor": 1 + }, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + "inputParameters": [], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "variables": {}, + "inputTemplate": {}, + "onStateChange": {} + } + }, + "joinOn": [], + "optional": false, + "taskDefinition": { + "name": "sub_flow_inline", + "description": "sub_flow_inline", + "retryCount": 0, + "timeoutSeconds": 3000, + "inputKeys": [], + "outputKeys": [], + "timeoutPolicy": "TIME_OUT_WF", + "retryLogic": "FIXED", + "retryDelaySeconds": 60, + "responseTimeoutSeconds": 20, + "inputTemplate": {}, + "rateLimitPerFrequency": 0, + "rateLimitFrequencyInSeconds": 1, + "pollTimeoutSeconds": 3600, + "backoffScaleFactor": 1 + }, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + [ + { + "name": "x_test_worker_2", + "taskReferenceName": "simple_task_2", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "x_test_worker_1", + "taskReferenceName": "simple_task_5", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ] + ], + "startDelay": 0, + "joinOn": [ + "sub_flow_inline", + "simple_task_5" + ], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "fork_join", + "taskReferenceName": "fork_join", + "inputParameters": {}, + "type": "JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [ + "simple_task_5", + "sub_flow_inline" + ], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + "inputParameters": [], + "outputParameters": { + "task1": "${simple_task_0.output}", + "jq": "${jq.output}", + "inner_task": "${x_test_worker_1.output}" + }, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "ownerEmail": "manan.bhatt@orkes.io", + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "variables": {}, + "inputTemplate": {}, + "onStateChange": {} + }, + { + "createTime": 1683107857859, + "updateTime": 1685241924046, + "name": "sync_workflow_end_with_simple_task", + "description": "Edit or extend this sample workflow. Set the workflow name to get started", + "version": 1, + "tasks": [ + { + "name": "get_random_fact", + "taskReferenceName": "get_random_fact", + "inputParameters": { + "http_request": { + "uri": "https://catfact.ninja/fact", + "method": "GET", + "connectionTimeOut": 3000, + "readTimeOut": 3000, + "accept": "application/json", + "contentType": "application/json" + } + }, + "type": "HTTP", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "x_test_worker_00", + "taskReferenceName": "simple_task_rka0w_ref", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + "inputParameters": [], + "outputParameters": { + "data": "${get_random_fact.output.response.body.fact}" + }, + "failureWorkflow": "", + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "ownerEmail": "viren@orkes.io", + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "variables": {}, + "inputTemplate": {}, + "onStateChange": {} + }, + { + "createTime": 1683107925149, + "updateTime": 1684744921838, + "name": "sync_workflow_end_with_set_variable_task", + "description": "Edit or extend this sample workflow. Set the workflow name to get started", + "version": 1, + "tasks": [ + { + "name": "set_variable_task_1fi09_ref", + "taskReferenceName": "set_variable_task_1fi09_ref", + "inputParameters": { + "name": "Orkes" + }, + "type": "SET_VARIABLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + "inputParameters": [], + "outputParameters": { + "data": "${get_random_fact.output.response.body.fact}" + }, + "failureWorkflow": "", + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "ownerEmail": "manan.bhatt@orkes.io", + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "variables": {}, + "inputTemplate": {}, + "onStateChange": {} + }, + { + "createTime": 1683107945966, + "updateTime": 1685309635473, + "name": "sync_workflow_end_with_jq_task", + "description": "Edit or extend this sample workflow. Set the workflow name to get started", + "version": 1, + "tasks": [ + { + "name": "json_transform_task_10i8a", + "taskReferenceName": "json_transform_task_10i8a_ref", + "inputParameters": { + "persons": [ + { + "name": "some", + "last": "name", + "email": "mail@mail.com", + "id": 1 + }, + { + "name": "some2", + "last": "name2", + "email": "mail2@mail.com", + "id": 2 + } + ], + "queryExpression": ".persons | map({user:{email,id}})" + }, + "type": "JSON_JQ_TRANSFORM", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + "inputParameters": [], + "outputParameters": { + "data": "${get_random_fact.output.response.body.fact}" + }, + "failureWorkflow": "", + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "ownerEmail": "viren@orkes.io", + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "variables": {}, + "inputTemplate": {}, + "onStateChange": {} + }, + { + "createTime": 1683107958888, + "updateTime": 1682357144047, + "name": "sync_workflow_end_with_subworkflow_task", + "description": "Edit or extend this sample workflow. Set the workflow name to get started", + "version": 1, + "tasks": [ + { + "name": "get_random_fact", + "taskReferenceName": "get_random_fact", + "inputParameters": { + "http_request": { + "uri": "https://catfact.ninja/fact", + "method": "GET", + "connectionTimeOut": 3000, + "readTimeOut": 3000, + "accept": "application/json", + "contentType": "application/json" + } + }, + "type": "HTTP", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "http", + "taskReferenceName": "http_sync", + "inputParameters": {}, + "type": "SUB_WORKFLOW", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "subWorkflowParam": { + "name": "http" + }, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + "inputParameters": [], + "outputParameters": { + "data": "${get_random_fact.output.response.body.fact}" + }, + "failureWorkflow": "", + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "ownerEmail": "manan.bhatt@orkes.io", + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "variables": {}, + "inputTemplate": {}, + "onStateChange": {} + }, + { + "createTime": 1675358077891, + "updateTime": 1683650500645, + "name": "http", + "description": "Edit or extend this sample workflow. Set the workflow name to get started", + "version": 1, + "tasks": [ + { + "name": "get_random_fact", + "taskReferenceName": "get_random_fact", + "inputParameters": { + "http_request": { + "uri": "https://catfact.ninja/fact", + "method": "GET", + "connectionTimeOut": 3000, + "readTimeOut": 3000 + } + }, + "type": "HTTP", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + }, + { + "name": "simple_task_in8x5", + "taskReferenceName": "simple_task_in8x5_ref", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + "inputParameters": [], + "outputParameters": { + "data": "${get_random_fact.output.response.body.fact}" + }, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "ownerEmail": "manan.bhatt@orkes.io", + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "variables": {}, + "inputTemplate": {}, + "onStateChange": {} + }, + { + "createTime": 1670136356629, + "updateTime": 1676101816481, + "name": "PopulationMinMax", + "description": "Edit or extend this sample workflow. Set the workflow name to get started", + "version": 1, + "tasks": [ + { + "name": "set_variable_task_jqc56h_ref", + "taskReferenceName": "set_variable_task_jqc56h_ref", + "inputParameters": { + "name": "Orkes" + }, + "type": "SET_VARIABLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "onStateChange": {} + } + ], + "inputParameters": [], + "outputParameters": { + "data": "${get_random_fact.output.response.body.fact}" + }, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "ownerEmail": "orkes-workers@apps.orkes.io", + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "variables": {}, + "inputTemplate": {}, + "onStateChange": {} + } + +] \ No newline at end of file diff --git a/test-harness/src/test/resources/metadata/workflows.json b/test-harness/src/test/resources/metadata/workflows.json new file mode 100644 index 0000000..43f6554 --- /dev/null +++ b/test-harness/src/test/resources/metadata/workflows.json @@ -0,0 +1,594 @@ +[{ + "createTime": 1670136330055, + "updateTime": 1670176591044, + "name": "sub_workflow_test", + "version": 1, + "tasks": [ + { + "name": "x_test_worker_0", + "taskReferenceName": "simple_task_0", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "jq", + "taskReferenceName": "jq", + "inputParameters": { + "key1": { + "value1": [ + "a", + "b" + ] + }, + "queryExpression": "{ key3: (.key1.value1 + .key2.value2) }", + "value2": [ + "d", + "e" + ] + }, + "type": "JSON_JQ_TRANSFORM", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "wait", + "taskReferenceName": "wait", + "inputParameters": { + "duration": "1 s" + }, + "type": "WAIT", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "set_state", + "taskReferenceName": "set_state", + "inputParameters": { + "call_made": true, + "number": "${simple_task_0.output.number}" + }, + "type": "SET_VARIABLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "sub_flow", + "taskReferenceName": "sub_flow", + "inputParameters": {}, + "type": "SUB_WORKFLOW", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "subWorkflowParam": { + "name": "PopulationMinMax2a27fdfb-295d-4c70-b813-7e3a44e2cb58" + }, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "sub_flow_v1", + "taskReferenceName": "sub_flow_v1", + "inputParameters": {}, + "type": "SUB_WORKFLOW", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "subWorkflowParam": { + "name": "PopulationMinMax2a27fdfb-295d-4c70-b813-7e3a44e2cb58", + "version": 1 + }, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "dynamic_fork", + "taskReferenceName": "dynamic_fork", + "inputParameters": { + "forkTaskName": "x_test_worker_0", + "forkTaskInputs": [ + 1, + 2, + 3 + ] + }, + "type": "FORK_JOIN_DYNAMIC", + "decisionCases": {}, + "dynamicForkTasksParam": "forkedTasks", + "dynamicForkTasksInputParamName": "forkedTasksInputs", + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "dynamic_fork_join", + "taskReferenceName": "dynamic_fork_join", + "inputParameters": {}, + "type": "JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "fork", + "taskReferenceName": "fork", + "inputParameters": {}, + "type": "FORK_JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [ + [ + { + "name": "loop_until_success", + "taskReferenceName": "loop_until_success", + "inputParameters": { + "loop_count": 2 + }, + "type": "DO_WHILE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": true, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopCondition": "if ( $.loop_count['iteration'] < $.loop_until_success ) { true; } else { false; }", + "loopOver": [ + { + "name": "fact_length", + "taskReferenceName": "fact_length", + "description": "Fail if the fact is too short", + "inputParameters": { + "number": "${get_data.output.number}" + }, + "type": "SWITCH", + "decisionCases": { + "LONG": [ + { + "name": "x_test_worker_1", + "taskReferenceName": "simple_task_1", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "SHORT": [ + { + "name": "too_short", + "taskReferenceName": "too_short", + "inputParameters": { + "terminationReason": "value too short", + "terminationStatus": "FAILED" + }, + "type": "TERMINATE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ] + }, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "evaluatorType": "javascript", + "expression": "$.number < 15 ? 'LONG':'LONG'" + } + ] + }, + { + "name": "sub_flow_inline", + "taskReferenceName": "sub_flow_inline", + "inputParameters": {}, + "type": "SUB_WORKFLOW", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "subWorkflowParam": { + "name": "inline_sub", + "version": 1, + "workflowDefinition": { + "name": "inline_sub", + "version": 1, + "tasks": [ + { + "name": "x_test_worker_2", + "taskReferenceName": "simple_task_0", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "fact_length2", + "taskReferenceName": "fact_length2", + "description": "Fail if the fact is too short", + "inputParameters": { + "number": "${get_data.output.number}" + }, + "type": "SWITCH", + "decisionCases": { + "LONG": [ + { + "name": "x_test_worker_1", + "taskReferenceName": "simple_task_1", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "SHORT": [ + { + "name": "too_short", + "taskReferenceName": "too_short", + "inputParameters": { + "terminationReason": "value too short", + "terminationStatus": "FAILED" + }, + "type": "TERMINATE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ] + }, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "evaluatorType": "javascript", + "expression": "$.number < 15 ? 'LONG':'LONG'" + }, + { + "name": "sub_flow_inline_lvl2", + "taskReferenceName": "sub_flow_inline_lvl2", + "inputParameters": {}, + "type": "SUB_WORKFLOW", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "subWorkflowParam": { + "name": "inline_sub", + "version": 1, + "workflowDefinition": { + "name": "inline_sub", + "version": 1, + "tasks": [ + { + "name": "x_test_worker_2", + "taskReferenceName": "simple_task_0", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "variables": {}, + "inputTemplate": {} + } + }, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "taskDefinition": { + "name": "sub_flow_inline", + "description": "sub_flow_inline", + "retryCount": 0, + "timeoutSeconds": 3000, + "timeoutPolicy": "TIME_OUT_WF", + "retryLogic": "FIXED", + "retryDelaySeconds": 60, + "responseTimeoutSeconds": 20, + "inputTemplate": {}, + "rateLimitPerFrequency": 0, + "rateLimitFrequencyInSeconds": 1, + "pollTimeoutSeconds": 3600, + "backoffScaleFactor": 1 + } + } + ], + "inputParameters": [], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "variables": {}, + "inputTemplate": {} + } + }, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [], + "taskDefinition": { + "name": "sub_flow_inline", + "description": "sub_flow_inline", + "retryCount": 0, + "timeoutSeconds": 3000, + "timeoutPolicy": "TIME_OUT_WF", + "retryLogic": "FIXED", + "retryDelaySeconds": 60, + "responseTimeoutSeconds": 20, + "inputTemplate": {}, + "rateLimitPerFrequency": 0, + "rateLimitFrequencyInSeconds": 1, + "pollTimeoutSeconds": 3600, + "backoffScaleFactor": 1 + } + } + ], + [ + { + "name": "x_test_worker_2", + "taskReferenceName": "simple_task_2", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "x_test_worker_1", + "taskReferenceName": "simple_task_5", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ] + ], + "startDelay": 0, + "joinOn": ["sub_flow_inline","simple_task_5"], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "fork_join", + "taskReferenceName": "fork_join", + "inputParameters": {}, + "type": "JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": ["simple_task_5","sub_flow_inline"], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "sub_flow_v0", + "taskReferenceName": "sub_flow_v0", + "inputParameters": {}, + "type": "SUB_WORKFLOW", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "subWorkflowParam": { + "name": "PopulationMinMax2a27fdfb-295d-4c70-b813-7e3a44e2cb58", + "version": 0 + }, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "ownerEmail": "viren@orkes.io", + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "variables": {}, + "inputTemplate": {} +}, + { + "createTime": 1670136356629, + "updateTime": 1670136356636, + "name": "PopulationMinMax2a27fdfb-295d-4c70-b813-7e3a44e2cb58", + "description": "PopulationMinMax v3", + "version": 3, + "tasks": [ + { + "name": "x_test_worker_4", + "taskReferenceName": "x_test_worker_4", + "inputParameters": { + "name": "Orkes" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [], + "outputParameters": { + "data": "${get_random_fact.output.response.body.fact}" + }, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "ownerEmail": "viren@orkes.io", + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "variables": {}, + "inputTemplate": {} + }, + { + "createTime": 1670136356629, + "updateTime": 1670136356636, + "name": "PopulationMinMax2a27fdfb-295d-4c70-b813-7e3a44e2cb58", + "description": "PopulationMinMax v1", + "version": 1, + "tasks": [ + { + "name": "x_test_worker_1", + "taskReferenceName": "x_test_worker_1", + "inputParameters": { + "name": "Orkes" + }, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + "inputParameters": [], + "outputParameters": { + "data": "${get_random_fact.output.response.body.fact}" + }, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "ownerEmail": "viren@orkes.io", + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "variables": {}, + "inputTemplate": {} + }] \ No newline at end of file diff --git a/test-harness/src/test/resources/sample_tasks.json b/test-harness/src/test/resources/sample_tasks.json new file mode 100644 index 0000000..46c54bb --- /dev/null +++ b/test-harness/src/test/resources/sample_tasks.json @@ -0,0 +1,59 @@ +[ + { + "createTime": 1651856131126, + "createdBy": "", + "name": "image_compression", + "description": "Edit or extend this sample task. Set the task name to get started", + "retryCount": 3, + "timeoutSeconds": 600, + "inputKeys": [], + "outputKeys": [], + "timeoutPolicy": "ALERT_ONLY", + "retryLogic": "FIXED", + "retryDelaySeconds": 6, + "responseTimeoutSeconds": 10, + "inputTemplate": {}, + "rateLimitPerFrequency": 0, + "rateLimitFrequencyInSeconds": 1, + "ownerEmail": "test@orkes.io", + "backoffScaleFactor": 1 + }, + { + "createTime": 1651853134126, + "createdBy": "", + "name": "download_file_from_ec2", + "description": "Edit or extend this sample task. Set the task name to get started", + "retryCount": 1, + "timeoutSeconds": 300, + "inputKeys": [], + "outputKeys": [], + "timeoutPolicy": "TIME_OUT_WF", + "retryLogic": "FIXED", + "retryDelaySeconds": 3, + "responseTimeoutSeconds": 50, + "inputTemplate": {}, + "rateLimitPerFrequency": 0, + "rateLimitFrequencyInSeconds": 1, + "ownerEmail": "test@orkes.io", + "backoffScaleFactor": 1 + }, + { + "createTime": 1652856134126, + "createdBy": "", + "name": "update_database", + "description": "Edit or extend this sample task. Set the task name to get started", + "retryCount": 3, + "timeoutSeconds": 900, + "inputKeys": [], + "outputKeys": [], + "timeoutPolicy": "TIME_OUT_WF", + "retryLogic": "FIXED", + "retryDelaySeconds": 10, + "responseTimeoutSeconds": 60, + "inputTemplate": {}, + "rateLimitPerFrequency": 0, + "rateLimitFrequencyInSeconds": 1, + "ownerEmail": "test@orkes.io", + "backoffScaleFactor": 1 + } +] \ No newline at end of file diff --git a/test-harness/src/test/resources/sample_workflow.json b/test-harness/src/test/resources/sample_workflow.json new file mode 100644 index 0000000..e6723a3 --- /dev/null +++ b/test-harness/src/test/resources/sample_workflow.json @@ -0,0 +1,117 @@ +{ + "name": "Do_While_Workflow", + "description": "Do_While_Workflow", + "version": 1, + "tasks": [ + { + "name": "loopTask", + "taskReferenceName": "loopTask", + "inputParameters": { + "value": "${workflow.input.loop}" + }, + "type": "DO_WHILE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopCondition": "if ($.loopTask['iteration'] < $.value) { true; } else { false;} ", + "loopOver": [ + { + "name": "integration_task_0", + "taskReferenceName": "integration_task_0", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "fork", + "taskReferenceName": "fork", + "inputParameters": {}, + "type": "FORK_JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [ + [ + { + "name": "integration_task_1", + "taskReferenceName": "integration_task_1", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ], + [ + { + "name": "integration_task_2", + "taskReferenceName": "integration_task_2", + "inputParameters": {}, + "type": "SIMPLE", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ] + ], + "startDelay": 0, + "joinOn": [], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + }, + { + "name": "join", + "taskReferenceName": "join", + "inputParameters": {}, + "type": "JOIN", + "decisionCases": {}, + "defaultCase": [], + "forkTasks": [], + "startDelay": 0, + "joinOn": [ + "integration_task_1", + "integration_task_2" + ], + "optional": false, + "defaultExclusiveJoinTask": [], + "asyncComplete": false, + "loopOver": [] + } + ] + } + ], + "inputParameters": [], + "outputParameters": {}, + "schemaVersion": 2, + "restartable": true, + "workflowStatusListenerEnabled": false, + "timeoutPolicy": "ALERT_ONLY", + "timeoutSeconds": 0, + "ownerEmail": "test@harness.com" +} \ No newline at end of file