diff --git a/test/test_dag.py b/test/test_dag.py index 4a15afe3c..321cb7244 100644 --- a/test/test_dag.py +++ b/test/test_dag.py @@ -851,7 +851,6 @@ def test_chained_depends(temp_factory): targets=['a.vcf']) -@pytest.mark.skipif(True, reason='This test is failing') def test_output_of_dag(clear_now_and_after): '''Test output of dag''' # diff --git a/test/test_docker_actions.py b/test/test_docker_actions.py index a00c3d530..36d2dda0b 100644 --- a/test/test_docker_actions.py +++ b/test/test_docker_actions.py @@ -90,16 +90,6 @@ def test_bash_in_docker(): ''') -# @pytest.mark.skipif(not has_docker or sys.platform != 'win32', reason='Skip test because docker is not installed.') -# def testBatchScriptInDocker(): -# '''Test action powershell in docker environment''' -# script = execute_workflow(r''' -# [0] -# run: container='docker://microsoft/windowsservercore' -# dir c:\ -# ''') - - @pytest.mark.skipif( not has_docker or sys.platform == 'win32', reason='Skip test because docker is not installed.') diff --git a/test/test_execute.py b/test/test_execute.py index 93efced1e..783890ed9 100644 --- a/test/test_execute.py +++ b/test/test_execute.py @@ -1775,7 +1775,6 @@ def test_step_id_vars(): """) - def test_reexecution_of_dynamic_depends(clear_now_and_after): """Testing the rerun of steps to verify dependency""" clear_now_and_after("a.bam", "a.bam.bai") @@ -1804,7 +1803,6 @@ def test_reexecution_of_dynamic_depends(clear_now_and_after): assert res["__completed__"]["__step_skipped__"] == 1 - def test_traced_function(clear_now_and_after): clear_now_and_after("a.bam", "a.bam.bai") script = """ @@ -2314,7 +2312,6 @@ def test_remove_empty_groups_empty_named(clear_now_and_after): """) - def test_multi_depends(clear_now_and_after, temp_factory): """Test a step with multiple depdendend steps""" @@ -2378,10 +2375,7 @@ def test_execute_ipynb(sample_workflow): Base_Executor(wf).run() -@pytest.mark.skipif( - True, - reason="Skip test because travis fails on this test for unknown reason, also due to a bug in psutil under windows", -) +@pytest.mark.skip(reason="Skip test because travis fails on this test for unknown reason, also due to a bug in psutil under windows") def test_kill_worker(script_factory): """Test if the workflow can error out after a worker is killed""" import time @@ -2402,17 +2396,7 @@ def test_kill_worker(script_factory): while True: children = proc.children(recursive=True) if children: - children[0].terminate() - break - time.sleep(0.1) - ret.wait() - - ret = subprocess.Popen(["sos", "run", script_file]) - proc = psutil.Process(ret.pid) - while True: - children = proc.children(recursive=True) - if children: - children[0].kill() + children[-1].terminate() break time.sleep(0.1) ret.wait() @@ -2457,7 +2441,7 @@ def test_kill_substep_worker(script_factory): ret.wait() -@pytest.mark.skipif(True, reason="This test needs to be improved to make it consistent") +@pytest.mark.skip(reason="This test needs to be improved to make it consistent") def test_kill_task(script_factory): """Test if the workflow can error out after a worker is killed""" subprocess.call(["sos", "purge", "--all"]) @@ -2501,7 +2485,7 @@ def test_kill_task(script_factory): assert ret.returncode != 0 -@pytest.mark.skipif(True, reason="This test needs to be improved to make it consistent") +@pytest.mark.skip(reason="This test needs to be improved to make it consistent") def test_restart_orphaned_tasks(script_factory): """Test restarting orphaned tasks which displays as running at first.""" import time @@ -2596,7 +2580,6 @@ def test_concurrent_running_tasks(script_factory): assert ret2.returncode == 0 - def test_reexecute_task_with_missing_output(clear_now_and_after): '''Issue #1493''' clear_now_and_after([f'a_{i}.txt' for i in range(10)]) diff --git a/test/test_task.py b/test/test_task.py index 0a69396ca..66b9034ce 100644 --- a/test/test_task.py +++ b/test/test_task.py @@ -509,7 +509,7 @@ def test_max_cores(): ) -@pytest.mark.skipIf(not has_docker, reason="Docker container not usable") +@pytest.mark.skipif(not has_docker, reason="Docker container not usable") def test_override_max_cores(): """Test use queue_args to override server restriction max_cores""" execute_workflow(