Skip to content

Commit

Permalink
Add workaround for snakemake#1550 in tests
Browse files Browse the repository at this point in the history
The new auto `--default-resources` was causing test failures, so we
manually set the default mem_mb to 0.

Ran formatting with black
  • Loading branch information
pvandyken committed Jun 22, 2022
1 parent 3d2f34f commit 6ae41a3
Show file tree
Hide file tree
Showing 3 changed files with 21 additions and 8 deletions.
12 changes: 6 additions & 6 deletions snakemake/dag.py
Expand Up @@ -1476,12 +1476,12 @@ def handle_pipes_and_services(self):
# All pipe groups should be contained within one user-defined group
if len(user_groups) > 1:
raise WorkflowError(
"An output file is marked as "
"pipe or service, but consuming jobs "
"are part of conflicting "
"groups.",
rule=job.rule,
)
"An output file is marked as "
"pipe or service, but consuming jobs "
"are part of conflicting "
"groups.",
rule=job.rule,
)

if len(candidate_groups) > 1:
# Merge multiple pipe groups together
Expand Down
1 change: 0 additions & 1 deletion snakemake/executors/__init__.py
Expand Up @@ -767,7 +767,6 @@ def get_resource_declarations(self, job):
]
return format_cli_arg("--resources", resources)


def get_python_executable(self):
return sys.executable if self.assume_shared_fs else "python"

Expand Down
16 changes: 15 additions & 1 deletion tests/tests.py
Expand Up @@ -9,6 +9,8 @@
import subprocess as sp
from pathlib import Path

from snakemake.resources import DefaultResources

sys.path.insert(0, os.path.dirname(__file__))

from .common import *
Expand Down Expand Up @@ -947,6 +949,7 @@ def test_group_jobs_resources(mocker):
cores=6,
resources={"typo": 23, "mem_mb": 60000},
group_components={0: 5},
default_resources=DefaultResources(["mem_mb=0"]),
)
assert set(spy.spy_return.items()) == {
("_nodes", 1),
Expand All @@ -955,6 +958,7 @@ def test_group_jobs_resources(mocker):
("tmpdir", "/tmp"),
("mem_mb", 60000),
("fake_res", 400),
("disk_mb", 2000),
}


Expand All @@ -968,6 +972,7 @@ def test_group_jobs_resources_with_max_threads(mocker):
resources={"mem_mb": 60000},
max_threads=1,
group_components={0: 5},
default_resources=DefaultResources(["mem_mb=0"]),
)
assert set(spy.spy_return.items()) == {
("_nodes", 1),
Expand All @@ -976,6 +981,7 @@ def test_group_jobs_resources_with_max_threads(mocker):
("tmpdir", "/tmp"),
("mem_mb", 60000),
("fake_res", 1200),
("disk_mb", 3000),
}


Expand All @@ -989,6 +995,7 @@ def test_group_jobs_resources_with_limited_resources(mocker):
resources={"mem_mb": 10000},
max_threads=1,
group_components={0: 5},
default_resources=DefaultResources(["mem_mb=0"]),
)
assert set(spy.spy_return.items()) == {
("_nodes", 1),
Expand All @@ -997,8 +1004,10 @@ def test_group_jobs_resources_with_limited_resources(mocker):
("tmpdir", "/tmp"),
("mem_mb", 10000),
("fake_res", 400),
("disk_mb", 1000),
}


@skip_on_windows
def test_multiple_group_jobs_submit_ignoring_resource_constraints():
tmp = run(
Expand All @@ -1011,13 +1020,15 @@ def test_multiple_group_jobs_submit_ignoring_resource_constraints():
resources={"typo": 23, "mem_mb": 50000},
group_components={0: 5, 1: 5},
overwrite_groups={"a": 0, "a_1": 1, "b": 2, "c": 2},
default_resources=DefaultResources(["mem_mb=0"]),
shouldfail=True,
)
with (Path(tmp)/'qsub.log').open('r') as f:
with (Path(tmp) / "qsub.log").open("r") as f:
lines = [l for l in f.readlines() if not l == "\n"]
assert len(lines) == 2
shutil.rmtree(tmp)


@skip_on_windows
def test_group_job_resources_with_pipe(mocker):
spy = mocker.spy(GroupJob, "_calculate_resources")
Expand All @@ -1029,13 +1040,15 @@ def test_group_job_resources_with_pipe(mocker):
"mem_mb": 60000,
},
group_components={0: 5},
default_resources=DefaultResources(["mem_mb=0"]),
)
assert set(spy.spy_return.items()) == {
("_nodes", 1),
("_cores", 4),
("runtime", 280),
("tmpdir", "/tmp"),
("mem_mb", 50000),
("disk_mb", 1000),
}


Expand All @@ -1050,6 +1063,7 @@ def test_group_job_resources_with_pipe_with_too_much_constraint():
},
group_components={0: 5},
shouldfail=True,
default_resources=DefaultResources(["mem_mb=0"]),
)


Expand Down

0 comments on commit 6ae41a3

Please sign in to comment.