Skip to content

Commit

Permalink
fix: always deploy conda envs in main process when assuming a shared …
Browse files Browse the repository at this point in the history
…file system (fixes issue #1463) (#1472)

* fix: always deploy conda envs in main process when assuming a shared file system (fixes issue #1463)

* fix lint
  • Loading branch information
johanneskoester committed Mar 11, 2022
1 parent 9520e98 commit 79788eb
Show file tree
Hide file tree
Showing 3 changed files with 12 additions and 10 deletions.
4 changes: 3 additions & 1 deletion snakemake/__init__.py
Expand Up @@ -395,6 +395,8 @@ def snakemake(
assume_shared_fs = False
default_remote_provider = "GS"
default_remote_prefix = default_remote_prefix.rstrip("/")
if kubernetes:
assume_shared_fs = False

# Currently preemptible instances only supported for Google LifeSciences Executor
if preemption_default or preemptible_rules and not google_lifesciences:
Expand Down Expand Up @@ -581,6 +583,7 @@ def snakemake(
default_remote_provider=_default_remote_provider,
default_remote_prefix=default_remote_prefix,
run_local=run_local,
assume_shared_fs=assume_shared_fs,
default_resources=default_resources,
cache=cache,
cores=cores,
Expand Down Expand Up @@ -788,7 +791,6 @@ def snakemake(
no_hooks=no_hooks,
force_use_threads=use_threads,
conda_create_envs_only=conda_create_envs_only,
assume_shared_fs=assume_shared_fs,
cluster_status=cluster_status,
cluster_cancel=cluster_cancel,
cluster_cancel_nargs=cluster_cancel_nargs,
Expand Down
2 changes: 1 addition & 1 deletion snakemake/dag.py
Expand Up @@ -280,7 +280,7 @@ def create_conda_envs(
env_set = {
(job.conda_env_spec, job.container_img_url)
for job in jobs
if job.conda_env_spec and (self.workflow.run_local or job.is_local)
if job.conda_env_spec and (self.workflow.assume_shared_fs or job.is_local)
}

# Then based on md5sum values
Expand Down
16 changes: 8 additions & 8 deletions snakemake/workflow.py
Expand Up @@ -131,6 +131,7 @@ def __init__(
default_remote_provider=None,
default_remote_prefix="",
run_local=True,
assume_shared_fs=True,
default_resources=None,
cache=None,
nodes=1,
Expand Down Expand Up @@ -211,6 +212,7 @@ def __init__(
[] if overwrite_configfiles is None else list(overwrite_configfiles)
)
self.run_local = run_local
self.assume_shared_fs = assume_shared_fs
self.report_text = None
self.conda_cleanup_pkgs = conda_cleanup_pkgs
self.edit_notebook = edit_notebook
Expand Down Expand Up @@ -619,7 +621,6 @@ def execute(
no_hooks=False,
force_use_threads=False,
conda_create_envs_only=False,
assume_shared_fs=True,
cluster_status=None,
cluster_cancel=None,
cluster_cancel_nargs=None,
Expand Down Expand Up @@ -959,13 +960,12 @@ def files(items):
dag.list_untracked()
return True

if self.use_singularity:
if assume_shared_fs:
dag.pull_container_imgs(
dryrun=dryrun or list_conda_envs, quiet=list_conda_envs
)
if self.use_singularity and self.assume_shared_fs:
dag.pull_container_imgs(
dryrun=dryrun or list_conda_envs, quiet=list_conda_envs
)
if self.use_conda:
if assume_shared_fs:
if self.assume_shared_fs:
dag.create_conda_envs(
dryrun=dryrun or list_conda_envs or conda_cleanup_envs,
quiet=list_conda_envs,
Expand Down Expand Up @@ -1027,7 +1027,7 @@ def files(items):
latency_wait=latency_wait,
greediness=greediness,
force_use_threads=force_use_threads,
assume_shared_fs=assume_shared_fs,
assume_shared_fs=self.assume_shared_fs,
keepincomplete=keepincomplete,
keepmetadata=keepmetadata,
scheduler_type=scheduler_type,
Expand Down

0 comments on commit 79788eb

Please sign in to comment.