diff --git a/snakemake/__init__.py b/snakemake/__init__.py index dd1ebc6af..c2b990c70 100644 --- a/snakemake/__init__.py +++ b/snakemake/__init__.py @@ -395,6 +395,8 @@ def snakemake( assume_shared_fs = False default_remote_provider = "GS" default_remote_prefix = default_remote_prefix.rstrip("/") + if kubernetes: + assume_shared_fs = False # Currently preemptible instances only supported for Google LifeSciences Executor if preemption_default or preemptible_rules and not google_lifesciences: @@ -581,6 +583,7 @@ def snakemake( default_remote_provider=_default_remote_provider, default_remote_prefix=default_remote_prefix, run_local=run_local, + assume_shared_fs=assume_shared_fs, default_resources=default_resources, cache=cache, cores=cores, @@ -788,7 +791,6 @@ def snakemake( no_hooks=no_hooks, force_use_threads=use_threads, conda_create_envs_only=conda_create_envs_only, - assume_shared_fs=assume_shared_fs, cluster_status=cluster_status, cluster_cancel=cluster_cancel, cluster_cancel_nargs=cluster_cancel_nargs, diff --git a/snakemake/dag.py b/snakemake/dag.py index d768728bb..4e82a18e1 100755 --- a/snakemake/dag.py +++ b/snakemake/dag.py @@ -280,7 +280,7 @@ def create_conda_envs( env_set = { (job.conda_env_spec, job.container_img_url) for job in jobs - if job.conda_env_spec and (self.workflow.run_local or job.is_local) + if job.conda_env_spec and (self.workflow.assume_shared_fs or job.is_local) } # Then based on md5sum values diff --git a/snakemake/workflow.py b/snakemake/workflow.py index 12de6fd2d..a4ddc278f 100644 --- a/snakemake/workflow.py +++ b/snakemake/workflow.py @@ -131,6 +131,7 @@ def __init__( default_remote_provider=None, default_remote_prefix="", run_local=True, + assume_shared_fs=True, default_resources=None, cache=None, nodes=1, @@ -211,6 +212,7 @@ def __init__( [] if overwrite_configfiles is None else list(overwrite_configfiles) ) self.run_local = run_local + self.assume_shared_fs = assume_shared_fs self.report_text = None self.conda_cleanup_pkgs = conda_cleanup_pkgs self.edit_notebook = edit_notebook @@ -619,7 +621,6 @@ def execute( no_hooks=False, force_use_threads=False, conda_create_envs_only=False, - assume_shared_fs=True, cluster_status=None, cluster_cancel=None, cluster_cancel_nargs=None, @@ -959,13 +960,12 @@ def files(items): dag.list_untracked() return True - if self.use_singularity: - if assume_shared_fs: - dag.pull_container_imgs( - dryrun=dryrun or list_conda_envs, quiet=list_conda_envs - ) + if self.use_singularity and self.assume_shared_fs: + dag.pull_container_imgs( + dryrun=dryrun or list_conda_envs, quiet=list_conda_envs + ) if self.use_conda: - if assume_shared_fs: + if self.assume_shared_fs: dag.create_conda_envs( dryrun=dryrun or list_conda_envs or conda_cleanup_envs, quiet=list_conda_envs, @@ -1027,7 +1027,7 @@ def files(items): latency_wait=latency_wait, greediness=greediness, force_use_threads=force_use_threads, - assume_shared_fs=assume_shared_fs, + assume_shared_fs=self.assume_shared_fs, keepincomplete=keepincomplete, keepmetadata=keepmetadata, scheduler_type=scheduler_type,