Skip to content

Commit

Permalink
Fixed misspellings and typos (#1321)
Browse files Browse the repository at this point in the history
Co-authored-by: Johannes Köster <johannes.koester@uni-due.de>
  • Loading branch information
kazanzhy and johanneskoester committed Jan 11, 2022
1 parent 60010e4 commit 803a696
Show file tree
Hide file tree
Showing 19 changed files with 70 additions and 70 deletions.
26 changes: 13 additions & 13 deletions snakemake/__init__.py
Expand Up @@ -198,7 +198,7 @@ def snakemake(
resources (dict): provided resources, a dictionary assigning integers to resource names, e.g. {gpu=1, io=5} (default {})
default_resources (DefaultResources): default values for resources not defined in rules (default None)
config (dict): override values for workflow config
workdir (str): path to working directory (default None)
workdir (str): path to the working directory (default None)
targets (list): list of targets, e.g. rule or file names (default None)
dryrun (bool): only dry-run the workflow (default False)
touch (bool): only touch all output files if present (default False)
Expand All @@ -216,7 +216,7 @@ def snakemake(
printd3dag (bool): print a D3.js compatible JSON representation of the DAG (default False)
nocolor (bool): do not print colored output (default False)
quiet (bool): do not print any default job information (default False)
keepgoing (bool): keep goind upon errors (default False)
keepgoing (bool): keep going upon errors (default False)
cluster (str): submission command of a cluster or batch system to use, e.g. qsub (default None)
cluster_config (str,list): configuration file for cluster options, or list thereof (default None)
cluster_sync (str): blocking cluster submission command (like SGE 'qsub -sync y') (default None)
Expand Down Expand Up @@ -248,11 +248,11 @@ def snakemake(
latency_wait (int): how many seconds to wait for an output file to appear after the execution of a job, e.g. to handle filesystem latency (default 3)
wait_for_files (list): wait for given files to be present before executing the workflow
list_resources (bool): list resources used in the workflow (default False)
summary (bool): list summary of all output files and their status (default False). If no option is specified a basic summary will be ouput. If 'detailed' is added as an option e.g --summary detailed, extra info about the input and shell commands will be included
summary (bool): list summary of all output files and their status (default False). If no option is specified a basic summary will be output. If 'detailed' is added as an option e.g --summary detailed, extra info about the input and shell commands will be included
detailed_summary (bool): list summary of all input and output files and their status (default False)
print_compilation (bool): print the compilation of the snakefile (default False)
debug (bool): allow to use the debugger within rules
notemp (bool): ignore temp file flags, e.g. do not delete output files marked as temp after use (default False)
notemp (bool): ignore temp file flags, e.g. do not delete output files marked as a temp after use (default False)
keep_remote_local (bool): keep local copies of remote files (default False)
nodeps (bool): ignore dependencies (default False)
keep_target_files (bool): do not adjust the paths of given target files relative to the working directory.
Expand All @@ -265,11 +265,11 @@ def snakemake(
max_jobs_per_second (int): maximal number of cluster/drmaa jobs per second, None to impose no limit (default None)
restart_times (int): number of times to restart failing jobs (default 0)
attempt (int): initial value of Job.attempt. This is intended for internal use only (default 1).
force_use_threads: whether to force use of threads over processes. helpful if shared memory is full or unavailable (default False)
force_use_threads: whether to force the use of threads over processes. helpful if shared memory is full or unavailable (default False)
use_conda (bool): use conda environments for each job (defined with conda directive of rules)
use_singularity (bool): run jobs in singularity containers (if defined with singularity directive)
use_env_modules (bool): load environment modules if defined in rules
singularity_args (str): additional arguments to pass to singularity
singularity_args (str): additional arguments to pass to a singularity
conda_prefix (str): the directory in which conda environments will be created (default None)
conda_cleanup_pkgs (snakemake.deployment.conda.CondaCleanupMode):
whether to clean up conda tarballs after env creation (default None), valid values: "tarballs", "cache"
Expand All @@ -279,8 +279,8 @@ def snakemake(
list_conda_envs (bool): list conda environments and their location on disk.
mode (snakemake.common.Mode): execution mode
wrapper_prefix (str): prefix for wrapper script URLs (default None)
kubernetes (str): submit jobs to kubernetes, using the given namespace.
container_image (str): Docker image to use, e.g., for kubernetes.
kubernetes (str): submit jobs to Kubernetes, using the given namespace.
container_image (str): Docker image to use, e.g., for Kubernetes.
default_remote_provider (str): default remote provider to use instead of local files (e.g. S3, GS)
default_remote_prefix (str): prefix for default remote provider (e.g. name of the bucket).
tibanna (bool): submit jobs to AWS cloud using Tibanna.
Expand All @@ -289,25 +289,25 @@ def snakemake(
google_lifesciences_regions (list): a list of regions (e.g., us-east1)
google_lifesciences_location (str): Life Sciences API location (e.g., us-central1)
google_lifesciences_cache (bool): save a cache of the compressed working directories in Google Cloud Storage for later usage.
tes (str): Execute workflow tasks on GA4GH TES server given by url.
tes (str): Execute workflow tasks on GA4GH TES server given by URL.
precommand (str): commands to run on AWS cloud before the snakemake command (e.g. wget, git clone, unzip, etc). Use with --tibanna.
preemption_default (int): set a default number of preemptible instance retries (for Google Life Sciences executor only)
preemptible_rules (list): define custom preemptible instance retries for specific rules (for Google Life Sciences executor only)
tibanna_config (list): Additional tibanna config e.g. --tibanna-config spot_instance=true subnet=<subnet_id> security group=<security_group_id>
assume_shared_fs (bool): assume that cluster nodes share a common filesystem (default true).
cluster_status (str): status command for cluster execution. If None, Snakemake will rely on flag files. Otherwise, it expects the command to return "success", "failure" or "running" when executing with a cluster jobid as single argument.
cluster_status (str): status command for cluster execution. If None, Snakemake will rely on flag files. Otherwise, it expects the command to return "success", "failure" or "running" when executing with a cluster jobid as a single argument.
export_cwl (str): Compile workflow to CWL and save to given file
log_handler (function): redirect snakemake output to this custom log handler, a function that takes a log message dictionary (see below) as its only argument (default None). The log message dictionary for the log handler has to following entries:
keep_incomplete (bool): keep incomplete output files of failed jobs
edit_notebook (object): "notebook.EditMode" object to configuring notebook server for interactive editing of a rule notebook. If None, do not edit.
edit_notebook (object): "notebook.EditMode" object to configure notebook server for interactive editing of a rule notebook. If None, do not edit.
scheduler (str): Select scheduling algorithm (default ilp)
scheduler_ilp_solver (str): Set solver for ilp scheduler.
overwrite_groups (dict): Rule to group assignments (default None)
group_components (dict): Number of connected components given groups shall span before being split up (1 by default if empty)
conda_not_block_search_path_envvars (bool): Do not block search path envvars (R_LIBS, PYTHONPATH, ...) when using conda environments.
scheduler_solver_path (str): Path to Snakemake environment (this can be used to e.g. overwrite the search path for the ILP solver used during scheduling).
conda_base_path (str): Path to conda base environment (this can be used to overwrite the search path for conda, mamba and activate).
log_handler (list): redirect snakemake output to this list of custom log handler, each a function that takes a log message dictionary (see below) as its only argument (default []). The log message dictionary for the log handler has to following entries:
conda_base_path (str): Path to conda base environment (this can be used to overwrite the search path for conda, mamba, and activate).
log_handler (list): redirect snakemake output to this list of custom log handlers, each a function that takes a log message dictionary (see below) as its only argument (default []). The log message dictionary for the log handler has to following entries:
:level:
the log level ("info", "error", "debug", "progress", "job_info")
Expand Down
6 changes: 3 additions & 3 deletions snakemake/_version.py
Expand Up @@ -48,7 +48,7 @@ def get_config():


class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
"""The exception is raised if a method is not valid for the current scenario."""


LONG_VERSION_PY = {}
Expand Down Expand Up @@ -235,7 +235,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
version string, meaning we're inside a checked-out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
Expand Down Expand Up @@ -331,7 +331,7 @@ def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):


def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
"""Return a + if we don't already have one, else return a '.'"""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
Expand Down
4 changes: 2 additions & 2 deletions snakemake/benchmark.py
Expand Up @@ -121,7 +121,7 @@ def plural(n):


class DaemonTimer(threading.Thread):
"""Variant of threading.Timer that is deaemonized"""
"""A variant of threading.The timer that is daemonized"""

def __init__(self, interval, function, args=None, kwargs=None):
threading.Thread.__init__(self, daemon=True)
Expand Down Expand Up @@ -317,7 +317,7 @@ def benchmarked(pid=None, benchmark_record=None, interval=BENCHMARK_INTERVAL):


def print_benchmark_records(records, file_):
"""Write benchmark records to file-like object"""
"""Write benchmark records to file-like the object"""
print(BenchmarkRecord.get_header(), file=file_)
for r in records:
print(r.to_tsv(), file=file_)
Expand Down
2 changes: 1 addition & 1 deletion snakemake/cwl.py
Expand Up @@ -193,7 +193,7 @@ def outer_entry(f, entry):


def dag_to_cwl(dag):
"""Convert a given DAG to a CWL workflow, which is returned as JSON object."""
"""Convert a given DAG to a CWL workflow, which is returned as a JSON object."""
snakemake_cwl = {
"class": "CommandLineTool",
"id": "#snakemake-job",
Expand Down
2 changes: 1 addition & 1 deletion snakemake/exceptions.py
Expand Up @@ -175,7 +175,7 @@ class WildcardError(WorkflowError):

class RuleException(Exception):
"""
Base class for exception occuring within the
Base class for exception occurring within the
execution or definition of rules.
"""

Expand Down
6 changes: 3 additions & 3 deletions snakemake/executors/__init__.py
Expand Up @@ -2305,11 +2305,11 @@ def run_wrapper(
Arguments
job_rule -- the ``job.rule`` member
input -- list of input files
output -- list of output files
input -- a list of input files
output -- a list of output files
wildcards -- so far processed wildcards
threads -- usable threads
log -- list of log files
log -- a list of log files
shadow_dir -- optional shadow directory root
"""
# get shortcuts to job_rule members
Expand Down
24 changes: 12 additions & 12 deletions snakemake/io.py
Expand Up @@ -792,7 +792,7 @@ def __hash__(self):
def wait_for_files(
files, latency_wait=3, force_stay_on_remote=False, ignore_pipe=False
):
"""Wait for given files to be present in filesystem."""
"""Wait for given files to be present in the filesystem."""
files = list(files)

def get_missing():
Expand Down Expand Up @@ -986,7 +986,7 @@ def ancient(value):

def directory(value):
"""
A flag to specify that an output is a directory, rather than a file or named pipe.
A flag to specify that output is a directory, rather than a file or named pipe.
"""
if is_flagged(value, "pipe"):
raise SyntaxError("Pipe and directory flags are mutually exclusive.")
Expand Down Expand Up @@ -1024,7 +1024,7 @@ def temporary(value):


def protected(value):
"""A flag for a file that shall be write protected after creation."""
"""A flag for a file that shall be write-protected after creation."""
if is_flagged(value, "temp"):
raise SyntaxError("Protected and temporary flags are mutually exclusive.")
if is_flagged(value, "remote"):
Expand Down Expand Up @@ -1078,7 +1078,7 @@ def report(
):
"""Flag output file or directory as to be included into reports.
In case of directory, files to include can be specified via a glob pattern (default: *).
In the case of a directory, files to include can be specified via a glob pattern (default: *).
Arguments
value -- File or directory.
Expand All @@ -1096,7 +1096,7 @@ def report(


def local(value):
"""Mark a file as local file. This disables application of a default remote
"""Mark a file as a local file. This disables the application of a default remote
provider.
"""
if is_flagged(value, "remote"):
Expand Down Expand Up @@ -1299,7 +1299,7 @@ def get_git_root(path):
Args:
path: (str) Path a to a directory/file that is located inside the repo
Returns:
path to root folder for git repo
path to the root folder for git repo
"""
import git

Expand All @@ -1315,14 +1315,14 @@ def get_git_root_parent_directory(path, input_path):
"""
This function will recursively go through parent directories until a git
repository is found or until no parent directories are left, in which case
a error will be raised. This is needed when providing a path to a
file/folder that is located on a branch/tag no currently checked out.
an error will be raised. This is needed when providing a path to a
file/folder that is located on a branch/tag not currently checked out.
Args:
path: (str) Path a to a directory that is located inside the repo
input_path: (str) origin path, used when raising WorkflowError
Returns:
path to root folder for git repo
path to the root folder for git repo
"""
import git

Expand All @@ -1344,10 +1344,10 @@ def git_content(git_file):
"""
This function will extract a file from a git repository, one located on
the filesystem.
Expected format is git+file:///path/to/your/repo/path_to_file@version
The expected format is git+file:///path/to/your/repo/path_to_file@version
Args:
env_file (str): consist of path to repo, @, version and file information
env_file (str): consist of path to repo, @, version, and file information
Ex: git+file:///home/smeds/snakemake-wrappers/bio/fastqc/wrapper.py@0.19.3
Returns:
file content or None if the expected format isn't meet
Expand Down Expand Up @@ -1377,7 +1377,7 @@ def strip_constraint(match):
class Namedlist(list):
"""
A list that additionally provides functions to name items. Further,
it is hashable, however the hash does not consider the item names.
it is hashable, however, the hash does not consider the item names.
"""

def __init__(
Expand Down
2 changes: 1 addition & 1 deletion snakemake/remote/GS.py
Expand Up @@ -88,7 +88,7 @@ class Crc32cCalculator:
"""The Google Python client doesn't provide a way to stream a file being
written, so we can wrap the file object in an additional class to
do custom handling. This is so we don't need to download the file
and then stream read it again to calculate the hash.
and then stream-read it again to calculate the hash.
"""

def __init__(self, fileobj):
Expand Down
2 changes: 1 addition & 1 deletion snakemake/remote/S3Mocked.py
Expand Up @@ -39,7 +39,7 @@ def pickled_moto_wrapper(func):
"""
This is a class decorator that in turn decorates all methods within
a class to mock out boto calls with moto-simulated ones.
Since the moto backends are not presistent across calls by default,
Since the moto backends are not persistent across calls by default,
the wrapper also pickles the bucket state after each function call,
and restores it before execution. This way uploaded files are available
for follow-on tasks. Since snakemake may execute with multiple threads
Expand Down
2 changes: 1 addition & 1 deletion snakemake/remote/__init__.py
Expand Up @@ -309,7 +309,7 @@ def remote_path(self):


class PooledDomainObject(DomainObject):
"""This adds conection pooling to DomainObjects
"""This adds connection pooling to DomainObjects
out of a location path specified as
(host|IP):port/remote/location
"""
Expand Down
8 changes: 4 additions & 4 deletions snakemake/script.py
Expand Up @@ -78,7 +78,7 @@ def log_fmt_shell(self, stdout=True, stderr=True, append=False):
"""
Return a shell redirection string to be used in `shell()` calls
This function allows scripts and wrappers support optional `log` files
This function allows scripts and wrappers to support optional `log` files
specified in the calling rule. If no `log` was specified, then an
empty string "" is returned, regardless of the values of `stdout`,
`stderr`, and `append`.
Expand All @@ -93,7 +93,7 @@ def log_fmt_shell(self, stdout=True, stderr=True, append=False):
Send stderr to log
append : bool
Do not overwrite the log file. Useful for sending output of
Do not overwrite the log file. Useful for sending an output of
multiple commands to the same log. Note however that the log will
not be truncated at the start.
Expand Down Expand Up @@ -170,7 +170,7 @@ def _log_shell_redirect(


class REncoder:
"""Encoding Pyton data structures into R."""
"""Encoding Python data structures into R."""

@classmethod
def encode_numeric(cls, value):
Expand Down Expand Up @@ -237,7 +237,7 @@ def encode_namedlist(cls, namedlist):


class JuliaEncoder:
"""Encoding Pyton data structures into Julia."""
"""Encoding Python data structures into Julia."""

@classmethod
def encode_value(cls, value):
Expand Down
2 changes: 1 addition & 1 deletion snakemake/unit_tests/__init__.py
Expand Up @@ -28,7 +28,7 @@ def expected_path(self):


def generate(dag, path, deploy=["conda", "singularity"], configfiles=None):
"""Generate unit tests from given dag at given path."""
"""Generate unit tests from given dag at a given path."""
logger.info("Generating unit tests for each rule...")

try:
Expand Down

0 comments on commit 803a696

Please sign in to comment.