Skip to content

Commit

Permalink
fix: rename maxmemory to max_memory
Browse files Browse the repository at this point in the history
  • Loading branch information
xianml committed Dec 7, 2023
1 parent 8985a30 commit f612788
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 15 deletions.
2 changes: 1 addition & 1 deletion src/bentoml/_internal/cloud/base.py
Expand Up @@ -44,7 +44,7 @@ def io_wrapper(
memory * 1024**3, read_cb=read_cb, write_cb=write_cb
)
else:
raise BentoMLException(f"Option maxmemory must be -1 or > 0, got {memory}")
raise BentoMLException(f"Option max_memory must be -1 or > 0, got {memory}")


class CallbackSpooledTemporaryFileIO(SpooledTemporaryFile):
Expand Down
16 changes: 8 additions & 8 deletions src/bentoml/_internal/cloud/bentocloud.py
Expand Up @@ -71,7 +71,7 @@ def push_bento(
force: bool = False,
threads: int = 10,
context: str | None = None,
maxmemory: int = -1,
max_memory: int = -1,
):
with Live(self.progress_group):
upload_task_id = self.transmission_progress.add_task(
Expand All @@ -83,7 +83,7 @@ def push_bento(
force=force,
threads=threads,
context=context,
maxmemory=maxmemory,
max_memory=max_memory,
)

@inject
Expand All @@ -95,7 +95,7 @@ def _do_push_bento(
force: bool = False,
threads: int = 10,
context: str | None = None,
maxmemory: int = -1,
max_memory: int = -1,
model_store: ModelStore = Provide[BentoMLContainer.model_store],
):
yatai_rest_client = get_rest_api_client(context)
Expand All @@ -121,7 +121,7 @@ def push_model(model: Model) -> None:
force=force,
threads=threads,
context=context,
maxmemory=maxmemory,
max_memory=max_memory,
)

futures: t.Iterator[None] = executor.map(push_model, models)
Expand Down Expand Up @@ -579,7 +579,7 @@ def push_model(
force: bool = False,
threads: int = 10,
context: str | None = None,
maxmemory: int = -1,
max_memory: int = -1,
):
with Live(self.progress_group):
upload_task_id = self.transmission_progress.add_task(
Expand All @@ -591,7 +591,7 @@ def push_model(
force=force,
threads=threads,
context=context,
maxmemory=maxmemory,
max_memory=max_memory,
)

def _do_push_model(
Expand All @@ -602,7 +602,7 @@ def _do_push_model(
force: bool = False,
threads: int = 10,
context: str | None = None,
maxmemory: int = -1,
max_memory: int = -1,
):
yatai_rest_client = get_rest_api_client(context)
name = model.tag.name
Expand Down Expand Up @@ -684,7 +684,7 @@ def io_cb(x: int):
self.transmission_progress.update(upload_task_id, advance=x)

# limit the max memory usage when uploading model
with io_wrapper(maxmemory, read_cb=io_cb) as tar_io:
with io_wrapper(max_memory, read_cb=io_cb) as tar_io:
with self.spin(text=f'Creating tar archive for model "{model.tag}"..'):
with tarfile.open(fileobj=tar_io, mode="w:") as tar:
tar.add(model.path, arcname="./")
Expand Down
6 changes: 3 additions & 3 deletions src/bentoml_cli/bentos.py
Expand Up @@ -283,12 +283,12 @@ def pull(shared_options: SharedOptions, bento_tag: str, force: bool) -> None: #
)
@click.option(
"-m",
"--maxmemory",
"--max_memory",
default=-1,
help="max memory usage in GB when pushing, default -1 means no limit",
)
@click.pass_obj
def push(shared_options: SharedOptions, bento_tag: str, force: bool, threads: int, maxmemory: int) -> None: # type: ignore (not accessed)
def push(shared_options: SharedOptions, bento_tag: str, force: bool, threads: int, max_memory: int) -> None: # type: ignore (not accessed)
"""Push Bento to a remote Bento store server."""
bento_obj = bento_store.get(bento_tag)
if not bento_obj:
Expand All @@ -298,7 +298,7 @@ def push(shared_options: SharedOptions, bento_tag: str, force: bool, threads: in
force=force,
threads=threads,
context=shared_options.cloud_context,
maxmemory=maxmemory,
max_memory=max_memory,
)

@cli.command()
Expand Down
6 changes: 3 additions & 3 deletions src/bentoml_cli/models.py
Expand Up @@ -310,12 +310,12 @@ def pull(ctx: click.Context, model_tag: str | None, force: bool, bentofile: str)
)
@click.option(
"-m",
"--maxmemory",
"--max_memory",
default=-1,
help="max memory usage in GB when pushing, default -1 means no limit",
)
@click.pass_obj
def push(shared_options: SharedOptions, model_tag: str, force: bool, threads: int, maxmemory: int): # type: ignore (not accessed)
def push(shared_options: SharedOptions, model_tag: str, force: bool, threads: int, max_memory: int): # type: ignore (not accessed)
"""Push Model to a remote model store."""
model_obj = model_store.get(model_tag)
if not model_obj:
Expand All @@ -325,5 +325,5 @@ def push(shared_options: SharedOptions, model_tag: str, force: bool, threads: in
force=force,
threads=threads,
context=shared_options.cloud_context,
maxmemory=maxmemory,
max_memory=max_memory,
)

0 comments on commit f612788

Please sign in to comment.