Skip to content

Commit

Permalink
chore: fix model get issue (#4406)
Browse files Browse the repository at this point in the history
* chore: fix model get

* ci: auto fixes from pre-commit.ci

For more information, see https://pre-commit.ci

* fix: change validators import

Signed-off-by: Frost Ming <me@frostming.com>

---------

Signed-off-by: Frost Ming <me@frostming.com>
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Frost Ming <me@frostming.com>
  • Loading branch information
3 people committed Jan 17, 2024
1 parent ba23ceb commit 81751f4
Show file tree
Hide file tree
Showing 9 changed files with 26 additions and 20 deletions.
3 changes: 2 additions & 1 deletion examples/quickstart/service.py
Expand Up @@ -2,6 +2,7 @@
from typing_extensions import Annotated

import bentoml
from bentoml.validators import Shape


@bentoml.service(resources={"cpu": "200m", "memory": "512Mi"})
Expand All @@ -23,7 +24,7 @@ def __init__(self):

@bentoml.api
def classify(
self, input_series: Annotated[np.ndarray, bentoml.Shape((1, 4))]
self, input_series: Annotated[np.ndarray, Shape((1, 4))]
) -> np.ndarray:
input_series = self.preprocessing.preprocess(input_series)
return self.model.predict(input_series)
13 changes: 11 additions & 2 deletions src/bentoml/_internal/bento/bento.py
Expand Up @@ -368,7 +368,7 @@ def export(
models_dir = self._fs.makedir("models", recreate=True)
model_store = ModelStore(models_dir)
global_model_store = BentoMLContainer.model_store.get()
for model in self.info.models:
for model in self.info.all_models:
copy_model(
model.tag,
src_model_store=global_model_store,
Expand All @@ -393,7 +393,7 @@ def total_size(
) -> int:
total_size = self.file_size
local_model_store = self._model_store
for model in self.info.models:
for model in self.info.all_models:
if local_model_store is not None:
try:
local_model_store.get(model.tag)
Expand Down Expand Up @@ -588,6 +588,14 @@ class BentoInfo:
conda: CondaOptions = attr.field(factory=lambda: CondaOptions().with_defaults())
envs: t.List[t.Dict[str, str]] = attr.field(factory=list)

@property
def all_models(self) -> t.List[BentoModelInfo]:
model_map = {model.tag: model for model in self.models}
for service in self.services:
for model in service.models:
model_map[model.tag] = model
return list(model_map.values())

def __attrs_post_init__(self):
# Direct set is not available when frozen=True
object.__setattr__(self, "name", self.tag.name)
Expand All @@ -602,6 +610,7 @@ def to_dict(self) -> t.Dict[str, t.Any]:
return bentoml_cattr.unstructure(self)

def dump(self, stream: t.IO[t.Any]):
# _models is an alias for models, replace it with models
return yaml.dump(self, stream, sort_keys=False)

@classmethod
Expand Down
10 changes: 2 additions & 8 deletions src/bentoml/_internal/cloud/bentocloud.py
Expand Up @@ -92,13 +92,7 @@ def _do_push_bento(
if version is None:
raise BentoMLException(f"Bento {bento.tag} version cannot be None")
info = bento.info
model_tags: set[Tag] = set()
for m in info.models:
model_tags.add(m.tag)
for svc in info.services:
for m in svc.models:
model_tags.add(m.tag)

model_tags = [m.tag for m in info.all_models]
local_model_store = bento._model_store # type: ignore # using internal BentoML API
if local_model_store is not None and len(local_model_store.list()) > 0:
model_store = local_model_store
Expand Down Expand Up @@ -147,7 +141,7 @@ def push_model(model: Model) -> None:
LabelItemSchema(key=key, value=value) for key, value in info.labels.items()
]
apis: dict[str, BentoApiSchema] = {}
models = [str(m.tag) for m in info.models]
models = [str(m.tag) for m in info.all_models]
runners = [
BentoRunnerSchema(
name=r.name,
Expand Down
4 changes: 2 additions & 2 deletions src/bentoml/_internal/cloud/yatai.py
Expand Up @@ -88,7 +88,7 @@ def _do_push_bento(
if version is None:
raise BentoMLException(f"Bento {bento.tag} version cannot be None")
info = bento.info
model_tags = [m.tag for m in info.models]
model_tags = [m.tag for m in info.all_models]
local_model_store = bento._model_store # type: ignore # Using internal BentoML APIs
if local_model_store is not None and len(local_model_store.list()) > 0:
model_store = local_model_store
Expand Down Expand Up @@ -135,7 +135,7 @@ def push_model(model: Model) -> None:
LabelItemSchema(key=key, value=value) for key, value in info.labels.items()
]
apis: dict[str, BentoApiSchema] = {}
models = [str(m.tag) for m in info.models]
models = [str(m.tag) for m in info.all_models]
runners = [
BentoRunnerSchema(
name=r.name,
Expand Down
2 changes: 1 addition & 1 deletion src/bentoml/_internal/container/__init__.py
Expand Up @@ -163,7 +163,7 @@ def construct_containerfile(
# copy models from model store
model_store = BentoMLContainer.model_store.get()
bento_model_store = ModelStore(temp_fs.makedir("models", recreate=True))
for model in options.models:
for model in options.all_models:
copy_model(
model.tag,
src_model_store=model_store,
Expand Down
4 changes: 3 additions & 1 deletion src/bentoml/_internal/service/loader.py
Expand Up @@ -268,7 +268,9 @@ def _load_bento(bento: Bento, standalone_load: bool) -> Service | NewService[t.A
model_store = local_model_store

# Read the model aliases
resolved_model_aliases = {m.alias: str(m.tag) for m in bento.info.models if m.alias}
resolved_model_aliases = {
m.alias: str(m.tag) for m in bento.info.all_models if m.alias
}
BentoMLContainer.model_aliases.set(resolved_model_aliases)

svc = import_service(
Expand Down
4 changes: 2 additions & 2 deletions src/bentoml/_internal/utils/analytics/cli_events.py
Expand Up @@ -21,9 +21,9 @@ def _cli_bentoml_build_event(
bento_creation_timestamp=bento.info.creation_time,
bento_size_in_kb=calc_dir_size(bento.path_of("/")) / 1024,
model_size_in_kb=calc_dir_size(bento.path_of("/models")) / 1024,
num_of_models=len(bento.info.models),
num_of_models=len(bento.info.all_models),
num_of_runners=len(bento.info.runners),
model_types=[m.module for m in bento.info.models],
model_types=[m.module for m in bento.info.all_models],
runnable_types=[r.runnable_type for r in bento.info.runners],
)
else:
Expand Down
4 changes: 2 additions & 2 deletions src/bentoml/_internal/utils/analytics/usage_stats.py
Expand Up @@ -147,10 +147,10 @@ def _track_serve_init(
production=production,
serve_kind=serve_kind,
bento_creation_timestamp=bento.info.creation_time,
num_of_models=len(bento.info.models),
num_of_models=len(bento.info.all_models),
num_of_runners=len(svc.runners) if is_legacy else len(svc.dependencies),
num_of_apis=len(bento.info.apis),
model_types=[m.module for m in bento.info.models],
model_types=[m.module for m in bento.info.all_models],
runnable_types=[r.runnable_type for r in bento.info.runners],
api_input_types=[api.input_type for api in bento.info.apis],
api_output_types=[api.output_type for api in bento.info.apis],
Expand Down
2 changes: 1 addition & 1 deletion src/bentoml_cli/models.py
Expand Up @@ -175,7 +175,7 @@ def check_model_is_used(tag: Tag) -> None:
for bento in bento_store.list():
if bento._model_store is not None:
continue
if any(model.tag == tag for model in bento.info.models):
if any(model.tag == tag for model in bento.info.all_models):
in_use.append(bento.tag)
if in_use:
raise BentoMLException(
Expand Down

0 comments on commit 81751f4

Please sign in to comment.