Skip to content

Commit

Permalink
Quick start guide updates (#297)
Browse files Browse the repository at this point in the history
* quick start guide update

* Update quick start guide link in readme.md

* fix operator DeploymentSpec import

* add alisas 'deployments' for deployment command

* deployment store list should return pb object

* fix list deployments

* hide yatai debug message

* Update bentoml-quick-start-guide.ipynb

* auto formatting
  • Loading branch information
parano committed Sep 17, 2019
1 parent becf4a2 commit 84afa46
Show file tree
Hide file tree
Showing 6 changed files with 36 additions and 31 deletions.
6 changes: 3 additions & 3 deletions README.md
Expand Up @@ -8,7 +8,7 @@
> From a model in jupyter notebook to production API service in 5 minutes

[![BentoML](https://raw.githubusercontent.com/bentoml/BentoML/master/docs/_static/img/bentoml.png)](https://colab.research.google.com/github/bentoml/BentoML/blob/master/examples/quick-start/bentoml-quick-start-guide.ipynb)
[![BentoML](https://raw.githubusercontent.com/bentoml/BentoML/master/docs/_static/img/bentoml.png)](https://colab.research.google.com/github/bentoml/BentoML/blob/master/guides/quick-start/bentoml-quick-start-guide.ipynb)

[Getting Started](https://github.com/bentoml/BentoML#getting-started) | [Documentation](http://bentoml.readthedocs.io) | [Examples](https://github.com/bentoml/BentoML#examples) | [Contributing](https://github.com/bentoml/BentoML#contributing) | [Releases](https://github.com/bentoml/BentoML#releases) | [License](https://github.com/bentoml/BentoML/blob/master/LICENSE) | [Blog](https://medium.com/bentoml)

Expand All @@ -29,7 +29,7 @@ BentoML framework provides:
cloud platforms such as AWS, Azure and GCP.


Check out the 5-mins quick start notebook using BentoML to productionize a scikit-learn model and deploy it to AWS Lambda: [![Google Colab Badge](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/bentoml/BentoML/blob/master/examples/quick-start/bentoml-quick-start-guide.ipynb)
Check out the 5-mins quick start notebook using BentoML to productionize a scikit-learn model and deploy it to AWS Lambda: [![Google Colab Badge](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/bentoml/BentoML/blob/master/guides/quick-start/bentoml-quick-start-guide.ipynb)

---

Expand Down Expand Up @@ -132,7 +132,7 @@ docker build -t my_api_server {saved_path}
```

Try out the full getting started notebook
[here on Google Colab](https://colab.research.google.com/github/bentoml/BentoML/blob/master/examples/quick-start/bentoml-quick-start-guide.ipynb).
[here on Google Colab](https://colab.research.google.com/github/bentoml/BentoML/blob/master/guides/quick-start/bentoml-quick-start-guide.ipynb).


## Examples
Expand Down
2 changes: 1 addition & 1 deletion bentoml/cli/click_utils.py
Expand Up @@ -48,7 +48,7 @@
CLI_COLOR_WARNING = "yellow"


COMMAND_ALIASES = {'deploy': 'deployment'}
COMMAND_ALIASES = {'deploy': 'deployment', 'deployments': 'deployment'}


def _echo(message, color="reset"):
Expand Down
12 changes: 6 additions & 6 deletions bentoml/deployment/operator.py
Expand Up @@ -19,30 +19,30 @@
from six import add_metaclass
from abc import abstractmethod, ABCMeta

from bentoml.proto import deployment_pb2
from bentoml.proto.deployment_pb2 import DeploymentSpec
from bentoml.exceptions import BentoMLDeploymentException


def get_deployment_operator(deployment_pb):
operator = deployment_pb.spec.operator

if operator == deployment_pb2.AWS_SAGEMAKER:
if operator == DeploymentSpec.AWS_SAGEMAKER:
from bentoml.deployment.sagemaker import SageMakerDeploymentOperator

return SageMakerDeploymentOperator()
elif operator == deployment_pb2.AWS_LAMBDA:
elif operator == DeploymentSpec.AWS_LAMBDA:
from bentoml.deployment.serverless.aws_lambda import AwsLambdaDeploymentOperator

return AwsLambdaDeploymentOperator()
elif operator == deployment_pb2.GCP_FUNCTION:
elif operator == DeploymentSpec.GCP_FUNCTION:
from bentoml.deployment.serverless.gcp_function import (
GcpFunctionDeploymentOperator,
)

return GcpFunctionDeploymentOperator()
elif operator == deployment_pb2.KUBERNETES:
elif operator == DeploymentSpec.KUBERNETES:
raise NotImplementedError("Kubernetes deployment operator is not implemented")
elif operator == deployment_pb2.CUSTOM:
elif operator == DeploymentSpec.CUSTOM:
raise NotImplementedError("Custom deployment operator is not implemented")
else:
raise BentoMLDeploymentException("DeployOperator must be set")
Expand Down
2 changes: 1 addition & 1 deletion bentoml/deployment/store.py
Expand Up @@ -151,4 +151,4 @@ def list(self, namespace, filter_str=None, labels=None, offset=None, limit=None)
query.filter(Deployment.name.contains(filter_str))
if labels:
raise NotImplementedError("Listing by labels is not yet implemented")
return query.all()
return list(map(_deployment_orm_obj_to_pb, query.all()))
4 changes: 2 additions & 2 deletions bentoml/yatai/__init__.py
Expand Up @@ -52,12 +52,12 @@ def get_yatai_service(
default_namespace,
channel_address,
)
logger.info("Using BentoML with remote Yatai server: %s", channel_address)
logger.debug("Using BentoML with remote Yatai server: %s", channel_address)

channel = grpc.insecure_channel(channel_address)
return YataiStub(channel)
else:
logger.info("Using BentoML with local Yatai server")
logger.debug("Using BentoML with local Yatai server")

default_namespace = default_namespace or config().get(
'deployment', 'default_namespace'
Expand Down
41 changes: 23 additions & 18 deletions guides/quick-start/bentoml-quick-start-guide.ipynb
Expand Up @@ -292,7 +292,7 @@
"\n",
"# CLI access\n",
"\n",
"`pip install {saved_path}` also installs a CLI tool for accessing the BentoML service:"
"`pip install {saved_path}` also installs a CLI tool for accessing the BentoML service, print CLI help document with `--help`:\n"
]
},
{
Expand All @@ -301,7 +301,14 @@
"metadata": {},
"outputs": [],
"source": [
"!IrisClassifier info"
"!IrisClassifier --help"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Printing more information about this ML service with `info` command:"
]
},
{
Expand All @@ -314,16 +321,14 @@
},
"outputs": [],
"source": [
"!IrisClassifier open-api-spec"
"!IrisClassifier info"
]
},
{
"cell_type": "code",
"execution_count": null,
"cell_type": "markdown",
"metadata": {},
"outputs": [],
"source": [
"!IrisClassifier --help"
"You can also print help and docs on individual commands:"
]
},
{
Expand All @@ -335,6 +340,13 @@
"!IrisClassifier predict --help"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Each service API you defined in the BentoService will be exposed as a CLI command with the same name as the API function:"
]
},
{
"cell_type": "code",
"execution_count": null,
Expand All @@ -360,15 +372,8 @@
"outputs": [],
"source": [
"# Writing test data to a csv file\n",
"pd.DataFrame(iris.data).to_csv('iris_data.csv', index=False)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"pd.DataFrame(iris.data).to_csv('iris_data.csv', index=False)\n",
"\n",
"# Invoke predict from command lien\n",
"!IrisClassifier predict --input='./iris_data.csv'"
]
Expand Down Expand Up @@ -452,11 +457,11 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
"scrolled": true
"scrolled": false
},
"outputs": [],
"source": [
"!bentoml deployment create quick-starrt-guide-deployment IrisClassifier:{svc.version} --platform aws-lambda --region us-west-2"
"!bentoml deployment create quick-start-guide-deployment --bento=IrisClassifier:{svc.version} --platform=aws-lambda --region=us-west-2"
]
},
{
Expand Down

0 comments on commit 84afa46

Please sign in to comment.