How to use bentoml - 10 common examples

To help you get started, we’ve selected a few bentoml examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github bentoml / BentoML / tests / test_service.py View on Github external
        @bentoml.api(ImageHandler)
        def test(self, image):
            return image
github bentoml / BentoML / tests / test_save_and_load.py View on Github external
    @bentoml.api(DataframeHandler)
    def test(self, df):
        return df
github bentoml / BentoML / tests / test_service.py View on Github external
def test_invalid_api_handler():
    with pytest.raises(InvalidArgument) as e:

        class TestBentoService(bentoml.BentoService):  # pylint: disable=unused-variable
            @bentoml.api("Not A BentoHandler")
            def test(self):
                pass

    assert "must be class derived from bentoml.handlers.BentoHandler" in str(e.value)
github bentoml / BentoML / tests / test_save_and_load.py View on Github external
assert os.path.exists(saved_path)

    model_service = bentoml.load(saved_path)

    assert len(model_service.get_service_apis()) == 1
    api = model_service.get_service_apis()[0]
    assert api.name == "predict"
    assert isinstance(api.handler, DataframeHandler)
    assert api.func(1) == 2

    # Check api methods are available
    assert model_service.predict(1) == 2
    assert model_service.version == expected_version


class TestBentoWithOutArtifact(bentoml.BentoService):
    @bentoml.api(DataframeHandler)
    def test(self, df):
        return df


def test_bento_without_artifact(tmpdir):
    TestBentoWithOutArtifact().save_to_dir(str(tmpdir))
    model_service = bentoml.load(str(tmpdir))
    assert model_service.test(1) == 1
    assert len(model_service.get_service_apis()) == 1


def test_save_duplicated_bento_exception_raised(tmpdir, test_bento_service_class):
    test_model = TestModel()
    svc = test_bento_service_class()
    svc.pack("model", test_model)
github bentoml / BentoML / tests / test_service.py View on Github external
def test_image_handler_pip_dependencies():
    class TestImageService(bentoml.BentoService):
        @bentoml.api(ImageHandler)
        def test(self, image):
            return image

    service = TestImageService()
    assert 'imageio' in service._env._pip_dependencies
github bentoml / BentoML / tests / test_service_env.py View on Github external
def test_service_env_pip_dependencies(tmpdir):
    @bentoml.env(pip_dependencies=['numpy', 'pandas', 'torch'])
    class ServiceWithList(bentoml.BentoService):
        @bentoml.api(DataframeHandler)
        def predict(self, df):
            return df

    service_with_list = ServiceWithList()
    saved_path = service_with_list.save(str(tmpdir))

    requirements_txt_path = os.path.join(saved_path, 'requirements.txt')
    with open(requirements_txt_path, 'rb') as f:
        saved_requirements = f.read()
        module_list = saved_requirements.decode('utf-8').split('\n')
        assert 'numpy' in module_list
        assert 'pandas' in module_list
        assert 'torch' in module_list
github bentoml / BentoML / tests / deployment / aws_lambda / test_aws_lambda_deployment_operator.py View on Github external
def test_aws_lambda_apply_under_bundle_size_limit_success():
    yatai_service_mock = create_yatai_service_mock()
    test_deployment_pb = generate_lambda_deployment_pb()
    deployment_operator = AwsLambdaDeploymentOperator(yatai_service_mock)

    result_pb = deployment_operator.add(test_deployment_pb)

    assert result_pb.status.status_code == status_pb2.Status.OK
    assert result_pb.deployment.state.state == DeploymentState.PENDING
github bentoml / BentoML / tests / deployment / serverless / test_serverless_lambda.py View on Github external
def test_aws_lambda_apply_failed_only_local_repo(
    mock_popen,
    mock_init_serverless,
    mock_copy,
    mock_copytree,
    mock_check_nodejs,
    mock_checkcall,
    mock_checkoutput,
):
    test_deployment_pb = generate_lambda_deployment_pb()
    yatai_service_mock = create_yatai_service_mock(BentoUri.UNSET)
    deployment_operator = AwsLambdaDeploymentOperator()
    result_pb = deployment_operator.apply(test_deployment_pb, yatai_service_mock)
    assert result_pb.status.status_code == Status.INTERNAL
    assert result_pb.status.error_message.startswith('BentoML currently not support')
github bentoml / BentoML / tests / deployment / sagemaker / test_sagemaker.py View on Github external
def test_sagemaker_apply_fail_not_local_repo():
    yatai_service = create_yatai_service_mock(repo_storage_type=BentoUri.UNSET)
    sagemaker_deployment_pb = generate_sagemaker_deployment_pb()
    deployment_operator = SageMakerDeploymentOperator(yatai_service)
    result_pb = deployment_operator.add(sagemaker_deployment_pb)
    assert result_pb.status.status_code == Status.INTERNAL
    assert result_pb.status.error_message.startswith('BentoML currently not support')
github bentoml / BentoML / tests / deployment / serverless / test_serverless_lambda.py View on Github external
def test_aws_lambda_describe_failed_no_formation():
    yatai_service_mock = create_yatai_service_mock()
    test_deployment_pb = generate_lambda_deployment_pb()
    deployment_operator = AwsLambdaDeploymentOperator()
    result_pb = deployment_operator.describe(test_deployment_pb, yatai_service_mock)
    assert result_pb.status.status_code == Status.INTERNAL
    assert result_pb.status.error_message.startswith(
        'An error occurred (ValidationError)'
    )