How to use the bentoml.yatai.status.Status function in bentoml

To help you get started, we’ve selected a few bentoml examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github bentoml / BentoML / bentoml / yatai / yatai_service_impl.py View on Github external
def HealthCheck(self, request, context=None):
        return HealthCheckResponse(status=Status.OK())
github bentoml / BentoML / bentoml / deployment / aws_lambda / __init__.py View on Github external
lambda_deployment_config.region,
                    lambda_s3_bucket,
                    deployment_path_prefix,
                )
                logger.info('Deploying lambda project')
                stack_name = generate_aws_compatible_string(
                    deployment_pb.namespace + '-' + deployment_pb.name
                )
                lambda_deploy(
                    lambda_project_dir,
                    lambda_deployment_config.region,
                    stack_name=stack_name,
                )

            deployment_pb.state.state = DeploymentState.PENDING
            return ApplyDeploymentResponse(status=Status.OK(), deployment=deployment_pb)
        except BentoMLException as error:
            if lambda_s3_bucket and lambda_deployment_config:
                _cleanup_s3_bucket_if_exist(
                    lambda_s3_bucket, lambda_deployment_config.region
                )
            raise error
github bentoml / BentoML / bentoml / deployment / aws_lambda / __init__.py View on Github external
else:
                        return DescribeDeploymentResponse(
                            status=Status.ABORTED('"Outputs" field is not present'),
                            state=DeploymentState(
                                state=DeploymentState.ERROR,
                                error_message='"Outputs" field is not present',
                            ),
                        )
                elif stack_result['StackStatus'] in FAILED_CLOUDFORMATION_STACK_STATUS:
                    state = DeploymentState(state=DeploymentState.FAILED)
                    state.timestamp.GetCurrentTime()
                    return DescribeDeploymentResponse(status=Status.OK(), state=state)
                else:
                    state = DeploymentState(state=DeploymentState.PENDING)
                    state.timestamp.GetCurrentTime()
                    return DescribeDeploymentResponse(status=Status.OK(), state=state)
            except Exception as error:  # pylint: disable=broad-except
                state = DeploymentState(
                    state=DeploymentState.ERROR, error_message=str(error)
                )
                state.timestamp.GetCurrentTime()
                return DescribeDeploymentResponse(
                    status=Status.INTERNAL(str(error)), state=state
                )
            outputs = {o['OutputKey']: o['OutputValue'] for o in outputs}
            info_json = {}

            if 'EndpointUrl' in outputs:
                info_json['endpoints'] = [
                    outputs['EndpointUrl'] + '/' + api_name for api_name in api_names
                ]
            if 'S3Bucket' in outputs:
github bentoml / BentoML / bentoml / yatai / python_api.py View on Github external
fileobj.seek(0, 0)

        files = {'file': ('dummy', fileobj)}  # dummy file name because file name
        # has been generated when getting the pre-signed signature.
        data = json.loads(response.uri.additional_fields)
        uri = data.pop('url')
        http_response = requests.post(uri, data=data, files=files)

        if http_response.status_code != 204:
            _update_bento_upload_progress(
                yatai, bento_service_metadata, UploadStatus.ERROR
            )

            raise BentoMLException(
                "Error saving BentoService bundle to S3. {}: {} ".format(
                    Status.Name(http_response.status_code), http_response.text
                )
            )

        _update_bento_upload_progress(yatai, bento_service_metadata)

        logger.info(
            "Successfully saved BentoService bundle '%s:%s' to S3: %s",
            bento_service_metadata.name,
            bento_service_metadata.version,
            response.uri.uri,
        )

        return response.uri.uri

    else:
        raise BentoMLException(
github bentoml / BentoML / bentoml / deployment / serverless / aws_lambda.py View on Github external
stage=deployment_pb.namespace,
            )
            logger.info(
                'Installing additional packages: serverless-python-requirements'
            )
            install_serverless_plugin(
                "serverless-python-requirements", serverless_project_dir
            )
            logger.info('Deploying to AWS Lambda')
            call_serverless_command(["deploy"], serverless_project_dir)

        res_deployment_pb = Deployment(state=DeploymentState())
        res_deployment_pb.CopyFrom(deployment_pb)
        state = self.describe(res_deployment_pb, yatai_service).state
        res_deployment_pb.state.CopyFrom(state)
        return ApplyDeploymentResponse(status=Status.OK(), deployment=res_deployment_pb)
github bentoml / BentoML / bentoml / yatai / python_api.py View on Github external
bento_version=bento_service_metadata.version,
        )
    )
    if get_bento_response.status.status_code == status_pb2.Status.OK:
        raise BentoMLException(
            "BentoService bundle {}:{} already registered in repository. Reset "
            "BentoService version with BentoService#set_version or bypass BentoML's "
            "model registry feature with BentoService#save_to_dir".format(
                bento_service_metadata.name, bento_service_metadata.version
            )
        )
    elif get_bento_response.status.status_code != status_pb2.Status.NOT_FOUND:
        raise BentoMLException(
            'Failed accessing YataiService. {error_code}:'
            '{error_message}'.format(
                error_code=Status.Name(get_bento_response.status.status_code),
                error_message=get_bento_response.status.error_message,
            )
        )
    request = AddBentoRequest(
        bento_name=bento_service_metadata.name,
        bento_version=bento_service_metadata.version,
    )
    response = yatai.AddBento(request)

    if response.status.status_code != status_pb2.Status.OK:
        raise BentoMLException(
            "Error adding BentoService bundle to repository: {}:{}".format(
                Status.Name(response.status.status_code), response.status.error_message
            )
        )
github bentoml / BentoML / bentoml / deployment / serverless / aws_lambda.py View on Github external
try:
                cf_client = boto3.client('cloudformation', aws_config.region)
                cloud_formation_stack_result = cf_client.describe_stacks(
                    StackName='{name}-{ns}'.format(
                        ns=deployment_pb.namespace, name=deployment_pb.name
                    )
                )
                outputs = cloud_formation_stack_result.get('Stacks')[0]['Outputs']
            except Exception as error:
                state = DeploymentState(
                    state=DeploymentState.ERROR, error_message=str(error)
                )
                state.timestamp.GetCurrentTime()
                return DescribeDeploymentResponse(
                    status=Status.INTERNAL(str(error)), state=state
                )

            base_url = ''
            for output in outputs:
                if output['OutputKey'] == 'ServiceEndpoint':
                    base_url = output['OutputValue']
                    break
            if base_url:
                info_json['endpoints'] = [
                    base_url + '/' + api_name for api_name in api_names
                ]
            state = DeploymentState(
                state=DeploymentState.RUNNING, info_json=json.dumps(info_json)
            )
            state.timestamp.GetCurrentTime()
            return DescribeDeploymentResponse(status=Status.OK(), state=state)
github bentoml / BentoML / bentoml / deployment / sagemaker / __init__.py View on Github external
_create_sagemaker_endpoint_config(
                sagemaker_client,
                sagemaker_model_name,
                sagemaker_endpoint_config_name,
                sagemaker_config,
            )
            _create_sagemaker_endpoint(
                sagemaker_client,
                sagemaker_endpoint_name,
                sagemaker_endpoint_config_name,
            )
        except AWSServiceError as e:
            _try_clean_up_sagemaker_deployment_resource(deployment_pb)
            raise e

        return ApplyDeploymentResponse(status=Status.OK(), deployment=deployment_pb)
github bentoml / BentoML / bentoml / yatai / yatai_service_impl.py View on Github external
def ListBento(self, request, context=None):
        try:
            # TODO: validate request
            bento_metadata_pb_list = self.bento_metadata_store.list(
                request.bento_name, request.offset, request.limit, request.filter
            )

            return ListBentoResponse(status=Status.OK(), bentos=bento_metadata_pb_list)
        except BentoMLException as e:
            logger.error("RPC ERROR ListBento: %s", e)
            return ListBentoResponse(status=e.status_proto)
github bentoml / BentoML / bentoml / deployment / serverless / gcp_function.py View on Github external
deployment_pb.name,
                    api_names,
                    serverless_project_dir,
                    gcp_config.region,
                    # BentoML namespace is mapping to serverless stage.
                    stage=deployment_pb.namespace,
                )
                call_serverless_command(["deploy"], serverless_project_dir)

            res_deployment_pb = Deployment(state=DeploymentState())
            res_deployment_pb.CopyFrom(deployment_pb)
            state = self.describe(res_deployment_pb, yatai_service).state
            res_deployment_pb.state.CopyFrom(state)

            return ApplyDeploymentResponse(
                status=Status.OK(), deployment=res_deployment_pb
            )
        except BentoMLException as error:
            return ApplyDeploymentResponse(status=error.status_proto)