How to use the bentoml.proto.deployment_pb2.DescribeDeploymentResponse function in bentoml

To help you get started, we’ve selected a few bentoml examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github bentoml / BentoML / bentoml / deployment / serverless / aws_lambda.py View on Github external
)

            base_url = ''
            for output in outputs:
                if output['OutputKey'] == 'ServiceEndpoint':
                    base_url = output['OutputValue']
                    break
            if base_url:
                info_json['endpoints'] = [
                    base_url + '/' + api_name for api_name in api_names
                ]
            state = DeploymentState(
                state=DeploymentState.RUNNING, info_json=json.dumps(info_json)
            )
            state.timestamp.GetCurrentTime()
            return DescribeDeploymentResponse(status=Status.OK(), state=state)
        except BentoMLException as error:
            return DescribeDeploymentResponse(status=error.status_proto)
github bentoml / BentoML / bentoml / deployment / serverless / aws_lambda.py View on Github external
)

            try:
                cf_client = boto3.client('cloudformation', aws_config.region)
                cloud_formation_stack_result = cf_client.describe_stacks(
                    StackName='{name}-{ns}'.format(
                        ns=deployment_pb.namespace, name=deployment_pb.name
                    )
                )
                outputs = cloud_formation_stack_result.get('Stacks')[0]['Outputs']
            except Exception as error:
                state = DeploymentState(
                    state=DeploymentState.ERROR, error_message=str(error)
                )
                state.timestamp.GetCurrentTime()
                return DescribeDeploymentResponse(
                    status=Status.INTERNAL(str(error)), state=state
                )

            base_url = ''
            for output in outputs:
                if output['OutputKey'] == 'ServiceEndpoint':
                    base_url = output['OutputValue']
                    break
            if base_url:
                info_json['endpoints'] = [
                    base_url + '/' + api_name for api_name in api_names
                ]
            state = DeploymentState(
                state=DeploymentState.RUNNING, info_json=json.dumps(info_json)
            )
            state.timestamp.GetCurrentTime()
github bentoml / BentoML / bentoml / deployment / serverless / aws_lambda.py View on Github external
base_url = ''
            for output in outputs:
                if output['OutputKey'] == 'ServiceEndpoint':
                    base_url = output['OutputValue']
                    break
            if base_url:
                info_json['endpoints'] = [
                    base_url + '/' + api_name for api_name in api_names
                ]
            state = DeploymentState(
                state=DeploymentState.RUNNING, info_json=json.dumps(info_json)
            )
            state.timestamp.GetCurrentTime()
            return DescribeDeploymentResponse(status=Status.OK(), state=state)
        except BentoMLException as error:
            return DescribeDeploymentResponse(status=error.status_proto)
github bentoml / BentoML / bentoml / proto / yatai_service_pb2_grpc.py View on Github external
response_serializer=deployment__pb2.ApplyDeploymentResponse.SerializeToString,
      ),
      'DeleteDeployment': grpc.unary_unary_rpc_method_handler(
          servicer.DeleteDeployment,
          request_deserializer=deployment__pb2.DeleteDeploymentRequest.FromString,
          response_serializer=deployment__pb2.DeleteDeploymentResponse.SerializeToString,
      ),
      'GetDeployment': grpc.unary_unary_rpc_method_handler(
          servicer.GetDeployment,
          request_deserializer=deployment__pb2.GetDeploymentRequest.FromString,
          response_serializer=deployment__pb2.GetDeploymentResponse.SerializeToString,
      ),
      'DescribeDeployment': grpc.unary_unary_rpc_method_handler(
          servicer.DescribeDeployment,
          request_deserializer=deployment__pb2.DescribeDeploymentRequest.FromString,
          response_serializer=deployment__pb2.DescribeDeploymentResponse.SerializeToString,
      ),
      'ListDeployments': grpc.unary_unary_rpc_method_handler(
          servicer.ListDeployments,
          request_deserializer=deployment__pb2.ListDeploymentsRequest.FromString,
          response_serializer=deployment__pb2.ListDeploymentsResponse.SerializeToString,
      ),
      'AddBento': grpc.unary_unary_rpc_method_handler(
          servicer.AddBento,
          request_deserializer=repository__pb2.AddBentoRequest.FromString,
          response_serializer=repository__pb2.AddBentoResponse.SerializeToString,
      ),
      'UpdateBento': grpc.unary_unary_rpc_method_handler(
          servicer.UpdateBento,
          request_deserializer=repository__pb2.UpdateBentoRequest.FromString,
          response_serializer=repository__pb2.UpdateBentoResponse.SerializeToString,
      ),
github bentoml / BentoML / bentoml / deployment / serverless / gcp_function.py View on Github external
try:
                    response = call_serverless_command(["info"], serverless_project_dir)
                    info_json = parse_serverless_info_response_to_json_string(response)
                    state = DeploymentState(
                        state=DeploymentState.RUNNING, info_json=info_json
                    )
                    state.timestamp.GetCurrentTime()
                except BentoMLException as e:
                    state = DeploymentState(
                        state=DeploymentState.ERROR, error_message=str(e)
                    )
                    state.timestamp.GetCurrentTime()

            return DescribeDeploymentResponse(status=Status.OK(), state=state)
        except BentoMLException as error:
            return DescribeDeploymentResponse(status=error.status_proto)
github bentoml / BentoML / bentoml / proto / yatai_service_pb2_grpc.py View on Github external
response_deserializer=deployment__pb2.ApplyDeploymentResponse.FromString,
        )
    self.DeleteDeployment = channel.unary_unary(
        '/bentoml.Yatai/DeleteDeployment',
        request_serializer=deployment__pb2.DeleteDeploymentRequest.SerializeToString,
        response_deserializer=deployment__pb2.DeleteDeploymentResponse.FromString,
        )
    self.GetDeployment = channel.unary_unary(
        '/bentoml.Yatai/GetDeployment',
        request_serializer=deployment__pb2.GetDeploymentRequest.SerializeToString,
        response_deserializer=deployment__pb2.GetDeploymentResponse.FromString,
        )
    self.DescribeDeployment = channel.unary_unary(
        '/bentoml.Yatai/DescribeDeployment',
        request_serializer=deployment__pb2.DescribeDeploymentRequest.SerializeToString,
        response_deserializer=deployment__pb2.DescribeDeploymentResponse.FromString,
        )
    self.ListDeployments = channel.unary_unary(
        '/bentoml.Yatai/ListDeployments',
        request_serializer=deployment__pb2.ListDeploymentsRequest.SerializeToString,
        response_deserializer=deployment__pb2.ListDeploymentsResponse.FromString,
        )
    self.AddBento = channel.unary_unary(
        '/bentoml.Yatai/AddBento',
        request_serializer=repository__pb2.AddBentoRequest.SerializeToString,
        response_deserializer=repository__pb2.AddBentoResponse.FromString,
        )
    self.UpdateBento = channel.unary_unary(
        '/bentoml.Yatai/UpdateBento',
        request_serializer=repository__pb2.UpdateBentoRequest.SerializeToString,
        response_deserializer=repository__pb2.UpdateBentoResponse.FromString,
        )
github bentoml / BentoML / bentoml / yatai / yatai_service_impl.py View on Github external
)

            if deployment_pb:
                operator = get_deployment_operator(self, deployment_pb)

                response = operator.describe(deployment_pb)

                if response.status.status_code == status_pb2.Status.OK:
                    with self.deployment_store.update_deployment(
                        request.deployment_name, request.namespace
                    ) as deployment:
                        deployment.state = ProtoMessageToDict(response.state)

                return response
            else:
                return DescribeDeploymentResponse(
                    status=Status.NOT_FOUND(
                        'Deployment "{}" in namespace "{}" not found'.format(
                            request.deployment_name, request.namespace
                        )
                    )
                )
        except BentoMLException as e:
            logger.error("RPC ERROR DescribeDeployment: %s", e)
            return DeleteDeploymentResponse(status=e.status_proto)