Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
metadata["loss"] = args.loss
if args.steps:
metadata["steps"] = args.steps
signature = sdk.Signature('predict')\
.with_input('imgs', 'float32', [-1, 28, 28, 1], 'image')\
.with_output('probabilities', 'float32', [-1, args.classes])\
.with_output('class_ids', 'int64', [-1, 1])\
.with_output('logits', 'float32', [-1, args.classes])\
.with_output('classes', 'string', [-1, 1])
monitoring = [
sdk.Monitoring('Requests').with_spec('CounterMetricSpec', interval=15),
sdk.Monitoring('Latency').with_spec('LatencyMetricSpec', interval=15),
sdk.Monitoring('Accuracy').with_spec('AccuracyMetricSpec'),
sdk.Monitoring('Autoencoder') \
.with_health(True) \
.with_spec(
kind='ImageAEMetricSpec',
threshold=0.15,
application=args.autoencoder_app
)
]
model = sdk.Model() \
.with_name(args.model_name) \
.with_runtime('hydrosphere/serving-runtime-tensorflow-1.13.1:latest') \
.with_metadata(metadata) \
.with_payload(payload) \
.with_signature(signature) \
.with_monitoring(monitoring)
def main(drift_detector_app, model_name, runtime, payload, metadata, hydrosphere_uri, *args, **kwargs):
monitoring = [
sdk.Monitoring('Drift Detector').with_health(True) \
.with_spec(
kind='CustomModelMetricSpec',
threshold=0.15,
operator="<=",
application=drift_detector_app
)
]
logger.info("Creating a Model object")
model = sdk.Model()
logger.info(f"Adding payload\n{payload}")
model = model.with_payload(payload)
logger.info(f"Adding runtime\n{runtime}", )
model = model.with_runtime(runtime)
logger.info(f"Adding metadata\n{metadata}")
model = model.with_metadata(metadata)
if args.accuracy:
metadata["accuracy"] = args.accuracy
if args.loss:
metadata["loss"] = args.loss
if args.steps:
metadata["steps"] = args.steps
signature = sdk.Signature('predict')\
.with_input('imgs', 'float32', [-1, 28, 28, 1], 'image')\
.with_output('probabilities', 'float32', [-1, args.classes])\
.with_output('class_ids', 'int64', [-1, 1])\
.with_output('logits', 'float32', [-1, args.classes])\
.with_output('classes', 'string', [-1, 1])
monitoring = [
sdk.Monitoring('Requests').with_spec('CounterMetricSpec', interval=15),
sdk.Monitoring('Latency').with_spec('LatencyMetricSpec', interval=15),
sdk.Monitoring('Accuracy').with_spec('AccuracyMetricSpec'),
sdk.Monitoring('Autoencoder') \
.with_health(True) \
.with_spec(
kind='ImageAEMetricSpec',
threshold=0.15,
application=args.autoencoder_app
)
]
model = sdk.Model() \
.with_name(args.model_name) \
.with_runtime('hydrosphere/serving-runtime-tensorflow-1.13.1:latest') \
.with_metadata(metadata) \
.with_payload(payload) \
metadata["accuracy"] = args.accuracy
if args.loss:
metadata["loss"] = args.loss
if args.steps:
metadata["steps"] = args.steps
signature = sdk.Signature('predict')\
.with_input('imgs', 'float32', [-1, 28, 28, 1], 'image')\
.with_output('probabilities', 'float32', [-1, args.classes])\
.with_output('class_ids', 'int64', [-1, 1])\
.with_output('logits', 'float32', [-1, args.classes])\
.with_output('classes', 'string', [-1, 1])
monitoring = [
sdk.Monitoring('Requests').with_spec('CounterMetricSpec', interval=15),
sdk.Monitoring('Latency').with_spec('LatencyMetricSpec', interval=15),
sdk.Monitoring('Accuracy').with_spec('AccuracyMetricSpec'),
sdk.Monitoring('Autoencoder') \
.with_health(True) \
.with_spec(
kind='ImageAEMetricSpec',
threshold=0.15,
application=args.autoencoder_app
)
]
model = sdk.Model() \
.with_name(args.model_name) \
.with_runtime('hydrosphere/serving-runtime-tensorflow-1.13.1:latest') \
.with_metadata(metadata) \
.with_payload(payload) \
.with_signature(signature) \
'batch_size': arguments['batch_size'],
'accuracy': str(arguments['accuracy']),
'data': arguments['data_path']
}
signature = sdk.Signature('predict')\
.with_input('imgs', 'float32', [-1, 28, 28, 1], 'image')\
.with_output('probabilities', 'float32', [-1, 10])\
.with_output('class_ids', 'int64', [-1, 1])\
.with_output('logits', 'float32', [-1, 10])\
.with_output('classes', 'string', [-1, 1])
monitoring = [
sdk.Monitoring('Requests').with_spec('CounterMetricSpec', interval = 15),
sdk.Monitoring('Latency').with_spec('LatencyMetricSpec', interval = 15),
sdk.Monitoring('Accuracy').with_spec('AccuracyMetricSpec'),
sdk.Monitoring('Autoencoder').with_health(True).with_spec('ImageAEMetricSpec', threshold=0.15, application='mnist-concept-app')
]
model = sdk.Model()\
.with_name(arguments['model_name'])\
.with_runtime('hydrosphere/serving-runtime-tensorflow-1.13.1:latest')\
.with_metadata(metadata)\
.with_payload(payload)\
.with_signature(signature)
result = model.apply(arguments['hydrosphere_address'])
print(result)
# i. Upload the model to Hydrosphere Serving
# ii. Parse the status of the model uploading, retrieve the built
# model version and write it to the `/model_version.txt` file.
if args.loss:
metadata["loss"] = args.loss
if args.steps:
metadata["steps"] = args.steps
signature = sdk.Signature('predict')\
.with_input('imgs', 'float32', [-1, 28, 28, 1], 'image')\
.with_output('probabilities', 'float32', [-1, args.classes])\
.with_output('class_ids', 'int64', [-1, 1])\
.with_output('logits', 'float32', [-1, args.classes])\
.with_output('classes', 'string', [-1, 1])
monitoring = [
sdk.Monitoring('Requests').with_spec('CounterMetricSpec', interval=15),
sdk.Monitoring('Latency').with_spec('LatencyMetricSpec', interval=15),
sdk.Monitoring('Accuracy').with_spec('AccuracyMetricSpec'),
sdk.Monitoring('Autoencoder') \
.with_health(True) \
.with_spec(
kind='ImageAEMetricSpec',
threshold=0.15,
application=args.autoencoder_app
)
]
model = sdk.Model() \
.with_name(args.model_name) \
.with_runtime('hydrosphere/serving-runtime-tensorflow-1.13.1:latest') \
.with_metadata(metadata) \
.with_payload(payload) \
.with_signature(signature) \
.with_monitoring(monitoring)
'learning_rate': arguments['learning_rate'],
'epochs': arguments['epochs'],
'batch_size': arguments['batch_size'],
'accuracy': str(arguments['accuracy']),
'data': arguments['data_path']
}
signature = sdk.Signature('predict')\
.with_input('imgs', 'float32', [-1, 28, 28, 1], 'image')\
.with_output('probabilities', 'float32', [-1, 10])\
.with_output('class_ids', 'int64', [-1, 1])\
.with_output('logits', 'float32', [-1, 10])\
.with_output('classes', 'string', [-1, 1])
monitoring = [
sdk.Monitoring('Requests').with_spec('CounterMetricSpec', interval = 15),
sdk.Monitoring('Latency').with_spec('LatencyMetricSpec', interval = 15),
sdk.Monitoring('Accuracy').with_spec('AccuracyMetricSpec'),
sdk.Monitoring('Autoencoder').with_health(True).with_spec('ImageAEMetricSpec', threshold=0.15, application='mnist-concept-app')
]
model = sdk.Model()\
.with_name(arguments['model_name'])\
.with_runtime('hydrosphere/serving-runtime-tensorflow-1.13.1:latest')\
.with_metadata(metadata)\
.with_payload(payload)\
.with_signature(signature)
result = model.apply(arguments['hydrosphere_address'])
print(result)
# i. Upload the model to Hydrosphere Serving
'epochs': arguments['epochs'],
'batch_size': arguments['batch_size'],
'accuracy': str(arguments['accuracy']),
'data': arguments['data_path']
}
signature = sdk.Signature('predict')\
.with_input('imgs', 'float32', [-1, 28, 28, 1], 'image')\
.with_output('probabilities', 'float32', [-1, 10])\
.with_output('class_ids', 'int64', [-1, 1])\
.with_output('logits', 'float32', [-1, 10])\
.with_output('classes', 'string', [-1, 1])
monitoring = [
sdk.Monitoring('Requests').with_spec('CounterMetricSpec', interval = 15),
sdk.Monitoring('Latency').with_spec('LatencyMetricSpec', interval = 15),
sdk.Monitoring('Accuracy').with_spec('AccuracyMetricSpec'),
sdk.Monitoring('Autoencoder').with_health(True).with_spec('ImageAEMetricSpec', threshold=0.15, application='mnist-concept-app')
]
model = sdk.Model()\
.with_name(arguments['model_name'])\
.with_runtime('hydrosphere/serving-runtime-tensorflow-1.13.1:latest')\
.with_metadata(metadata)\
.with_payload(payload)\
.with_signature(signature)
result = model.apply(arguments['hydrosphere_address'])
print(result)
# i. Upload the model to Hydrosphere Serving
# ii. Parse the status of the model uploading, retrieve the built