Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
parser.add_argument("--zazu_timer", action='store_true', default=False)
args = parser.parse_args()
with open('configs.json', 'r') as fp:
configs = json.load(fp)
try:
maybe_login(configs['dataloop']['setenv'])
except:
pass
maybe_do_deployment_stuff()
if args.remote:
configs_input = dl.FunctionIO(type='Json', name='configs', value=configs)
inputs = [configs_input]
zazu_service = dl.services.get('zazu')
# get project id for billing bla bla bla
dataset_obj = get_dataset_obj(configs['dataloop'])
id = dataset_obj.project.id
if args.search:
zazu_service.execute(function_name='search', execution_input=inputs, project_id=id)
if args.predict:
zazu_service.execute(function_name='predict', execution_input=inputs, project_id=id)
else:
logger = init_logging(__name__)
this_path = path = os.getcwd()
configs_path = os.path.join(this_path, 'configs.json')
configs = ConfigSpec(configs_path)
opt_model = OptModel()
opt_model.add_child_spec(configs, 'configs')
def _update_predict_service(self, new_best_checkpoint_obj):
logger.info('update predict service')
predict_service = dl.services.get('predict')
logger.info('service: ' + str(predict_service))
predict_service.input_params = {'model_id': self.model_obj.id,
'checkpoint_id': new_best_checkpoint_obj.id}
predict_service.update()
logger.info('service: ' + str(predict_service))
def __init__(self, configs, time, test_dataset_id, query):
logger.info('dtlpy version: ' + str(dl.__version__))
logger.info('dtlpy info: ' + str(dl.info()))
time = int(time)
dl.setenv('prod')
configs = json.loads(configs)
query = json.loads(query)
self.configs_input = dl.FunctionIO(type='Json', name='configs', value=configs)
self.service = dl.services.get('zazu')
project_name = configs['dataloop']['project']
self.project = dl.projects.get(project_name)
test_dataset = self.project.datasets.get(dataset_id=test_dataset_id)
maybe_download_pred_data(dataset_obj=test_dataset, val_query=query)
# add gt annotations
filters = dl.Filters()
filters.custom_filter = query
dataset_name = test_dataset.name
path_to_dataset = os.path.join(os.getcwd(), dataset_name)
# only download if doesnt exist
if not os.path.exists(path_to_dataset):
download_and_organize(path_to_dataset=path_to_dataset, dataset_obj=test_dataset, filters=filters)
json_file_path = os.path.join(path_to_dataset, 'json')
self.model_obj = self.project.models.get(model_name='retinanet')