Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
self.assertFalse(fsclient.is_file_exists(results_file_path))
fsclient.remove_file(predicted_file_path)
self.assertFalse(fsclient.is_file_exists(predicted_file_path))
res = ModelHelper.save_prediction(ds, prediction_id,
support_review_model=True, json_result=True, count_in_result=False, prediction_date=prediction_date,
model_path=model_path, model_id=options.get('uid'))
res = json.loads(res)
self.assertEqual( res['columns'], ds.columns)
self.assertEqual( len(res['data']), 6)
ds = DataFrame.create_dataframe(os.path.join(model_path, "iris_test.csv"))
fsclient.remove_file(results_file_path)
self.assertFalse(fsclient.is_file_exists(results_file_path))
fsclient.remove_file(predicted_file_path)
self.assertFalse(fsclient.is_file_exists(predicted_file_path))
ds.options['data_path'] = None
res = ModelHelper.save_prediction(ds, prediction_id,
support_review_model=False, json_result=False, count_in_result=False, prediction_date=prediction_date,
model_path=model_path, model_id=options.get('uid'))
self.assertEqual( type(res[0]), dict)
self.assertEqual( res[0][options['targetFeature']], 'setosa')
ds = DataFrame.create_dataframe(os.path.join(model_path, "iris_test.csv"))
fsclient.remove_file(results_file_path)
self.assertFalse(fsclient.is_file_exists(results_file_path))
fsclient.remove_file(predicted_file_path)
self.assertFalse(fsclient.is_file_exists(predicted_file_path))
ds.options['data_path'] = None
ds.loaded_columns = ds.columns
prediction_date + '_' + prediction_id + "_results.feather.zstd")
predicted_file_path = os.path.join(model_path, "predictions",
"iris_test_"+prediction_id+"_"+options.get('uid')+"_predicted.csv")
ds = DataFrame.create_dataframe(os.path.join(model_path, "iris_test.csv"))
fsclient.remove_file(results_file_path)
self.assertFalse(fsclient.is_file_exists(results_file_path))
fsclient.remove_file(predicted_file_path)
self.assertFalse(fsclient.is_file_exists(predicted_file_path))
res = ModelHelper.save_prediction(ds, prediction_id,
support_review_model=True, json_result=False, count_in_result=False, prediction_date=prediction_date,
model_path=model_path, model_id=options.get('uid'))
self.assertEqual(res, predicted_file_path)
self.assertTrue(fsclient.is_file_exists(predicted_file_path))
self.assertTrue(fsclient.is_file_exists(results_file_path))
ds = DataFrame.create_dataframe(os.path.join(model_path, "iris_test.csv"))
fsclient.remove_file(results_file_path)
self.assertFalse(fsclient.is_file_exists(results_file_path))
fsclient.remove_file(predicted_file_path)
self.assertFalse(fsclient.is_file_exists(predicted_file_path))
res = ModelHelper.save_prediction(ds, prediction_id,
support_review_model=True, json_result=True, count_in_result=False, prediction_date=prediction_date,
model_path=model_path, model_id=options.get('uid'))
res = json.loads(res)
self.assertEqual( res['columns'], ds.columns)
self.assertEqual( len(res['data']), 6)
ds = DataFrame.create_dataframe(os.path.join(model_path, "iris_test.csv"))
fsclient.remove_file(results_file_path)
def test_save_prediction(self):
model_path = 'tests/fixtures/test_predict_by_model/iris'
options = fsclient.read_json_file(os.path.join(model_path, "options.json"))
prediction_id = "123"
prediction_date="today"
results_file_path = os.path.join(model_path, "predictions",
prediction_date + '_' + prediction_id + "_results.feather.zstd")
predicted_file_path = os.path.join(model_path, "predictions",
"iris_test_"+prediction_id+"_"+options.get('uid')+"_predicted.csv")
ds = DataFrame.create_dataframe(os.path.join(model_path, "iris_test.csv"))
fsclient.remove_file(results_file_path)
self.assertFalse(fsclient.is_file_exists(results_file_path))
fsclient.remove_file(predicted_file_path)
self.assertFalse(fsclient.is_file_exists(predicted_file_path))
res = ModelHelper.save_prediction(ds, prediction_id,
support_review_model=True, json_result=False, count_in_result=False, prediction_date=prediction_date,
model_path=model_path, model_id=options.get('uid'))
self.assertEqual(res, predicted_file_path)
self.assertTrue(fsclient.is_file_exists(predicted_file_path))
self.assertTrue(fsclient.is_file_exists(results_file_path))
ds = DataFrame.create_dataframe(os.path.join(model_path, "iris_test.csv"))
fsclient.remove_file(results_file_path)
self.assertFalse(fsclient.is_file_exists(results_file_path))
fsclient.remove_file(predicted_file_path)
self.assertFalse(fsclient.is_file_exists(predicted_file_path))
res = ModelHelper.save_prediction(ds, prediction_id,
support_review_model=True, json_result=True, count_in_result=False, prediction_date=prediction_date,
ds = DataFrame.create_dataframe(os.path.join(model_path, "iris_test.csv"))
fsclient.remove_file(results_file_path)
self.assertFalse(fsclient.is_file_exists(results_file_path))
fsclient.remove_file(predicted_file_path)
self.assertFalse(fsclient.is_file_exists(predicted_file_path))
ds.options['data_path'] = None
res = ModelHelper.save_prediction(ds, prediction_id,
support_review_model=False, json_result=False, count_in_result=False, prediction_date=prediction_date,
model_path=model_path, model_id=options.get('uid'))
self.assertEqual( type(res[0]), dict)
self.assertEqual( res[0][options['targetFeature']], 'setosa')
ds = DataFrame.create_dataframe(os.path.join(model_path, "iris_test.csv"))
fsclient.remove_file(results_file_path)
self.assertFalse(fsclient.is_file_exists(results_file_path))
fsclient.remove_file(predicted_file_path)
self.assertFalse(fsclient.is_file_exists(predicted_file_path))
ds.options['data_path'] = None
ds.loaded_columns = ds.columns
res = ModelHelper.save_prediction(ds, prediction_id,
support_review_model=False, json_result=False, count_in_result=False, prediction_date=prediction_date,
model_path=model_path, model_id=options.get('uid'))
self.assertEqual( res['columns'], ds.columns)
self.assertEqual( len(res['data']), 6)
self.assertEqual( type(res['data'][0]), list)
def _path_to_credentials(self):
if self.ctx.config.get('path_to_credentials'):
creds_path = os.path.abspath(self.ctx.config.get('path_to_credentials'))
elif os.environ.get('%s_CREDENTIALS_PATH'%self.provider.upper()):
creds_path = os.environ.get('%s_CREDENTIALS_PATH'%self.provider.upper())
else:
cur_path = os.getcwd()
if self.ctx.config.path:
cur_path = self.ctx.config.path
if fsclient.is_file_exists(os.path.join(cur_path, "%s.json"%self.provider)):
creds_path = cur_path
else:
creds_path = os.path.abspath('%s/.a2ml' % os.environ.get('HOME', os.getcwd()))
return creds_path
def _load(self, name):
part = SerializableConfigYaml()
if fsclient.is_file_exists(name):
part.load_from_file(name)
return part
def verify_local_model(self, model_id):
model_path = os.path.join(self.ctx.config.get_model_path(model_id),
'model.pkl.gz')
return fsclient.is_file_exists(model_path), model_path
if not fsclient.is_s3_path(data_source_file):
if config_path is None:
config_path = os.getcwd()
data_source_file = os.path.join(config_path, data_source_file)
if not fsclient.is_s3_path(data_source_file):
data_source_file = os.path.abspath(data_source_file)
filename, file_extension = os.path.splitext(data_source_file)
if not file_extension in SUPPORTED_FORMATS:
raise AugerException(
'Source file has to be one of the supported fomats: %s' %
', '.join(SUPPORTED_FORMATS))
if not fsclient.is_file_exists(data_source_file):
raise AugerException(
'Can\'t find file to import: %s' % data_source_file)
return data_source_file, True