Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def download_file(self, path, local_path):
from .local_fsclient import LocalFSClient
import boto3
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse
if path.startswith("http:") or path.startswith("https:"):
s3_path = self._get_relative_path(local_path)
uri = urlparse(path)
with LocalFSClient().save_atomic(uri.path) as temp_file:
LocalFSClient().downloadFile(path, temp_file)
self._s3_upload_file(temp_file, s3_path)
#FSClient().waitForFile(local_path, if_wait_for_file=True, num_tries=3000, interval_sec=20)
# with FSClient().open(path, "rb", encoding=None) as fd:
# self.client.upload_fileobj(fd, Bucket=self.s3BucketName, Key=s3_path)
else:
path = self._get_relative_path(path)
LocalFSClient().create_parent_folder(local_path)
self.client.download_file(
Bucket=self.s3BucketName, Key=path, Filename=local_path)
def save_atomic(path, move_file=True):
localClient = LocalFSClient()
with localClient.save_atomic(path) as local_path:
yield local_path
if move_file:
client = _get_fsclient_bypath(path)
client.move_file(local_path, path)
def read_text_file(self, path):
from .local_fsclient import LocalFSClient
with LocalFSClient().save_atomic(path) as local_tmp_path:
self.download_file(path, local_tmp_path)
return LocalFSClient().read_text_file(local_tmp_path)
def _get_remote_model_features(self, remote_run):
from a2ml.api.utils import fsclient, local_fsclient
import pandas as pd
model_features = None
target_categories = None
temp_dir = local_fsclient.LocalFSClient().get_temp_folder()
try:
file_name = 'scoring_file_v_1_0_0.py'
remote_run.download_file('outputs/%s'%file_name, os.path.join(temp_dir, file_name))
text = fsclient.read_text_file(os.path.join(temp_dir, file_name))
to_find = "input_sample ="
start = text.find(to_find)
if start > 0:
end = text.find("\n", start)
if end > start:
code_to_run = text[start+len(to_find):end]
input_sample = eval(code_to_run)
model_features = input_sample.columns.tolist()
except Exception as e:
self.ctx.log('Cannot get columns from remote model.Use original columns from predicted data: %s'%e)
def read_text_file(self, path):
from .local_fsclient import LocalFSClient
with LocalFSClient().save_atomic(path) as local_tmp_path:
self.download_file(path, local_tmp_path)
return LocalFSClient().read_text_file(local_tmp_path)
if path.startswith("http:") or path.startswith("https:"):
s3_path = self._get_relative_path(local_path)
uri = urlparse(path)
with LocalFSClient().save_atomic(uri.path) as temp_file:
LocalFSClient().downloadFile(path, temp_file)
self._s3_upload_file(temp_file, s3_path)
#FSClient().waitForFile(local_path, if_wait_for_file=True, num_tries=3000, interval_sec=20)
# with FSClient().open(path, "rb", encoding=None) as fd:
# self.client.upload_fileobj(fd, Bucket=self.s3BucketName, Key=s3_path)
else:
path = self._get_relative_path(path)
LocalFSClient().create_parent_folder(local_path)
self.client.download_file(
Bucket=self.s3BucketName, Key=path, Filename=local_path)