Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
assert np.isclose(np.linalg.norm(model.vp-new_model.vp), 0)
f0 = .010
dt = model.critical_dt
t0 = 0.0
tn = 350.0
time_range = TimeAxis(start=t0, stop=tn, step=dt)
# Test TimeAxis pickling
pkl_time_range = pickle.dumps(time_range)
new_time_range = pickle.loads(pkl_time_range)
assert np.isclose(np.linalg.norm(time_range.time_values),
np.linalg.norm(new_time_range.time_values))
# Test Class Constant pickling
pkl_origin = pickle.dumps(model.grid.origin)
new_origin = pickle.loads(pkl_origin)
for a, b in zip(model.grid.origin, new_origin):
assert a.compare(b) == 0
# Test Class TimeDimension pickling
time_dim = TimeDimension(name='time', spacing=Constant(name='dt', dtype=np.float32))
pkl_time_dim = pickle.dumps(time_dim)
new_time_dim = pickle.loads(pkl_time_dim)
assert time_dim.spacing._value == new_time_dim.spacing._value
# Test Class SteppingDimension
stepping_dim = SteppingDimension(name='t', parent=time_dim)
pkl_stepping_dim = pickle.dumps(stepping_dim)
new_stepping_dim = pickle.loads(pkl_stepping_dim)
assert stepping_dim.is_Time == new_stepping_dim.is_Time
def tearDownClass(cls):
cloudpickle.loads(cls.r.get('env'))
cloudpickle.loads(cls.r.get('traj'))
cloudpickle.loads(cls.r.get('gpol'))
cloudpickle.loads(cls.r.get('dpol'))
cloudpickle.loads(cls.r.get('mpcpol'))
cloudpickle.loads(cls.r.get('qfunc'))
cloudpickle.loads(cls.r.get('aqpol'))
cloudpickle.loads(cls.r.get('vfunc'))
cloudpickle.loads(cls.r.get('mcpol'))
flow_runner = MagicMock()
monkeypatch.setattr(
"prefect.engine.get_default_flow_runner_class",
MagicMock(return_value=flow_runner),
)
kube_cluster = MagicMock()
monkeypatch.setattr("dask_kubernetes.KubeCluster", kube_cluster)
with tempfile.TemporaryDirectory() as directory:
with open(os.path.join(directory, "flow_env.prefect"), "w+") as env:
flow = prefect.Flow("test")
flow_path = os.path.join(directory, "flow_env.prefect")
with open(flow_path, "wb") as f:
cloudpickle.dump(flow, f)
with set_temporary_config({"cloud.auth_token": "test"}):
with prefect.context(
flow_file_path=os.path.join(directory, "flow_env.prefect")
):
environment.run_flow()
assert flow_runner.call_args[1]["flow"].name == "test"
assert start_func.called
assert exit_func.called
def test_pickle_english(EN):
file_ = io.BytesIO()
cloudpickle.dump(EN, file_)
file_.seek(0)
loaded = pickle.load(file_)
def load_bytes(fname):
import cloudpickle
with open(fname, "rb") as f:
obj = cloudpickle.load(f)
return obj(1, 2, 3)
def _conda_env():
# NB: We need mlflow as a dependency in the environment.
return _mlflow_conda_env(
additional_conda_deps=None,
install_mlflow=False,
additional_pip_deps=[
"-e " + os.path.dirname(mlflow.__path__[0]),
"cloudpickle=={}".format(cloudpickle.__version__),
"scikit-learn=={}".format(sklearn.__version__)
],
additional_conda_channels=None)
self.on_start()
try:
from prefect.engine import (
get_default_flow_runner_class,
get_default_executor_class,
)
# Load serialized flow from file and run it with the executor
with open(
prefect.context.get(
"flow_file_path", "/root/.prefect/flow_env.prefect"
),
"rb",
) as f:
flow = cloudpickle.load(f)
runner_cls = get_default_flow_runner_class()
executor_cls = get_default_executor_class()()
runner_cls(flow=flow).run(executor=executor_cls)
except Exception as exc:
self.logger.exception(
"Unexpected error raised during flow run: {}".format(exc)
)
raise exc
finally:
# Call on_exit callback if specified
if self.on_exit:
self.on_exit()
def driver():
warnings.filterwarnings("ignore")
# Dependencies for deploying the model
pytorch_index = "https://download.pytorch.org/whl/"
pytorch_version = "cpu/torch-1.1.0-cp36-cp36m-linux_x86_64.whl"
deps = [
"cloudpickle=={}".format(cloudpickle.__version__),
pytorch_index + pytorch_version,
"torchvision=={}".format(torchvision.__version__),
"Pillow=={}".format("6.0.0")
]
with mlflow.start_run() as run:
model = Net().to(device)
optimizer = optim.SGD(
model.parameters(),
lr=args.lr,
momentum=args.momentum)
for epoch in range(1, args.epochs + 1):
train(args, model, device, train_loader, optimizer, epoch)
test(args, model, device, test_loader)
# Log model to run history using MLflow
if args.save_model:
model_env = _mlflow_conda_env(additional_pip_deps=deps)
def _serialize_function(function):
# Note; In Py3 cloudpickle and base64 handle bytes objects only, so we need to
# decode it into a string to be able to json dump it again later.
cp_version = getattr(cloudpickle, "__version__", None)
if cp_version is None or cp_version != "0.4.0":
warn(
(
"You must use version 0.4.0 of cloudpickle for compatibility with the Tasks client. {} found."
).format(cp_version)
)
encoded_bytes = base64.b64encode(cloudpickle.dumps(function))
return encoded_bytes.decode("ascii")
def __getstate__(self):
import cloudpickle
return cloudpickle.dumps(self.x)