How to use cloudpickle - 10 common examples

To help you get started, we’ve selected a few cloudpickle examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github devitocodes / devito / tests / test_pickle.py View on Github external
assert np.isclose(np.linalg.norm(model.vp-new_model.vp), 0)

    f0 = .010
    dt = model.critical_dt
    t0 = 0.0
    tn = 350.0
    time_range = TimeAxis(start=t0, stop=tn, step=dt)

    # Test TimeAxis pickling
    pkl_time_range = pickle.dumps(time_range)
    new_time_range = pickle.loads(pkl_time_range)
    assert np.isclose(np.linalg.norm(time_range.time_values),
                      np.linalg.norm(new_time_range.time_values))

    # Test Class Constant pickling
    pkl_origin = pickle.dumps(model.grid.origin)
    new_origin = pickle.loads(pkl_origin)

    for a, b in zip(model.grid.origin, new_origin):
        assert a.compare(b) == 0

    # Test Class TimeDimension pickling
    time_dim = TimeDimension(name='time', spacing=Constant(name='dt', dtype=np.float32))
    pkl_time_dim = pickle.dumps(time_dim)
    new_time_dim = pickle.loads(pkl_time_dim)
    assert time_dim.spacing._value == new_time_dim.spacing._value

    # Test Class SteppingDimension
    stepping_dim = SteppingDimension(name='t', parent=time_dim)
    pkl_stepping_dim = pickle.dumps(stepping_dim)
    new_stepping_dim = pickle.loads(pkl_stepping_dim)
    assert stepping_dim.is_Time == new_stepping_dim.is_Time
github DeepX-inc / machina / tests / test_cloud_pickle.py View on Github external
def tearDownClass(cls):
        cloudpickle.loads(cls.r.get('env'))
        cloudpickle.loads(cls.r.get('traj'))
        cloudpickle.loads(cls.r.get('gpol'))
        cloudpickle.loads(cls.r.get('dpol'))
        cloudpickle.loads(cls.r.get('mpcpol'))
        cloudpickle.loads(cls.r.get('qfunc'))
        cloudpickle.loads(cls.r.get('aqpol'))
        cloudpickle.loads(cls.r.get('vfunc'))
        cloudpickle.loads(cls.r.get('mcpol'))
github PrefectHQ / prefect / tests / environments / execution / test_dask_k8s_environment.py View on Github external
flow_runner = MagicMock()
    monkeypatch.setattr(
        "prefect.engine.get_default_flow_runner_class",
        MagicMock(return_value=flow_runner),
    )

    kube_cluster = MagicMock()
    monkeypatch.setattr("dask_kubernetes.KubeCluster", kube_cluster)

    with tempfile.TemporaryDirectory() as directory:
        with open(os.path.join(directory, "flow_env.prefect"), "w+") as env:
            flow = prefect.Flow("test")
            flow_path = os.path.join(directory, "flow_env.prefect")
            with open(flow_path, "wb") as f:
                cloudpickle.dump(flow, f)

        with set_temporary_config({"cloud.auth_token": "test"}):
            with prefect.context(
                flow_file_path=os.path.join(directory, "flow_env.prefect")
            ):
                environment.run_flow()

        assert flow_runner.call_args[1]["flow"].name == "test"

    assert start_func.called
    assert exit_func.called
github explosion / spaCy / tests / test_pickle.py View on Github external
def test_pickle_english(EN):
    file_ = io.BytesIO()
    cloudpickle.dump(EN, file_)

    file_.seek(0)

    loaded = pickle.load(file_)
github PrefectHQ / prefect / tests / utilities / test_notifications.py View on Github external
def load_bytes(fname):
        import cloudpickle

        with open(fname, "rb") as f:
            obj = cloudpickle.load(f)
        return obj(1, 2, 3)
github mlflow / mlflow / tests / pyfunc / test_model_export_with_class_and_artifacts.py View on Github external
def _conda_env():
    # NB: We need mlflow as a dependency in the environment.
    return _mlflow_conda_env(
        additional_conda_deps=None,
        install_mlflow=False,
        additional_pip_deps=[
            "-e " + os.path.dirname(mlflow.__path__[0]),
            "cloudpickle=={}".format(cloudpickle.__version__),
            "scikit-learn=={}".format(sklearn.__version__)
        ],
        additional_conda_channels=None)
github PrefectHQ / prefect / src / prefect / environments / execution / fargate / fargate_task.py View on Github external
self.on_start()

        try:
            from prefect.engine import (
                get_default_flow_runner_class,
                get_default_executor_class,
            )

            # Load serialized flow from file and run it with the executor
            with open(
                prefect.context.get(
                    "flow_file_path", "/root/.prefect/flow_env.prefect"
                ),
                "rb",
            ) as f:
                flow = cloudpickle.load(f)

                runner_cls = get_default_flow_runner_class()
                executor_cls = get_default_executor_class()()
                runner_cls(flow=flow).run(executor=executor_cls)
        except Exception as exc:
            self.logger.exception(
                "Unexpected error raised during flow run: {}".format(exc)
            )
            raise exc
        finally:
            # Call on_exit callback if specified
            if self.on_exit:
                self.on_exit()
github Azure / MachineLearningNotebooks / how-to-use-azureml / track-and-monitor-experiments / using-mlflow / train-deploy-pytorch / scripts / train.py View on Github external
def driver():
    warnings.filterwarnings("ignore")
    # Dependencies for deploying the model
    pytorch_index = "https://download.pytorch.org/whl/"
    pytorch_version = "cpu/torch-1.1.0-cp36-cp36m-linux_x86_64.whl"
    deps = [
        "cloudpickle=={}".format(cloudpickle.__version__),
        pytorch_index + pytorch_version,
        "torchvision=={}".format(torchvision.__version__),
        "Pillow=={}".format("6.0.0")
    ]
    with mlflow.start_run() as run:
        model = Net().to(device)
        optimizer = optim.SGD(
            model.parameters(),
            lr=args.lr,
            momentum=args.momentum)
        for epoch in range(1, args.epochs + 1):
            train(args, model, device, train_loader, optimizer, epoch)
            test(args, model, device, test_loader)
        # Log model to run history using MLflow
        if args.save_model:
            model_env = _mlflow_conda_env(additional_pip_deps=deps)
github descarteslabs / descarteslabs-python / descarteslabs / client / services / tasks / tasks.py View on Github external
def _serialize_function(function):
    # Note; In Py3 cloudpickle and base64 handle bytes objects only, so we need to
    # decode it into a string to be able to json dump it again later.
    cp_version = getattr(cloudpickle, "__version__", None)
    if cp_version is None or cp_version != "0.4.0":
        warn(
            (
                "You must use version 0.4.0 of cloudpickle for compatibility with the Tasks client. {} found."
            ).format(cp_version)
        )

    encoded_bytes = base64.b64encode(cloudpickle.dumps(function))
    return encoded_bytes.decode("ascii")
github medipixel / rl_algorithms / algorithms / common / env / multiprocessing_env.py View on Github external
def __getstate__(self):
        import cloudpickle

        return cloudpickle.dumps(self.x)