How to use the kfp.compiler.Compiler function in kfp

To help you get started, we’ve selected a few kfp examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github kubeflow / pipelines / samples / core / loop_static / loop_static.py View on Github external
name="my-in-coop2",
            image="library/bash:4.4.23",
            command=["sh", "-c"],
            arguments=["echo op2 %s" % item.B_b],
        )

    op_out = dsl.ContainerOp(
        name="my-out-cop",
        image="library/bash:4.4.23",
        command=["sh", "-c"],
        arguments=["echo %s" % my_pipe_param],
    )


if __name__ == '__main__':
    kfp.compiler.Compiler().compile(pipeline, __file__ + '.yaml')
github kubeflow / pipelines / samples / core / volume_ops / volume_ops.py View on Github external
name="create-pvc",
        resource_name="my-pvc",
        modes=dsl.VOLUME_MODE_RWO,
        size=size
    )

    cop = dsl.ContainerOp(
        name="cop",
        image="library/bash:4.4.23",
        command=["sh", "-c"],
        arguments=["echo foo > /mnt/file1"],
        pvolumes={"/mnt": vop.volume}
    )

if __name__ == '__main__':
    kfp.compiler.Compiler().compile(volumeop_basic, __file__ + '.yaml')
github kubeflow / pipelines / samples / contrib / volume_snapshot_ops / volume_snapshotop_rokurl.py View on Github external
resource_name="vol3",
        data_source=step2_snap.snapshot,
        size=step2_snap.outputs["size"]
    )

    step3 = dsl.ContainerOp(
        name="step3_output",
        image="library/bash:4.4.23",
        command=["cat", "/data/full"],
        pvolumes={"/data": vop3.volume}
    )


if __name__ == "__main__":
    import kfp.compiler as compiler
    compiler.Compiler().compile(volume_snapshotop_rokurl, __file__ + ".tar.gz")
github tensorflow / tfx / tfx / orchestration / kubeflow / kubeflow_dag_runner.py View on Github external
output_dir: An optional output directory into which to output the pipeline
        definition files. Defaults to the current working directory.
      output_filename: An optional output file name for the pipeline definition
        file. Defaults to pipeline_name.tar.gz when compiling a TFX pipeline.
        Currently supports .tar.gz, .tgz, .zip, .yaml, .yml formats. See
        https://github.com/kubeflow/pipelines/blob/181de66cf9fa87bcd0fe9291926790c400140783/sdk/python/kfp/compiler/compiler.py#L851
          for format restriction.
      config: An optional KubeflowDagRunnerConfig object to specify runtime
        configuration when running the pipeline under Kubeflow.
    """
    if config and not isinstance(config, KubeflowDagRunnerConfig):
      raise TypeError('config must be type of KubeflowDagRunnerConfig.')
    super(KubeflowDagRunner, self).__init__(config or KubeflowDagRunnerConfig())
    self._output_dir = output_dir or os.getcwd()
    self._output_filename = output_filename
    self._compiler = compiler.Compiler()
    self._params = []  # List of dsl.PipelineParam used in this pipeline.
    self._deduped_parameter_names = set()  # Set of unique param names used.
github kubeflow / pipelines / samples / core / sequential / sequential.py View on Github external
command=['sh', '-c'],
        arguments=['echo "$0"', text]
    )

@dsl.pipeline(
    name='Sequential pipeline',
    description='A pipeline with two sequential steps.'
)
def sequential_pipeline(url='gs://ml-pipeline-playground/shakespeare1.txt'):
    """A pipeline with two sequential steps."""

    download_task = gcs_download_op(url)
    echo_task = echo_op(download_task.output)

if __name__ == '__main__':
    kfp.compiler.Compiler().compile(sequential_pipeline, __file__ + '.yaml')
github kubeflow / pipelines / contrib / samples / openvino / predict / numpy_predict.py View on Github external
image='gcr.io/constant-cubist-173123/inference_server/ml_predict:5',
     command=['python3', 'predict.py'],
     arguments=[
         '--model_bin', model_bin,
         '--model_xml', model_xml,
         '--input_numpy_file', input_numpy_file,
         '--label_numpy_file', label_numpy_file,
         '--batch_size', batch_size,
         '--scale_div', scale_div,
         '--scale_sub', scale_sub,
         '--output_folder', generated_model_dir],
     file_outputs={})

if __name__ == '__main__':
    import kfp.compiler as compiler
    compiler.Compiler().compile(openvino_predict, __file__ + '.tar.gz')
github kubeflow-kale / kale / backend / kale / converter.py View on Github external
def deploy_pipeline_to_kfp(self):
        import kfp.compiler as compiler
        import kfp

        # import the generated pipeline code
        # add temp folder to PYTHONPATH
        sys.path.append(self.temp_dirdirpath)
        from pipeline_code import auto_generated_pipeline

        pipeline_filename = self.pipeline_name + '.pipeline.tar.gz'
        compiler.Compiler().compile(auto_generated_pipeline, pipeline_filename)

        # Get or create an experiment and submit a pipeline run
        client = kfp.Client(host=self.kfp_url)
        list_experiments_response = client.list_experiments()
        experiments = list_experiments_response.experiments

        print(experiments)

        if not experiments:
            # The user does not have any experiments available. Creating a new one
            experiment = client.create_experiment(self.pipeline_name + ' experiment')
        else:
            experiment = experiments[-1]  # Using the last experiment

        # Submit a pipeline run
        run_name = self.pipeline_name + ' run'
github kubeflow / pipelines / samples / contrib / seldon / mnist_tf_volume.py View on Github external
}    
""")
    seldon_serving_json = seldon_serving_json_template.substitute({ 'dockerreposerving': str(docker_repo_serving),'dockertagserving': str(docker_tag_serving),'modelpvc': modelvolop.outputs["name"]})

    seldon_deployment = json.loads(seldon_serving_json)

    serve = dsl.ResourceOp(
        name='serve',
        k8s_resource=seldon_deployment,
        success_condition='status.state == Available'
    ).after(train)


if __name__ == "__main__":
    import kfp.compiler as compiler
    compiler.Compiler().compile(mnist_tf_volume, __file__ + ".tar.gz")