How to use the kfp.dsl.pipeline function in kfp

To help you get started, we’ve selected a few kfp examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github kubeflow / pipelines / samples / core / preemptible_tpu_gpu / preemptible_tpu_gpu.py View on Github external
@dsl.pipeline(
    name='pipeline flip coin', description='shows how to use dsl.Condition.')
def flipcoin():
  flip = FlipCoinOp().apply(gcp.use_preemptible_nodepool()).set_gpu_limit(
      1, 'nvidia').set_retry(5)
github kubeflow / pipelines / samples / contrib / volume_ops / volumeop_parallel.py View on Github external
@dsl.pipeline(
    name="VolumeOp Parallel",
    description="The first example of the design doc."
)
def volumeop_parallel():
    vop = dsl.VolumeOp(
        name="create_pvc",
        resource_name="my-pvc",
        size="10Gi",
        modes=dsl.VOLUME_MODE_RWM
    )

    step1 = dsl.ContainerOp(
        name="step1",
        image="library/bash:4.4.23",
        command=["sh", "-c"],
        arguments=["echo 1 | tee /mnt/file1"],
github xuw10 / kubeflow-tfx-workshop / kubeflow-pipelines / taxi / taxi-cab-classification-pipeline.py View on Github external
@dsl.pipeline(
  name='TFX Taxi Cab Classification Pipeline Example',
  description='Example pipeline that does classification with model analysis based on a public BigQuery dataset.'
)
def taxi_cab_classification(
#    output='minio://minio-service:9000/blah/',
#    output='gs://pipelineai-kubeflow/blah',
    output='/mnt',
    project='taxi-cab-classification-pipeline',
#    column_names='gs://ml-pipeline-playground/tfx/taxi-cab-classification/column-names.json',
    column_names='/mnt/kubeflow-pipelines/taxi/column-names.json',
    key_columns='trip_start_timestamp',
#    train='gs://ml-pipeline-playground/tfx/taxi-cab-classification/train.csv',
    train='/mnt/kubeflow-pipelines/taxi/train.csv',
#    evaluation='gs://ml-pipeline-playground/tfx/taxi-cab-classification/eval.csv',
    evaluation='/mnt/kubeflow-pipelines/taxi/eval.csv',
    mode='local',
github kubeflow / pipelines / samples / contrib / volume_ops / volumeop_dag.py View on Github external
@dsl.pipeline(
    name="Volume Op DAG",
    description="The second example of the design doc."
)
def volume_op_dag():
    vop = dsl.VolumeOp(
        name="create_pvc",
        resource_name="my-pvc",
        size="10Gi",
        modes=dsl.VOLUME_MODE_RWM
    )

    step1 = dsl.ContainerOp(
        name="step1",
        image="library/bash:4.4.23",
        command=["sh", "-c"],
        arguments=["echo 1 | tee /mnt/file1"],
github kubeflow / pipelines / samples / basic / immediate_value.py View on Github external
@dsl.pipeline(
  name='Immediate Value',
  description='A pipeline with parameter values hard coded'
)
def immediate_value_pipeline():
  # "url" is a pipeline parameter with value being hard coded.
  # It is useful in case for some component you want to hard code a parameter instead
  # of exposing it as a pipeline parameter.
  url=dsl.PipelineParam(name='url', value='gs://ml-pipeline-playground/shakespeare1.txt')
  op1 = dsl.ContainerOp(
     name='download',
     image='google/cloud-sdk:216.0.0',
     command=['sh', '-c'],
     arguments=['gsutil cat %s | tee /tmp/results.txt' % url],
     file_outputs={'downloaded': '/tmp/results.txt'})
  op2 = dsl.ContainerOp(
     name='echo',
github kubeflow / pipelines / samples / core / kubeflow_training_classification / kubeflow_training_classification.py View on Github external
@dsl.pipeline(
    name='TF training and prediction pipeline',
    description=''
)
def kubeflow_training(output, project,
    evaluation='gs://ml-pipeline-playground/flower/eval100.csv',
    train='gs://ml-pipeline-playground/flower/train200.csv',
    schema='gs://ml-pipeline-playground/flower/schema.json',
    learning_rate=0.1,
    hidden_layer_size='100,50',
    steps=2000,
    target='label',
    workers=0,
    pss=0,
    preprocess_mode='local',
    predict_mode='local',
):
github xuw10 / kubeflow-tfx-workshop / kubeflow-pipelines / basic / artifact_location.py View on Github external
@dsl.pipeline(
    name="custom_artifact_location_pipeline",
    description="""A pipeline to demonstrate how to configure the artifact 
    location for all the ops in the pipeline.""",
)
def custom_artifact_location(
    tag: str, namespace: str = "kubeflow", bucket: str = "mybucket"
):

    # configures artifact location
    pipeline_artifact_location = dsl.ArtifactLocation.s3(
        bucket=bucket,
        endpoint="minio-service.%s:9000" % namespace,  # parameterize minio-service endpoint
        insecure=True,
        access_key_secret=V1SecretKeySelector(name="minio", key="accesskey"),
        secret_key_secret={"name": "minio", "key": "secretkey"},  # accepts dict also
    )
github kubeflow / pipelines / samples / contrib / seldon / mnist_tf.py View on Github external
@dsl.pipeline(
    name="Seldon MNIST TF",
    description="Example of training and serving seldon MNIST TF model. Requires docker secret as per kubeflow/example-seldon. Simpler version is mnist_tf_nopush.py"
)

#Example derived from https://github.com/kubeflow/example-seldon
#This example is TF but R and SKLearn flows are similar - see kubeflow/example-seldon
#push access needed to chosen docker repo - see note below on secret
#requires seldon v0.3.0 or higher
def mnist_tf(docker_secret='docker-config',
             training_repo='https://github.com/kubeflow/example-seldon.git',
             training_branch='master',
             training_files='./example-seldon/models/tf_mnist/train/*',
             docker_repo_training='seldonio/deepmnistclassifier_trainer',
             docker_tag_training='0.3',
             serving_repo='https://github.com/kubeflow/example-seldon.git',
             serving_branch='master',
github kubeflow / pipelines / samples / contrib / volume_ops / volumeop_sequential.py View on Github external
@dsl.pipeline(
    name="VolumeOp Sequential",
    description="The third example of the design doc."
)
def volumeop_sequential():
    vop = dsl.VolumeOp(
        name="mypvc",
        resource_name="newpvc",
        size="10Gi",
        modes=dsl.VOLUME_MODE_RWM
    )

    step1 = dsl.ContainerOp(
        name="step1",
        image="library/bash:4.4.23",
        command=["sh", "-c"],
        arguments=["echo 1|tee /data/file1"],