How to use the kfp.dsl.ContainerOp function in kfp

To help you get started, we’ve selected a few kfp examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github amygdala / code-snippets / ml / kubeflow-pipelines / samples / kubeflow-tf / workflow1.py View on Github external
cmleop = dsl.ContainerOp(
      name = 'cmleop',
      image = 'gcr.io/google-samples/ml-pipeline-cmle-op',
      arguments = ["--gcs-path", '%s/%s/tf/serving_model_dir/export/chicago-taxi' % (working_dir, '{{workflow.name}}'),
          "--version-name", '{{workflow.name}}',
          "--project", project]
      ).apply(gcp.use_gcp_secret('user-gcp-sa'))
  cmleop2 = dsl.ContainerOp(
      name = 'cmleop2',
      image = 'gcr.io/google-samples/ml-pipeline-cmle-op',
      arguments = ["--gcs-path", '%s/%s/tf2/serving_model_dir/export/chicago-taxi' % (working_dir, '{{workflow.name}}'),
          "--version-name", '{{workflow.name}}_2',
          "--project", project]
      ).apply(gcp.use_gcp_secret('user-gcp-sa'))

  tfserving = dsl.ContainerOp(
      name = 'tfserving',
      image = 'gcr.io/google-samples/ml-pipeline-kubeflow-tfserve-taxi',
      arguments = ["--model_name", '{{workflow.name}}',
          "--model_path", '%s/%s/tf/serving_model_dir/export/chicago-taxi' % (working_dir, '{{workflow.name}}')]
      )
  tfserving2 = dsl.ContainerOp(
      name = 'tfserving2',
      image = 'gcr.io/google-samples/ml-pipeline-kubeflow-tfserve-taxi',
      arguments = ["--model_name", '{{workflow.name}}-2',
          "--model_path", '%s/%s/tf2/serving_model_dir/export/chicago-taxi' % (working_dir, '{{workflow.name}}')]
      )

  analyze.after(train)
  analyze2.after(train2)
  cmleop.after(train)
  cmleop2.after(train2)
github kubeflow / pipelines / samples / contrib / nvidia-resnet / pipeline / src / pipeline.py View on Github external
def TrainOp(name, input_dir, output_dir, model_name, model_version, epochs):
    return dsl.ContainerOp(
        name=name,
        image='',
        arguments=[
            '--input_dir', input_dir,
            '--output_dir', output_dir,
            '--model_name', model_name,
            '--model_version', model_version,
            '--epochs', epochs
        ],
        file_outputs={'output': '/output.txt'}
    )
github kubeflow / pipelines / samples / core / recursion / recursion.py View on Github external
def flip_coin_op():
    """Flip a coin and output heads or tails randomly."""
    return dsl.ContainerOp(
        name='Flip coin',
        image='python:alpine3.6',
        command=['sh', '-c'],
        arguments=['python -c "import random; result = \'heads\' if random.randint(0,1) == 0 '
                  'else \'tails\'; print(result)" | tee /tmp/output'],
        file_outputs={'output': '/tmp/output'}
    )
github kubeflow / pipelines / samples / core / loop_output / loop_output.py View on Github external
def pipeline():
    op0 = dsl.ContainerOp(
        name="my-out-cop0",
        image='python:alpine3.6',
        command=["sh", "-c"],
        arguments=[
            'python -c "import json; import sys; json.dump([i for i in range(20, 31)], open(\'/tmp/out.json\', \'w\'))"'],
        file_outputs={'out': '/tmp/out.json'},
    )

    with dsl.ParallelFor(op0.output) as item:
        op1 = dsl.ContainerOp(
            name="my-in-cop1",
            image="library/bash:4.4.23",
            command=["sh", "-c"],
            arguments=["echo do output op1 item: %s" % item],
        )

    op_out = dsl.ContainerOp(
        name="my-out-cop2",
        image="library/bash:4.4.23",
        command=["sh", "-c"],
        arguments=["echo do output op2, outp: %s" % op0.output],
    )
github SeldonIO / seldon-core / examples / kubeflow / train_pipeline / nlp_pipeline.py View on Github external
name='vectorize',
        image='tfidf_vectorizer:0.1',
        command="python",
        arguments=[
            "/microservice/pipeline_step.py",
            "--in-path", spacy_tokens_path,
            "--out-path", tfidf_vectors_path,
            "--max-features", tfidf_max_features,
            "--ngram-range", tfidf_ngram_range,
            "--action", "train",
            "--model-path", tfidf_model_path,
        ],
        pvolumes={"/mnt": tokenize_step.pvolume}
    )

    predict_step = dsl.ContainerOp(
        name='predictor',
        image='lr_text_classifier:0.1',
        command="python",
        arguments=[
            "/microservice/pipeline_step.py",
            "--in-path", tfidf_vectors_path,
            "--labels-path", labels_path,
            "--out-path", lr_prediction_path,
            "--c-param", lr_c_param,
            "--action", "train",
            "--model-path", lr_model_path,
        ],
        pvolumes={"/mnt": vectorize_step.pvolume}
    )

    try:
github amygdala / code-snippets / ml / kubeflow-pipelines / samples / kubeflow-tf / workflow1.py View on Github external
).apply(gcp.use_gcp_secret('user-gcp-sa'))
  tfteval2 = dsl.ContainerOp(
      name = 'tft-eval2',
      image = 'gcr.io/google-samples/ml-pipeline-dataflow-tftbq-taxi',
      arguments = [ "--input_handle", input_handle_eval, "--outfile_prefix", outfile_prefix_eval,
          "--working_dir", '%s/%s/tft-eval2' % (working_dir, '{{workflow.name}}'),
          "--project", project,
          "--mode", preprocess_mode,
          "--setup_file", tft_setup_file,
          "--max_rows", '5000',
          "--ts1", ts1,
          "--ts2", ts2,
          "--stage", "eval",
          "--preprocessing_module", preprocessing_module2]
      ).apply(gcp.use_gcp_secret('user-gcp-sa'))
  tfttrain2 = dsl.ContainerOp(
      name = 'tft-train2',
      image = 'gcr.io/google-samples/ml-pipeline-dataflow-tftbq-taxi',
      arguments = [ "--input_handle", input_handle_train, "--outfile_prefix", outfile_prefix_train,
          "--working_dir", '%s/%s/tft-train2' % (working_dir, '{{workflow.name}}'),
          "--project", project,
          "--mode", preprocess_mode,
          "--setup_file", tft_setup_file,
          "--max_rows", max_rows,
          "--ts1", ts1,
          "--ts2", ts2,
          "--stage", "train",
          "--preprocessing_module", preprocessing_module2]
      ).apply(gcp.use_gcp_secret('user-gcp-sa'))

  train = dsl.ContainerOp(
      name = 'train',
github kubeflow / pipelines / samples / core / artifact_location / artifact_location.py View on Github external
):

    # configures artifact location
    pipeline_artifact_location = dsl.ArtifactLocation.s3(
        bucket=bucket,
        endpoint="minio-service.%s:9000" % namespace,  # parameterize minio-service endpoint
        insecure=True,
        access_key_secret=V1SecretKeySelector(name=secret_name, key="accesskey"),
        secret_key_secret={"name": secret_name, "key": "secretkey"},  # accepts dict also
    )

    # set pipeline level artifact location
    dsl.get_pipeline_conf().set_artifact_location(pipeline_artifact_location)

    # artifacts in this op are stored to endpoint `minio-service.:9000`
    op = dsl.ContainerOp(name="foo", image="busybox:%s" % tag,
                         command=['sh', '-c', 'echo hello > /tmp/output.txt'],
                         file_outputs={'output': '/tmp/output.txt'})
github kubeflow / pipelines / samples / core / volume_snapshot_ops / volume_snapshot_ops.py View on Github external
def volume_snapshotop_sequential(url):
    vop = dsl.VolumeOp(
        name="create_volume",
        resource_name="vol1",
        size="1Gi",
        modes=dsl.VOLUME_MODE_RWM
    )

    step1 = dsl.ContainerOp(
        name="step1_ingest",
        image="google/cloud-sdk:272.0.0",
        command=["sh", "-c"],
        arguments=["mkdir /data/step1 && "
                   "gsutil cat %s | gzip -c >/data/step1/file1.gz" % url],
        pvolumes={"/data": vop.volume}
    )

    step1_snap = dsl.VolumeSnapshotOp(
        name="step1_snap",
        resource_name="step1_snap",
        volume=step1.pvolume
    )

    step2 = dsl.ContainerOp(
        name="step2_gunzip",
github kubeflow / pipelines / samples / contrib / volume_ops / volumeop_sequential.py View on Github external
def volumeop_sequential():
    vop = dsl.VolumeOp(
        name="mypvc",
        resource_name="newpvc",
        size="10Gi",
        modes=dsl.VOLUME_MODE_RWM
    )

    step1 = dsl.ContainerOp(
        name="step1",
        image="library/bash:4.4.23",
        command=["sh", "-c"],
        arguments=["echo 1|tee /data/file1"],
        pvolumes={"/data": vop.volume}
    )

    step2 = dsl.ContainerOp(
        name="step2",
        image="library/bash:4.4.23",
        command=["sh", "-c"],
        arguments=["cp /data/file1 /data/file2"],
        pvolumes={"/data": step1.pvolume}
    )

    step3 = dsl.ContainerOp(
github kubeflow / pipelines / samples / core / condition / condition.py View on Github external
def print_op(msg):
    """Print a message."""
    return dsl.ContainerOp(
        name='Print',
        image='alpine:3.6',
        command=['echo', msg],
    )