How to use the sagemaker.fw_utils.tar_and_upload_dir function in sagemaker

To help you get started, we’ve selected a few sagemaker examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github aws / sagemaker-pytorch-container / test / integ / utils.py View on Github external
def create_docker_services(region, command, tmpdir, hosts, image, additional_volumes, additional_env_vars, customer_script,
                           source_dir, entrypoint):
    environment = []
    session = boto3.Session(region_name=region)

    optml_dirs = set()
    if command == 'train':
        optml_dirs = {'output', 'input'}

    elif command == 'serve':
        environment.extend(DEFAULT_HOSTING_ENV)

        if customer_script:
            s3_script_path = fw_utils.tar_and_upload_dir(session=session,
                                                         bucket=default_bucket(session),
                                                         s3_key_prefix='test',
                                                         script=customer_script,
                                                         directory=source_dir)[0]

            environment.extend([
                'SAGEMAKER_PROGRAM={}'.format(os.path.basename(customer_script)),
                'SAGEMAKER_SUBMIT_DIRECTORY={}'.format(s3_script_path)
            ])
    else:
        raise ValueError('Unexpected command: {}'.format(command))

    environment.extend(credentials_to_env(session))

    environment.extend(additional_env_vars)
github aws / sagemaker-pytorch-container / test / utils / local_mode.py View on Github external
def create_docker_services(command, tmpdir, hosts, image, additional_volumes, additional_env_vars,
                           customer_script,
                           source_dir, entrypoint, use_gpu):
    environment = []
    session = boto3.Session()

    optml_dirs = set()
    if command == 'train':
        optml_dirs = {'output', 'input'}

    elif command == 'serve':
        environment.extend(DEFAULT_HOSTING_ENV)

        if customer_script:
            timestamp = utils.sagemaker_timestamp()
            s3_script_path = fw_utils.tar_and_upload_dir(session=session,
                                                         bucket=default_bucket(session),
                                                         s3_key_prefix='test-{}'.format(timestamp),
                                                         script=customer_script,
                                                         directory=source_dir)[0]

            environment.extend([
                'SAGEMAKER_PROGRAM={}'.format(os.path.basename(customer_script)),
                'SAGEMAKER_SUBMIT_DIRECTORY={}'.format(s3_script_path)
            ])
    else:
        raise ValueError('Unexpected command: {}'.format(command))

    environment.extend(credentials_to_env(session))

    environment.extend(additional_env_vars)
github aws / sagemaker-tensorflow-container / test / integ / test_layers_prediction.py View on Github external
def test_layers_prediction(docker_image, sagemaker_session, opt_ml, processor):
    resource_path = os.path.join(SCRIPT_PATH, '../resources/mnist')

    copy_resource(resource_path, opt_ml, 'code')
    copy_resource(resource_path, opt_ml, 'data', 'input/data')

    s3_source_archive = fw_utils.tar_and_upload_dir(session=sagemaker_session.boto_session,
                                                    bucket=sagemaker_session.default_bucket(),
                                                    s3_key_prefix='test_job',
                                                    script='mnist.py',
                                                    directory=os.path.join(resource_path, 'code'))

    create_config_files('mnist.py', s3_source_archive.s3_prefix, opt_ml,
                        dict(training_steps=1, evaluation_steps=1))
    os.makedirs(os.path.join(opt_ml, 'model'))

    train(docker_image, opt_ml, processor)

    assert file_exists(opt_ml, 'model/export/Servo'), 'model was not exported'
    assert file_exists(opt_ml, 'model/checkpoint'), 'checkpoint was not created'
    assert file_exists(opt_ml, 'output/success'), 'Success file was not created'
    assert not file_exists(opt_ml, 'output/failure'), 'Failure happened'
github aws / sagemaker-chainer-container / test / utils / local_mode.py View on Github external
def create_docker_services(command, tmpdir, hosts, image, additional_volumes, additional_env_vars,
                           customer_script, source_dir, entrypoint, use_gpu=False):

    environment = []
    session = boto3.Session()

    optml_dirs = set()
    if command == 'train':
        optml_dirs = {'output', 'input'}

    elif command == 'serve':
        environment.extend(DEFAULT_HOSTING_ENV)

        if customer_script:
            timestamp = utils.sagemaker_timestamp()
            s3_script_path = fw_utils.tar_and_upload_dir(session=session,
                                                         bucket=default_bucket(session),
                                                         s3_key_prefix='test-{}'.format(timestamp),
                                                         script=customer_script,
                                                         directory=source_dir)[0]

            environment.extend([
                'SAGEMAKER_PROGRAM={}'.format(os.path.basename(customer_script)),
                'SAGEMAKER_SUBMIT_DIRECTORY={}'.format(s3_script_path)
            ])
    else:
        raise ValueError('Unexpected command: {}'.format(command))

    environment.extend(credentials_to_env(session))

    environment.extend(additional_env_vars)
github aws / sagemaker-python-sdk / tests / unit / test_fw_utils.py View on Github external
def test_tar_and_upload_dir_with_subdirectory(sagemaker_session, tmpdir):
    file_tree(tmpdir, ["src-dir/sub/train.py"])
    source_dir = os.path.join(str(tmpdir), "src-dir")

    with patch("shutil.rmtree"):
        result = fw_utils.tar_and_upload_dir(
            sagemaker_session, "bucket", "prefix", "train.py", source_dir
        )

    assert result == fw_utils.UploadedCode(
        s3_prefix="s3://bucket/prefix/sourcedir.tar.gz", script_name="train.py"
    )

    assert {"/sub/train.py"} == list_source_dir_files(sagemaker_session, tmpdir)
github aws / sagemaker-python-sdk / tests / unit / test_fw_utils.py View on Github external
def test_tar_and_upload_dir_not_s3(sagemaker_session):
    bucket = "mybucket"
    s3_key_prefix = "something/source"
    script = os.path.basename(__file__)
    directory = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
    result = fw_utils.tar_and_upload_dir(
        sagemaker_session, bucket, s3_key_prefix, script, directory
    )
    assert result == fw_utils.UploadedCode(
        "s3://{}/{}/sourcedir.tar.gz".format(bucket, s3_key_prefix), script
    )
github aws / sagemaker-mxnet-container / test / integ / test_py_version.py View on Github external
def test_train_py_version(docker_image, sagemaker_session, py_version, opt_ml, processor):
    resource_path = 'test/resources/py_version/code'

    s3_source_archive = fw_utils.tar_and_upload_dir(session=sagemaker_session.boto_session,
                                                    bucket=sagemaker_session.default_bucket(),
                                                    s3_key_prefix=sagemaker_timestamp(),
                                                    script='usermodule.py',
                                                    directory=resource_path)

    hp = _py_version_dict(py_version)

    utils.create_config_files('usermodule.py', s3_source_archive.s3_prefix, opt_ml,
                              additional_hp=hp)
    os.makedirs(os.path.join(opt_ml, 'model'))
    docker_utils.train(docker_image, opt_ml, processor)

    # The usermodule.py train_fn will assert on the expected
    # python versions passed in through hyperparameters,
    # and training will fail if they are incorrect.