How to use blobfile - 9 common examples

To help you get started, we’ve selected a few blobfile examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github openai / procgen / procgen-build / procgen_build / build_package.py View on Github external
os.environ["CIBW_ENVIRONMENT"] = (
                os.environ["CIBW_ENVIRONMENT"]
                + " TRAVIS_TAG=" + os.environ["TRAVIS_TAG"]
            )
    elif platform.system() == "Windows":
        init_vsvars()

    run("pip install cibuildwheel==1.0.0")
    run("cibuildwheel --output-dir wheelhouse")

    if have_credentials:
        print("upload wheels", platform.system())
        input_dir = "wheelhouse"
        output_dir = f"gs://{GCS_BUCKET}/builds/"
        for filename in bf.listdir(input_dir):
            src = bf.join(input_dir, filename)
            dst = bf.join(output_dir, filename)
            print(src, "=>", dst)
            bf.copy(src, dst, overwrite=True)
github openai / procgen / procgen-build / procgen_build / build_qt.py View on Github external
def cache_folder(name, dirpath, options, build_fn):
    if os.path.exists(dirpath):
        print(f"cache for {name} found locally")
        return
    
    options_hash = hashlib.md5("|".join(options).encode("utf8")).hexdigest()
    cache_path = bf.join(f"gs://{GCS_BUCKET}", "cache", f"{name}-{options_hash}.tar")
    if "GOOGLE_APPLICATION_CREDENTIALS" not in os.environ:
        # we don't have any credentials to do the caching, always build in this case
        print(f"building without cache for {name}")
        start = time.time()
        build_fn()
        print(f"build elapsed {time.time() - start}")
    elif bf.exists(cache_path):
        print(f"downloading cache for {name}: {cache_path}")
        start = time.time()
        with bf.BlobFile(cache_path, "rb") as f:
            with tarfile.open(fileobj=f, mode="r") as tf:
                tf.extractall()
        print(f"download elapsed {time.time() - start}")
    else:
        print(f"building cache for {name}")
        start = time.time()
github openai / procgen / procgen-build / procgen_build / build_package.py View on Github external
os.environ["CIBW_ENVIRONMENT"]
                + " TRAVIS_TAG=" + os.environ["TRAVIS_TAG"]
            )
    elif platform.system() == "Windows":
        init_vsvars()

    run("pip install cibuildwheel==1.0.0")
    run("cibuildwheel --output-dir wheelhouse")

    if have_credentials:
        print("upload wheels", platform.system())
        input_dir = "wheelhouse"
        output_dir = f"gs://{GCS_BUCKET}/builds/"
        for filename in bf.listdir(input_dir):
            src = bf.join(input_dir, filename)
            dst = bf.join(output_dir, filename)
            print(src, "=>", dst)
            bf.copy(src, dst, overwrite=True)
github openai / procgen / procgen-build / procgen_build / build_qt.py View on Github external
elif bf.exists(cache_path):
        print(f"downloading cache for {name}: {cache_path}")
        start = time.time()
        with bf.BlobFile(cache_path, "rb") as f:
            with tarfile.open(fileobj=f, mode="r") as tf:
                tf.extractall()
        print(f"download elapsed {time.time() - start}")
    else:
        print(f"building cache for {name}")
        start = time.time()
        build_fn()
        print(f"cache build elapsed {time.time() - start}")
        print(f"uploading cache for {name}")
        start = time.time()
        if not bf.exists(cache_path):
            with bf.BlobFile(cache_path, "wb") as f:
                with tarfile.open(fileobj=f, mode="w") as tf:
                    tf.add(dirpath)
        print(f"upload elapsed {time.time() - start}")
github openai / procgen / procgen-build / procgen_build / build_qt.py View on Github external
if os.path.exists(dirpath):
        print(f"cache for {name} found locally")
        return
    
    options_hash = hashlib.md5("|".join(options).encode("utf8")).hexdigest()
    cache_path = bf.join(f"gs://{GCS_BUCKET}", "cache", f"{name}-{options_hash}.tar")
    if "GOOGLE_APPLICATION_CREDENTIALS" not in os.environ:
        # we don't have any credentials to do the caching, always build in this case
        print(f"building without cache for {name}")
        start = time.time()
        build_fn()
        print(f"build elapsed {time.time() - start}")
    elif bf.exists(cache_path):
        print(f"downloading cache for {name}: {cache_path}")
        start = time.time()
        with bf.BlobFile(cache_path, "rb") as f:
            with tarfile.open(fileobj=f, mode="r") as tf:
                tf.extractall()
        print(f"download elapsed {time.time() - start}")
    else:
        print(f"building cache for {name}")
        start = time.time()
        build_fn()
        print(f"cache build elapsed {time.time() - start}")
        print(f"uploading cache for {name}")
        start = time.time()
        if not bf.exists(cache_path):
            with bf.BlobFile(cache_path, "wb") as f:
                with tarfile.open(fileobj=f, mode="w") as tf:
                    tf.add(dirpath)
        print(f"upload elapsed {time.time() - start}")
github openai / procgen / procgen-build / procgen_build / build_qt.py View on Github external
print(f"build elapsed {time.time() - start}")
    elif bf.exists(cache_path):
        print(f"downloading cache for {name}: {cache_path}")
        start = time.time()
        with bf.BlobFile(cache_path, "rb") as f:
            with tarfile.open(fileobj=f, mode="r") as tf:
                tf.extractall()
        print(f"download elapsed {time.time() - start}")
    else:
        print(f"building cache for {name}")
        start = time.time()
        build_fn()
        print(f"cache build elapsed {time.time() - start}")
        print(f"uploading cache for {name}")
        start = time.time()
        if not bf.exists(cache_path):
            with bf.BlobFile(cache_path, "wb") as f:
                with tarfile.open(fileobj=f, mode="w") as tf:
                    tf.add(dirpath)
        print(f"upload elapsed {time.time() - start}")
github openai / procgen / procgen-build / procgen_build / build_qt.py View on Github external
def cache_folder(name, dirpath, options, build_fn):
    if os.path.exists(dirpath):
        print(f"cache for {name} found locally")
        return
    
    options_hash = hashlib.md5("|".join(options).encode("utf8")).hexdigest()
    cache_path = bf.join(f"gs://{GCS_BUCKET}", "cache", f"{name}-{options_hash}.tar")
    if "GOOGLE_APPLICATION_CREDENTIALS" not in os.environ:
        # we don't have any credentials to do the caching, always build in this case
        print(f"building without cache for {name}")
        start = time.time()
        build_fn()
        print(f"build elapsed {time.time() - start}")
    elif bf.exists(cache_path):
        print(f"downloading cache for {name}: {cache_path}")
        start = time.time()
        with bf.BlobFile(cache_path, "rb") as f:
            with tarfile.open(fileobj=f, mode="r") as tf:
                tf.extractall()
        print(f"download elapsed {time.time() - start}")
    else:
        print(f"building cache for {name}")
        start = time.time()
        build_fn()
        print(f"cache build elapsed {time.time() - start}")
        print(f"uploading cache for {name}")
        start = time.time()
        if not bf.exists(cache_path):
            with bf.BlobFile(cache_path, "wb") as f:
                with tarfile.open(fileobj=f, mode="w") as tf:
github openai / procgen / procgen-build / procgen_build / build_package.py View on Github external
)
    elif platform.system() == "Windows":
        init_vsvars()

    run("pip install cibuildwheel==1.0.0")
    run("cibuildwheel --output-dir wheelhouse")

    if have_credentials:
        print("upload wheels", platform.system())
        input_dir = "wheelhouse"
        output_dir = f"gs://{GCS_BUCKET}/builds/"
        for filename in bf.listdir(input_dir):
            src = bf.join(input_dir, filename)
            dst = bf.join(output_dir, filename)
            print(src, "=>", dst)
            bf.copy(src, dst, overwrite=True)
github openai / procgen / procgen-build / procgen_build / build_package.py View on Github external
# pass TRAVIS_TAG to the container so that it can build wheels with the correct version number
            os.environ["CIBW_ENVIRONMENT"] = (
                os.environ["CIBW_ENVIRONMENT"]
                + " TRAVIS_TAG=" + os.environ["TRAVIS_TAG"]
            )
    elif platform.system() == "Windows":
        init_vsvars()

    run("pip install cibuildwheel==1.0.0")
    run("cibuildwheel --output-dir wheelhouse")

    if have_credentials:
        print("upload wheels", platform.system())
        input_dir = "wheelhouse"
        output_dir = f"gs://{GCS_BUCKET}/builds/"
        for filename in bf.listdir(input_dir):
            src = bf.join(input_dir, filename)
            dst = bf.join(output_dir, filename)
            print(src, "=>", dst)
            bf.copy(src, dst, overwrite=True)

blobfile

Read GCS, ABS and local paths with the same interface, clone of tensorflow.io.gfile

Unlicense
Latest version published 3 months ago

Package Health Score

80 / 100
Full package analysis