How to use storage - 10 common examples

To help you get started, we’ve selected a few storage examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github googleapis / google-cloud-python / storage / unit_tests / test_blob.py View on Github external
from google.cloud.streaming import http_wrapper

        BLOB_NAME = 'blob-name'
        UPLOAD_URL = 'http://example.com/upload/name/key'
        DATA = b'ABCDEF'
        loc_response = {'status': OK, 'location': UPLOAD_URL}
        chunk1_response = {'status': http_wrapper.RESUME_INCOMPLETE,
                           'range': 'bytes 0-4'}
        chunk2_response = {'status': OK}
        connection = _Connection(
            (loc_response, '{}'),
            (chunk1_response, ''),
            (chunk2_response, ''),
        )
        client = _Client(connection)
        bucket = _Bucket(client)
        blob = self._make_one(BLOB_NAME, bucket=bucket)
        blob._CHUNK_SIZE_MULTIPLE = 1
        blob.chunk_size = 5
        blob.upload_from_string(DATA)
        rq = connection.http._requested
        self.assertEqual(len(rq), 1)
        self.assertEqual(rq[0]['method'], 'POST')
        uri = rq[0]['uri']
        scheme, netloc, path, qs, _ = urlsplit(uri)
        self.assertEqual(scheme, 'http')
        self.assertEqual(netloc, 'example.com')
        self.assertEqual(path, '/b/name/o')
        self.assertEqual(dict(parse_qsl(qs)),
                         {'uploadType': 'media', 'name': BLOB_NAME})
        headers = {
            x.title(): str(y) for x, y in rq[0]['headers'].items()}
github inasafe / inasafe / engine / test_engine.py View on Github external
plugin_list = get_plugins(plugin_name)
            assert len(plugin_list) == 1
            assert plugin_list[0].keys()[0] == plugin_name

            IF = plugin_list[0][plugin_name]

            impact_vector = calculate_impact(layers=[H, E],
                                             impact_fcn=IF)
            impact_filename = impact_vector.get_filename()

            # Read input data
            hazard_raster = read_layer(haz_filename)
            A = hazard_raster.get_data()
            mmi_min, mmi_max = hazard_raster.get_extrema()

            exposure_vector = read_layer(exp_filename)
            coordinates = exposure_vector.get_geometry()
            attributes = exposure_vector.get_data()

            # Extract calculated result
            icoordinates = impact_vector.get_geometry()
            iattributes = impact_vector.get_data()

            # First check that interpolated MMI was done as expected
            fid = open('%s/test_buildings_percentage_loss_and_mmi.txt'
                       % TESTDATA)
            reference_points = []
            MMI = []
            DAM = []
            for line in fid.readlines()[1:]:
                fields = line.strip().split(',')
github googleapis / google-cloud-python / storage / unit_tests / test_blob.py View on Github external
def test_upload_from_string_w_bytes(self):
        from six.moves.http_client import OK
        from six.moves.urllib.parse import parse_qsl
        from six.moves.urllib.parse import urlsplit
        from google.cloud.streaming import http_wrapper

        BLOB_NAME = 'blob-name'
        UPLOAD_URL = 'http://example.com/upload/name/key'
        DATA = b'ABCDEF'
        loc_response = {'status': OK, 'location': UPLOAD_URL}
        chunk1_response = {'status': http_wrapper.RESUME_INCOMPLETE,
                           'range': 'bytes 0-4'}
        chunk2_response = {'status': OK}
        connection = _Connection(
            (loc_response, '{}'),
            (chunk1_response, ''),
            (chunk2_response, ''),
        )
        client = _Client(connection)
        bucket = _Bucket(client)
        blob = self._make_one(BLOB_NAME, bucket=bucket)
        blob._CHUNK_SIZE_MULTIPLE = 1
        blob.chunk_size = 5
        blob.upload_from_string(DATA)
        rq = connection.http._requested
        self.assertEqual(len(rq), 1)
        self.assertEqual(rq[0]['method'], 'POST')
        uri = rq[0]['uri']
        scheme, netloc, path, qs, _ = urlsplit(uri)
        self.assertEqual(scheme, 'http')
github googleapis / google-cloud-python / storage / unit_tests / test_blob.py View on Github external
def test_create_resumable_upload_session(self):
        from six.moves.http_client import OK
        from six.moves.urllib.parse import parse_qsl
        from six.moves.urllib.parse import urlsplit

        BLOB_NAME = 'blob-name'
        UPLOAD_URL = 'http://example.com/upload/name/key'
        loc_response = {'status': OK, 'location': UPLOAD_URL}
        connection = _Connection(
            (loc_response, '{}'),
        )
        client = _Client(connection)
        bucket = _Bucket(client=client)
        blob = self._make_one(BLOB_NAME, bucket=bucket)

        resumable_url = blob.create_resumable_upload_session()

        self.assertEqual(resumable_url, UPLOAD_URL)

        rq = connection.http._requested
        self.assertEqual(len(rq), 1)
        self.assertEqual(rq[0]['method'], 'POST')

        uri = rq[0]['uri']
        scheme, netloc, path, qs, _ = urlsplit(uri)
github googleapis / google-cloud-python / storage / unit_tests / test_blob.py View on Github external
def test_exists_hit(self):
        from six.moves.http_client import OK

        BLOB_NAME = 'blob-name'
        found_response = ({'status': OK}, b'')
        connection = _Connection(found_response)
        client = _Client(connection)
        bucket = _Bucket(client)
        blob = self._make_one(BLOB_NAME, bucket=bucket)
        bucket._blobs[BLOB_NAME] = 1
        self.assertTrue(blob.exists())
github adafruit / rosie-ci / tester.py View on Github external
if "circuitpython_tests" in tests:
        # First find our CircuitPython disk.
        start_time = time.monotonic()
        disk_path = None
        while not disk_path and time.monotonic() - start_time < 10:
            for disk in os.listdir("/dev/disk/by-path"):
                if board["path"] in disk and disk.endswith("part1"):
                    disk_path = disk
        if not disk_path:
            raise RuntimeError("Cannot find CIRCUITPY disk for device: " + board["path"])

        disk_path = "/dev/disk/by-path/" + disk_path
        disk_device = os.path.basename(os.readlink(disk_path))[:-1]

        with storage.mount(storage.NativeFileSystem(disk_path), "/media/cpy-" + board["path"]):
            mountpoint = "/media/cpy-" + board["path"]
            redis_log(log_key, "Successfully mounted CIRCUITPY disk at {0}\n".format(mountpoint))

            # Now find the serial.
            serial_device_name = None
            for port in list_ports.comports():
                if port.location and port.location.split(":")[0][2:] == board["path"]:
                    serial_device_name = port.name
            if not serial_device_name:
                raise RuntimeError("No CircuitPython serial connection found at path: " + board["path"])
            with serial.Serial("/dev/" + serial_device_name, 115200, write_timeout=4, timeout=4) as conn:
                tests_ok = run_circuitpython_tests(log_key, board["board"], board["test_env"], mountpoint, disk_device, conn, tests["circuitpython_tests"]) and tests_ok


    return tests_ok
github apache / thrift / contrib / zeromq / test-client.py View on Github external
def main(args):
    endpoint = "tcp://127.0.0.1:9090"
    socktype = zmq.REQ
    incr = 0
    if len(args) > 1:
        incr = int(args[1])
        if incr:
            socktype = zmq.DOWNSTREAM
            endpoint = "tcp://127.0.0.1:9091"

    ctx = zmq.Context()
    transport = TZmqClient.TZmqClient(ctx, endpoint, socktype)
    protocol = thrift.protocol.TBinaryProtocol.TBinaryProtocolAccelerated(transport)
    client = storage.Storage.Client(protocol)
    transport.open()

    if incr:
        client.incr(incr)
        time.sleep(0.05)
    else:
        value = client.get()
        print value
github robotology / yarp / extern / thrift / thrift / contrib / zeromq / test-server.py View on Github external
def main():
    handler = StorageHandler()
    processor = storage.Storage.Processor(handler)

    ctx = zmq.Context()
    reqrep_server = TZmqServer.TZmqServer(processor, ctx, "tcp://0.0.0.0:9090", zmq.REP)
    oneway_server = TZmqServer.TZmqServer(processor, ctx, "tcp://0.0.0.0:9091", zmq.PULL)
    multiserver = TZmqServer.TZmqMultiServer()
    multiserver.servers.append(reqrep_server)
    multiserver.servers.append(oneway_server)
    multiserver.serveForever()
github ustudio / storage / tests / test_local_storage.py View on Github external
def test_load_from_file_creates_intermediate_dirs(
            self, mock_exists: mock.Mock, mock_makedirs: mock.Mock, mock_copy: mock.Mock) -> None:
        mock_exists.return_value = False

        storage = get_storage("file:///foo/bar/file")
        storage.load_from_filename("input_file")

        mock_exists.assert_called_with("/foo/bar")
        mock_makedirs.assert_called_with("/foo/bar")
        mock_copy.assert_called_with("input_file", "/foo/bar/file")
github ustudio / storage / tests / test_swift_storage.py View on Github external
def test_save_to_filename_raises_internal_server_exception(self) -> None:
        self.auth_failure = "500 Internal Server Error"

        self.add_container_object("/v2.0/1234/CONTAINER", "/path/to/file.mp4", b"FOOBAR")

        swift_uri = self._generate_storage_uri("/path/to/file.mp4")
        storage_object = get_storage(swift_uri)

        tmp_file = tempfile.NamedTemporaryFile()

        with self.run_services():
            with self.assertRaises(InternalServerError):
                storage_object.save_to_filename(tmp_file.name)