How to use the aiofiles.open function in aiofiles

To help you get started, we’ve selected a few aiofiles examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github Python-Tools / aioorm / test / test_scv.py View on Github external
2,u2,17,f
3,u3,16,m
4,u4,15,f
"""
    with StringIO() as f:
        f.write(head+content)
        await aioload_csv(User_csv_f, f,pk_in_csv=True)
        all_cols = SQL('*')
        query0 = User_csv_f.select(all_cols).order_by(User_csv_f.id)
        user0 = await alist(query0)
        s = (os.linesep).join([str(i) for i in user0]).strip()
        r = content.strip()
        assert s == r
    await User_csv_f.drop_table()

    async with aiofiles.open(str(filepath)) as ff:
        er = await aioload_csv(User_csv_f, ff,pk_in_csv=False)
        all_cols = SQL('*')
        query0 = User_csv_f.select(all_cols).order_by(User_csv_f.id)
        user0 = await alist(query0)
        s = (os.linesep).join([str(i) for i in user0]).strip()
        r = content.strip()
        assert s == r

    await User_csv_f.drop_table()

    await aioload_csv(User_csv_f, '0_user_out.csv',pk_in_csv=True)
    all_cols = SQL('*')
    query0 = User_csv_f.select(all_cols).order_by(User_csv_f.id)
    user0 = await alist(query0)
    s = (os.linesep).join([str(i) for i in user0]).strip()
    r = content.strip()
github odrling / peony-twitter / tests / test_client.py View on Github external
async def test_chunked_upload_async_input(dummy_peony_client, medias):
    async with aiofiles.open(str(medias['bloom'].cache), 'rb') as aiofile:
        await chunked_upload(dummy_peony_client, medias['bloom'], aiofile)
github steemit / sbds / sbds / storages / db / scripts / populate.py View on Github external
async def local_fetch_blocks_and_ops_in_blocks(local_path, block_nums):
    try:
        results = []
        for block_num in block_nums:
            with aiofiles.open(f'{local_path}/{block_num}/block.json') as f:
                raw_block = await f.read()
            with aiofiles.open(f'{local_path}/{block_num}/ops.json') as f:
               raw_ops = await f.read()
            block = json.loads(raw_block)
            ops = json.loads(raw_ops)
            results.append((block_num,block,ops))
        assert len(results) == len(block_nums)
        return results
    except Exception as e:
            logger.exception('error ly localfetching block and/or ops in block',
                             e=e, local_path=local_path)
github PyPlanet / PyPlanet / pyplanet / core / storage / drivers / local / __init__.py View on Github external
async def open(self, filename: str, mode: str = 'r', **kwargs):
		fh = await aiofiles.open(self.absolute(filename), mode, **kwargs)
		await async_generator.yield_(fh)
github virtool / virtool / virtool / app.py View on Github external
loop = asyncio.get_event_loop()

    try:
        output = await loop.run_in_executor(None, subprocess.check_output, ["git", "describe", "--tags"])
        output = output.decode().rstrip()
    except (subprocess.CalledProcessError, FileNotFoundError):
        pass

    if output and "Not a git repository" not in output:
        return output

    try:
        version_file_path = os.path.join(install_path, "VERSION")

        async with aiofiles.open(version_file_path, "r") as version_file:
            content = await version_file.read()
            return content.rstrip()

    except FileNotFoundError:
        logger.critical("Could not determine software version.")
        return "Unknown"
github DavidFW1960 / home-assistant / custom_components / hacs / hacsstorage.py View on Github external
async def get(self, raw=False):
        """Read HACS data to storage."""
        from .blueprints import (
            HacsRepositoryAppDaemon,
            HacsRepositoryIntegration,
            HacsRepositoryPlugin,
            HacsRepositoryPythonScripts,
            HacsRepositoryThemes,
        )

        datastore = "{}/.storage/{}".format(self.config_dir, STORENAME)
        _LOGGER.debug("Reading from datastore %s.", datastore)

        self.data["task_running"] = True
        try:
            async with aiofiles.open(
                datastore, mode="r", encoding="utf-8", errors="ignore"
            ) as datafile:
                store_data = await datafile.read()
                store_data = json.loads(store_data)
                datafile.close()
        except Exception:
            # Issues reading the file (if it exists.)
            return False

        if raw:
            return store_data

        # Restore data about HACS
        self.data["hacs"]["schema"] = store_data["hacs"].get("schema")
        self.data["hacs"]["view"] = store_data["hacs"].get("view")
github pinnaculum / galacteek / galacteek / core / multihashmetadb.py View on Github external
async def get(self, rscPath):
        containerPath, metaPath, exists = self.path(rscPath)
        if metaPath and exists:
            await asyncio.sleep(0)
            with await self._lock:
                try:
                    async with aiofiles.open(metaPath, 'rt') as fd:
                        data = await fd.read()
                        return json.loads(data)
                except BaseException as err:
                    # Error reading metadata
                    log.debug('Error reading metadata for {0}: {1}'.format(
                        rscPath, str(err)))
                    os.unlink(metaPath)
github RobertoPrevato / BlackSheep / blacksheep / server / responses.py View on Github external
async def data_provider():
        async with aiofiles.open(file_path, mode='rb') as f:
            while True:
                chunk = await f.read(1024 * 64)

                if not chunk:
                    break

                yield chunk
            yield b''
    return data_provider
github pioneers / PieCentral / runtime / runtime / supervisor.py View on Github external
srv_config_path: The path to the service configuration.
        dev_schema_path: The path to the device schema.
        max_retries: The maximum number of times to restart all the
            subprocesses before the supervisor exits.
        retry_interval: The duration between retries.
    """
    try:
        await initialize_log(log_level, log_pretty, log_frontend, log_backend)
        LOGGER.debug(f'Runtime v{get_version()}')
        LOGGER.debug(f'Configured logging', level=log_level, pretty=log_pretty)

        async with aiofiles.open(srv_config_path) as config_file:
            service_config = yaml.safe_load(await config_file.read())
        LOGGER.debug(f'Read configuration from disk', srv_config_path=srv_config_path)

        async with aiofiles.open(dev_schema_path) as schema_file:
            dev_schema = yaml.load(await schema_file.read())
        load_device_types(dev_schema)
        LOGGER.debug(f'Read device schema from disk', dev_schema_path=dev_schema_path)

        make_retryable = backoff.on_exception(
            backoff.constant,
            Exception,
            interval=retry_interval,
            max_tries=max_retries,
            logger=LOGGER,
        )
        await make_retryable(spin)(service_config)
    except Exception as exc:
        LOGGER.critical('Error reached the top of the call stack')
        raise exc
    finally: