Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
async def test_parallel_writer(temp_file, uuid):
w_file = thread_aio_file(temp_file, 'w')
r_file = thread_aio_file(temp_file, 'r')
futures = list()
for i in range(2000):
futures.append(w_file.write(uuid, i * len(uuid), 0))
await asyncio.wait(futures)
await w_file.fsync()
count = 0
for async_chunk in Reader(r_file, chunk_size=len(uuid)):
chunk = await async_chunk
if not chunk:
break
assert chunk.decode() == uuid
count += 1
assert count == 2000
def test_parallel_writer(temp_file, uuid):
w_file = thread_aio_file(temp_file, 'w')
r_file = thread_aio_file(temp_file, 'r')
futures = list()
for i in range(2000):
futures.append(w_file.write(uuid, i * len(uuid), 0))
yield from asyncio.wait(futures)
yield from w_file.fsync()
count = 0
for async_chunk in Reader(r_file, chunk_size=len(uuid)):
chunk = yield from async_chunk
if not chunk:
break
assert chunk.decode() == uuid
count += 1
assert count == 2000
async def test_md5_hasher(self):
async with AIOFile('40MiBempty.img', mode='rb') as afp:
reference = md5(await afp.read()).hexdigest()
compare = await ss.md5_hasher(ss.region_read(afp, 0, 40*1000*1000))
self.assertEqual(compare, reference)
def posix_aio_file(name, mode, **kwargs):
AIOFile.OPERATION_CLASS = posix_aio.AIOOperation
AIOFile.IO_READ = posix_aio.IO_READ
AIOFile.IO_NOP = posix_aio.IO_NOP
AIOFile.IO_WRITE = posix_aio.IO_WRITE
return AIOFile(name, mode, **kwargs)
def thread_aio_file(name, mode):
AIOFile.OPERATION_CLASS = ThreadedAIOOperation
AIOFile.IO_READ = IO_READ
AIOFile.IO_NOP = IO_NOP
AIOFile.IO_WRITE = IO_WRITE
return AIOFile(name, mode)
def thread_aio_file(name, mode):
AIOFile.OPERATION_CLASS = ThreadedAIOOperation
AIOFile.IO_READ = IO_READ
AIOFile.IO_NOP = IO_NOP
AIOFile.IO_WRITE = IO_WRITE
return AIOFile(name, mode)
def test_reader_writer(temp_file, uuid):
r_file = thread_aio_file(temp_file, 'r')
w_file = thread_aio_file(temp_file, 'w')
writer = Writer(w_file)
for _ in range(100):
yield from writer(uuid)
yield from w_file.fsync()
count = 0
for async_chunk in Reader(r_file, chunk_size=len(uuid)):
chunk = yield from async_chunk
if not chunk:
break
assert chunk.decode() == uuid
count += 1
assert count == 100
async def test_parallel_writer(aio_file_maker, temp_file, uuid):
w_file = await aio_file_maker(temp_file, 'w')
r_file = await aio_file_maker(temp_file, 'r')
futures = list()
for i in range(1000):
futures.append(w_file.write(uuid, i * len(uuid)))
shuffle(futures)
await asyncio.gather(*futures)
await w_file.fsync()
count = 0
async for chunk in Reader(r_file, chunk_size=len(uuid)):
assert chunk == uuid
count += 1
assert count == 1000
async def test_reader_writer(loop, temp_file, uuid):
r_file = thread_aio_file(temp_file, 'r')
w_file = thread_aio_file(temp_file, 'w')
writer = Writer(w_file)
for _ in range(100):
await writer(uuid)
await w_file.fsync()
async for chunk in Reader(r_file, chunk_size=len(uuid)):
assert chunk.decode() == uuid
async def test_reader_writer(aio_file_maker, temp_file, uuid):
r_file = await aio_file_maker(temp_file, 'r')
w_file = await aio_file_maker(temp_file, 'w')
writer = Writer(w_file)
for _ in range(100):
await writer(uuid)
await w_file.fsync()
count = 0
for async_chunk in Reader(r_file, chunk_size=len(uuid)):
chunk = await async_chunk
assert chunk == uuid
count += 1
assert count == 100