Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
#
# This module is part of asyncpg and is released under
# the Apache 2.0 License: http://www.apache.org/licenses/LICENSE-2.0
import asyncio
import json
from asyncpg import _testbase as tb
from asyncpg import connection as apg_con
MAX_RUNTIME = 0.1
class SlowIntrospectionConnection(apg_con.Connection):
"""Connection class to test introspection races."""
introspect_count = 0
async def _introspect_types(self, *args, **kwargs):
self.introspect_count += 1
await asyncio.sleep(0.4)
return await super()._introspect_types(*args, **kwargs)
class TestIntrospection(tb.ConnectedTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.adminconn = cls.loop.run_until_complete(cls.connect())
cls.loop.run_until_complete(
cls.adminconn.execute('CREATE DATABASE asyncpg_intro_test'))
def create_pool(self, pool_class=pg_pool.Pool,
connection_class=pg_connection.Connection, **kwargs):
conn_spec = self.get_connection_spec(kwargs)
pool = create_pool(loop=self.loop, pool_class=pool_class,
connection_class=connection_class, **conn_spec)
self._pools.append(pool)
return pool
if os.environ.get('TRAVIS_OS_NAME') == 'osx':
# Travis' macOS is _slow_.
POOL_NOMINAL_TIMEOUT = 0.5
else:
POOL_NOMINAL_TIMEOUT = 0.1
class SlowResetConnection(pg_connection.Connection):
"""Connection class to simulate races with Connection.reset()."""
async def reset(self, *, timeout=None):
await asyncio.sleep(0.2)
return await super().reset(timeout=timeout)
class SlowCancelConnection(pg_connection.Connection):
"""Connection class to simulate races with Connection._cancel()."""
async def _cancel(self, waiter):
await asyncio.sleep(0.2)
return await super()._cancel(waiter)
class TestPool(tb.ConnectedTestCase):
async def test_pool_01(self):
for n in {1, 5, 10, 20, 100}:
with self.subTest(tasksnum=n):
pool = await self.create_pool(database='postgres',
min_size=5, max_size=10)
async def worker():
con = await pool.acquire()
async def test_introspection_no_stmt_cache_03(self):
# max_cacheable_statement_size will disable caching for
# the user query but not for the introspection query.
old_uid = apg_con._uid
await self.con.fetchval(
"SELECT $1::int[], '{foo}'".format(foo='a' * 10000), [1, 2])
self.assertEqual(apg_con._uid, old_uid + 1)
import pkg_resources
__version__ = pkg_resources.get_distribution("guillotina").version
# create logging
logger = glogging.getLogger("guillotina")
if os.environ.get("GDEBUG", "").lower() in ("true", "t", "1"): # pragma: no cover
# patches for extra debugging....
import asyncpg
import time
original_execute = asyncpg.connection.Connection._do_execute
logger.error("RUNNING IN DEBUG MODE")
def _record(query, duration):
# log each query on the transaction object...
try:
from guillotina.transactions import get_transaction
txn = get_transaction()
if txn:
if not hasattr(txn, "_queries"):
txn._queries = {}
if query not in txn._queries:
txn._queries[query] = [0, 0.0]
txn._queries[query][0] += 1
txn._queries[query][1] += duration
except AttributeError:
async def get_connection(self, name: str = None) -> asyncpg.connection.Connection:
connection_options = _get_connection_options(self.config)
dsn = self.get_dsn(name)
return await asyncpg.connect(dsn=dsn, **connection_options)
async def _get_new_connection(self):
if self._working_addr is None:
# First connection attempt on this pool.
con = await connection.connect(
*self._connect_args,
loop=self._loop,
connection_class=self._connection_class,
**self._connect_kwargs)
self._working_addr = con._addr
self._working_config = con._config
self._working_params = con._params
else:
# We've connected before and have a resolved address,
# and parsed options and config.
con = await connect_utils._connect_addr(
loop=self._loop,
addr=self._working_addr,
timeout=self._working_params.connect_timeout,
txn._queries = {}
if query not in txn._queries:
txn._queries[query] = [0, 0.0]
txn._queries[query][0] += 1
txn._queries[query][1] += duration
except AttributeError:
pass
async def _do_execute(self, query, *args, **kwargs):
start = time.time()
result = await original_execute(self, query, *args, **kwargs)
end = time.time()
_record(query, end - start)
return result
asyncpg.connection.Connection._do_execute = _do_execute # type: ignore
original_bind_execute = asyncpg.prepared_stmt.PreparedStatement._PreparedStatement__bind_execute
async def __bind_execute(self, *args, **kwargs):
start = time.time()
result = await original_bind_execute(self, *args, **kwargs)
end = time.time()
_record(self._query, end - start)
return result
asyncpg.prepared_stmt.PreparedStatement._PreparedStatement__bind_execute = __bind_execute # type: ignore
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# we purposefully do not support these options for performance
self._server_caps = asyncpg.connection.ServerCapabilities(
advisory_locks=False,
notifications=False,
sql_reset=False,
sql_close_all=False,
plpgsql=self._server_caps.plpgsql
)
async def fetch(self, sql: str, rows: int = 1, params: dict = None):
"""
Fetches rows from the database, using the specified query.
.. warning::
It is not recommended to use this method - use :meth:`.AsyncpgTransaction.execute` instead!
:param sql: The SQL query to execute.
:param rows: The number of rows to return.
:param params: The parameters to return.
:return:
"""
async with self.get_connection() as conn:
assert isinstance(conn, asyncpg.connection.Connection)
# Parse the sql and the params.
new_sql, p_tup = get_param_query(sql, params)
self.logger.debug("Fetching `{}`".format(new_sql))
self.logger.debug("{}".format(p_tup))
# Open up a new transaction.
async with conn.transaction():
# Create a cursor.
cursor = await conn.cursor(new_sql, *p_tup)
items = await cursor.fetch(rows)
return items