How to use the psycopg2.extras function in psycopg2

To help you get started, we’ve selected a few psycopg2 examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github psycopg / psycopg2 / tests / test_ipaddress.py View on Github external
def test_inet_cast(self):
        cur = self.conn.cursor()
        psycopg2.extras.register_ipaddress(cur)

        cur.execute("select null::inet")
        self.assert_(cur.fetchone()[0] is None)

        cur.execute("select '127.0.0.1/24'::inet")
        obj = cur.fetchone()[0]
        self.assert_(isinstance(obj, ip.IPv4Interface), repr(obj))
        self.assertEquals(obj, ip.ip_interface('127.0.0.1/24'))

        cur.execute("select '::ffff:102:300/128'::inet")
        obj = cur.fetchone()[0]
        self.assert_(isinstance(obj, ip.IPv6Interface), repr(obj))
        self.assertEquals(obj, ip.ip_interface('::ffff:102:300/128'))
github CloverHealth / temporal-sqlalchemy / temporal_sqlalchemy / bases.py View on Github external
def make_clock(effective_lower: dt.datetime,
                   vclock_lower: int,
                   **kwargs) -> _ClockSet:
        """construct a clock set tuple"""
        effective_upper = kwargs.get('effective_upper', None)
        vclock_upper = kwargs.get('vclock_upper', None)

        effective = psql_extras.DateTimeTZRange(
            effective_lower, effective_upper)
        vclock = psql_extras.NumericRange(vclock_lower, vclock_upper)

        return _ClockSet(effective, vclock)
github waymarkedtrails / waymarked-trails-site / django / src / routemap / sites / hiking / __init__.py View on Github external
def set_schema(sender, connection, **kwargs):
    cursor = connection.cursor()
    cursor.execute("SET search_path TO hiking,public;")
    psycopg2.extras.register_hstore(cursor, globally=True, unicode=True)
github amillb / pgMapMatch / tools.py View on Github external
def cur(self):
        """ Create a new cursor. This can be done frequently.
        The cursor is a lightweight object, and can be deleted after each use.
        That might help with postgres memory use.

        The latest cursor is always available as self.cursor but the only
          intended use is outside calls of the form:
            thisobject.cur().execute('pg command')
        """
        if self.curType == 'DictCursor':
            # So everything coming from the database will be in Python dict format.
            cur = self.connection.cursor(cursor_factory=psycopg2.extras.DictCursor)
        elif self.curType == 'default':  # will return data as tuples - easier to convert to pandas
            cur = self.connection.cursor()
        del self.cursor
        self.cursor = cur
        return cur
github ianmiell / alert-on-change / context / db.py View on Github external
def send(test=True):
	conn = _get_db_conn()
	# HERE IS THE IMPORTANT PART, by specifying a name for the cursor
	# psycopg2 creates a server-side cursor, which prevents all of the
	# records from being downloaded at once from the server.
	cursor = conn.cursor('cursor_unique_name', cursor_factory=psycopg2.extras.DictCursor)
	# execute our Query
	cursor.execute("select alert_on_change_id, command, output, common_threshold, email_address, description, last_updated, cadence, ignore_output, ok_exit_codes, follow_on_command from alert_on_change")

	# Because cursor objects are iterable we can just call 'for - in' on
	# the cursor object and the cursor will automatically advance itself
	# each iteration.
	for row in cursor:
		alert_on_change_id = row[0]
		command            = row[1]
		output             = row[2]
		common_threshold   = row[3]
		email_address      = row[4]
		description        = row[5]
		last_updated       = row[6]
		cadence            = row[7]
		# Turn buffer into a string.
github zalando / PGObserver / frontend / src / sprocdata.py View on Github external
def getActiveSprocsOrderedBy( hostId, order = " ORDER BY SUM(delta_total_time) DESC"):
    sql = """SELECT sproc_name
               FROM ( """ + viewSprocs(hostId) + """ ) t JOIN monitor_data.sprocs ON sp_sproc_id = sproc_id
               WHERE sproc_host_id = """ + str(adapt(hostId)) + """
               GROUP BY sproc_name
             """ + order + """;
          """

    conn = datadb.getDataConnection()
    cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor)

    list= []
    cur.execute( sql )

    for r in cur:
        list.append ( r['sproc_name'] )

    cur.close()
    datadb.closeDataConnection(conn)
    return list
github zalando / PGObserver / frontend / src / hosts.py View on Github external
def getAllHostsData():
    conn = datadb.getDataConnection()
    hosts = {}

    cur = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)

    cur.execute("SELECT host_id, host_name, host_port, host_db, host_settings, host_group_id, host_enabled,"
                " host_ui_shortname, host_ui_longname"
                " FROM monitor_data.hosts ORDER BY host_id ASC;")
    for r in cur:
        r['uishortname'] = r['host_ui_shortname'].lower().replace('-','')
        r['uilongname'] = r['host_ui_longname']
        hosts[r['host_id']] = r

    cur.close()
    conn.close()
    return hosts
github kylejmcintyre / pypostgreports / data_dictionary.py View on Github external
#!/usr/bin/env python

import report, sys

import psycopg2.extras

parser = report.get_parser(sys.argv[0])
parser.add_argument('--title', '-t', required=False, dest='title', default="Data Dictionary", help='Report Title')

args = parser.parse_args()
conn = report.get_connection(args)
curs = conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor)

def get_dictionary():
    q = """
    select t1.nspname as schema, t3.description, count(*) as count
    from pg_namespace t1
    join information_schema.tables t2 on t1.nspname = t2.table_schema
    left outer join pg_description t3 on t1.oid = t3.objoid
    where t1.nspname not in ('information_schema', 'pg_catalog')
    group by schema, description
    order by schema
    """

    curs.execute(q)

    schemas = curs.fetchall()
github akrherz / iem / htdocs / plotting / auto / scripts100 / p106.py View on Github external
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn("asos")
    cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx["zstation"]
    threshold = ctx["threshold"]
    opt = ctx["opt"]
    month = ctx["month"]

    if month == "all":
        months = range(1, 13)
    elif month == "fall":
        months = [9, 10, 11]
    elif month == "winter":
        months = [12, 1, 2]
    elif month == "spring":
        months = [3, 4, 5]
    elif month == "summer":
        months = [6, 7, 8]
github akrherz / iem / cgi-bin / precip / catSNET.py View on Github external
"""
Generate web output for precip data
"""
from io import StringIO
import datetime

import psycopg2.extras
from paste.request import parse_formvars
from pyiem.network import Table as NetworkTable
from pyiem.util import get_dbconn

nt = NetworkTable(("KCCI", "KIMIT", "KELO"))
IEM = get_dbconn("iem")
icursor = IEM.cursor(cursor_factory=psycopg2.extras.DictCursor)


requireHrs = [0] * 25
stData = {}
totp = {}


# Return the Date we will be looking for...
def doHeader(environ, start_response, sio):
    """header please"""
    start_response("200 OK", [("Content-type", "text/html")])
    sio.write(
        """


  <title>IEM | Hourly Precip Grid</title>