How to use the sqlalchemy.MetaData function in SQLAlchemy

To help you get started, we’ve selected a few SQLAlchemy examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github sqlalchemy / sqlalchemy / test / dialect / postgresql / test_compiler.py View on Github external
def test_create_index_with_with(self):
        m = MetaData()
        tbl = Table("testtbl", m, Column("data", String))

        idx1 = Index("test_idx1", tbl.c.data)
        idx2 = Index(
            "test_idx2", tbl.c.data, postgresql_with={"fillfactor": 50}
        )
        idx3 = Index(
            "test_idx3",
            tbl.c.data,
            postgresql_using="gist",
            postgresql_with={"buffering": "off"},
        )

        self.assert_compile(
            schema.CreateIndex(idx1),
            "CREATE INDEX test_idx1 ON testtbl " "(data)",
github geoalchemy / geoalchemy2 / tests / test_functional.py View on Github external
def test_reflection(self):
        t = Table(
            'lake',
            MetaData(),
            schema='gis',
            autoload=True,
            autoload_with=engine)
        type_ = t.c.geom.type
        assert isinstance(type_, Geometry)
        if postgis_version.startswith('1.'):
            assert type_.geometry_type == 'GEOMETRY'
            assert type_.srid == -1
        else:
            assert type_.geometry_type == 'LINESTRING'
            assert type_.srid == 4326
github TurboGears / tg2 / tests / test_stack / rendering / test_pagination.py View on Github external
pager = p.pager(onclick='goto($page)')

            assert 'goto(1)' in pager
            assert 'goto(4)' in pager
            assert 'goto(6)' in pager
            assert 'goto(10)' in pager


try:
    import sqlite3
except:
    import pysqlite2
from sqlalchemy import (MetaData, Table, Column, ForeignKey, Integer, String)
from sqlalchemy.orm import create_session, mapper, relation

metadata = MetaData('sqlite:///:memory:')

test1 = Table('test1', metadata,
    Column('id', Integer, primary_key=True),
    Column('val', String(8)))

test2 = Table('test2', metadata,
    Column('id', Integer, primary_key=True),
    Column('test1id', Integer, ForeignKey('test1.id')),
    Column('val', String(8)))

test3 = Table('test3', metadata,
    Column('id', Integer, primary_key=True),
    Column('val', String(8)))

test4 = Table('test4', metadata,
    Column('id', Integer, primary_key=True),
github sqlalchemy / sqlalchemy / test / dialect / test_postgresql.py View on Github external
def test_array_literal_insert(self):
        m = MetaData()
        t = Table('t', m, Column('data', postgresql.ARRAY(Integer)))
        self.assert_compile(
            t.insert().values(data=array([1, 2, 3])),
            "INSERT INTO t (data) VALUES (ARRAY[%(param_1)s, "
                "%(param_2)s, %(param_3)s])"
github sqlalchemy / sqlalchemy / test / dialect / mssql / test_types.py View on Github external
testing.db.dialect.type_descriptor(c.type)
                table_args.append(c)
        dates_table = Table(*table_args)
        gen = testing.db.dialect.ddl_compiler(
            testing.db.dialect, schema.CreateTable(dates_table)
        )
        for col in dates_table.c:
            index = int(col.name[1:])
            testing.eq_(
                gen.get_column_specification(col),
                "%s %s" % (col.name, columns[index][3]),
            )
            self.assert_(repr(col))
        dates_table.create(checkfirst=True)
        reflected_dates = Table(
            "test_mssql_dates", MetaData(testing.db), autoload=True
        )
        for col in reflected_dates.c:
            self.assert_types_base(col, dates_table.c[col.key])
github openstack / nova / nova / db / sqlalchemy / utils.py View on Github external
def check_shadow_table(migrate_engine, table_name):
    """This method checks that table with ``table_name`` and
    corresponding shadow table have same columns.
    """
    meta = MetaData()
    meta.bind = migrate_engine

    table = Table(table_name, meta, autoload=True)
    shadow_table = Table(db._SHADOW_TABLE_PREFIX + table_name, meta,
                         autoload=True)

    columns = {c.name: c for c in table.columns}
    shadow_columns = {c.name: c for c in shadow_table.columns}

    for name, column in columns.items():
        if name not in shadow_columns:
            raise exception.NovaException(
                _("Missing column %(table)s.%(column)s in shadow table")
                        % {'column': name, 'table': shadow_table.name})
        shadow_column = shadow_columns[name]
github openstack / glance / glance / db / sqlalchemy / migrate_repo / versions / 027_checksum_index.py View on Github external
def upgrade(migrate_engine):
    meta = MetaData()
    meta.bind = migrate_engine

    images = Table('images', meta, autoload=True)

    index = Index(INDEX_NAME, images.c.checksum)
    index.create(migrate_engine)
github openstack / keystone / keystone / common / sql / migrate_repo / versions / 011_endpoints_v3.py View on Github external
def downgrade(migrate_engine):
    """Replace API-version specific endpoint tables with one based on v2."""
    meta = sql.MetaData()
    meta.bind = migrate_engine

    new_table = sql.Table('endpoint_v3', meta, autoload=True)
    new_table.drop()

    legacy_table = sql.Table('endpoint_v2', meta, autoload=True)

    renames = {'endpoint': legacy_table}
    service_table = sql.Table('service', meta, autoload=True)
    constraints = [{'table': legacy_table,
                    'fk_column': 'service_id',
                    'ref_column': service_table.c.id}]
    migration_helpers.rename_tables_with_constraints(renames, constraints,
                                                     migrate_engine)
github trakt / Plex-Trakt-Scrobbler / Trakttv.bundle / Contents / Libraries / Shared / shove / stores / db.py View on Github external
def __init__(self, engine, **kw):
        super(DBStore, self).__init__(engine, **kw)
        # make store table
        self._store = Table(
            # get tablename
            kw.get('tablename', 'store'),
            MetaData(engine),
            Column('key', String(255), primary_key=True, nullable=False),
            Column('value', Binary, nullable=False),
        )
        # create store table if it does not exist
        if not self._store.exists():
            self._store.create()
github alephdata / aleph / aleph / migrate / versions / 294b8f9f9478_links_and_identity.py View on Github external
def upgrade():
    bind = op.get_bind()
    meta = sa.MetaData()
    meta.bind = bind
    meta.reflect()
    table = meta.tables['entity_identity']
    bind.execute(table.delete())

    op.drop_column('collection', 'generate_entities')

    op.create_table('link',
        sa.Column('id', sa.String(length=32), nullable=False),
        sa.Column('created_at', sa.DateTime(), nullable=True),
        sa.Column('updated_at', sa.DateTime(), nullable=True),
        sa.Column('deleted_at', sa.DateTime(), nullable=True),
        sa.Column('type', sa.String(length=255), nullable=True),
        sa.Column('source_id', sa.String(length=254), nullable=True),
        sa.Column('target_id', sa.String(length=254), nullable=True),
        sa.Column('foreign_ids', postgresql.ARRAY(sa.Unicode()), nullable=True),