How to use the datasette.utils.sqlite3 function in datasette

To help you get started, we’ve selected a few datasette examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github simonw / datasette / tests / fixtures.py View on Github external
filename="fixtures.db",
    is_immutable=False,
    extra_databases=None,
    inspect_data=None,
    static_mounts=None,
    template_dir=None,
):
    with tempfile.TemporaryDirectory() as tmpdir:
        filepath = os.path.join(tmpdir, filename)
        if is_immutable:
            files = []
            immutables = [filepath]
        else:
            files = [filepath]
            immutables = []
        conn = sqlite3.connect(filepath)
        conn.executescript(TABLES)
        for sql, params in TABLE_PARAMETERIZED_SQL:
            with conn:
                conn.execute(sql, params)
        if extra_databases is not None:
            for extra_filename, extra_sql in extra_databases.items():
                extra_filepath = os.path.join(tmpdir, extra_filename)
                sqlite3.connect(extra_filepath).executescript(extra_sql)
                files.append(extra_filepath)
        os.chdir(os.path.dirname(filepath))
        plugins_dir = os.path.join(tmpdir, "plugins")
        os.mkdir(plugins_dir)
        open(os.path.join(plugins_dir, "my_plugin.py"), "w").write(PLUGIN1)
        open(os.path.join(plugins_dir, "my_plugin_2.py"), "w").write(PLUGIN2)
        config = config or {}
        config.update(
github simonw / datasette / tests / fixtures.py View on Github external
filepath = os.path.join(tmpdir, filename)
        if is_immutable:
            files = []
            immutables = [filepath]
        else:
            files = [filepath]
            immutables = []
        conn = sqlite3.connect(filepath)
        conn.executescript(TABLES)
        for sql, params in TABLE_PARAMETERIZED_SQL:
            with conn:
                conn.execute(sql, params)
        if extra_databases is not None:
            for extra_filename, extra_sql in extra_databases.items():
                extra_filepath = os.path.join(tmpdir, extra_filename)
                sqlite3.connect(extra_filepath).executescript(extra_sql)
                files.append(extra_filepath)
        os.chdir(os.path.dirname(filepath))
        plugins_dir = os.path.join(tmpdir, "plugins")
        os.mkdir(plugins_dir)
        open(os.path.join(plugins_dir, "my_plugin.py"), "w").write(PLUGIN1)
        open(os.path.join(plugins_dir, "my_plugin_2.py"), "w").write(PLUGIN2)
        config = config or {}
        config.update(
            {
                "default_page_size": 50,
                "max_returned_rows": max_returned_rows or 100,
                "sql_time_limit_ms": sql_time_limit_ms or 200,
            }
        )
        ds = Datasette(
            files,
github simonw / datasette / tests / test_plugins.py View on Github external
def test_plugins_async_template_function(restore_working_directory):
    for client in make_app_client(
        template_dir=str(pathlib.Path(__file__).parent / "test_templates")
    ):
        response = client.get("/-/metadata")
        assert response.status == 200
        extra_from_awaitable_function = (
            Soup(response.body, "html.parser")
            .select("pre.extra_from_awaitable_function")[0]
            .text
        )
        expected = (
            sqlite3.connect(":memory:").execute("select sqlite_version()").fetchone()[0]
        )
        assert expected == extra_from_awaitable_function
github simonw / datasette / tests / test_utils.py View on Github external
"qSpecies" TEXT
    );
    CREATE TABLE "Street_Tree_List" (
      "TreeID" INTEGER,
      "qSpecies" TEXT,
      "qAddress" TEXT,
      "SiteOrder" INTEGER,
      "qSiteInfo" TEXT,
      "PlantType" TEXT,
      "qCaretaker" TEXT
    );
    CREATE VIEW Test_View AS SELECT * FROM Dumb_Table;
    CREATE VIRTUAL TABLE "Street_Tree_List_fts" USING FTS4 ("qAddress", "qCaretaker", "qSpecies", content="Street_Tree_List");
    CREATE VIRTUAL TABLE r USING rtree(a, b, c);
    """
    conn = utils.sqlite3.connect(":memory:")
    conn.executescript(sql)
    assert None is utils.detect_fts(conn, "Dumb_Table")
    assert None is utils.detect_fts(conn, "Test_View")
    assert None is utils.detect_fts(conn, "r")
    assert "Street_Tree_List_fts" == utils.detect_fts(conn, "Street_Tree_List")
github simonw / datasette / datasette / database.py View on Github external
def connect(self):
        if self.is_memory:
            return sqlite3.connect(":memory:")
        # mode=ro or immutable=1?
        if self.is_mutable:
            qs = "mode=ro"
        else:
            qs = "immutable=1"
        return sqlite3.connect(
            "file:{}?{}".format(self.path, qs), uri=True, check_same_thread=False
        )
github simonw / datasette / datasette / inspect.py View on Github external
def inspect_tables(conn, database_metadata):
    " List tables and their row counts, excluding uninteresting tables. "
    tables = {}
    table_names = [
        r["name"]
        for r in conn.execute('select * from sqlite_master where type="table"')
    ]

    for table in table_names:
        table_metadata = database_metadata.get("tables", {}).get(table, {})

        try:
            count = conn.execute(
                "select count(*) from {}".format(escape_sqlite(table))
            ).fetchone()[0]
        except sqlite3.OperationalError:
            # This can happen when running against a FTS virtual table
            # e.g. "select count(*) from some_fts;"
            count = 0

        column_names = table_columns(conn, table)

        tables[table] = {
            "name": table,
            "columns": column_names,
            "primary_keys": detect_primary_keys(conn, table),
            "count": count,
            "hidden": table_metadata.get("hidden") or False,
            "fts_table": detect_fts(conn, table),
        }

    foreign_keys = get_all_foreign_keys(conn)
github simonw / datasette / datasette / app.py View on Github external
def prepare_connection(self, conn):
        conn.row_factory = sqlite3.Row
        conn.text_factory = lambda x: str(x, "utf-8", "replace")
        for name, num_args, func in self.sqlite_functions:
            conn.create_function(name, num_args, func)
        if self.sqlite_extensions:
            conn.enable_load_extension(True)
            for extension in self.sqlite_extensions:
                conn.execute("SELECT load_extension('{}')".format(extension))
        if self.config("cache_size_kb"):
            conn.execute("PRAGMA cache_size=-{}".format(self.config("cache_size_kb")))
        # pylint: disable=no-member
        pm.hook.prepare_connection(conn=conn)
github simonw / datasette / datasette / views / base.py View on Github external
# Some quick sanity checks
            if not self.ds.config("allow_csv_stream"):
                raise DatasetteError("CSV streaming is disabled", status=400)
            if request.args.get("_next"):
                raise DatasetteError("_next not allowed for CSV streaming", status=400)
            kwargs["_size"] = "max"
        # Fetch the first page
        try:
            response_or_template_contexts = await self.data(
                request, database, hash, **kwargs
            )
            if isinstance(response_or_template_contexts, Response):
                return response_or_template_contexts
            else:
                data, _, _ = response_or_template_contexts
        except (sqlite3.OperationalError, InvalidSql) as e:
            raise DatasetteError(str(e), title="Invalid SQL", status=400)

        except (sqlite3.OperationalError) as e:
            raise DatasetteError(str(e))

        except DatasetteError:
            raise

        # Convert rows and columns to CSV
        headings = data["columns"]
        # if there are expanded_columns we need to add additional headings
        expanded_columns = set(data.get("expanded_columns") or [])
        if expanded_columns:
            headings = []
            for column in data["columns"]:
                headings.append(column)
github simonw / datasette / datasette / app.py View on Github external
async def run_sanity_checks(self):
        # Only one check right now, for Spatialite
        for database_name, database in self.databases.items():
            # Run pragma_info on every table
            for table in await database.table_names():
                try:
                    await self.execute(
                        database_name,
                        "PRAGMA table_info({});".format(escape_sqlite(table)),
                    )
                except sqlite3.OperationalError as e:
                    if e.args[0] == "no such module: VirtualSpatialIndex":
                        raise click.UsageError(
                            "It looks like you're trying to load a SpatiaLite"
                            " database without first loading the SpatiaLite module."
                            "\n\nRead more: https://datasette.readthedocs.io/en/latest/spatialite.html"
                        )
                    else:
                        raise