From 18d021dbfe835965daccee414f5aecff3216ef35 Mon Sep 17 00:00:00 2001 From: Cody Fincher Date: Sat, 24 Jan 2026 20:47:36 +0000 Subject: [PATCH 1/2] docs: enhance framework examples and SQL playground - Fix framework documentation with correct driver types and extension_config patterns - Add multi-database examples for FastAPI, Flask, and Starlette - Enhance SQL playground with sortable datatable, blinking cursor, and improved styling - Fix sphinx_datatables extension error by ensuring _static directory exists - Update Litestar dependency injection docs with DuckDB advanced configuration --- docs/conf.py | 12 + .../extensions/litestar/plugin_setup.py | 11 +- .../frameworks/fastapi/basic_setup.py | 6 +- .../frameworks/fastapi/multi_database.py | 57 ++++ docs/examples/frameworks/flask/basic_setup.py | 26 +- .../frameworks/flask/multi_database.py | 57 ++++ .../frameworks/starlette/basic_setup.py | 9 +- .../frameworks/starlette/multi_database.py | 60 ++++ .../litestar/dependency_injection.rst | 136 ++++++++-- tools/sphinx_ext/playground_template.html | 256 ++++++++++++++---- 10 files changed, 541 insertions(+), 89 deletions(-) create mode 100644 docs/examples/frameworks/fastapi/multi_database.py create mode 100644 docs/examples/frameworks/flask/multi_database.py create mode 100644 docs/examples/frameworks/starlette/multi_database.py diff --git a/docs/conf.py b/docs/conf.py index a57e72cb..2c766b03 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -320,6 +320,18 @@ def update_html_context( context["generate_toctree_html"] = partial(context["generate_toctree_html"], startdepth=0) +def _ensure_static_dir(app: Sphinx, exception: Any) -> None: + """Ensure _static directory exists for extensions that write to it.""" + if exception is None and hasattr(app.builder, "outdir"): + from pathlib import Path + + static_dir = Path(app.builder.outdir) / "_static" + static_dir.mkdir(parents=True, exist_ok=True) + + def setup(app: Sphinx) -> dict[str, bool]: app.setup_extension("shibuya") + # Ensure _static exists before sphinx_datatables tries to write to it + # Use priority < 500 to run before sphinx_datatables' finish handler + app.connect("build-finished", _ensure_static_dir, priority=100) return {"parallel_read_safe": True, "parallel_write_safe": True} diff --git a/docs/examples/extensions/litestar/plugin_setup.py b/docs/examples/extensions/litestar/plugin_setup.py index edcce1b2..b7751a81 100644 --- a/docs/examples/extensions/litestar/plugin_setup.py +++ b/docs/examples/extensions/litestar/plugin_setup.py @@ -8,10 +8,10 @@ def test_litestar_plugin_setup() -> None: pytest.importorskip("litestar") # start-example - from litestar import Litestar + from litestar import Litestar, get from sqlspec import SQLSpec - from sqlspec.adapters.sqlite import SqliteConfig + from sqlspec.adapters.sqlite import SqliteConfig, SqliteDriver from sqlspec.extensions.litestar import SQLSpecPlugin sqlspec = SQLSpec() @@ -21,7 +21,12 @@ def test_litestar_plugin_setup() -> None: ) ) - app = Litestar(plugins=[SQLSpecPlugin(sqlspec=sqlspec)]) + @get("/health") + def health_check(db_session: SqliteDriver) -> dict[str, str]: + result = db_session.execute("SELECT 'ok' as status") + return result.one() + + app = Litestar(route_handlers=[health_check], plugins=[SQLSpecPlugin(sqlspec=sqlspec)]) # end-example assert app is not None diff --git a/docs/examples/frameworks/fastapi/basic_setup.py b/docs/examples/frameworks/fastapi/basic_setup.py index e62f6129..1ddae273 100644 --- a/docs/examples/frameworks/fastapi/basic_setup.py +++ b/docs/examples/frameworks/fastapi/basic_setup.py @@ -1,5 +1,3 @@ -from __future__ import annotations - from typing import Annotated, Any import pytest @@ -14,7 +12,7 @@ def test_fastapi_basic_setup() -> None: from fastapi import Depends, FastAPI from sqlspec import SQLSpec - from sqlspec.adapters.aiosqlite import AiosqliteConfig + from sqlspec.adapters.aiosqlite import AiosqliteConfig, AiosqliteDriver from sqlspec.extensions.fastapi import SQLSpecPlugin sqlspec = SQLSpec() @@ -24,7 +22,7 @@ def test_fastapi_basic_setup() -> None: db_ext = SQLSpecPlugin(sqlspec, app) @app.get("/teams") - async def list_teams(db: Annotated[Any, Depends(db_ext.provide_session())]) -> Any: + async def list_teams(db: Annotated[AiosqliteDriver, Depends(db_ext.provide_session())]) -> dict[str, Any]: result = await db.execute("select 1 as ok") return result.one() diff --git a/docs/examples/frameworks/fastapi/multi_database.py b/docs/examples/frameworks/fastapi/multi_database.py new file mode 100644 index 00000000..f1370894 --- /dev/null +++ b/docs/examples/frameworks/fastapi/multi_database.py @@ -0,0 +1,57 @@ +from typing import Annotated + +import pytest + +__all__ = ("test_fastapi_multi_database",) + + +def test_fastapi_multi_database() -> None: + pytest.importorskip("fastapi") + pytest.importorskip("aiosqlite") + # start-example + from fastapi import Depends, FastAPI + + from sqlspec import SQLSpec + from sqlspec.adapters.aiosqlite import AiosqliteConfig, AiosqliteDriver + from sqlspec.adapters.sqlite import SqliteConfig, SqliteDriver + from sqlspec.extensions.fastapi import SQLSpecPlugin + + sqlspec = SQLSpec() + + # Primary async database + sqlspec.add_config( + AiosqliteConfig( + connection_config={"database": ":memory:"}, + extension_config={ + "starlette": {"session_key": "db", "connection_key": "db_connection", "pool_key": "db_pool"} + }, + ) + ) + + # ETL sync database (e.g., DuckDB pattern) + sqlspec.add_config( + SqliteConfig( + connection_config={"database": ":memory:"}, + extension_config={ + "starlette": {"session_key": "etl_db", "connection_key": "etl_connection", "pool_key": "etl_pool"} + }, + ) + ) + + app = FastAPI() + db_plugin = SQLSpecPlugin(sqlspec, app) + + @app.get("/report") + async def report( + db: Annotated[AiosqliteDriver, Depends(db_plugin.provide_session("db"))], + etl_db: Annotated[SqliteDriver, Depends(db_plugin.provide_session("etl_db"))], + ) -> dict[str, list]: + # Async query to primary database + users = await db.select("SELECT 1 as id, 'Alice' as name") + # Sync query to ETL database + metrics = etl_db.select("SELECT 'metric1' as name, 100 as value") + return {"users": users, "metrics": metrics} + + # end-example + + assert app is not None diff --git a/docs/examples/frameworks/flask/basic_setup.py b/docs/examples/frameworks/flask/basic_setup.py index f0a55a33..62bbc8f3 100644 --- a/docs/examples/frameworks/flask/basic_setup.py +++ b/docs/examples/frameworks/flask/basic_setup.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import Any - import pytest __all__ = ("test_flask_basic_setup",) @@ -13,21 +11,29 @@ def test_flask_basic_setup() -> None: from flask import Flask from sqlspec import SQLSpec - from sqlspec.adapters.sqlite import SqliteConfig + from sqlspec.adapters.sqlite import SqliteConfig, SqliteDriver from sqlspec.extensions.flask import SQLSpecPlugin + # Create SQLSpec and plugin at module level sqlspec = SQLSpec() sqlspec.add_config(SqliteConfig(connection_config={"database": ":memory:"})) + plugin = SQLSpecPlugin(sqlspec) + + def create_app() -> Flask: + """Application factory pattern.""" + app = Flask(__name__) + plugin.init_app(app) - app = Flask(__name__) - plugin = SQLSpecPlugin(sqlspec, app) + @app.get("/health") + def health() -> dict[str, int]: + db: SqliteDriver = plugin.get_session() + result = db.execute("select 1 as ok") + return result.one() - @app.get("/health") - def health() -> Any: - session = plugin.get_session() - result = session.execute("select 1 as ok") - return result.one() + return app + app = create_app() # end-example assert plugin is not None + assert app is not None diff --git a/docs/examples/frameworks/flask/multi_database.py b/docs/examples/frameworks/flask/multi_database.py new file mode 100644 index 00000000..1a5fe41a --- /dev/null +++ b/docs/examples/frameworks/flask/multi_database.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +import pytest + +__all__ = ("test_flask_multi_database",) + + +def test_flask_multi_database() -> None: + pytest.importorskip("flask") + # start-example + from flask import Flask + + from sqlspec import SQLSpec + from sqlspec.adapters.sqlite import SqliteConfig, SqliteDriver + from sqlspec.extensions.flask import SQLSpecPlugin + + sqlspec = SQLSpec() + + # Primary database + sqlspec.add_config( + SqliteConfig( + connection_config={"database": ":memory:"}, + extension_config={"flask": {"session_key": "db", "connection_key": "db_connection", "pool_key": "db_pool"}}, + ) + ) + + # ETL database with custom keys + sqlspec.add_config( + SqliteConfig( + connection_config={"database": ":memory:"}, + extension_config={ + "flask": {"session_key": "etl_db", "connection_key": "etl_connection", "pool_key": "etl_pool"} + }, + ) + ) + + plugin = SQLSpecPlugin(sqlspec) + + def create_app() -> Flask: + app = Flask(__name__) + plugin.init_app(app) + + @app.get("/report") + def report() -> dict[str, list]: + db: SqliteDriver = plugin.get_session("db") + etl_db: SqliteDriver = plugin.get_session("etl_db") + + users = db.select("SELECT 1 as id, 'Alice' as name") + metrics = etl_db.select("SELECT 'metric1' as name, 100 as value") + return {"users": users, "metrics": metrics} + + return app + + app = create_app() + # end-example + + assert app is not None diff --git a/docs/examples/frameworks/starlette/basic_setup.py b/docs/examples/frameworks/starlette/basic_setup.py index d506c228..8ece3a18 100644 --- a/docs/examples/frameworks/starlette/basic_setup.py +++ b/docs/examples/frameworks/starlette/basic_setup.py @@ -15,19 +15,22 @@ def test_starlette_basic_setup() -> None: from starlette.routing import Route from sqlspec import SQLSpec - from sqlspec.adapters.aiosqlite import AiosqliteConfig + from sqlspec.adapters.aiosqlite import AiosqliteConfig, AiosqliteDriver from sqlspec.extensions.starlette import SQLSpecPlugin sqlspec = SQLSpec() sqlspec.add_config(AiosqliteConfig(connection_config={"database": ":memory:"})) + # Create plugin at module level + db_plugin = SQLSpecPlugin(sqlspec) + async def health(request: Request) -> JSONResponse: - db = request.app.state.sqlspec.get_session(request) + db: AiosqliteDriver = db_plugin.get_session(request) result = await db.execute("select 1 as ok") return JSONResponse(result.one()) app = Starlette(routes=[Route("/health", health)]) - app.state.sqlspec = SQLSpecPlugin(sqlspec, app) + db_plugin.init_app(app) # Initialize plugin with app # end-example assert app is not None diff --git a/docs/examples/frameworks/starlette/multi_database.py b/docs/examples/frameworks/starlette/multi_database.py new file mode 100644 index 00000000..1616af81 --- /dev/null +++ b/docs/examples/frameworks/starlette/multi_database.py @@ -0,0 +1,60 @@ +from __future__ import annotations + +import pytest + +__all__ = ("test_starlette_multi_database",) + + +def test_starlette_multi_database() -> None: + pytest.importorskip("starlette") + pytest.importorskip("aiosqlite") + # start-example + from starlette.applications import Starlette + from starlette.requests import Request + from starlette.responses import JSONResponse + from starlette.routing import Route + + from sqlspec import SQLSpec + from sqlspec.adapters.aiosqlite import AiosqliteConfig, AiosqliteDriver + from sqlspec.adapters.sqlite import SqliteConfig, SqliteDriver + from sqlspec.extensions.starlette import SQLSpecPlugin + + sqlspec = SQLSpec() + + # Primary async database + sqlspec.add_config( + AiosqliteConfig( + connection_config={"database": ":memory:"}, + extension_config={ + "starlette": {"session_key": "db", "connection_key": "db_connection", "pool_key": "db_pool"} + }, + ) + ) + + # ETL sync database + sqlspec.add_config( + SqliteConfig( + connection_config={"database": ":memory:"}, + extension_config={ + "starlette": {"session_key": "etl_db", "connection_key": "etl_connection", "pool_key": "etl_pool"} + }, + ) + ) + + db_plugin = SQLSpecPlugin(sqlspec) + + async def report(request: Request) -> JSONResponse: + db: AiosqliteDriver = db_plugin.get_session(request, "db") + etl_db: SqliteDriver = db_plugin.get_session(request, "etl_db") + + # Async query to primary database + users = await db.select("SELECT 1 as id, 'Alice' as name") + # Sync query to ETL database + metrics = etl_db.select("SELECT 'metric1' as name, 100 as value") + return JSONResponse({"users": users, "metrics": metrics}) + + app = Starlette(routes=[Route("/report", report)]) + db_plugin.init_app(app) + # end-example + + assert app is not None diff --git a/docs/usage/frameworks/litestar/dependency_injection.rst b/docs/usage/frameworks/litestar/dependency_injection.rst index 3fabadfd..7e98ea3b 100644 --- a/docs/usage/frameworks/litestar/dependency_injection.rst +++ b/docs/usage/frameworks/litestar/dependency_injection.rst @@ -3,27 +3,35 @@ Dependency Injection ====================== The SQLSpec plugin integrates with Litestar's dependency injection system. By default, -it provides a session under the key ``db``. You can customize this key or register +it provides a session under the key ``db_session``. You can customize this key or register multiple databases with distinct keys. Default Injection ----------------- -When you add ``SQLSpecPlugin`` to your app, handlers can request ``db`` to receive +When you add ``SQLSpecPlugin`` to your app, handlers can request ``db_session`` to receive a session scoped to the request: .. code-block:: python + from sqlspec.adapters.aiosqlite import AiosqliteDriver + @get("/users") - async def list_users(db: AsyncSession) -> list[User]: - result = await db.execute("SELECT * FROM users") + async def list_users(db_session: AiosqliteDriver) -> list[User]: + result = await db_session.execute("SELECT * FROM users") return result.all(schema_type=User) +.. note:: + + Use the driver type that matches your config. For example, ``SqliteDriver`` for + ``SqliteConfig``, ``AiosqliteDriver`` for ``AiosqliteConfig``, or ``AsyncpgDriver`` + for ``AsyncpgConfig``. + Custom Keys ----------- -Use ``session_dependency_key`` and ``config_dependency_key`` to customize injection keys. -This is useful when connecting to multiple databases. +Use ``extension_config`` to customize the dependency injection keys for each database. +Set ``session_key``, ``connection_key``, and ``pool_key`` to unique values when using multiple databases. .. literalinclude:: /examples/extensions/litestar/dependency_keys.py :language: python @@ -36,22 +44,114 @@ This is useful when connecting to multiple databases. Multiple Databases ------------------ -Register separate plugins for each database, each with its own key: +Configure each database with unique keys in its ``extension_config``, then use a single plugin. +You can mix async and sync adapters - for example, an async PostgreSQL primary database with a +sync DuckDB for ETL operations: .. code-block:: python - primary_plugin = SQLSpecPlugin( - sqlspec, - config_name="primary", - session_dependency_key="primary_db", + from litestar import Litestar, get + from sqlspec import SQLSpec + from sqlspec.adapters.asyncpg import AsyncpgConfig, AsyncpgDriver + from sqlspec.adapters.duckdb import DuckDBConfig, DuckDBDriver + from sqlspec.extensions.litestar import SQLSpecPlugin + + sqlspec = SQLSpec() + + # Primary async PostgreSQL database + sqlspec.add_config( + AsyncpgConfig( + connection_config={ + "host": "localhost", + "port": 5432, + "database": "app", + "user": "app", + "password": "secret", + }, + extension_config={"litestar": {"session_key": "db"}} + ) ) - analytics_plugin = SQLSpecPlugin( - sqlspec, - config_name="analytics", - session_dependency_key="analytics_db", + + # ETL sync DuckDB database with custom keys + sqlspec.add_config( + DuckDBConfig( + connection_config={"database": "/tmp/etl.db"}, + extension_config={ + "litestar": { + "session_key": "etl_db", + "connection_key": "etl_connection", + "pool_key": "etl_pool", + } + } + ) ) @get("/report") - async def report(primary_db: AsyncSession, analytics_db: AsyncSession) -> dict: - # Use both databases in one handler - ... + async def report(db: AsyncpgDriver, etl_db: DuckDBDriver) -> dict: + # Async query to primary PostgreSQL + users = await db.select("SELECT * FROM users") + # Sync query to DuckDB ETL database + metrics = etl_db.select("SELECT * FROM analytics") + return {"users": users.all(), "metrics": metrics.all()} + + app = Litestar( + route_handlers=[report], + plugins=[SQLSpecPlugin(sqlspec=sqlspec)] # Single plugin handles all configs + ) + +Advanced DuckDB Configuration +----------------------------- + +DuckDB supports extensions and connection hooks for advanced use cases like attaching +external PostgreSQL databases. Use ``driver_features`` to configure extensions and +``on_connection_create`` for custom connection initialization: + +.. code-block:: python + + from typing import Any + from sqlspec import SQLSpec + from sqlspec.adapters.duckdb import DuckDBConfig, DuckDBExtensionConfig + + def on_connection_create(connection: Any) -> None: + """Configure DuckDB connection with PostgreSQL attachment.""" + # Load postgres extension and attach external database + connection.execute("LOAD postgres") + connection.execute( + "ATTACH 'dbname=app user=app password=secret host=localhost' " + "AS pg (TYPE POSTGRES, SCHEMA 'public')" + ) + + sqlspec = SQLSpec() + sqlspec.add_config( + DuckDBConfig( + connection_config={ + "database": "/tmp/analytics.db", + "temp_directory": "/tmp", + }, + driver_features={ + "extensions": [ + DuckDBExtensionConfig(name="postgres"), + DuckDBExtensionConfig(name="encodings"), + ], + "on_connection_create": on_connection_create, + }, + extension_config={ + "litestar": { + "session_key": "etl_db", + "connection_key": "etl_connection", + } + } + ) + ) + +This pattern enables querying PostgreSQL tables directly from DuckDB SQL: + +.. code-block:: python + + @get("/sync-users") + def sync_users(etl_db: DuckDBDriver) -> dict: + # Query PostgreSQL via DuckDB's postgres extension + result = etl_db.execute( + "INSERT INTO local_users SELECT * FROM pg.users RETURNING *" + ) + return {"synced": result.rowcount} diff --git a/tools/sphinx_ext/playground_template.html b/tools/sphinx_ext/playground_template.html index 6883f7ea..9239b37c 100644 --- a/tools/sphinx_ext/playground_template.html +++ b/tools/sphinx_ext/playground_template.html @@ -343,6 +343,14 @@ background: rgba(237, 182, 65, 0.08); } + /* CodeMirror - Cursor */ + .sqlspec-playground .CodeMirror-cursor { + border-left: 2px solid #EDB641; + } + html.dark .sqlspec-playground .CodeMirror-cursor { + border-left: 2px solid #EDB641; + } + /* Dialog */ .sqlspec-playground__tip-dialog { border: 1px solid rgba(237, 182, 65, 0.3); @@ -401,9 +409,43 @@ } } + /* Inline status indicator */ + .sqlspec-playground__inline-status { + display: inline-flex; + align-items: center; + gap: 0.35rem; + font-size: 0.75rem; + color: #64748b; + margin-left: 0.5rem; + } + html.dark .sqlspec-playground__inline-status { + color: #94a3b8; + } + .sqlspec-playground__inline-status--running { + color: #EDB641; + } + .sqlspec-playground__inline-status--success { + color: #22c55e; + } + .sqlspec-playground__inline-status--error { + color: #ef4444; + } + .sqlspec-playground__spinner { + width: 12px; + height: 12px; + border: 2px solid currentColor; + border-top-color: transparent; + border-radius: 50%; + animation: spin 0.8s linear infinite; + } + @keyframes spin { + to { transform: rotate(360deg); } + } + /* Table Output */ .sqlspec-playground__table-wrapper { overflow-x: auto; + max-height: 320px; margin-top: 1rem; border-radius: 6px; border: 1px solid #e2e8f0; @@ -434,10 +476,56 @@ .sqlspec-playground__table th { background: #f1f5f9; + color: #202235; font-weight: 600; + font-size: 0.75rem; + text-transform: uppercase; + letter-spacing: 0.05em; + position: sticky; + top: 0; + z-index: 10; + cursor: pointer; + user-select: none; + border-bottom: 2px solid #EDB641; } html.dark .sqlspec-playground__table th { background: #1e293b; + color: #e6edf3; + border-bottom-color: #EDB641; + } + .sqlspec-playground__table th:hover { + background: #e2e8f0; + } + html.dark .sqlspec-playground__table th:hover { + background: #334155; + } + .sqlspec-playground__table th .sort-indicator { + margin-left: 0.35rem; + opacity: 0.5; + } + .sqlspec-playground__table th.sorted-asc .sort-indicator::after { + content: "▲"; + opacity: 1; + } + .sqlspec-playground__table th.sorted-desc .sort-indicator::after { + content: "▼"; + opacity: 1; + } + + .sqlspec-playground__table tbody tr:nth-child(even) { + background: rgba(0, 0, 0, 0.02); + } + html.dark .sqlspec-playground__table tbody tr:nth-child(even) { + background: rgba(255, 255, 255, 0.02); + } + + .sqlspec-playground__row-count { + margin-top: 0.5rem; + font-size: 0.75rem; + color: #64748b; + } + html.dark .sqlspec-playground__row-count { + color: #94a3b8; } /* Error Output */ @@ -460,13 +548,12 @@
- -
Python
+ @@ -480,23 +567,26 @@ config = spec.add_config(SqliteConfig(connection_config={"database": ":memory:"})) with spec.provide_session(config) as session: - session.execute("CREATE TABLE IF NOT EXISTS teams (id INTEGER PRIMARY KEY, name TEXT)") - session.execute("INSERT INTO teams (name) VALUES ('Litestar'), ('SQLSpec')") - result = session.select("SELECT * FROM teams") - # Return list of dictionaries for table rendering - print(result) + session.execute_script(""" + CREATE TABLE IF NOT EXISTS teams (id INTEGER PRIMARY KEY, name TEXT); + INSERT INTO teams (name) VALUES ('Litestar'), ('SQLSpec'), ('Starlite'); + """) + results = session.select("SELECT * FROM teams") + +results
- Output + Results
-

-        
+ + +
@@ -521,7 +611,9 @@

Usage Tips

const root = document.getElementById("sqlspec-playground-{{ id }}"); const outputEl = root.querySelector("[data-output]"); const tableOutputEl = root.querySelector("[data-table-output]"); + const rowCountEl = root.querySelector("[data-row-count]"); const errorOutputEl = root.querySelector("[data-error-output]"); + const inlineStatusEl = root.querySelector("[data-inline-status]"); const pythonTextarea = root.querySelector("[data-python-editor]"); const runPythonButton = root.querySelector("[data-run-python]"); const resetPythonButton = root.querySelector("[data-reset-python]"); @@ -530,9 +622,10 @@

Usage Tips

const openTipsButton = root.querySelector("[data-open-tips]"); const closeTipsButton = root.querySelector("[data-close-tips]"); - const statusEl = root.querySelector("[data-status]"); - const pythonDefault = pythonTextarea.value.trim(); + let currentData = null; + let sortColumn = null; + let sortDirection = 'asc'; const pythonEditor = CodeMirror.fromTextArea(pythonTextarea, { mode: "python", @@ -541,49 +634,88 @@

Usage Tips

tabSize: 4, indentWithTabs: false, viewportMargin: Infinity, - styleActiveLine: true + styleActiveLine: true, + cursorBlinkRate: 530 }); let pyodide = null; - const writeOutput = (text) => { - outputEl.textContent += `${text}\n`; - outputEl.scrollTop = outputEl.scrollHeight; - }; - - const setStatus = (text, type = "default") => { - if (statusEl) { - statusEl.textContent = text; - statusEl.className = "sqlspec-playground__status"; - if (type === "ready") { - statusEl.classList.add("sqlspec-playground__status--ready"); - } + const setInlineStatus = (text, type) => { + if (!inlineStatusEl) return; + inlineStatusEl.className = "sqlspec-playground__inline-status"; + while (inlineStatusEl.firstChild) inlineStatusEl.removeChild(inlineStatusEl.firstChild); + if (type === "running") { + const spinner = document.createElement('span'); + spinner.className = 'sqlspec-playground__spinner'; + inlineStatusEl.appendChild(spinner); + inlineStatusEl.appendChild(document.createTextNode(' ' + text)); + inlineStatusEl.classList.add("sqlspec-playground__inline-status--running"); + } else if (type === "success") { + inlineStatusEl.textContent = '\u2713 ' + text; + inlineStatusEl.classList.add("sqlspec-playground__inline-status--success"); + } else if (type === "error") { + inlineStatusEl.textContent = '\u2717 ' + text; + inlineStatusEl.classList.add("sqlspec-playground__inline-status--error"); + } else { + inlineStatusEl.textContent = text; } }; const clearOutput = () => { outputEl.textContent = ""; - tableOutputEl.innerHTML = ""; + outputEl.hidden = true; + while (tableOutputEl.firstChild) tableOutputEl.removeChild(tableOutputEl.firstChild); tableOutputEl.hidden = true; + if (rowCountEl) rowCountEl.hidden = true; errorOutputEl.textContent = ""; errorOutputEl.hidden = true; + currentData = null; + sortColumn = null; + }; + + const sortData = (data, column, direction) => { + return [...data].sort((a, b) => { + const aVal = a[column], bVal = b[column]; + if (aVal === bVal) return 0; + if (aVal == null) return 1; + if (bVal == null) return -1; + return (aVal < bVal ? -1 : 1) * (direction === 'asc' ? 1 : -1); + }); }; const renderTable = (data) => { if (!Array.isArray(data) || data.length === 0 || typeof data[0] !== 'object') { - return; // Not tabular data + return false; } - + currentData = data; + const displayData = sortColumn ? sortData(data, sortColumn, sortDirection) : data; const headers = Object.keys(data[0]); + while (tableOutputEl.firstChild) tableOutputEl.removeChild(tableOutputEl.firstChild); + const table = document.createElement('table'); table.className = 'sqlspec-playground__table'; - // Header + // Header with sort const thead = document.createElement('thead'); const trHead = document.createElement('tr'); headers.forEach(header => { const th = document.createElement('th'); th.textContent = header; + const indicator = document.createElement('span'); + indicator.className = 'sort-indicator'; + th.appendChild(indicator); + if (header === sortColumn) { + th.classList.add(sortDirection === 'asc' ? 'sorted-asc' : 'sorted-desc'); + } + th.addEventListener('click', () => { + if (sortColumn === header) { + sortDirection = sortDirection === 'asc' ? 'desc' : 'asc'; + } else { + sortColumn = header; + sortDirection = 'asc'; + } + renderTable(currentData); + }); trHead.appendChild(th); }); thead.appendChild(trHead); @@ -591,11 +723,12 @@

Usage Tips

// Body const tbody = document.createElement('tbody'); - data.forEach(row => { + displayData.forEach(row => { const tr = document.createElement('tr'); headers.forEach(header => { const td = document.createElement('td'); - td.textContent = row[header]; + const val = row[header]; + td.textContent = val === null ? 'null' : val; tr.appendChild(td); }); tbody.appendChild(tr); @@ -603,6 +736,11 @@

Usage Tips

table.appendChild(tbody); tableOutputEl.appendChild(table); tableOutputEl.hidden = false; + if (rowCountEl) { + rowCountEl.textContent = data.length + ' row' + (data.length !== 1 ? 's' : ''); + rowCountEl.hidden = false; + } + return true; }; const showError = (error) => { @@ -612,27 +750,24 @@

Usage Tips

const initializePlayground = async () => { try { - setStatus("Loading Pyodide..."); + setInlineStatus("Loading...", "running"); pyodide = await loadPyodide({ indexURL: "https://cdn.jsdelivr.net/pyodide/v0.29.0/full/" }); - pyodide.setStdout({ batched: writeOutput }); - pyodide.setStderr({ batched: writeOutput }); + const stdoutBuffer = []; + pyodide.setStdout({ batched: (t) => stdoutBuffer.push(t) }); + pyodide.setStderr({ batched: (t) => stdoutBuffer.push(t) }); - setStatus("Loading libraries..."); await pyodide.loadPackage(["sqlite3", "micropip"]); const micropip = pyodide.pyimport("micropip"); - await micropip.install("sqlspec"); - setStatus("Ready", "ready"); - clearOutput(); - writeOutput("Ready! Click 'Run' to execute the code."); + setInlineStatus("Ready", "success"); runPythonButton.disabled = false; } catch (err) { - setStatus("Failed to load"); - showError(`Failed to initialize: ${err}`); + setInlineStatus("Failed", "error"); + showError('Failed to initialize: ' + err); console.error(err); } }; @@ -641,33 +776,52 @@

Usage Tips

runPythonButton.addEventListener("click", async () => { if (!pyodide) { - writeOutput("Pyodide is not ready yet."); + setInlineStatus("Not ready", "error"); return; } clearOutput(); - writeOutput("Running...\n"); + setInlineStatus("Running...", "running"); + + const stdoutBuffer = []; + pyodide.setStdout({ batched: (t) => stdoutBuffer.push(t) }); + pyodide.setStderr({ batched: (t) => stdoutBuffer.push(t) }); + try { const result = await pyodide.runPythonAsync(pythonEditor.getValue()); + let tableRendered = false; + if (result && typeof result.toJs === 'function') { - const jsResult = result.toJs({dict_converter: Object.fromEntries}); - if (Array.isArray(jsResult)) { - renderTable(jsResult); - } else { - writeOutput(String(result)); - } - } else if (result !== undefined) { - writeOutput(String(result)); + const jsResult = result.toJs({dict_converter: Object.fromEntries}); + if (Array.isArray(jsResult)) { + tableRendered = renderTable(jsResult); + } } + if (tableRendered) { + setInlineStatus("Done", "success"); + } else if (stdoutBuffer.length > 0) { + outputEl.textContent = stdoutBuffer.join('\n'); + outputEl.hidden = false; + setInlineStatus("Done", "success"); + } else if (result !== undefined && result !== null) { + outputEl.textContent = String(result); + outputEl.hidden = false; + setInlineStatus("Done", "success"); + } else { + setInlineStatus("Done", "success"); + } } catch (err) { showError(String(err)); + setInlineStatus("Error", "error"); console.error(err); } }); resetPythonButton.addEventListener("click", () => { pythonEditor.setValue(pythonDefault); + clearOutput(); + setInlineStatus("Reset", "success"); }); clearOutputButton.addEventListener("click", () => { From 00c360e3258d6a0912aba72052f19bc1fc11cf5b Mon Sep 17 00:00:00 2001 From: Cody Fincher Date: Mon, 26 Jan 2026 19:54:05 +0000 Subject: [PATCH 2/2] docs: update .gitignore to include .agent and .geminiignore --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 3728caca..42ab1062 100644 --- a/.gitignore +++ b/.gitignore @@ -61,3 +61,5 @@ specs/ .playwright-mcp .geminiignore uv.toml +.agent/ +.geminiignore \ No newline at end of file