issue_comments: 997324666
This data as json
html_url | issue_url | id | node_id | user | created_at | updated_at | author_association | body | reactions | issue | performed_via_github_app |
---|---|---|---|---|---|---|---|---|---|---|---|
https://github.com/simonw/datasette/issues/1555#issuecomment-997324666 | https://api.github.com/repos/simonw/datasette/issues/1555 | 997324666 | IC_kwDOBm6k_c47cfd6 | 9599 | 2021-12-19T03:47:51Z | 2021-12-19T03:48:09Z | OWNER | Here's a hacked together prototype of running all of that stuff inside a single function passed to `.execute_fn()`: ```diff diff --git a/datasette/utils/internal_db.py b/datasette/utils/internal_db.py index 95055d8..58f9982 100644 --- a/datasette/utils/internal_db.py +++ b/datasette/utils/internal_db.py @@ -1,4 +1,5 @@ import textwrap +from datasette.utils import table_column_details async def init_internal_db(db): @@ -70,49 +71,70 @@ async def populate_schema_tables(internal_db, db): "DELETE FROM tables WHERE database_name = ?", [database_name], block=True ) tables = (await db.execute("select * from sqlite_master WHERE type = 'table'")).rows - tables_to_insert = [] - columns_to_delete = [] - columns_to_insert = [] - foreign_keys_to_delete = [] - foreign_keys_to_insert = [] - indexes_to_delete = [] - indexes_to_insert = [] - for table in tables: - table_name = table["name"] - tables_to_insert.append( - (database_name, table_name, table["rootpage"], table["sql"]) - ) - columns_to_delete.append((database_name, table_name)) - columns = await db.table_column_details(table_name) - columns_to_insert.extend( - { - **{"database_name": database_name, "table_name": table_name}, - **column._asdict(), - } - for column in columns - ) - foreign_keys_to_delete.append((database_name, table_name)) - foreign_keys = ( - await db.execute(f"PRAGMA foreign_key_list([{table_name}])") - ).rows - foreign_keys_to_insert.extend( - { - **{"database_name": database_name, "table_name": table_name}, - **dict(foreign_key), - } - for foreign_key in foreign_keys - ) - indexes_to_delete.append((database_name, table_name)) - indexes = (await db.execute(f"PRAGMA index_list([{table_name}])")).rows - indexes_to_insert.extend( - { - **{"database_name": database_name, "table_name": table_name}, - **dict(index), - } - for index in indexes + def collect_info(conn): + tables_to_insert = [] + columns_to_delete = [] + columns_to_insert = [] + foreign_keys_to_delete = [] + foreign_keys_to_insert = [] + indexes_to_delete = [] + indexes_to_insert = [] + + for table in tables: + table_name = table["name"] + tables_to_insert.append( + (database_name, table_name, table["rootpage"], table["sql"]) + ) + columns_to_delete.append((database_name, table_name)) + columns = table_column_details(conn, table_name) + columns_to_insert.extend( + { + **{"database_name": database_name, "table_name": table_name}, + **column._asdict(), + } + for column in columns + ) + foreign_keys_to_delete.append((database_name, table_name)) + foreign_keys = conn.execute( + f"PRAGMA foreign_key_list([{table_name}])" + ).fetchall() + foreign_keys_to_insert.extend( + { + **{"database_name": database_name, "table_name": table_name}, + **dict(foreign_key), + } + for foreign_key in foreign_keys + ) + indexes_to_delete.append((database_name, table_name)) + indexes = conn.execute(f"PRAGMA index_list([{table_name}])").fetchall() + indexes_to_insert.extend( + { + **{"database_name": database_name, "table_name": table_name}, + **dict(index), + } + for index in indexes + ) + return ( + tables_to_insert, + columns_to_delete, + columns_to_insert, + foreign_keys_to_delete, + foreign_keys_to_insert, + indexes_to_delete, + indexes_to_insert, ) + ( + tables_to_insert, + columns_to_delete, + columns_to_insert, + foreign_keys_to_delete, + foreign_keys_to_insert, + indexes_to_delete, + indexes_to_insert, + ) = await db.execute_fn(collect_info) + await internal_db.execute_write_many( """ INSERT INTO tables (database_name, table_name, rootpage, sql) ``` First impressions: it looks like this helps **a lot** - as far as I can tell this is now taking around 21ms to get to the point at which all of those internal databases have been populated, where previously it took more than 180ms. ![CleanShot 2021-12-18 at 19 47 22@2x](https://user-images.githubusercontent.com/9599/146663192-bba098d5-e7bd-4e2e-b525-2270867888a0.png) | {"total_count": 0, "+1": 0, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 0, "rocket": 0, "eyes": 0} | 1079149656 |