issue_comments: 1353707828
This data as json
html_url | issue_url | id | node_id | user | created_at | updated_at | author_association | body | reactions | issue | performed_via_github_app |
---|---|---|---|---|---|---|---|---|---|---|---|
https://github.com/simonw/datasette/issues/1959#issuecomment-1353707828 | https://api.github.com/repos/simonw/datasette/issues/1959 | 1353707828 | IC_kwDOBm6k_c5Qr_E0 | 9599 | 2022-12-15T21:06:29Z | 2022-12-15T21:06:29Z | OWNER | Previous, abandoned attempt at this work (for #1843): ```diff diff --git a/datasette/app.py b/datasette/app.py index 7e682498..cf35c3a2 100644 --- a/datasette/app.py +++ b/datasette/app.py @@ -228,7 +228,7 @@ class Datasette: template_dir=None, plugins_dir=None, static_mounts=None, - memory=False, + memory=None, settings=None, secret=None, version_note=None, @@ -238,6 +238,7 @@ class Datasette: nolock=False, ): self._startup_invoked = False + self._extra_on_startup = [] assert config_dir is None or isinstance( config_dir, Path ), "config_dir= should be a pathlib.Path" @@ -278,7 +279,7 @@ class Datasette: raise self.crossdb = crossdb self.nolock = nolock - if memory or crossdb or not self.files: + if memory or crossdb or (not self.files and memory is not False): self.add_database( Database(self, is_mutable=False, is_memory=True), name="_memory" ) @@ -391,6 +392,9 @@ class Datasette: self._root_token = secrets.token_hex(32) self.client = DatasetteClient(self) + def _add_on_startup(self, fn): + self._extra_on_startup.append(fn) + async def refresh_schemas(self): if self._refresh_schemas_lock.locked(): return @@ -431,6 +435,8 @@ class Datasette: # This must be called for Datasette to be in a usable state if self._startup_invoked: return + for fn in self._extra_on_startup: + await fn() # Register permissions, but watch out for duplicate name/abbr names = {} abbrs = {} @@ -1431,9 +1437,9 @@ class Datasette: ) if self.setting("trace_debug"): asgi = AsgiTracer(asgi) - asgi = AsgiRunOnFirstRequest(asgi, on_startup=[setup_db, self.invoke_startup]) for wrapper in pm.hook.asgi_wrapper(datasette=self): asgi = wrapper(asgi) + asgi = AsgiRunOnFirstRequest(asgi, on_startup=[setup_db, self.invoke_startup]) return asgi diff --git a/datasette/utils/asgi.py b/datasette/utils/asgi.py index 56690251..986755cb 100644 --- a/datasette/utils/asgi.py +++ b/datasette/utils/asgi.py @@ -423,9 +423,9 @@ class AsgiFileDownload: class AsgiRunOnFirstRequest: - def __init__(self, asgi, on_startup): + def __init__(self, app, on_startup): assert isinstance(on_startup, list) - self.asgi = asgi + self.app = app self.on_startup = on_startup self._started = False @@ -434,4 +434,4 @@ class AsgiRunOnFirstRequest: self._started = True for hook in self.on_startup: await hook() - return await self.asgi(scope, receive, send) + return await self.app(scope, receive, send) diff --git a/tests/conftest.py b/tests/conftest.py index cd735e12..d1301943 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -23,6 +23,15 @@ UNDOCUMENTED_PERMISSIONS = { } +# @pytest.fixture(autouse=True) +# def log_name_of_test_before_test(request): +# # To help identify tests that are hanging +# name = str(request.node) +# with open("/tmp/test.log", "a") as f: +# f.write(name + "\n") +# yield + + def pytest_report_header(config): return "SQLite: {}".format( sqlite3.connect(":memory:").execute("select sqlite_version()").fetchone()[0] diff --git a/tests/fixtures.py b/tests/fixtures.py index a6700239..18d3f1b7 100644 --- a/tests/fixtures.py +++ b/tests/fixtures.py @@ -101,6 +101,19 @@ EXPECTED_PLUGINS = [ ] +def _populate_connection(conn): + # Drop any tables and views that exist + to_drop = conn.execute( + "SELECT name, type FROM sqlite_master where type in ('table', 'view')" + ).fetchall() + for name, type in to_drop: + conn.execute(f"DROP {type} IF EXISTS [{name}]") + conn.executescript(TABLES) + for sql, params in TABLE_PARAMETERIZED_SQL: + with conn: + conn.execute(sql, params) + + @contextlib.contextmanager def make_app_client( sql_time_limit_ms=None, @@ -117,45 +130,22 @@ def make_app_client( metadata=None, crossdb=False, ): - with tempfile.TemporaryDirectory() as tmpdir: - filepath = os.path.join(tmpdir, filename) - if is_immutable: - files = [] - immutables = [filepath] - else: - files = [filepath] - immutables = [] - conn = sqlite3.connect(filepath) - conn.executescript(TABLES) - for sql, params in TABLE_PARAMETERIZED_SQL: - with conn: - conn.execute(sql, params) - # Close the connection to avoid "too many open files" errors - conn.close() - if extra_databases is not None: - for extra_filename, extra_sql in extra_databases.items(): - extra_filepath = os.path.join(tmpdir, extra_filename) - c2 = sqlite3.connect(extra_filepath) - c2.executescript(extra_sql) - c2.close() - # Insert at start to help test /-/databases ordering: - files.insert(0, extra_filepath) - os.chdir(os.path.dirname(filepath)) - settings = settings or {} - for key, value in { - "default_page_size": 50, - "max_returned_rows": max_returned_rows or 100, - "sql_time_limit_ms": sql_time_limit_ms or 200, - # Default is 3 but this results in "too many open files" - # errors when running the full test suite: - "num_sql_threads": 1, - }.items(): - if key not in settings: - settings[key] = value + settings = settings or {} + for key, value in { + "default_page_size": 50, + "max_returned_rows": max_returned_rows or 100, + "sql_time_limit_ms": sql_time_limit_ms or 200, + # Default is 3 but this results in "too many open files" + # errors when running the full test suite: + "num_sql_threads": 1, + }.items(): + if key not in settings: + settings[key] = value + # We can use an in-memory database, but only if we're not doing anything + # with is_immutable or extra_databases and filename is the default + if not is_immutable and not extra_databases and filename == "fixtures.db": ds = Datasette( - files, - immutables=immutables, - memory=memory, + memory=memory or False, cors=cors, metadata=metadata or METADATA, plugins_dir=PLUGINS_DIR, @@ -165,12 +155,57 @@ def make_app_client( template_dir=template_dir, crossdb=crossdb, ) + db = ds.add_memory_database("fixtures") + + async def populate_fixtures(): + print("Here we go... populating fixtures") + await db.execute_write_fn(_populate_connection) + + ds._add_on_startup(populate_fixtures) yield TestClient(ds) - # Close as many database connections as possible - # to try and avoid too many open files error - for db in ds.databases.values(): - if not db.is_memory: - db.close() + else: + with tempfile.TemporaryDirectory() as tmpdir: + filepath = os.path.join(tmpdir, filename) + if is_immutable: + files = [] + immutables = [filepath] + else: + files = [filepath] + immutables = [] + + conn = sqlite3.connect(filepath) + _populate_connection(conn) + # Close the connection to reduce "too many open files" errors + conn.close() + + if extra_databases is not None: + for extra_filename, extra_sql in extra_databases.items(): + extra_filepath = os.path.join(tmpdir, extra_filename) + c2 = sqlite3.connect(extra_filepath) + c2.executescript(extra_sql) + c2.close() + # Insert at start to help test /-/databases ordering: + files.insert(0, extra_filepath) + os.chdir(os.path.dirname(filepath)) + ds = Datasette( + files, + immutables=immutables, + memory=memory, + cors=cors, + metadata=metadata or METADATA, + plugins_dir=PLUGINS_DIR, + settings=settings, + inspect_data=inspect_data, + static_mounts=static_mounts, + template_dir=template_dir, + crossdb=crossdb, + ) + yield TestClient(ds) + # Close as many database connections as possible + # to try and avoid too many open files error + for db in ds.databases.values(): + if not db.is_memory: + db.close() @pytest.fixture(scope="session") diff --git a/tests/test_cli.py b/tests/test_cli.py index d3e015fa..d9e4e457 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -1,5 +1,6 @@ from .fixtures import ( app_client, + app_client_with_cors, make_app_client, TestClient as _TestClient, EXPECTED_PLUGINS, @@ -38,7 +39,7 @@ def test_inspect_cli(app_client): assert expected_count == database["tables"][table_name]["count"] -def test_inspect_cli_writes_to_file(app_client): +def test_inspect_cli_writes_to_file(app_client_with_cors): runner = CliRunner() result = runner.invoke( cli, ["inspect", "fixtures.db", "--inspect-file", "foo.json"] ``` | {"total_count": 0, "+1": 0, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 0, "rocket": 0, "eyes": 0} | 1499081664 |