id,seq,job,job_label,repo,repo_label,uses,name,with,run,env,if 133396,1,26772,mirror,107914493,datasette,zofrex/mirror-branch@ea152f124954fa4eb26eea3fe0dbe313a3a08d94,"Mirror to ""master""","{""target-branch"": ""master"", ""force"": false}",,, 133397,2,26772,mirror,107914493,datasette,zofrex/mirror-branch@ea152f124954fa4eb26eea3fe0dbe313a3a08d94,"Mirror to ""main""","{""target-branch"": ""main"", ""force"": false}",,, 136963,1,27487,deploy-branch-preview,107914493,datasette,actions/checkout@v3,,,,, 136964,2,27487,deploy-branch-preview,107914493,datasette,actions/setup-python@v4,Set up Python 3.11,"{""python-version"": ""3.11""}",,, 136965,3,27487,deploy-branch-preview,107914493,datasette,,Install dependencies,,"pip install datasette-publish-vercel ",, 136966,4,27487,deploy-branch-preview,107914493,datasette,,Deploy the preview,,"export BRANCH=""${{ github.event.inputs.branch }}"" wget https://latest.datasette.io/fixtures.db datasette publish vercel fixtures.db \ --branch $BRANCH \ --project ""datasette-preview-$BRANCH"" \ --token $VERCEL_TOKEN \ --scope datasette \ --about ""Preview of $BRANCH"" \ --about_url ""https://github.com/simonw/datasette/tree/$BRANCH"" ","{""VERCEL_TOKEN"": ""${{ secrets.BRANCH_PREVIEW_VERCEL_TOKEN }}""}", 136967,1,27488,deploy,107914493,datasette,actions/checkout@v3,Check out datasette,,,, 136968,2,27488,deploy,107914493,datasette,actions/setup-python@v4,Set up Python,"{""python-version"": ""3.9""}",,, 136969,3,27488,deploy,107914493,datasette,actions/cache@v3,Configure pip caching,"{""path"": ""~/.cache/pip"", ""key"": ""${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}"", ""restore-keys"": ""${{ runner.os }}-pip-\n""}",,, 136970,4,27488,deploy,107914493,datasette,,Install Python dependencies,,"python -m pip install --upgrade pip python -m pip install -e .[test] python -m pip install -e .[docs] python -m pip install sphinx-to-sqlite==0.1a1 ",, 136971,5,27488,deploy,107914493,datasette,,Run tests,,"pytest -n auto -m ""not serial"" pytest -m ""serial"" ",,${{ github.ref == 'refs/heads/main' }} 136972,6,27488,deploy,107914493,datasette,,Build fixtures.db,,python tests/fixtures.py fixtures.db fixtures.json plugins --extra-db-filename extra_database.db,, 136973,7,27488,deploy,107914493,datasette,,Build docs.db,,"cd docs DISABLE_SPHINX_INLINE_TABS=1 sphinx-build -b xml . _build sphinx-to-sqlite ../docs.db _build cd ..",,${{ github.ref == 'refs/heads/main' }} 136974,8,27488,deploy,107914493,datasette,,Set up the alternate-route demo,,"echo ' from datasette import hookimpl @hookimpl def startup(datasette): db = datasette.get_database(""fixtures2"") db.route = ""alternative-route"" ' > plugins/alternative_route.py cp fixtures.db fixtures2.db ",, 136975,9,27488,deploy,107914493,datasette,,Make some modifications to metadata.json,,"cat fixtures.json | \ jq '.databases |= . + {""ephemeral"": {""allow"": {""id"": ""*""}}}' | \ jq '.plugins |= . + {""datasette-ephemeral-tables"": {""table_ttl"": 900}}' \ > metadata.json cat metadata.json ",, 136976,10,27488,deploy,107914493,datasette,google-github-actions/setup-gcloud@v0,Set up Cloud Run,"{""version"": ""318.0.0"", ""service_account_email"": ""${{ secrets.GCP_SA_EMAIL }}"", ""service_account_key"": ""${{ secrets.GCP_SA_KEY }}""}",,, 136977,11,27488,deploy,107914493,datasette,,Deploy to Cloud Run,,"gcloud config set run/region us-central1 gcloud config set project datasette-222320 export SUFFIX=""-${GITHUB_REF#refs/heads/}"" export SUFFIX=${SUFFIX#-main} # Replace 1.0 with one-dot-zero in SUFFIX export SUFFIX=${SUFFIX//1.0/one-dot-zero} datasette publish cloudrun fixtures.db fixtures2.db extra_database.db \ -m metadata.json \ --plugins-dir=plugins \ --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options=""--setting template_debug 1 --setting trace_debug 1 --crossdb"" \ --install 'datasette-ephemeral-tables>=0.2.2' \ --service ""datasette-latest$SUFFIX"" \ --secret $LATEST_DATASETTE_SECRET","{""LATEST_DATASETTE_SECRET"": ""${{ secrets.LATEST_DATASETTE_SECRET }}""}", 136978,12,27488,deploy,107914493,datasette,,Deploy to docs as well (only for main),,"# Deploy docs.db to a different service datasette publish cloudrun docs.db \ --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options=""--setting template_debug 1"" \ --service=datasette-docs-latest",,${{ github.ref == 'refs/heads/main' }} 136979,1,27489,documentation-links,107914493,datasette,readthedocs/actions/preview@v1,,"{""project-slug"": ""datasette""}",,, 136980,1,27490,prettier,107914493,datasette,actions/checkout@v2,Check out repo,,,, 136981,2,27490,prettier,107914493,datasette,actions/cache@v2,Configure npm caching,"{""path"": ""~/.npm"", ""key"": ""${{ runner.OS }}-npm-${{ hashFiles('**/package-lock.json') }}"", ""restore-keys"": ""${{ runner.OS }}-npm-\n""}",,, 136982,3,27490,prettier,107914493,datasette,,Install dependencies,,npm ci,, 136983,4,27490,prettier,107914493,datasette,,Run prettier,,npm run prettier -- --check,, 136984,1,27491,test,107914493,datasette,actions/checkout@v3,,,,, 136985,2,27491,test,107914493,datasette,actions/setup-python@v4,Set up Python ${{ matrix.python-version }},"{""python-version"": ""${{ matrix.python-version }}""}",,, 136986,3,27491,test,107914493,datasette,actions/cache@v3,Configure pip caching,"{""path"": ""~/.cache/pip"", ""key"": ""${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}"", ""restore-keys"": ""${{ runner.os }}-pip-\n""}",,, 136987,4,27491,test,107914493,datasette,,Install dependencies,,"pip install -e '.[test]' ",, 136988,5,27491,test,107914493,datasette,,Run tests,,"pytest ",, 136989,1,27492,deploy,107914493,datasette,actions/checkout@v3,,,,, 136990,2,27492,deploy,107914493,datasette,actions/setup-python@v4,Set up Python,"{""python-version"": ""3.11""}",,, 136991,3,27492,deploy,107914493,datasette,actions/cache@v3,Configure pip caching,"{""path"": ""~/.cache/pip"", ""key"": ""${{ runner.os }}-publish-pip-${{ hashFiles('**/setup.py') }}"", ""restore-keys"": ""${{ runner.os }}-publish-pip-\n""}",,, 136992,4,27492,deploy,107914493,datasette,,Install dependencies,,"pip install setuptools wheel twine ",, 136993,5,27492,deploy,107914493,datasette,,Publish,,"python setup.py sdist bdist_wheel twine upload dist/* ","{""TWINE_USERNAME"": ""__token__"", ""TWINE_PASSWORD"": ""${{ secrets.PYPI_TOKEN }}""}", 136994,1,27493,deploy_static_docs,107914493,datasette,actions/checkout@v2,,,,, 136995,2,27493,deploy_static_docs,107914493,datasette,actions/setup-python@v2,Set up Python,"{""python-version"": ""3.9""}",,, 136996,3,27493,deploy_static_docs,107914493,datasette,actions/cache@v2,Configure pip caching,"{""path"": ""~/.cache/pip"", ""key"": ""${{ runner.os }}-publish-pip-${{ hashFiles('**/setup.py') }}"", ""restore-keys"": ""${{ runner.os }}-publish-pip-\n""}",,, 136997,4,27493,deploy_static_docs,107914493,datasette,,Install dependencies,,"python -m pip install -e .[docs] python -m pip install sphinx-to-sqlite==0.1a1 ",, 136998,5,27493,deploy_static_docs,107914493,datasette,,Build docs.db,,"cd docs DISABLE_SPHINX_INLINE_TABS=1 sphinx-build -b xml . _build sphinx-to-sqlite ../docs.db _build cd ..",, 136999,6,27493,deploy_static_docs,107914493,datasette,google-github-actions/setup-gcloud@v0,Set up Cloud Run,"{""version"": ""318.0.0"", ""service_account_email"": ""${{ secrets.GCP_SA_EMAIL }}"", ""service_account_key"": ""${{ secrets.GCP_SA_KEY }}""}",,, 137000,7,27493,deploy_static_docs,107914493,datasette,,Deploy stable-docs.datasette.io to Cloud Run,,"gcloud config set run/region us-central1 gcloud config set project datasette-222320 datasette publish cloudrun docs.db \ --service=datasette-docs-stable",, 137001,1,27494,deploy_docker,107914493,datasette,actions/checkout@v2,,,,, 137002,2,27494,deploy_docker,107914493,datasette,,Build and push to Docker Hub,,"sleep 60 # Give PyPI time to make the new release available docker login -u $DOCKER_USER -p $DOCKER_PASS export REPO=datasetteproject/datasette docker build -f Dockerfile \ -t $REPO:${GITHUB_REF#refs/tags/} \ --build-arg VERSION=${GITHUB_REF#refs/tags/} . docker tag $REPO:${GITHUB_REF#refs/tags/} $REPO:latest docker push $REPO:${GITHUB_REF#refs/tags/} docker push $REPO:latest","{""DOCKER_USER"": ""${{ secrets.DOCKER_USER }}"", ""DOCKER_PASS"": ""${{ secrets.DOCKER_PASS }}""}", 137003,1,27495,deploy_docker,107914493,datasette,actions/checkout@v2,,,,, 137004,2,27495,deploy_docker,107914493,datasette,,Build and push to Docker Hub,,"docker login -u $DOCKER_USER -p $DOCKER_PASS export REPO=datasetteproject/datasette docker build -f Dockerfile \ -t $REPO:${VERSION_TAG} \ --build-arg VERSION=${VERSION_TAG} . docker push $REPO:${VERSION_TAG}","{""DOCKER_USER"": ""${{ secrets.DOCKER_USER }}"", ""DOCKER_PASS"": ""${{ secrets.DOCKER_PASS }}"", ""VERSION_TAG"": ""${{ github.event.inputs.version_tag }}""}", 137005,1,27496,spellcheck,107914493,datasette,actions/checkout@v2,,,,, 137006,2,27496,spellcheck,107914493,datasette,actions/setup-python@v2,Set up Python ${{ matrix.python-version }},"{""python-version"": 3.9}",,, 137007,3,27496,spellcheck,107914493,datasette,actions/cache@v2,Configure pip caching,"{""path"": ""~/.cache/pip"", ""key"": ""${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}"", ""restore-keys"": ""${{ runner.os }}-pip-\n""}",,, 137008,4,27496,spellcheck,107914493,datasette,,Install dependencies,,"pip install -e '.[docs]' ",, 137009,5,27496,spellcheck,107914493,datasette,,Check spelling,,"codespell README.md --ignore-words docs/codespell-ignore-words.txt codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt ",, 137010,1,27497,test,107914493,datasette,actions/checkout@v2,Check out datasette,,,, 137011,2,27497,test,107914493,datasette,actions/setup-python@v2,Set up Python,"{""python-version"": 3.9}",,, 137012,3,27497,test,107914493,datasette,actions/cache@v2,Configure pip caching,"{""path"": ""~/.cache/pip"", ""key"": ""${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}"", ""restore-keys"": ""${{ runner.os }}-pip-\n""}",,, 137013,4,27497,test,107914493,datasette,,Install Python dependencies,,"python -m pip install --upgrade pip python -m pip install -e .[test] python -m pip install pytest-cov ",, 137014,5,27497,test,107914493,datasette,,Run tests,,"ls -lah cat .coveragerc pytest --cov=datasette --cov-config=.coveragerc --cov-report xml:coverage.xml --cov-report term ls -lah",, 137015,6,27497,test,107914493,datasette,codecov/codecov-action@v1,Upload coverage report,"{""token"": ""${{ secrets.CODECOV_TOKEN }}"", ""file"": ""coverage.xml""}",,, 137016,1,27498,test,107914493,datasette,actions/checkout@v3,,,,, 137017,2,27498,test,107914493,datasette,actions/setup-python@v3,Set up Python 3.10,"{""python-version"": ""3.10"", ""cache"": ""pip"", ""cache-dependency-path"": ""**/setup.py""}",,, 137018,3,27498,test,107914493,datasette,actions/cache@v2,Cache Playwright browsers,"{""path"": ""~/.cache/ms-playwright/"", ""key"": ""${{ runner.os }}-browsers""}",,, 137019,4,27498,test,107914493,datasette,,Install Playwright dependencies,,"pip install shot-scraper build shot-scraper install ",, 137020,5,27498,test,107914493,datasette,,Run test,,"./test-in-pyodide-with-shot-scraper.sh ",, 137021,1,27499,test,107914493,datasette,actions/checkout@v3,,,,, 137022,2,27499,test,107914493,datasette,actions/setup-python@v4,Set up Python ${{ matrix.python-version }},"{""python-version"": ""${{ matrix.python-version }}""}",,, 137023,3,27499,test,107914493,datasette,actions/cache@v3,Configure pip caching,"{""path"": ""~/.cache/pip"", ""key"": ""${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}"", ""restore-keys"": ""${{ runner.os }}-pip-\n""}",,, 137024,4,27499,test,107914493,datasette,,Build extension for --load-extension test,,(cd tests && gcc ext.c -fPIC -shared -o ext.so),, 137025,5,27499,test,107914493,datasette,,Install dependencies,,"pip install -e '.[test,docs]' pip freeze ",, 137026,6,27499,test,107914493,datasette,,Run tests,,"pytest -n auto -m ""not serial"" pytest -m ""serial"" # And the test that exceeds a localhost HTTPS server tests/test_datasette_https_server.sh ",, 137027,7,27499,test,107914493,datasette,,Check if cog needs to be run,,"cog --check docs/*.rst ",, 137028,8,27499,test,107914493,datasette,,Check if blacken-docs needs to be run,,"# This fails on syntax errors, or a diff was applied blacken-docs -l 60 docs/*.rst ",, 137029,1,27500,build,107914493,datasette,actions/checkout@v2,,,,, 137030,2,27500,build,107914493,datasette,mxschmitt/action-tmate@v3,Setup tmate session,,,, 137031,1,27501,build,107914493,datasette,actions/checkout@v2,,,,, 137032,2,27501,build,107914493,datasette,mxschmitt/action-tmate@v3,Setup tmate session,,,,