github
id | seq | job | repo | uses | name | with | run | env | if |
---|---|---|---|---|---|---|---|---|---|
137073 | 13 | 27508 | 140912432 | Check if cog needs to be run | cog --check README.md docs/*.rst | ||||
137072 | 12 | 27508 | 140912432 | Check formatting | black . --check | ||||
137071 | 11 | 27508 | 140912432 | run flake8 if Python 3.8 or higher | flake8 | matrix.python-version >= 3.8 | |||
137070 | 10 | 27508 | 140912432 | run mypy | mypy sqlite_utils tests | ||||
137069 | 9 | 27508 | 140912432 | Run tests | pytest -v | ||||
137068 | 8 | 27508 | 140912432 | Build extension for --load-extension test | (cd tests && gcc ext.c -fPIC -shared -o ext.so && ls -lah) | matrix.os == 'ubuntu-latest' | |||
137067 | 7 | 27508 | 140912432 | On macOS with Python 3.10 test with sqlean.py | pip install sqlean.py sqlite-dump | matrix.os == 'macos-latest' && matrix.python-version == '3.10' | |||
137066 | 6 | 27508 | 140912432 | Install SpatiaLite | sudo apt-get install libsqlite3-mod-spatialite | matrix.os == 'ubuntu-latest' | |||
137065 | 5 | 27508 | 140912432 | Optionally install numpy | pip install numpy | matrix.numpy == 1 | |||
137064 | 4 | 27508 | 140912432 | Install dependencies | pip install -e '.[test,mypy,flake8,tui]' | ||||
137063 | 3 | 27508 | 140912432 | actions/cache@v3 | Configure pip caching | {"path": "~/.cache/pip", "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-pip-\n"} | |||
137062 | 2 | 27508 | 140912432 | actions/setup-python@v4 | Set up Python ${{ matrix.python-version }} | {"python-version": "${{ matrix.python-version }}"} | |||
137061 | 1 | 27508 | 140912432 | actions/checkout@v3 | |||||
137060 | 7 | 27507 | 140912432 | codecov/codecov-action@v1 | Upload coverage report | {"token": "${{ secrets.CODECOV_TOKEN }}", "file": "coverage.xml"} | |||
137059 | 6 | 27507 | 140912432 | Run tests | ls -lah pytest --cov=sqlite_utils --cov-report xml:coverage.xml --cov-report term ls -lah | ||||
137058 | 5 | 27507 | 140912432 | Install Python dependencies | python -m pip install --upgrade pip python -m pip install -e .[test] python -m pip install pytest-cov | ||||
137057 | 4 | 27507 | 140912432 | Install SpatiaLite | sudo apt-get install libsqlite3-mod-spatialite | ||||
137056 | 3 | 27507 | 140912432 | actions/cache@v2 | Configure pip caching | {"path": "~/.cache/pip", "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-pip-\n"} | |||
137055 | 2 | 27507 | 140912432 | actions/setup-python@v2 | Set up Python | {"python-version": 3.9} | |||
137054 | 1 | 27507 | 140912432 | actions/checkout@v2 | Check out repo | ||||
137053 | 5 | 27506 | 140912432 | Check spelling | codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt codespell sqlite_utils --ignore-words docs/codespell-ignore-words.txt | ||||
137052 | 4 | 27506 | 140912432 | Install dependencies | pip install -e '.[docs]' | ||||
137051 | 3 | 27506 | 140912432 | actions/cache@v2 | Configure pip caching | {"path": "~/.cache/pip", "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-pip-\n"} | |||
137050 | 2 | 27506 | 140912432 | actions/setup-python@v2 | Set up Python ${{ matrix.python-version }} | {"python-version": 3.9} | |||
137049 | 1 | 27506 | 140912432 | actions/checkout@v2 | |||||
137048 | 5 | 27505 | 140912432 | Publish | python setup.py sdist bdist_wheel twine upload dist/* | {"TWINE_USERNAME": "__token__", "TWINE_PASSWORD": "${{ secrets.PYPI_TOKEN }}"} | |||
137047 | 4 | 27505 | 140912432 | Install dependencies | pip install setuptools wheel twine | ||||
137046 | 3 | 27505 | 140912432 | actions/cache@v3 | Configure pip caching | {"path": "~/.cache/pip", "key": "${{ runner.os }}-publish-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-publish-pip-\n"} | |||
137045 | 2 | 27505 | 140912432 | actions/setup-python@v4 | Set up Python | {"python-version": "3.11"} | |||
137044 | 1 | 27505 | 140912432 | actions/checkout@v3 | |||||
137043 | 5 | 27504 | 140912432 | Run tests | pytest | ||||
137042 | 4 | 27504 | 140912432 | Install dependencies | pip install -e '.[test,tui]' | ||||
137041 | 3 | 27504 | 140912432 | actions/cache@v3 | Configure pip caching | {"path": "~/.cache/pip", "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-pip-\n"} | |||
137040 | 2 | 27504 | 140912432 | actions/setup-python@v4 | Set up Python ${{ matrix.python-version }} | {"python-version": "${{ matrix.python-version }}"} | |||
137039 | 1 | 27504 | 140912432 | actions/checkout@v3 | |||||
137038 | 1 | 27503 | 140912432 | readthedocs/actions/preview@v1 | {"project-slug": "sqlite-utils"} | ||||
137037 | 5 | 27502 | 140912432 | github/codeql-action/analyze@v1 | Perform CodeQL Analysis | ||||
137036 | 4 | 27502 | 140912432 | github/codeql-action/autobuild@v1 | Autobuild | ||||
137035 | 3 | 27502 | 140912432 | github/codeql-action/init@v1 | Initialize CodeQL | {"languages": "${{ matrix.language }}"} | |||
137034 | 2 | 27502 | 140912432 | git checkout HEAD^2 | ${{ github.event_name == 'pull_request' }} | ||||
137033 | 1 | 27502 | 140912432 | actions/checkout@v2 | Checkout repository | {"fetch-depth": 2} | |||
137032 | 2 | 27501 | 107914493 | mxschmitt/action-tmate@v3 | Setup tmate session | ||||
137031 | 1 | 27501 | 107914493 | actions/checkout@v2 | |||||
137030 | 2 | 27500 | 107914493 | mxschmitt/action-tmate@v3 | Setup tmate session | ||||
137029 | 1 | 27500 | 107914493 | actions/checkout@v2 | |||||
137028 | 8 | 27499 | 107914493 | Check if blacken-docs needs to be run | # This fails on syntax errors, or a diff was applied blacken-docs -l 60 docs/*.rst | ||||
137027 | 7 | 27499 | 107914493 | Check if cog needs to be run | cog --check docs/*.rst | ||||
137026 | 6 | 27499 | 107914493 | Run tests | pytest -n auto -m "not serial" pytest -m "serial" # And the test that exceeds a localhost HTTPS server tests/test_datasette_https_server.sh | ||||
137025 | 5 | 27499 | 107914493 | Install dependencies | pip install -e '.[test,docs]' pip freeze | ||||
137024 | 4 | 27499 | 107914493 | Build extension for --load-extension test | (cd tests && gcc ext.c -fPIC -shared -o ext.so) | ||||
137023 | 3 | 27499 | 107914493 | actions/cache@v3 | Configure pip caching | {"path": "~/.cache/pip", "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-pip-\n"} | |||
137022 | 2 | 27499 | 107914493 | actions/setup-python@v4 | Set up Python ${{ matrix.python-version }} | {"python-version": "${{ matrix.python-version }}"} | |||
137021 | 1 | 27499 | 107914493 | actions/checkout@v3 | |||||
137020 | 5 | 27498 | 107914493 | Run test | ./test-in-pyodide-with-shot-scraper.sh | ||||
137019 | 4 | 27498 | 107914493 | Install Playwright dependencies | pip install shot-scraper build shot-scraper install | ||||
137018 | 3 | 27498 | 107914493 | actions/cache@v2 | Cache Playwright browsers | {"path": "~/.cache/ms-playwright/", "key": "${{ runner.os }}-browsers"} | |||
137017 | 2 | 27498 | 107914493 | actions/setup-python@v3 | Set up Python 3.10 | {"python-version": "3.10", "cache": "pip", "cache-dependency-path": "**/setup.py"} | |||
137016 | 1 | 27498 | 107914493 | actions/checkout@v3 | |||||
137015 | 6 | 27497 | 107914493 | codecov/codecov-action@v1 | Upload coverage report | {"token": "${{ secrets.CODECOV_TOKEN }}", "file": "coverage.xml"} | |||
137014 | 5 | 27497 | 107914493 | Run tests | ls -lah cat .coveragerc pytest --cov=datasette --cov-config=.coveragerc --cov-report xml:coverage.xml --cov-report term ls -lah | ||||
137013 | 4 | 27497 | 107914493 | Install Python dependencies | python -m pip install --upgrade pip python -m pip install -e .[test] python -m pip install pytest-cov | ||||
137012 | 3 | 27497 | 107914493 | actions/cache@v2 | Configure pip caching | {"path": "~/.cache/pip", "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-pip-\n"} | |||
137011 | 2 | 27497 | 107914493 | actions/setup-python@v2 | Set up Python | {"python-version": 3.9} | |||
137010 | 1 | 27497 | 107914493 | actions/checkout@v2 | Check out datasette | ||||
137009 | 5 | 27496 | 107914493 | Check spelling | codespell README.md --ignore-words docs/codespell-ignore-words.txt codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt | ||||
137008 | 4 | 27496 | 107914493 | Install dependencies | pip install -e '.[docs]' | ||||
137007 | 3 | 27496 | 107914493 | actions/cache@v2 | Configure pip caching | {"path": "~/.cache/pip", "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-pip-\n"} | |||
137006 | 2 | 27496 | 107914493 | actions/setup-python@v2 | Set up Python ${{ matrix.python-version }} | {"python-version": 3.9} | |||
137005 | 1 | 27496 | 107914493 | actions/checkout@v2 | |||||
137004 | 2 | 27495 | 107914493 | Build and push to Docker Hub | docker login -u $DOCKER_USER -p $DOCKER_PASS export REPO=datasetteproject/datasette docker build -f Dockerfile \ -t $REPO:${VERSION_TAG} \ --build-arg VERSION=${VERSION_TAG} . docker push $REPO:${VERSION_TAG} | {"DOCKER_USER": "${{ secrets.DOCKER_USER }}", "DOCKER_PASS": "${{ secrets.DOCKER_PASS }}", "VERSION_TAG": "${{ github.event.inputs.version_tag }}"} | |||
137003 | 1 | 27495 | 107914493 | actions/checkout@v2 | |||||
137002 | 2 | 27494 | 107914493 | Build and push to Docker Hub | sleep 60 # Give PyPI time to make the new release available docker login -u $DOCKER_USER -p $DOCKER_PASS export REPO=datasetteproject/datasette docker build -f Dockerfile \ -t $REPO:${GITHUB_REF#refs/tags/} \ --build-arg VERSION=${GITHUB_REF#refs/tags/} . docker tag $REPO:${GITHUB_REF#refs/tags/} $REPO:latest docker push $REPO:${GITHUB_REF#refs/tags/} docker push $REPO:latest | {"DOCKER_USER": "${{ secrets.DOCKER_USER }}", "DOCKER_PASS": "${{ secrets.DOCKER_PASS }}"} | |||
137001 | 1 | 27494 | 107914493 | actions/checkout@v2 | |||||
137000 | 7 | 27493 | 107914493 | Deploy stable-docs.datasette.io to Cloud Run | gcloud config set run/region us-central1 gcloud config set project datasette-222320 datasette publish cloudrun docs.db \ --service=datasette-docs-stable | ||||
136999 | 6 | 27493 | 107914493 | google-github-actions/setup-gcloud@v0 | Set up Cloud Run | {"version": "318.0.0", "service_account_email": "${{ secrets.GCP_SA_EMAIL }}", "service_account_key": "${{ secrets.GCP_SA_KEY }}"} | |||
136998 | 5 | 27493 | 107914493 | Build docs.db | cd docs DISABLE_SPHINX_INLINE_TABS=1 sphinx-build -b xml . _build sphinx-to-sqlite ../docs.db _build cd .. | ||||
136997 | 4 | 27493 | 107914493 | Install dependencies | python -m pip install -e .[docs] python -m pip install sphinx-to-sqlite==0.1a1 | ||||
136996 | 3 | 27493 | 107914493 | actions/cache@v2 | Configure pip caching | {"path": "~/.cache/pip", "key": "${{ runner.os }}-publish-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-publish-pip-\n"} | |||
136995 | 2 | 27493 | 107914493 | actions/setup-python@v2 | Set up Python | {"python-version": "3.9"} | |||
136994 | 1 | 27493 | 107914493 | actions/checkout@v2 | |||||
136993 | 5 | 27492 | 107914493 | Publish | python setup.py sdist bdist_wheel twine upload dist/* | {"TWINE_USERNAME": "__token__", "TWINE_PASSWORD": "${{ secrets.PYPI_TOKEN }}"} | |||
136992 | 4 | 27492 | 107914493 | Install dependencies | pip install setuptools wheel twine | ||||
136991 | 3 | 27492 | 107914493 | actions/cache@v3 | Configure pip caching | {"path": "~/.cache/pip", "key": "${{ runner.os }}-publish-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-publish-pip-\n"} | |||
136990 | 2 | 27492 | 107914493 | actions/setup-python@v4 | Set up Python | {"python-version": "3.11"} | |||
136989 | 1 | 27492 | 107914493 | actions/checkout@v3 | |||||
136988 | 5 | 27491 | 107914493 | Run tests | pytest | ||||
136987 | 4 | 27491 | 107914493 | Install dependencies | pip install -e '.[test]' | ||||
136986 | 3 | 27491 | 107914493 | actions/cache@v3 | Configure pip caching | {"path": "~/.cache/pip", "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-pip-\n"} | |||
136985 | 2 | 27491 | 107914493 | actions/setup-python@v4 | Set up Python ${{ matrix.python-version }} | {"python-version": "${{ matrix.python-version }}"} | |||
136984 | 1 | 27491 | 107914493 | actions/checkout@v3 | |||||
136983 | 4 | 27490 | 107914493 | Run prettier | npm run prettier -- --check | ||||
136982 | 3 | 27490 | 107914493 | Install dependencies | npm ci | ||||
136981 | 2 | 27490 | 107914493 | actions/cache@v2 | Configure npm caching | {"path": "~/.npm", "key": "${{ runner.OS }}-npm-${{ hashFiles('**/package-lock.json') }}", "restore-keys": "${{ runner.OS }}-npm-\n"} | |||
136980 | 1 | 27490 | 107914493 | actions/checkout@v2 | Check out repo | ||||
136979 | 1 | 27489 | 107914493 | readthedocs/actions/preview@v1 | {"project-slug": "datasette"} | ||||
136978 | 12 | 27488 | 107914493 | Deploy to docs as well (only for main) | # Deploy docs.db to a different service datasette publish cloudrun docs.db \ --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options="--setting template_debug 1" \ --service=datasette-docs-latest | ${{ github.ref == 'refs/heads/main' }} | |||
136977 | 11 | 27488 | 107914493 | Deploy to Cloud Run | gcloud config set run/region us-central1 gcloud config set project datasette-222320 export SUFFIX="-${GITHUB_REF#refs/heads/}" export SUFFIX=${SUFFIX#-main} # Replace 1.0 with one-dot-zero in SUFFIX export SUFFIX=${SUFFIX//1.0/one-dot-zero} datasette publish cloudrun fixtures.db fixtures2.db extra_database.db \ -m metadata.json \ --plugins-dir=plugins \ --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options="--setting template_debug 1 --setting trace_debug 1 --crossdb" \ --install 'datasette-ephemeral-tables>=0.2.2' \ --service "datasette-latest$SUFFIX" \ --secret $LATEST_DATASETTE_SECRET | {"LATEST_DATASETTE_SECRET": "${{ secrets.LATEST_DATASETTE_SECRET }}"} | |||
136976 | 10 | 27488 | 107914493 | google-github-actions/setup-gcloud@v0 | Set up Cloud Run | {"version": "318.0.0", "service_account_email": "${{ secrets.GCP_SA_EMAIL }}", "service_account_key": "${{ secrets.GCP_SA_KEY }}"} | |||
136975 | 9 | 27488 | 107914493 | Make some modifications to metadata.json | cat fixtures.json | \ jq '.databases |= . + {"ephemeral": {"allow": {"id": "*"}}}' | \ jq '.plugins |= . + {"datasette-ephemeral-tables": {"table_ttl": 900}}' \ > metadata.json cat metadata.json | ||||
136974 | 8 | 27488 | 107914493 | Set up the alternate-route demo | echo ' from datasette import hookimpl @hookimpl def startup(datasette): db = datasette.get_database("fixtures2") db.route = "alternative-route" ' > plugins/alternative_route.py cp fixtures.db fixtures2.db | ||||
136973 | 7 | 27488 | 107914493 | Build docs.db | cd docs DISABLE_SPHINX_INLINE_TABS=1 sphinx-build -b xml . _build sphinx-to-sqlite ../docs.db _build cd .. | ${{ github.ref == 'refs/heads/main' }} |