id,seq,job,repo,uses,name,with,run,env,if 138655,3,27827,611552758,,Install dependencies,,"pip install '.[test]' ",, 138656,4,27827,611552758,,Run tests,,"pytest ",, 138659,3,27828,611552758,,Install dependencies,,"pip install setuptools wheel twine build ",, 138660,4,27828,611552758,,Publish,,"python -m build twine upload dist/* ","{""TWINE_USERNAME"": ""__token__"", ""TWINE_PASSWORD"": ""${{ secrets.PYPI_TOKEN }}""}", 138663,3,27829,611552758,,Install dependencies,,"pip install '.[test]' ",, 138664,4,27829,611552758,,Run tests,,"pytest ",, 138665,5,27829,611552758,,Check if README is up-to-date,,"cog --check README.md ",, 138669,4,27830,197431109,,Install dependencies,,"pip install -e '.[test]' ",, 138670,5,27830,197431109,,Run tests,,"pytest -v ",, 138674,4,27831,197431109,,Install dependencies,,"pip install setuptools wheel twine ",, 138675,5,27831,197431109,,Publish,,"python setup.py sdist bdist_wheel twine upload dist/* ","{""TWINE_USERNAME"": ""__token__"", ""TWINE_PASSWORD"": ""${{ secrets.PYPI_TOKEN }}""}", 138679,4,27832,197431109,,Install dependencies,,"pip install -e '.[test]' ",, 138680,5,27832,197431109,,Run tests,,"pytest -v ",, 138684,4,27833,256834907,,Install dependencies,,"python -m pip install --upgrade pip pip install -e .[test] ",, 138685,5,27833,256834907,,Test with pytest,,"pytest ",, 138689,4,27834,303218369,,Install dependencies,,"pip install -e '.[test]' ",, 138690,5,27834,303218369,,Run tests,,"pytest ",, 138694,4,27835,303218369,,Install dependencies,,"pip install setuptools wheel twine ",, 138695,5,27835,303218369,,Publish,,"python setup.py sdist bdist_wheel twine upload dist/* ","{""TWINE_USERNAME"": ""__token__"", ""TWINE_PASSWORD"": ""${{ secrets.PYPI_TOKEN }}""}", 138699,4,27836,303218369,,Install dependencies,,"pip install -e '.[test]' ",, 138700,5,27836,303218369,,Run tests,,"pytest ",, 138704,4,27837,207052882,,Install Python dependencies,,"python -m pip install --upgrade pip pip install sqlite-utils>=3.1 sqlite-utils --version pip install -e . pip install datasette pip install bs4 ",, 138705,5,27837,207052882,,Create auth.json,,"echo ""{\""github_personal_token\"": \""$GITHUB_ACCESS_TOKEN\""}"" > auth.json ","{""GITHUB_ACCESS_TOKEN"": ""${{ secrets.GH_TOKEN }}""}", 138706,6,27837,207052882,,Fetch previous copy of database,,"# So we can keep track of when we first saw each dependent repo wget https://github-to-sqlite.dogsheep.net/github.db",,"!contains(github.event.head_commit.message, 'REFRESH_DB')" 138707,7,27837,207052882,,Fetch the repos,,"github-to-sqlite repos github.db dogsheep sqlite-utils tables --counts github.db",, 138708,8,27837,207052882,,"Fetch releases, commits, issues, contributors",,"sqlite-utils github.db ""select full_name from repos where owner = 53015001 union select 'simonw/datasette' as full_name union select 'simonw/sqlite-utils' as full_name"" \ --csv --no-headers | while read repo; do github-to-sqlite releases \ github.db $(echo $repo | tr -d '\r'); sleep 10; github-to-sqlite commits \ github.db $(echo $repo | tr -d '\r'); sleep 10; github-to-sqlite tags \ github.db $(echo $repo | tr -d '\r'); sleep 10; github-to-sqlite contributors \ github.db $(echo $repo | tr -d '\r'); sleep 10; github-to-sqlite issues \ github.db $(echo $repo | tr -d '\r'); sleep 10; github-to-sqlite pull-requests \ github.db $(echo $repo | tr -d '\r'); sleep 10; github-to-sqlite issue-comments \ github.db $(echo $repo | tr -d '\r'); sleep 10; github-to-sqlite stargazers \ github.db $(echo $repo | tr -d '\r'); sleep 10; github-to-sqlite workflows \ github.db $(echo $repo | tr -d '\r'); sleep 10; done; # Scrape dependents github-to-sqlite scrape-dependents github.db simonw/datasette simonw/sqlite-utils -v sqlite-utils tables --counts github.db # Delete email addresses from raw_authors sqlite-utils github.db ""update raw_authors set email = ''"" # Fetch emojis github-to-sqlite emojis github.db --fetch # Rebuild FTS tables sqlite-utils rebuild-fts github.db # Populate _analyze_tables_ table sqlite-utils analyze-tables github.db --save",, 138711,11,27837,207052882,,Deploy to Cloud Run,,"gcloud config set run/region us-central1 gcloud config set project datasette-222320 datasette publish cloudrun github.db \ -m demo-metadata.json \ --service github-to-sqlite \ --install=datasette-search-all>=0.3 \ --install=datasette-render-markdown>=1.1.2 \ --install=datasette-pretty-json>=0.2.2 \ --install=datasette-json-html \ --install=datasette-vega \ --install=datasette-render-images \ --install=datasette-graphql \ --install=datasette-atom",, 138715,4,27838,207052882,,Install dependencies,,"pip install -e '.[test]' ",, 138716,5,27838,207052882,,Run tests,,"pytest ",, 138720,4,27839,207052882,,Install dependencies,,"pip install setuptools wheel twine ",, 138721,5,27839,207052882,,Publish,,"python setup.py sdist bdist_wheel twine upload dist/* ","{""TWINE_USERNAME"": ""__token__"", ""TWINE_PASSWORD"": ""${{ secrets.PYPI_TOKEN }}""}", 138723,2,27840,207052882,,Update TOC,,npx markdown-toc README.md -i,, 138724,3,27840,207052882,,Commit and push if README changed,,"git diff git config --global user.email ""readme-bot@example.com"" git config --global user.name ""README-bot"" git diff --quiet || (git add README.md && git commit -m ""Updated README"") git push",, 138728,4,27841,207052882,,Install dependencies,,"pip install -e '.[test]' ",, 138729,5,27841,207052882,,Run tests,,"pytest ",, 138733,4,27842,248903544,,Install dependencies,,"pip install -e '.[test]' ",, 138734,5,27842,248903544,,Run tests,,"pytest ",, 138738,4,27843,248903544,,Install dependencies,,"pip install setuptools wheel twine ",, 138739,5,27843,248903544,,Publish,,"python setup.py sdist bdist_wheel twine upload dist/* ","{""TWINE_USERNAME"": ""__token__"", ""TWINE_PASSWORD"": ""${{ secrets.PYPI_TOKEN }}""}", 138743,4,27844,248903544,,Install dependencies,,"pip install -e '.[test]' ",, 138744,5,27844,248903544,,Run tests,,"pytest ",, 138748,4,27845,197882382,,Install dependencies,,"pip install -e '.[test]' ",, 138749,5,27845,197882382,,Run tests,,"pytest ",, 138753,4,27846,197882382,,Install dependencies,,"pip install setuptools wheel twine ",, 138754,5,27846,197882382,,Publish,,"python setup.py sdist bdist_wheel twine upload dist/* ","{""TWINE_USERNAME"": ""__token__"", ""TWINE_PASSWORD"": ""${{ secrets.PYPI_TOKEN }}""}", 138758,4,27847,197882382,,Install dependencies,,"pip install -e '.[test]' ",, 138759,5,27847,197882382,,Run tests,,"pytest ",, 138762,3,27848,213286752,,Install dependencies,,"pip install '.[test]' ",, 138763,4,27848,213286752,,Run tests,,"pytest ",, 138766,3,27849,213286752,,Install dependencies,,"pip install setuptools wheel twine build ",, 138767,4,27849,213286752,,Publish,,"python -m build twine upload dist/* ","{""TWINE_USERNAME"": ""__token__"", ""TWINE_PASSWORD"": ""${{ secrets.PYPI_TOKEN }}""}", 138770,3,27850,213286752,,Install dependencies,,"pip install '.[test]' ",, 138771,4,27850,213286752,,Run tests,,"pytest ",, 138774,3,27851,205429375,,Install dependencies,,"pip install -e '.[test]' ",, 138775,4,27851,205429375,,Run tests,,"pytest ",, 138778,3,27852,205429375,,Install dependencies,,"pip install setuptools wheel twine ",, 138779,4,27852,205429375,,Publish,,"python setup.py sdist bdist_wheel twine upload dist/* ","{""TWINE_USERNAME"": ""__token__"", ""TWINE_PASSWORD"": ""${{ secrets.PYPI_TOKEN }}""}", 138782,3,27853,205429375,,Install dependencies,,"pip install -e '.[test]' ",, 138783,4,27853,205429375,,Run tests,,"pytest ",, 138787,4,27854,206156866,,Install dependencies,,"pip install -e '.[test]' ",, 138788,5,27854,206156866,,Run tests,,"pytest ",, 138792,4,27855,206156866,,Install dependencies,,"pip install setuptools wheel twine ",, 138793,5,27855,206156866,,Publish,,"python setup.py sdist bdist_wheel twine upload dist/* ","{""TWINE_USERNAME"": ""__token__"", ""TWINE_PASSWORD"": ""${{ secrets.PYPI_TOKEN }}""}", 138795,2,27856,206156866,,Update TOC,,npx markdown-toc README.md -i,, 138796,3,27856,206156866,,Commit and push if README changed,,"git diff git config --global user.email ""readme-bot@example.com"" git config --global user.name ""README-bot"" git diff --quiet || (git add README.md && git commit -m ""Updated README"") git push",, 138800,4,27857,206156866,,Install dependencies,,"pip install -e '.[test]' ",, 138801,5,27857,206156866,,Run tests,,"pytest ",, 138804,3,27858,107914493,,Install dependencies,,"pip install datasette-publish-vercel ",, 138805,4,27858,107914493,,Deploy the preview,,"export BRANCH=""${{ github.event.inputs.branch }}"" wget https://latest.datasette.io/fixtures.db datasette publish vercel fixtures.db \ --branch $BRANCH \ --project ""datasette-preview-$BRANCH"" \ --token $VERCEL_TOKEN \ --scope datasette \ --about ""Preview of $BRANCH"" \ --about_url ""https://github.com/simonw/datasette/tree/$BRANCH"" ","{""VERCEL_TOKEN"": ""${{ secrets.BRANCH_PREVIEW_VERCEL_TOKEN }}""}", 138809,4,27859,107914493,,Install Python dependencies,,"python -m pip install --upgrade pip python -m pip install -e .[test] python -m pip install -e .[docs] python -m pip install sphinx-to-sqlite==0.1a1 ",, 138810,5,27859,107914493,,Run tests,,"pytest -n auto -m ""not serial"" pytest -m ""serial"" ",,${{ github.ref == 'refs/heads/main' }} 138811,6,27859,107914493,,Build fixtures.db and other files needed to deploy the demo,,"python tests/fixtures.py \ fixtures.db \ fixtures-config.json \ fixtures-metadata.json \ plugins \ --extra-db-filename extra_database.db",, 138812,7,27859,107914493,,Build docs.db,,"cd docs DISABLE_SPHINX_INLINE_TABS=1 sphinx-build -b xml . _build sphinx-to-sqlite ../docs.db _build cd ..",,${{ github.ref == 'refs/heads/main' }} 138813,8,27859,107914493,,Set up the alternate-route demo,,"echo ' from datasette import hookimpl @hookimpl def startup(datasette): db = datasette.get_database(""fixtures2"") db.route = ""alternative-route"" ' > plugins/alternative_route.py cp fixtures.db fixtures2.db ",, 138814,9,27859,107914493,,And the counters writable canned query demo,,"cat > plugins/counters.py <=0.2.2' \ --service ""datasette-latest$SUFFIX"" \ --secret $LATEST_DATASETTE_SECRET","{""LATEST_DATASETTE_SECRET"": ""${{ secrets.LATEST_DATASETTE_SECRET }}""}", 138817,12,27859,107914493,,Deploy to docs as well (only for main),,"# Deploy docs.db to a different service datasette publish cloudrun docs.db \ --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options=""--setting template_debug 1"" \ --service=datasette-docs-latest",,${{ github.ref == 'refs/heads/main' }} 138821,3,27861,107914493,,Install dependencies,,npm ci,, 138822,4,27861,107914493,,Run prettier,,npm run prettier -- --check,, 138826,4,27862,107914493,,Install dependencies,,"pip install -e '.[test]' ",, 138827,5,27862,107914493,,Run tests,,"pytest ",, 138831,4,27863,107914493,,Install dependencies,,"pip install setuptools wheel twine ",, 138832,5,27863,107914493,,Publish,,"python setup.py sdist bdist_wheel twine upload dist/* ","{""TWINE_USERNAME"": ""__token__"", ""TWINE_PASSWORD"": ""${{ secrets.PYPI_TOKEN }}""}", 138836,4,27864,107914493,,Install dependencies,,"python -m pip install -e .[docs] python -m pip install sphinx-to-sqlite==0.1a1 ",, 138837,5,27864,107914493,,Build docs.db,,"cd docs DISABLE_SPHINX_INLINE_TABS=1 sphinx-build -b xml . _build sphinx-to-sqlite ../docs.db _build cd ..",, 138839,7,27864,107914493,,Deploy stable-docs.datasette.io to Cloud Run,,"gcloud config set run/region us-central1 gcloud config set project datasette-222320 datasette publish cloudrun docs.db \ --service=datasette-docs-stable",, 138841,2,27865,107914493,,Build and push to Docker Hub,,"sleep 60 # Give PyPI time to make the new release available docker login -u $DOCKER_USER -p $DOCKER_PASS export REPO=datasetteproject/datasette docker build -f Dockerfile \ -t $REPO:${GITHUB_REF#refs/tags/} \ --build-arg VERSION=${GITHUB_REF#refs/tags/} . docker tag $REPO:${GITHUB_REF#refs/tags/} $REPO:latest docker push $REPO:${GITHUB_REF#refs/tags/} docker push $REPO:latest","{""DOCKER_USER"": ""${{ secrets.DOCKER_USER }}"", ""DOCKER_PASS"": ""${{ secrets.DOCKER_PASS }}""}", 138843,2,27866,107914493,,Build and push to Docker Hub,,"docker login -u $DOCKER_USER -p $DOCKER_PASS export REPO=datasetteproject/datasette docker build -f Dockerfile \ -t $REPO:${VERSION_TAG} \ --build-arg VERSION=${VERSION_TAG} . docker push $REPO:${VERSION_TAG}","{""DOCKER_USER"": ""${{ secrets.DOCKER_USER }}"", ""DOCKER_PASS"": ""${{ secrets.DOCKER_PASS }}"", ""VERSION_TAG"": ""${{ github.event.inputs.version_tag }}""}", 138846,3,27867,107914493,,Install dependencies,,"pip install -e '.[docs]' ",, 138847,4,27867,107914493,,Check spelling,,"codespell README.md --ignore-words docs/codespell-ignore-words.txt codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt ",, 138851,4,27868,107914493,,Install Python dependencies,,"python -m pip install --upgrade pip python -m pip install -e .[test] python -m pip install pytest-cov ",, 138852,5,27868,107914493,,Run tests,,"ls -lah cat .coveragerc pytest --cov=datasette --cov-config=.coveragerc --cov-report xml:coverage.xml --cov-report term ls -lah",, 138857,4,27869,107914493,,Install Playwright dependencies,,"pip install shot-scraper build shot-scraper install ",, 138858,5,27869,107914493,,Run test,,"./test-in-pyodide-with-shot-scraper.sh ",, 138862,4,27870,107914493,,Build extension for --load-extension test,,(cd tests && gcc ext.c -fPIC -shared -o ext.so),, 138863,5,27870,107914493,,Install dependencies,,"pip install -e '.[test]' pip freeze ",, 138864,6,27870,107914493,,Run tests,,"pytest -n auto -m ""not serial"" pytest -m ""serial"" # And the test that exceeds a localhost HTTPS server tests/test_datasette_https_server.sh ",, 138865,7,27870,107914493,,Install docs dependencies on Python 3.9+,,"pip install -e '.[docs]' ",,matrix.python-version != '3.8' 138866,8,27870,107914493,,Check if cog needs to be run,,"cog --check docs/*.rst ",,matrix.python-version != '3.8' 138867,9,27870,107914493,,Check if blacken-docs needs to be run,,"# This fails on syntax errors, or a diff was applied blacken-docs -l 60 docs/*.rst ",,matrix.python-version != '3.8' 138868,10,27870,107914493,,Test DATASETTE_LOAD_PLUGINS,,"pip install datasette-init datasette-json-html tests/test-datasette-load-plugins.sh ",,