steps
264 rows sorted by run descending
This data as json, CSV (advanced)
Suggested facets: seq, repo, uses, with, if
env 6 ✖
- {"TWINE_USERNAME": "__token__", "TWINE_PASSWORD": "${{ secrets.PYPI_TOKEN }}"} 11
- {"DOCKER_USER": "${{ secrets.DOCKER_USER }}", "DOCKER_PASS": "${{ secrets.DOCKER_PASS }}", "VERSION_TAG": "${{ github.event.inputs.version_tag }}"} 1
- {"DOCKER_USER": "${{ secrets.DOCKER_USER }}", "DOCKER_PASS": "${{ secrets.DOCKER_PASS }}"} 1
- {"GITHUB_ACCESS_TOKEN": "${{ secrets.GH_TOKEN }}"} 1
- {"LATEST_DATASETTE_SECRET": "${{ secrets.LATEST_DATASETTE_SECRET }}"} 1
- {"VERCEL_TOKEN": "${{ secrets.BRANCH_PREVIEW_VERCEL_TOKEN }}"} 1
id | seq | job | repo | uses | name | with | run ▲ | env | if |
---|---|---|---|---|---|---|---|---|---|
138897 | 4 | test 27878 | sqlite-utils 140912432 | Install SpatiaLite | sudo apt-get install libsqlite3-mod-spatialite | ||||
138907 | 7 | test 27879 | sqlite-utils 140912432 | Install SpatiaLite | sudo apt-get install libsqlite3-mod-spatialite | matrix.os == 'ubuntu-latest' | |||
138708 | 8 | scheduled 27837 | github-to-sqlite 207052882 | Fetch releases, commits, issues, contributors | sqlite-utils github.db "select full_name from repos where owner = 53015001 union select 'simonw/datasette' as full_name union select 'simonw/sqlite-utils' as full_name" \ --csv --no-headers | while read repo; do github-to-sqlite releases \ github.db $(echo $repo | tr -d '\r'); sleep 10; github-to-sqlite commits \ github.db $(echo $repo | tr -d '\r'); sleep 10; github-to-sqlite tags \ github.db $(echo $repo | tr -d '\r'); sleep 10; github-to-sqlite contributors \ github.db $(echo $repo | tr -d '\r'); sleep 10; github-to-sqlite issues \ github.db $(echo $repo | tr -d '\r'); sleep 10; github-to-sqlite pull-requests \ github.db $(echo $repo | tr -d '\r'); sleep 10; github-to-sqlite issue-comments \ github.db $(echo $repo | tr -d '\r'); sleep 10; github-to-sqlite stargazers \ github.db $(echo $repo | tr -d '\r'); sleep 10; github-to-sqlite workflows \ github.db $(echo $repo | tr -d '\r'); sleep 10; done; # Scrape dependents github-to-sqlite scrape-dependents github.db simonw/datasette simonw/sqlite-utils -v sqlite-utils tables --counts github.db # Delete email addresses from raw_authors sqlite-utils github.db "update raw_authors set email = ''" # Fetch emojis github-to-sqlite emojis github.db --fetch # Rebuild FTS tables sqlite-utils rebuild-fts github.db # Populate _analyze_tables_ table sqlite-utils analyze-tables github.db --save | ||||
138841 | 2 | deploy_docker 27865 | datasette 107914493 | Build and push to Docker Hub | sleep 60 # Give PyPI time to make the new release available docker login -u $DOCKER_USER -p $DOCKER_PASS export REPO=datasetteproject/datasette docker build -f Dockerfile \ -t $REPO:${GITHUB_REF#refs/tags/} \ --build-arg VERSION=${GITHUB_REF#refs/tags/} . docker tag $REPO:${GITHUB_REF#refs/tags/} $REPO:latest docker push $REPO:${GITHUB_REF#refs/tags/} docker push $REPO:latest | { "DOCKER_USER": "${{ secrets.DOCKER_USER }}", "DOCKER_PASS": "${{ secrets.DOCKER_PASS }}" } |
|||
138811 | 6 | deploy 27859 | datasette 107914493 | Build fixtures.db and other files needed to deploy the demo | python tests/fixtures.py \ fixtures.db \ fixtures-config.json \ fixtures-metadata.json \ plugins \ --extra-db-filename extra_database.db | ||||
138675 | 5 | deploy 27831 | dogsheep-beta 197431109 | Publish | python setup.py sdist bdist_wheel twine upload dist/* | { "TWINE_USERNAME": "__token__", "TWINE_PASSWORD": "${{ secrets.PYPI_TOKEN }}" } |
|||
138695 | 5 | deploy 27835 | evernote-to-sqlite 303218369 | Publish | python setup.py sdist bdist_wheel twine upload dist/* | { "TWINE_USERNAME": "__token__", "TWINE_PASSWORD": "${{ secrets.PYPI_TOKEN }}" } |
|||
138721 | 5 | deploy 27839 | github-to-sqlite 207052882 | Publish | python setup.py sdist bdist_wheel twine upload dist/* | { "TWINE_USERNAME": "__token__", "TWINE_PASSWORD": "${{ secrets.PYPI_TOKEN }}" } |
|||
138739 | 5 | deploy 27843 | hacker-news-to-sqlite 248903544 | Publish | python setup.py sdist bdist_wheel twine upload dist/* | { "TWINE_USERNAME": "__token__", "TWINE_PASSWORD": "${{ secrets.PYPI_TOKEN }}" } |
|||
138754 | 5 | deploy 27846 | healthkit-to-sqlite 197882382 | Publish | python setup.py sdist bdist_wheel twine upload dist/* | { "TWINE_USERNAME": "__token__", "TWINE_PASSWORD": "${{ secrets.PYPI_TOKEN }}" } |
|||
138779 | 4 | deploy 27852 | swarm-to-sqlite 205429375 | Publish | python setup.py sdist bdist_wheel twine upload dist/* | { "TWINE_USERNAME": "__token__", "TWINE_PASSWORD": "${{ secrets.PYPI_TOKEN }}" } |
|||
138793 | 5 | deploy 27855 | twitter-to-sqlite 206156866 | Publish | python setup.py sdist bdist_wheel twine upload dist/* | { "TWINE_USERNAME": "__token__", "TWINE_PASSWORD": "${{ secrets.PYPI_TOKEN }}" } |
|||
138832 | 5 | deploy 27863 | datasette 107914493 | Publish | python setup.py sdist bdist_wheel twine upload dist/* | { "TWINE_USERNAME": "__token__", "TWINE_PASSWORD": "${{ secrets.PYPI_TOKEN }}" } |
|||
138888 | 5 | deploy 27876 | sqlite-utils 140912432 | Publish | python setup.py sdist bdist_wheel twine upload dist/* | { "TWINE_USERNAME": "__token__", "TWINE_PASSWORD": "${{ secrets.PYPI_TOKEN }}" } |
|||
138836 | 4 | deploy_static_docs 27864 | datasette 107914493 | Install dependencies | python -m pip install -e .[docs] python -m pip install sphinx-to-sqlite==0.1a1 | ||||
138851 | 4 | test 27868 | datasette 107914493 | Install Python dependencies | python -m pip install --upgrade pip python -m pip install -e .[test] python -m pip install pytest-cov | ||||
138898 | 5 | test 27878 | sqlite-utils 140912432 | Install Python dependencies | python -m pip install --upgrade pip python -m pip install -e .[test] python -m pip install pytest-cov | ||||
138809 | 4 | deploy 27859 | datasette 107914493 | Install Python dependencies | python -m pip install --upgrade pip python -m pip install -e .[test] python -m pip install -e .[docs] python -m pip install sphinx-to-sqlite==0.1a1 | ||||
138704 | 4 | scheduled 27837 | github-to-sqlite 207052882 | Install Python dependencies | python -m pip install --upgrade pip pip install sqlite-utils>=3.1 sqlite-utils --version pip install -e . pip install datasette pip install bs4 | ||||
138684 | 4 | build 27833 | dogsheep-photos 256834907 | Install dependencies | python -m pip install --upgrade pip pip install -e .[test] | ||||
138660 | 4 | deploy 27828 | apple-notes-to-sqlite 611552758 | Publish | python -m build twine upload dist/* | { "TWINE_USERNAME": "__token__", "TWINE_PASSWORD": "${{ secrets.PYPI_TOKEN }}" } |
|||
138767 | 4 | deploy 27849 | pocket-to-sqlite 213286752 | Publish | python -m build twine upload dist/* | { "TWINE_USERNAME": "__token__", "TWINE_PASSWORD": "${{ secrets.PYPI_TOKEN }}" } |
|||
138670 | 5 | test 27830 | dogsheep-beta 197431109 | Run tests | pytest -v | ||||
138680 | 5 | test 27832 | dogsheep-beta 197431109 | Run tests | pytest -v | ||||
138910 | 10 | test 27879 | sqlite-utils 140912432 | Run tests | pytest -v | ||||
138864 | 6 | test 27870 | datasette 107914493 | Run tests | pytest -n auto -m "not serial" pytest -m "serial" # And the test that exceeds a localhost HTTPS server tests/test_datasette_https_server.sh | ||||
138810 | 5 | deploy 27859 | datasette 107914493 | Run tests | pytest -n auto -m "not serial" pytest -m "serial" | ${{ github.ref == 'refs/heads/main' }} | |||
138656 | 4 | test 27827 | apple-notes-to-sqlite 611552758 | Run tests | pytest | ||||
138664 | 4 | test 27829 | apple-notes-to-sqlite 611552758 | Run tests | pytest | ||||
138685 | 5 | build 27833 | dogsheep-photos 256834907 | Test with pytest | pytest | ||||
138690 | 5 | test 27834 | evernote-to-sqlite 303218369 | Run tests | pytest | ||||
138700 | 5 | test 27836 | evernote-to-sqlite 303218369 | Run tests | pytest | ||||
138716 | 5 | test 27838 | github-to-sqlite 207052882 | Run tests | pytest | ||||
138729 | 5 | test 27841 | github-to-sqlite 207052882 | Run tests | pytest | ||||
138734 | 5 | test 27842 | hacker-news-to-sqlite 248903544 | Run tests | pytest | ||||
138744 | 5 | test 27844 | hacker-news-to-sqlite 248903544 | Run tests | pytest | ||||
138749 | 5 | test 27845 | healthkit-to-sqlite 197882382 | Run tests | pytest | ||||
138759 | 5 | test 27847 | healthkit-to-sqlite 197882382 | Run tests | pytest | ||||
138763 | 4 | test 27848 | pocket-to-sqlite 213286752 | Run tests | pytest | ||||
138771 | 4 | test 27850 | pocket-to-sqlite 213286752 | Run tests | pytest | ||||
138775 | 4 | test 27851 | swarm-to-sqlite 205429375 | Run tests | pytest | ||||
138783 | 4 | test 27853 | swarm-to-sqlite 205429375 | Run tests | pytest | ||||
138788 | 5 | test 27854 | twitter-to-sqlite 206156866 | Run tests | pytest | ||||
138801 | 5 | test 27857 | twitter-to-sqlite 206156866 | Run tests | pytest | ||||
138827 | 5 | test 27862 | datasette 107914493 | Run tests | pytest | ||||
138883 | 5 | test 27875 | sqlite-utils 140912432 | Run tests | pytest | ||||
138908 | 8 | test 27879 | sqlite-utils 140912432 | On macOS with Python 3.10 test with sqlean.py | pip install sqlean.py sqlite-dump | matrix.os == 'macos-latest' && matrix.python-version == '3.10' | |||
138857 | 4 | test 27869 | datasette 107914493 | Install Playwright dependencies | pip install shot-scraper build shot-scraper install | ||||
138659 | 3 | deploy 27828 | apple-notes-to-sqlite 611552758 | Install dependencies | pip install setuptools wheel twine build | ||||
138766 | 3 | deploy 27849 | pocket-to-sqlite 213286752 | Install dependencies | pip install setuptools wheel twine build | ||||
138674 | 4 | deploy 27831 | dogsheep-beta 197431109 | Install dependencies | pip install setuptools wheel twine | ||||
138694 | 4 | deploy 27835 | evernote-to-sqlite 303218369 | Install dependencies | pip install setuptools wheel twine | ||||
138720 | 4 | deploy 27839 | github-to-sqlite 207052882 | Install dependencies | pip install setuptools wheel twine | ||||
138738 | 4 | deploy 27843 | hacker-news-to-sqlite 248903544 | Install dependencies | pip install setuptools wheel twine | ||||
138753 | 4 | deploy 27846 | healthkit-to-sqlite 197882382 | Install dependencies | pip install setuptools wheel twine | ||||
138778 | 3 | deploy 27852 | swarm-to-sqlite 205429375 | Install dependencies | pip install setuptools wheel twine | ||||
138792 | 4 | deploy 27855 | twitter-to-sqlite 206156866 | Install dependencies | pip install setuptools wheel twine | ||||
138831 | 4 | deploy 27863 | datasette 107914493 | Install dependencies | pip install setuptools wheel twine | ||||
138887 | 4 | deploy 27876 | sqlite-utils 140912432 | Install dependencies | pip install setuptools wheel twine | ||||
138906 | 6 | test 27879 | sqlite-utils 140912432 | Optionally install numpy | pip install numpy | matrix.numpy == 1 | |||
138804 | 3 | deploy-branch-preview 27858 | datasette 107914493 | Install dependencies | pip install datasette-publish-vercel | ||||
138868 | 10 | test 27870 | datasette 107914493 | Test DATASETTE_LOAD_PLUGINS | pip install datasette-init datasette-json-html tests/test-datasette-load-plugins.sh | ||||
138905 | 5 | test 27879 | sqlite-utils 140912432 | Optionally install tui dependencies (not 3.7) | pip install -e '.[tui]' | matrix.python-version != '3.7' | |||
138863 | 5 | test 27870 | datasette 107914493 | Install dependencies | pip install -e '.[test]' pip freeze | ||||
138669 | 4 | test 27830 | dogsheep-beta 197431109 | Install dependencies | pip install -e '.[test]' | ||||
138679 | 4 | test 27832 | dogsheep-beta 197431109 | Install dependencies | pip install -e '.[test]' | ||||
138689 | 4 | test 27834 | evernote-to-sqlite 303218369 | Install dependencies | pip install -e '.[test]' | ||||
138699 | 4 | test 27836 | evernote-to-sqlite 303218369 | Install dependencies | pip install -e '.[test]' | ||||
138715 | 4 | test 27838 | github-to-sqlite 207052882 | Install dependencies | pip install -e '.[test]' | ||||
138728 | 4 | test 27841 | github-to-sqlite 207052882 | Install dependencies | pip install -e '.[test]' | ||||
138733 | 4 | test 27842 | hacker-news-to-sqlite 248903544 | Install dependencies | pip install -e '.[test]' | ||||
138743 | 4 | test 27844 | hacker-news-to-sqlite 248903544 | Install dependencies | pip install -e '.[test]' | ||||
138748 | 4 | test 27845 | healthkit-to-sqlite 197882382 | Install dependencies | pip install -e '.[test]' | ||||
138758 | 4 | test 27847 | healthkit-to-sqlite 197882382 | Install dependencies | pip install -e '.[test]' | ||||
138774 | 3 | test 27851 | swarm-to-sqlite 205429375 | Install dependencies | pip install -e '.[test]' | ||||
138782 | 3 | test 27853 | swarm-to-sqlite 205429375 | Install dependencies | pip install -e '.[test]' | ||||
138787 | 4 | test 27854 | twitter-to-sqlite 206156866 | Install dependencies | pip install -e '.[test]' | ||||
138800 | 4 | test 27857 | twitter-to-sqlite 206156866 | Install dependencies | pip install -e '.[test]' | ||||
138826 | 4 | test 27862 | datasette 107914493 | Install dependencies | pip install -e '.[test]' | ||||
138882 | 4 | test 27875 | sqlite-utils 140912432 | Install dependencies | pip install -e '.[test]' | ||||
138904 | 4 | test 27879 | sqlite-utils 140912432 | Install dependencies | pip install -e '.[test,mypy,flake8]' | ||||
138846 | 3 | spellcheck 27867 | datasette 107914493 | Install dependencies | pip install -e '.[docs]' | ||||
138865 | 7 | test 27870 | datasette 107914493 | Install docs dependencies on Python 3.9+ | pip install -e '.[docs]' | matrix.python-version != '3.8' | |||
138892 | 4 | spellcheck 27877 | sqlite-utils 140912432 | Install dependencies | pip install -e '.[docs]' | ||||
138655 | 3 | test 27827 | apple-notes-to-sqlite 611552758 | Install dependencies | pip install '.[test]' | ||||
138663 | 3 | test 27829 | apple-notes-to-sqlite 611552758 | Install dependencies | pip install '.[test]' | ||||
138762 | 3 | test 27848 | pocket-to-sqlite 213286752 | Install dependencies | pip install '.[test]' | ||||
138770 | 3 | test 27850 | pocket-to-sqlite 213286752 | Install dependencies | pip install '.[test]' | ||||
138723 | 2 | build 27840 | github-to-sqlite 207052882 | Update TOC | npx markdown-toc README.md -i | ||||
138795 | 2 | build 27856 | twitter-to-sqlite 206156866 | Update TOC | npx markdown-toc README.md -i | ||||
138822 | 4 | prettier 27861 | datasette 107914493 | Run prettier | npm run prettier -- --check | ||||
138821 | 3 | prettier 27861 | datasette 107914493 | Install dependencies | npm ci | ||||
138911 | 11 | test 27879 | sqlite-utils 140912432 | run mypy | mypy sqlite_utils tests | ||||
138899 | 6 | test 27878 | sqlite-utils 140912432 | Run tests | ls -lah pytest --cov=sqlite_utils --cov-report xml:coverage.xml --cov-report term ls -lah | ||||
138852 | 5 | test 27868 | datasette 107914493 | Run tests | ls -lah cat .coveragerc pytest --cov=datasette --cov-config=.coveragerc --cov-report xml:coverage.xml --cov-report term ls -lah | ||||
138707 | 7 | scheduled 27837 | github-to-sqlite 207052882 | Fetch the repos | github-to-sqlite repos github.db dogsheep sqlite-utils tables --counts github.db | ||||
138724 | 3 | build 27840 | github-to-sqlite 207052882 | Commit and push if README changed | git diff git config --global user.email "readme-bot@example.com" git config --global user.name "README-bot" git diff --quiet || (git add README.md && git commit -m "Updated README") git push | ||||
138796 | 3 | build 27856 | twitter-to-sqlite 206156866 | Commit and push if README changed | git diff git config --global user.email "readme-bot@example.com" git config --global user.name "README-bot" git diff --quiet || (git add README.md && git commit -m "Updated README") git push | ||||
138874 | 2 | Analyze 27873 | sqlite-utils 140912432 | git checkout HEAD^2 | ${{ github.event_name == 'pull_request' }} | ||||
138816 | 11 | deploy 27859 | datasette 107914493 | Deploy to Cloud Run | gcloud config set run/region us-central1 gcloud config set project datasette-222320 export SUFFIX="-${GITHUB_REF#refs/heads/}" export SUFFIX=${SUFFIX#-main} # Replace 1.0 with one-dot-zero in SUFFIX export SUFFIX=${SUFFIX//1.0/one-dot-zero} datasette publish cloudrun fixtures.db fixtures2.db extra_database.db \ -m fixtures-metadata.json \ --plugins-dir=plugins \ --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options="--setting template_debug 1 --setting trace_debug 1 --crossdb" \ --install 'datasette-ephemeral-tables>=0.2.2' \ --service "datasette-latest$SUFFIX" \ --secret $LATEST_DATASETTE_SECRET | { "LATEST_DATASETTE_SECRET": "${{ secrets.LATEST_DATASETTE_SECRET }}" } |
Advanced export
JSON shape: default, array, newline-delimited, object
CREATE TABLE [steps] ( [id] INTEGER PRIMARY KEY, [seq] INTEGER, [job] INTEGER REFERENCES [jobs]([id]), [repo] INTEGER REFERENCES [repos]([id]), [uses] TEXT, [name] TEXT, [with] TEXT, [run] TEXT , [env] TEXT, [if] TEXT); CREATE INDEX [idx_steps_repo] ON [steps] ([repo]); CREATE INDEX [idx_steps_job] ON [steps] ([job]);