steps
264 rows sorted by id descending
This data as json, CSV (advanced)
Suggested facets: seq, repo, with, env, if
run >30 ✖
- pytest 19
- pip install -e '.[test]' 16
- pip install setuptools wheel twine 9
- python setup.py sdist bdist_wheel twine upload dist/* 9
- pip install '.[test]' 4
- pip install -e '.[docs]' 3
- pytest -v 3
- cd docs DISABLE_SPHINX_INLINE_TABS=1 sphinx-build -b xml . _build sphinx-to-sqlite ../docs.db _build cd .. 2
- git diff git config --global user.email "readme-bot@example.com" git config --global user.name "README-bot" git diff --quiet || (git add README.md && git commit -m "Updated README") git push 2
- npx markdown-toc README.md -i 2
- pip install setuptools wheel twine build 2
- python -m build twine upload dist/* 2
- python -m pip install --upgrade pip python -m pip install -e .[test] python -m pip install pytest-cov 2
- sudo apt-get install libsqlite3-mod-spatialite 2
- # Deploy docs.db to a different service datasette publish cloudrun docs.db \ --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options="--setting template_debug 1" \ --service=datasette-docs-latest 1
- # So we can keep track of when we first saw each dependent repo wget https://github-to-sqlite.dogsheep.net/github.db 1
- # This fails on syntax errors, or a diff was applied blacken-docs -l 60 docs/*.rst 1
- (cd tests && gcc ext.c -fPIC -shared -o ext.so && ls -lah) 1
- (cd tests && gcc ext.c -fPIC -shared -o ext.so) 1
- ./test-in-pyodide-with-shot-scraper.sh 1
- black . --check 1
- cat > plugins/counters.py <<EOF from datasette import hookimpl @hookimpl def startup(datasette): db = datasette.add_memory_database("counters") async def inner(): await db.execute_write("create table if not exists counters (name text primary key, value integer)") await db.execute_write("insert or ignore into counters (name, value) values ('counter_a', 0)") await db.execute_write("insert or ignore into counters (name, value) values ('counter_b', 0)") await db.execute_write("insert or ignore into counters (name, value) values ('counter_c', 0)") return inner @hookimpl def canned_queries(database): if database == "counters": queries = {} for name in ("counter_a", "counter_b", "counter_c"): queries["increment_{}".format(name)] = { "sql": "update counters set value = value + 1 where name = '{}'".format(name), "on_success_message_sql": "select 'Counter {name} incremented to ' || value from counters where name = '{name}'".format(name=name), "write": True, } queries["decrement_{}".format(name)] = { "sql": "update counters set value = value - 1 where name = '{}'".format(name), "on_success_message_sql": "select 'Counter {name} decremented to ' || value from counters where name = '{name}'".format(name=name), "write": True, } return queries EOF 1
- codespell README.md --ignore-words docs/codespell-ignore-words.txt codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt 1
- codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt codespell sqlite_utils --ignore-words docs/codespell-ignore-words.txt 1
- cog --check README.md 1
- cog --check README.md docs/*.rst 1
- cog --check docs/*.rst 1
- docker login -u $DOCKER_USER -p $DOCKER_PASS export REPO=datasetteproject/datasette docker build -f Dockerfile \ -t $REPO:${VERSION_TAG} \ --build-arg VERSION=${VERSION_TAG} . docker push $REPO:${VERSION_TAG} 1
- echo "{\"github_personal_token\": \"$GITHUB_ACCESS_TOKEN\"}" > auth.json 1
- echo ' from datasette import hookimpl @hookimpl def startup(datasette): db = datasette.get_database("fixtures2") db.route = "alternative-route" ' > plugins/alternative_route.py cp fixtures.db fixtures2.db 1
- …
uses 19 ✖
- actions/checkout@v2 32
- actions/cache@v2 24
- actions/setup-python@v2 22
- actions/checkout@v3 18
- actions/setup-python@v4 18
- actions/cache@v3 7
- google-github-actions/setup-gcloud@v0 3
- actions/cache@v1 2
- actions/setup-python@v1 2
- codecov/codecov-action@v1 2
- mxschmitt/action-tmate@v3 2
- readthedocs/actions/preview@v1 2
- zofrex/mirror-branch@ea152f124954fa4eb26eea3fe0dbe313a3a08d94 2
- actions/checkout@v4 1
- actions/setup-python@v3 1
- actions/upload-artifact@v2 1
- github/codeql-action/analyze@v1 1
- github/codeql-action/autobuild@v1 1
- github/codeql-action/init@v1 1
id ▲ | seq | job | repo | uses | name | with | run | env | if |
---|---|---|---|---|---|---|---|---|---|
138914 | 14 | test 27879 | sqlite-utils 140912432 | Check if cog needs to be run | cog --check README.md docs/*.rst | matrix.python-version != '3.7' | |||
138913 | 13 | test 27879 | sqlite-utils 140912432 | Check formatting | black . --check | ||||
138912 | 12 | test 27879 | sqlite-utils 140912432 | run flake8 if Python 3.8 or higher | flake8 | matrix.python-version >= 3.8 | |||
138911 | 11 | test 27879 | sqlite-utils 140912432 | run mypy | mypy sqlite_utils tests | ||||
138910 | 10 | test 27879 | sqlite-utils 140912432 | Run tests | pytest -v | ||||
138909 | 9 | test 27879 | sqlite-utils 140912432 | Build extension for --load-extension test | (cd tests && gcc ext.c -fPIC -shared -o ext.so && ls -lah) | matrix.os == 'ubuntu-latest' | |||
138908 | 8 | test 27879 | sqlite-utils 140912432 | On macOS with Python 3.10 test with sqlean.py | pip install sqlean.py sqlite-dump | matrix.os == 'macos-latest' && matrix.python-version == '3.10' | |||
138907 | 7 | test 27879 | sqlite-utils 140912432 | Install SpatiaLite | sudo apt-get install libsqlite3-mod-spatialite | matrix.os == 'ubuntu-latest' | |||
138906 | 6 | test 27879 | sqlite-utils 140912432 | Optionally install numpy | pip install numpy | matrix.numpy == 1 | |||
138905 | 5 | test 27879 | sqlite-utils 140912432 | Optionally install tui dependencies (not 3.7) | pip install -e '.[tui]' | matrix.python-version != '3.7' | |||
138904 | 4 | test 27879 | sqlite-utils 140912432 | Install dependencies | pip install -e '.[test,mypy,flake8]' | ||||
138903 | 3 | test 27879 | sqlite-utils 140912432 | actions/cache@v3 | Configure pip caching | { "path": "~/.cache/pip", "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-pip-\n" } |
|||
138902 | 2 | test 27879 | sqlite-utils 140912432 | actions/setup-python@v4 | Set up Python ${{ matrix.python-version }} | { "python-version": "${{ matrix.python-version }}" } |
|||
138901 | 1 | test 27879 | sqlite-utils 140912432 | actions/checkout@v3 | |||||
138900 | 7 | test 27878 | sqlite-utils 140912432 | codecov/codecov-action@v1 | Upload coverage report | { "token": "${{ secrets.CODECOV_TOKEN }}", "file": "coverage.xml" } |
|||
138899 | 6 | test 27878 | sqlite-utils 140912432 | Run tests | ls -lah pytest --cov=sqlite_utils --cov-report xml:coverage.xml --cov-report term ls -lah | ||||
138898 | 5 | test 27878 | sqlite-utils 140912432 | Install Python dependencies | python -m pip install --upgrade pip python -m pip install -e .[test] python -m pip install pytest-cov | ||||
138897 | 4 | test 27878 | sqlite-utils 140912432 | Install SpatiaLite | sudo apt-get install libsqlite3-mod-spatialite | ||||
138896 | 3 | test 27878 | sqlite-utils 140912432 | actions/cache@v2 | Configure pip caching | { "path": "~/.cache/pip", "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-pip-\n" } |
|||
138895 | 2 | test 27878 | sqlite-utils 140912432 | actions/setup-python@v2 | Set up Python | { "python-version": 3.9 } |
|||
138894 | 1 | test 27878 | sqlite-utils 140912432 | actions/checkout@v2 | Check out repo | ||||
138893 | 5 | spellcheck 27877 | sqlite-utils 140912432 | Check spelling | codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt codespell sqlite_utils --ignore-words docs/codespell-ignore-words.txt | ||||
138892 | 4 | spellcheck 27877 | sqlite-utils 140912432 | Install dependencies | pip install -e '.[docs]' | ||||
138891 | 3 | spellcheck 27877 | sqlite-utils 140912432 | actions/cache@v2 | Configure pip caching | { "path": "~/.cache/pip", "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-pip-\n" } |
|||
138890 | 2 | spellcheck 27877 | sqlite-utils 140912432 | actions/setup-python@v2 | Set up Python ${{ matrix.python-version }} | { "python-version": 3.9 } |
|||
138889 | 1 | spellcheck 27877 | sqlite-utils 140912432 | actions/checkout@v2 | |||||
138888 | 5 | deploy 27876 | sqlite-utils 140912432 | Publish | python setup.py sdist bdist_wheel twine upload dist/* | { "TWINE_USERNAME": "__token__", "TWINE_PASSWORD": "${{ secrets.PYPI_TOKEN }}" } |
|||
138887 | 4 | deploy 27876 | sqlite-utils 140912432 | Install dependencies | pip install setuptools wheel twine | ||||
138886 | 3 | deploy 27876 | sqlite-utils 140912432 | actions/cache@v3 | Configure pip caching | { "path": "~/.cache/pip", "key": "${{ runner.os }}-publish-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-publish-pip-\n" } |
|||
138885 | 2 | deploy 27876 | sqlite-utils 140912432 | actions/setup-python@v4 | Set up Python | { "python-version": "3.12" } |
|||
138884 | 1 | deploy 27876 | sqlite-utils 140912432 | actions/checkout@v3 | |||||
138883 | 5 | test 27875 | sqlite-utils 140912432 | Run tests | pytest | ||||
138882 | 4 | test 27875 | sqlite-utils 140912432 | Install dependencies | pip install -e '.[test]' | ||||
138881 | 3 | test 27875 | sqlite-utils 140912432 | actions/cache@v3 | Configure pip caching | { "path": "~/.cache/pip", "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-pip-\n" } |
|||
138880 | 2 | test 27875 | sqlite-utils 140912432 | actions/setup-python@v4 | Set up Python ${{ matrix.python-version }} | { "python-version": "${{ matrix.python-version }}" } |
|||
138879 | 1 | test 27875 | sqlite-utils 140912432 | actions/checkout@v3 | |||||
138878 | 1 | documentation-links 27874 | sqlite-utils 140912432 | readthedocs/actions/preview@v1 | { "project-slug": "sqlite-utils" } |
||||
138877 | 5 | Analyze 27873 | sqlite-utils 140912432 | github/codeql-action/analyze@v1 | Perform CodeQL Analysis | ||||
138876 | 4 | Analyze 27873 | sqlite-utils 140912432 | github/codeql-action/autobuild@v1 | Autobuild | ||||
138875 | 3 | Analyze 27873 | sqlite-utils 140912432 | github/codeql-action/init@v1 | Initialize CodeQL | { "languages": "${{ matrix.language }}" } |
|||
138874 | 2 | Analyze 27873 | sqlite-utils 140912432 | git checkout HEAD^2 | ${{ github.event_name == 'pull_request' }} | ||||
138873 | 1 | Analyze 27873 | sqlite-utils 140912432 | actions/checkout@v2 | Checkout repository | { "fetch-depth": 2 } |
|||
138872 | 2 | build 27872 | datasette 107914493 | mxschmitt/action-tmate@v3 | Setup tmate session | ||||
138871 | 1 | build 27872 | datasette 107914493 | actions/checkout@v2 | |||||
138870 | 2 | build 27871 | datasette 107914493 | mxschmitt/action-tmate@v3 | Setup tmate session | ||||
138869 | 1 | build 27871 | datasette 107914493 | actions/checkout@v2 | |||||
138868 | 10 | test 27870 | datasette 107914493 | Test DATASETTE_LOAD_PLUGINS | pip install datasette-init datasette-json-html tests/test-datasette-load-plugins.sh | ||||
138867 | 9 | test 27870 | datasette 107914493 | Check if blacken-docs needs to be run | # This fails on syntax errors, or a diff was applied blacken-docs -l 60 docs/*.rst | matrix.python-version != '3.8' | |||
138866 | 8 | test 27870 | datasette 107914493 | Check if cog needs to be run | cog --check docs/*.rst | matrix.python-version != '3.8' | |||
138865 | 7 | test 27870 | datasette 107914493 | Install docs dependencies on Python 3.9+ | pip install -e '.[docs]' | matrix.python-version != '3.8' | |||
138864 | 6 | test 27870 | datasette 107914493 | Run tests | pytest -n auto -m "not serial" pytest -m "serial" # And the test that exceeds a localhost HTTPS server tests/test_datasette_https_server.sh | ||||
138863 | 5 | test 27870 | datasette 107914493 | Install dependencies | pip install -e '.[test]' pip freeze | ||||
138862 | 4 | test 27870 | datasette 107914493 | Build extension for --load-extension test | (cd tests && gcc ext.c -fPIC -shared -o ext.so) | ||||
138861 | 3 | test 27870 | datasette 107914493 | actions/cache@v3 | Configure pip caching | { "path": "~/.cache/pip", "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-pip-\n" } |
|||
138860 | 2 | test 27870 | datasette 107914493 | actions/setup-python@v4 | Set up Python ${{ matrix.python-version }} | { "python-version": "${{ matrix.python-version }}", "allow-prereleases": true } |
|||
138859 | 1 | test 27870 | datasette 107914493 | actions/checkout@v3 | |||||
138858 | 5 | test 27869 | datasette 107914493 | Run test | ./test-in-pyodide-with-shot-scraper.sh | ||||
138857 | 4 | test 27869 | datasette 107914493 | Install Playwright dependencies | pip install shot-scraper build shot-scraper install | ||||
138856 | 3 | test 27869 | datasette 107914493 | actions/cache@v2 | Cache Playwright browsers | { "path": "~/.cache/ms-playwright/", "key": "${{ runner.os }}-browsers" } |
|||
138855 | 2 | test 27869 | datasette 107914493 | actions/setup-python@v3 | Set up Python 3.10 | { "python-version": "3.10", "cache": "pip", "cache-dependency-path": "**/setup.py" } |
|||
138854 | 1 | test 27869 | datasette 107914493 | actions/checkout@v3 | |||||
138853 | 6 | test 27868 | datasette 107914493 | codecov/codecov-action@v1 | Upload coverage report | { "token": "${{ secrets.CODECOV_TOKEN }}", "file": "coverage.xml" } |
|||
138852 | 5 | test 27868 | datasette 107914493 | Run tests | ls -lah cat .coveragerc pytest --cov=datasette --cov-config=.coveragerc --cov-report xml:coverage.xml --cov-report term ls -lah | ||||
138851 | 4 | test 27868 | datasette 107914493 | Install Python dependencies | python -m pip install --upgrade pip python -m pip install -e .[test] python -m pip install pytest-cov | ||||
138850 | 3 | test 27868 | datasette 107914493 | actions/cache@v2 | Configure pip caching | { "path": "~/.cache/pip", "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-pip-\n" } |
|||
138849 | 2 | test 27868 | datasette 107914493 | actions/setup-python@v2 | Set up Python | { "python-version": 3.9 } |
|||
138848 | 1 | test 27868 | datasette 107914493 | actions/checkout@v2 | Check out datasette | ||||
138847 | 4 | spellcheck 27867 | datasette 107914493 | Check spelling | codespell README.md --ignore-words docs/codespell-ignore-words.txt codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt | ||||
138846 | 3 | spellcheck 27867 | datasette 107914493 | Install dependencies | pip install -e '.[docs]' | ||||
138845 | 2 | spellcheck 27867 | datasette 107914493 | actions/setup-python@v4 | Set up Python | { "python-version": "3.11", "cache": "pip", "cache-dependency-path": "**/setup.py" } |
|||
138844 | 1 | spellcheck 27867 | datasette 107914493 | actions/checkout@v4 | |||||
138843 | 2 | deploy_docker 27866 | datasette 107914493 | Build and push to Docker Hub | docker login -u $DOCKER_USER -p $DOCKER_PASS export REPO=datasetteproject/datasette docker build -f Dockerfile \ -t $REPO:${VERSION_TAG} \ --build-arg VERSION=${VERSION_TAG} . docker push $REPO:${VERSION_TAG} | { "DOCKER_USER": "${{ secrets.DOCKER_USER }}", "DOCKER_PASS": "${{ secrets.DOCKER_PASS }}", "VERSION_TAG": "${{ github.event.inputs.version_tag }}" } |
|||
138842 | 1 | deploy_docker 27866 | datasette 107914493 | actions/checkout@v2 | |||||
138841 | 2 | deploy_docker 27865 | datasette 107914493 | Build and push to Docker Hub | sleep 60 # Give PyPI time to make the new release available docker login -u $DOCKER_USER -p $DOCKER_PASS export REPO=datasetteproject/datasette docker build -f Dockerfile \ -t $REPO:${GITHUB_REF#refs/tags/} \ --build-arg VERSION=${GITHUB_REF#refs/tags/} . docker tag $REPO:${GITHUB_REF#refs/tags/} $REPO:latest docker push $REPO:${GITHUB_REF#refs/tags/} docker push $REPO:latest | { "DOCKER_USER": "${{ secrets.DOCKER_USER }}", "DOCKER_PASS": "${{ secrets.DOCKER_PASS }}" } |
|||
138840 | 1 | deploy_docker 27865 | datasette 107914493 | actions/checkout@v2 | |||||
138839 | 7 | deploy_static_docs 27864 | datasette 107914493 | Deploy stable-docs.datasette.io to Cloud Run | gcloud config set run/region us-central1 gcloud config set project datasette-222320 datasette publish cloudrun docs.db \ --service=datasette-docs-stable | ||||
138838 | 6 | deploy_static_docs 27864 | datasette 107914493 | google-github-actions/setup-gcloud@v0 | Set up Cloud Run | { "version": "318.0.0", "service_account_email": "${{ secrets.GCP_SA_EMAIL }}", "service_account_key": "${{ secrets.GCP_SA_KEY }}" } |
|||
138837 | 5 | deploy_static_docs 27864 | datasette 107914493 | Build docs.db | cd docs DISABLE_SPHINX_INLINE_TABS=1 sphinx-build -b xml . _build sphinx-to-sqlite ../docs.db _build cd .. | ||||
138836 | 4 | deploy_static_docs 27864 | datasette 107914493 | Install dependencies | python -m pip install -e .[docs] python -m pip install sphinx-to-sqlite==0.1a1 | ||||
138835 | 3 | deploy_static_docs 27864 | datasette 107914493 | actions/cache@v2 | Configure pip caching | { "path": "~/.cache/pip", "key": "${{ runner.os }}-publish-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-publish-pip-\n" } |
|||
138834 | 2 | deploy_static_docs 27864 | datasette 107914493 | actions/setup-python@v2 | Set up Python | { "python-version": "3.9" } |
|||
138833 | 1 | deploy_static_docs 27864 | datasette 107914493 | actions/checkout@v2 | |||||
138832 | 5 | deploy 27863 | datasette 107914493 | Publish | python setup.py sdist bdist_wheel twine upload dist/* | { "TWINE_USERNAME": "__token__", "TWINE_PASSWORD": "${{ secrets.PYPI_TOKEN }}" } |
|||
138831 | 4 | deploy 27863 | datasette 107914493 | Install dependencies | pip install setuptools wheel twine | ||||
138830 | 3 | deploy 27863 | datasette 107914493 | actions/cache@v3 | Configure pip caching | { "path": "~/.cache/pip", "key": "${{ runner.os }}-publish-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-publish-pip-\n" } |
|||
138829 | 2 | deploy 27863 | datasette 107914493 | actions/setup-python@v4 | Set up Python | { "python-version": "3.11" } |
|||
138828 | 1 | deploy 27863 | datasette 107914493 | actions/checkout@v3 | |||||
138827 | 5 | test 27862 | datasette 107914493 | Run tests | pytest | ||||
138826 | 4 | test 27862 | datasette 107914493 | Install dependencies | pip install -e '.[test]' | ||||
138825 | 3 | test 27862 | datasette 107914493 | actions/cache@v3 | Configure pip caching | { "path": "~/.cache/pip", "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-pip-\n" } |
|||
138824 | 2 | test 27862 | datasette 107914493 | actions/setup-python@v4 | Set up Python ${{ matrix.python-version }} | { "python-version": "${{ matrix.python-version }}" } |
|||
138823 | 1 | test 27862 | datasette 107914493 | actions/checkout@v3 | |||||
138822 | 4 | prettier 27861 | datasette 107914493 | Run prettier | npm run prettier -- --check | ||||
138821 | 3 | prettier 27861 | datasette 107914493 | Install dependencies | npm ci | ||||
138820 | 2 | prettier 27861 | datasette 107914493 | actions/cache@v2 | Configure npm caching | { "path": "~/.npm", "key": "${{ runner.OS }}-npm-${{ hashFiles('**/package-lock.json') }}", "restore-keys": "${{ runner.OS }}-npm-\n" } |
|||
138819 | 1 | prettier 27861 | datasette 107914493 | actions/checkout@v2 | Check out repo | ||||
138818 | 1 | documentation-links 27860 | datasette 107914493 | readthedocs/actions/preview@v1 | { "project-slug": "datasette" } |
||||
138817 | 12 | deploy 27859 | datasette 107914493 | Deploy to docs as well (only for main) | # Deploy docs.db to a different service datasette publish cloudrun docs.db \ --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options="--setting template_debug 1" \ --service=datasette-docs-latest | ${{ github.ref == 'refs/heads/main' }} | |||
138816 | 11 | deploy 27859 | datasette 107914493 | Deploy to Cloud Run | gcloud config set run/region us-central1 gcloud config set project datasette-222320 export SUFFIX="-${GITHUB_REF#refs/heads/}" export SUFFIX=${SUFFIX#-main} # Replace 1.0 with one-dot-zero in SUFFIX export SUFFIX=${SUFFIX//1.0/one-dot-zero} datasette publish cloudrun fixtures.db fixtures2.db extra_database.db \ -m fixtures-metadata.json \ --plugins-dir=plugins \ --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options="--setting template_debug 1 --setting trace_debug 1 --crossdb" \ --install 'datasette-ephemeral-tables>=0.2.2' \ --service "datasette-latest$SUFFIX" \ --secret $LATEST_DATASETTE_SECRET | { "LATEST_DATASETTE_SECRET": "${{ secrets.LATEST_DATASETTE_SECRET }}" } |
|||
138815 | 10 | deploy 27859 | datasette 107914493 | google-github-actions/setup-gcloud@v0 | Set up Cloud Run | { "version": "318.0.0", "service_account_email": "${{ secrets.GCP_SA_EMAIL }}", "service_account_key": "${{ secrets.GCP_SA_KEY }}" } |
Advanced export
JSON shape: default, array, newline-delimited, object
CREATE TABLE [steps] ( [id] INTEGER PRIMARY KEY, [seq] INTEGER, [job] INTEGER REFERENCES [jobs]([id]), [repo] INTEGER REFERENCES [repos]([id]), [uses] TEXT, [name] TEXT, [with] TEXT, [run] TEXT , [env] TEXT, [if] TEXT); CREATE INDEX [idx_steps_repo] ON [steps] ([repo]); CREATE INDEX [idx_steps_job] ON [steps] ([job]);