steps
74 rows where repo = 107914493
This data as json, CSV (advanced)
Suggested facets: seq, job, uses, with, if
id ▼ | seq | job | repo | uses | name | with | run | env | if |
---|---|---|---|---|---|---|---|---|---|
133396 | 1 | mirror 26772 | datasette 107914493 | zofrex/mirror-branch@ea152f124954fa4eb26eea3fe0dbe313a3a08d94 | Mirror to "master" | { "target-branch": "master", "force": false } |
|||
133397 | 2 | mirror 26772 | datasette 107914493 | zofrex/mirror-branch@ea152f124954fa4eb26eea3fe0dbe313a3a08d94 | Mirror to "main" | { "target-branch": "main", "force": false } |
|||
138539 | 1 | deploy-branch-preview 27805 | datasette 107914493 | actions/checkout@v3 | |||||
138540 | 2 | deploy-branch-preview 27805 | datasette 107914493 | actions/setup-python@v4 | Set up Python 3.11 | { "python-version": "3.11" } |
|||
138541 | 3 | deploy-branch-preview 27805 | datasette 107914493 | Install dependencies | pip install datasette-publish-vercel | ||||
138542 | 4 | deploy-branch-preview 27805 | datasette 107914493 | Deploy the preview | export BRANCH="${{ github.event.inputs.branch }}" wget https://latest.datasette.io/fixtures.db datasette publish vercel fixtures.db \ --branch $BRANCH \ --project "datasette-preview-$BRANCH" \ --token $VERCEL_TOKEN \ --scope datasette \ --about "Preview of $BRANCH" \ --about_url "https://github.com/simonw/datasette/tree/$BRANCH" | { "VERCEL_TOKEN": "${{ secrets.BRANCH_PREVIEW_VERCEL_TOKEN }}" } |
|||
138543 | 1 | deploy 27806 | datasette 107914493 | actions/checkout@v3 | Check out datasette | ||||
138544 | 2 | deploy 27806 | datasette 107914493 | actions/setup-python@v4 | Set up Python | { "python-version": "3.9" } |
|||
138545 | 3 | deploy 27806 | datasette 107914493 | actions/cache@v3 | Configure pip caching | { "path": "~/.cache/pip", "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-pip-\n" } |
|||
138546 | 4 | deploy 27806 | datasette 107914493 | Install Python dependencies | python -m pip install --upgrade pip python -m pip install -e .[test] python -m pip install -e .[docs] python -m pip install sphinx-to-sqlite==0.1a1 | ||||
138547 | 5 | deploy 27806 | datasette 107914493 | Run tests | pytest -n auto -m "not serial" pytest -m "serial" | ${{ github.ref == 'refs/heads/main' }} | |||
138548 | 6 | deploy 27806 | datasette 107914493 | Build fixtures.db and other files needed to deploy the demo | python tests/fixtures.py \ fixtures.db \ fixtures-config.json \ fixtures-metadata.json \ plugins \ --extra-db-filename extra_database.db | ||||
138549 | 7 | deploy 27806 | datasette 107914493 | Build docs.db | cd docs DISABLE_SPHINX_INLINE_TABS=1 sphinx-build -b xml . _build sphinx-to-sqlite ../docs.db _build cd .. | ${{ github.ref == 'refs/heads/main' }} | |||
138550 | 8 | deploy 27806 | datasette 107914493 | Set up the alternate-route demo | echo ' from datasette import hookimpl @hookimpl def startup(datasette): db = datasette.get_database("fixtures2") db.route = "alternative-route" ' > plugins/alternative_route.py cp fixtures.db fixtures2.db | ||||
138551 | 9 | deploy 27806 | datasette 107914493 | And the counters writable canned query demo | cat > plugins/counters.py <<EOF from datasette import hookimpl @hookimpl def startup(datasette): db = datasette.add_memory_database("counters") async def inner(): await db.execute_write("create table if not exists counters (name text primary key, value integer)") await db.execute_write("insert or ignore into counters (name, value) values ('counter_a', 0)") await db.execute_write("insert or ignore into counters (name, value) values ('counter_b', 0)") await db.execute_write("insert or ignore into counters (name, value) values ('counter_c', 0)") return inner @hookimpl def canned_queries(database): if database == "counters": queries = {} for name in ("counter_a", "counter_b", "counter_c"): queries["increment_{}".format(name)] = { "sql": "update counters set value = value + 1 where name = '{}'".format(name), "on_success_message_sql": "select 'Counter {name} incremented to ' || value from counters where name = '{name}'".format(name=name), "write": True, } queries["decrement_{}".format(name)] = { "sql": "update counters set value = value - 1 where name = '{}'".format(name), "on_success_message_sql": "select 'Counter {name} decremented to ' || value from counters where name = '{name}'".format(name=name), "write": True, } return queries EOF | ||||
138552 | 10 | deploy 27806 | datasette 107914493 | google-github-actions/setup-gcloud@v0 | Set up Cloud Run | { "version": "318.0.0", "service_account_email": "${{ secrets.GCP_SA_EMAIL }}", "service_account_key": "${{ secrets.GCP_SA_KEY }}" } |
|||
138553 | 11 | deploy 27806 | datasette 107914493 | Deploy to Cloud Run | gcloud config set run/region us-central1 gcloud config set project datasette-222320 export SUFFIX="-${GITHUB_REF#refs/heads/}" export SUFFIX=${SUFFIX#-main} # Replace 1.0 with one-dot-zero in SUFFIX export SUFFIX=${SUFFIX//1.0/one-dot-zero} datasette publish cloudrun fixtures.db fixtures2.db extra_database.db \ -m fixtures-metadata.json \ --plugins-dir=plugins \ --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options="--setting template_debug 1 --setting trace_debug 1 --crossdb" \ --install 'datasette-ephemeral-tables>=0.2.2' \ --service "datasette-latest$SUFFIX" \ --secret $LATEST_DATASETTE_SECRET | { "LATEST_DATASETTE_SECRET": "${{ secrets.LATEST_DATASETTE_SECRET }}" } |
|||
138554 | 12 | deploy 27806 | datasette 107914493 | Deploy to docs as well (only for main) | # Deploy docs.db to a different service datasette publish cloudrun docs.db \ --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options="--setting template_debug 1" \ --service=datasette-docs-latest | ${{ github.ref == 'refs/heads/main' }} | |||
138555 | 1 | documentation-links 27807 | datasette 107914493 | readthedocs/actions/preview@v1 | { "project-slug": "datasette" } |
||||
138556 | 1 | prettier 27808 | datasette 107914493 | actions/checkout@v2 | Check out repo | ||||
138557 | 2 | prettier 27808 | datasette 107914493 | actions/cache@v2 | Configure npm caching | { "path": "~/.npm", "key": "${{ runner.OS }}-npm-${{ hashFiles('**/package-lock.json') }}", "restore-keys": "${{ runner.OS }}-npm-\n" } |
|||
138558 | 3 | prettier 27808 | datasette 107914493 | Install dependencies | npm ci | ||||
138559 | 4 | prettier 27808 | datasette 107914493 | Run prettier | npm run prettier -- --check | ||||
138560 | 1 | test 27809 | datasette 107914493 | actions/checkout@v3 | |||||
138561 | 2 | test 27809 | datasette 107914493 | actions/setup-python@v4 | Set up Python ${{ matrix.python-version }} | { "python-version": "${{ matrix.python-version }}" } |
|||
138562 | 3 | test 27809 | datasette 107914493 | actions/cache@v3 | Configure pip caching | { "path": "~/.cache/pip", "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-pip-\n" } |
|||
138563 | 4 | test 27809 | datasette 107914493 | Install dependencies | pip install -e '.[test]' | ||||
138564 | 5 | test 27809 | datasette 107914493 | Run tests | pytest | ||||
138565 | 1 | deploy 27810 | datasette 107914493 | actions/checkout@v3 | |||||
138566 | 2 | deploy 27810 | datasette 107914493 | actions/setup-python@v4 | Set up Python | { "python-version": "3.11" } |
|||
138567 | 3 | deploy 27810 | datasette 107914493 | actions/cache@v3 | Configure pip caching | { "path": "~/.cache/pip", "key": "${{ runner.os }}-publish-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-publish-pip-\n" } |
|||
138568 | 4 | deploy 27810 | datasette 107914493 | Install dependencies | pip install setuptools wheel twine | ||||
138569 | 5 | deploy 27810 | datasette 107914493 | Publish | python setup.py sdist bdist_wheel twine upload dist/* | { "TWINE_USERNAME": "__token__", "TWINE_PASSWORD": "${{ secrets.PYPI_TOKEN }}" } |
|||
138570 | 1 | deploy_static_docs 27811 | datasette 107914493 | actions/checkout@v2 | |||||
138571 | 2 | deploy_static_docs 27811 | datasette 107914493 | actions/setup-python@v2 | Set up Python | { "python-version": "3.9" } |
|||
138572 | 3 | deploy_static_docs 27811 | datasette 107914493 | actions/cache@v2 | Configure pip caching | { "path": "~/.cache/pip", "key": "${{ runner.os }}-publish-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-publish-pip-\n" } |
|||
138573 | 4 | deploy_static_docs 27811 | datasette 107914493 | Install dependencies | python -m pip install -e .[docs] python -m pip install sphinx-to-sqlite==0.1a1 | ||||
138574 | 5 | deploy_static_docs 27811 | datasette 107914493 | Build docs.db | cd docs DISABLE_SPHINX_INLINE_TABS=1 sphinx-build -b xml . _build sphinx-to-sqlite ../docs.db _build cd .. | ||||
138575 | 6 | deploy_static_docs 27811 | datasette 107914493 | google-github-actions/setup-gcloud@v0 | Set up Cloud Run | { "version": "318.0.0", "service_account_email": "${{ secrets.GCP_SA_EMAIL }}", "service_account_key": "${{ secrets.GCP_SA_KEY }}" } |
|||
138576 | 7 | deploy_static_docs 27811 | datasette 107914493 | Deploy stable-docs.datasette.io to Cloud Run | gcloud config set run/region us-central1 gcloud config set project datasette-222320 datasette publish cloudrun docs.db \ --service=datasette-docs-stable | ||||
138577 | 1 | deploy_docker 27812 | datasette 107914493 | actions/checkout@v2 | |||||
138578 | 2 | deploy_docker 27812 | datasette 107914493 | Build and push to Docker Hub | sleep 60 # Give PyPI time to make the new release available docker login -u $DOCKER_USER -p $DOCKER_PASS export REPO=datasetteproject/datasette docker build -f Dockerfile \ -t $REPO:${GITHUB_REF#refs/tags/} \ --build-arg VERSION=${GITHUB_REF#refs/tags/} . docker tag $REPO:${GITHUB_REF#refs/tags/} $REPO:latest docker push $REPO:${GITHUB_REF#refs/tags/} docker push $REPO:latest | { "DOCKER_USER": "${{ secrets.DOCKER_USER }}", "DOCKER_PASS": "${{ secrets.DOCKER_PASS }}" } |
|||
138579 | 1 | deploy_docker 27813 | datasette 107914493 | actions/checkout@v2 | |||||
138580 | 2 | deploy_docker 27813 | datasette 107914493 | Build and push to Docker Hub | docker login -u $DOCKER_USER -p $DOCKER_PASS export REPO=datasetteproject/datasette docker build -f Dockerfile \ -t $REPO:${VERSION_TAG} \ --build-arg VERSION=${VERSION_TAG} . docker push $REPO:${VERSION_TAG} | { "DOCKER_USER": "${{ secrets.DOCKER_USER }}", "DOCKER_PASS": "${{ secrets.DOCKER_PASS }}", "VERSION_TAG": "${{ github.event.inputs.version_tag }}" } |
|||
138581 | 1 | spellcheck 27814 | datasette 107914493 | actions/checkout@v2 | |||||
138582 | 2 | spellcheck 27814 | datasette 107914493 | actions/setup-python@v2 | Set up Python ${{ matrix.python-version }} | { "python-version": 3.11 } |
|||
138583 | 3 | spellcheck 27814 | datasette 107914493 | actions/cache@v2 | Configure pip caching | { "path": "~/.cache/pip", "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-pip-\n" } |
|||
138584 | 4 | spellcheck 27814 | datasette 107914493 | Install dependencies | pip install -e '.[docs]' | ||||
138585 | 5 | spellcheck 27814 | datasette 107914493 | Check spelling | codespell README.md --ignore-words docs/codespell-ignore-words.txt codespell docs/*.rst --ignore-words docs/codespell-ignore-words.txt codespell datasette -S datasette/static --ignore-words docs/codespell-ignore-words.txt | ||||
138586 | 1 | test 27815 | datasette 107914493 | actions/checkout@v2 | Check out datasette | ||||
138587 | 2 | test 27815 | datasette 107914493 | actions/setup-python@v2 | Set up Python | { "python-version": 3.9 } |
|||
138588 | 3 | test 27815 | datasette 107914493 | actions/cache@v2 | Configure pip caching | { "path": "~/.cache/pip", "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-pip-\n" } |
|||
138589 | 4 | test 27815 | datasette 107914493 | Install Python dependencies | python -m pip install --upgrade pip python -m pip install -e .[test] python -m pip install pytest-cov | ||||
138590 | 5 | test 27815 | datasette 107914493 | Run tests | ls -lah cat .coveragerc pytest --cov=datasette --cov-config=.coveragerc --cov-report xml:coverage.xml --cov-report term ls -lah | ||||
138591 | 6 | test 27815 | datasette 107914493 | codecov/codecov-action@v1 | Upload coverage report | { "token": "${{ secrets.CODECOV_TOKEN }}", "file": "coverage.xml" } |
|||
138592 | 1 | test 27816 | datasette 107914493 | actions/checkout@v3 | |||||
138593 | 2 | test 27816 | datasette 107914493 | actions/setup-python@v3 | Set up Python 3.10 | { "python-version": "3.10", "cache": "pip", "cache-dependency-path": "**/setup.py" } |
|||
138594 | 3 | test 27816 | datasette 107914493 | actions/cache@v2 | Cache Playwright browsers | { "path": "~/.cache/ms-playwright/", "key": "${{ runner.os }}-browsers" } |
|||
138595 | 4 | test 27816 | datasette 107914493 | Install Playwright dependencies | pip install shot-scraper build shot-scraper install | ||||
138596 | 5 | test 27816 | datasette 107914493 | Run test | ./test-in-pyodide-with-shot-scraper.sh | ||||
138597 | 1 | test 27817 | datasette 107914493 | actions/checkout@v3 | |||||
138598 | 2 | test 27817 | datasette 107914493 | actions/setup-python@v4 | Set up Python ${{ matrix.python-version }} | { "python-version": "${{ matrix.python-version }}", "allow-prereleases": true } |
|||
138599 | 3 | test 27817 | datasette 107914493 | actions/cache@v3 | Configure pip caching | { "path": "~/.cache/pip", "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}", "restore-keys": "${{ runner.os }}-pip-\n" } |
|||
138600 | 4 | test 27817 | datasette 107914493 | Build extension for --load-extension test | (cd tests && gcc ext.c -fPIC -shared -o ext.so) | ||||
138601 | 5 | test 27817 | datasette 107914493 | Install dependencies | pip install -e '.[test]' pip freeze | ||||
138602 | 6 | test 27817 | datasette 107914493 | Run tests | pytest -n auto -m "not serial" pytest -m "serial" # And the test that exceeds a localhost HTTPS server tests/test_datasette_https_server.sh | ||||
138603 | 7 | test 27817 | datasette 107914493 | Install docs dependencies on Python 3.9+ | pip install -e '.[docs]' | matrix.python-version != '3.8' | |||
138604 | 8 | test 27817 | datasette 107914493 | Check if cog needs to be run | cog --check docs/*.rst | matrix.python-version != '3.8' | |||
138605 | 9 | test 27817 | datasette 107914493 | Check if blacken-docs needs to be run | # This fails on syntax errors, or a diff was applied blacken-docs -l 60 docs/*.rst | matrix.python-version != '3.8' | |||
138606 | 10 | test 27817 | datasette 107914493 | Test DATASETTE_LOAD_PLUGINS | pip install datasette-init datasette-json-html tests/test-datasette-load-plugins.sh | ||||
138607 | 1 | build 27818 | datasette 107914493 | actions/checkout@v2 | |||||
138608 | 2 | build 27818 | datasette 107914493 | mxschmitt/action-tmate@v3 | Setup tmate session | ||||
138609 | 1 | build 27819 | datasette 107914493 | actions/checkout@v2 | |||||
138610 | 2 | build 27819 | datasette 107914493 | mxschmitt/action-tmate@v3 | Setup tmate session |
Advanced export
JSON shape: default, array, newline-delimited, object
CREATE TABLE [steps] ( [id] INTEGER PRIMARY KEY, [seq] INTEGER, [job] INTEGER REFERENCES [jobs]([id]), [repo] INTEGER REFERENCES [repos]([id]), [uses] TEXT, [name] TEXT, [with] TEXT, [run] TEXT , [env] TEXT, [if] TEXT); CREATE INDEX [idx_steps_repo] ON [steps] ([repo]); CREATE INDEX [idx_steps_job] ON [steps] ([job]);