44 rows where repo = 107914493

View and edit SQL

Suggested facets: seq, job, uses, name, with, run

id ▼ seq job repo uses name with run env if
32468 1 deploy 6655 datasette 107914493 actions/checkout@v2 Check out datasette        
32469 2 deploy 6655 datasette 107914493 actions/setup-python@v2 Set up Python
{
    "python-version": 3.9
}
     
32470 3 deploy 6655 datasette 107914493 actions/cache@v2 Configure pip caching
{
    "path": "~/.cache/pip",
    "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}",
    "restore-keys": "${{ runner.os }}-pip-\n"
}
     
32471 4 deploy 6655 datasette 107914493   Install Python dependencies   python -m pip install --upgrade pip python -m pip install -e .[test] python -m pip install -e .[docs] python -m pip install sphinx-to-sqlite==0.1a1    
32472 5 deploy 6655 datasette 107914493   Run tests   pytest    
32473 6 deploy 6655 datasette 107914493   Build fixtures.db   python tests/fixtures.py fixtures.db fixtures.json plugins --extra-db-filename extra_database.db    
32474 7 deploy 6655 datasette 107914493   Build docs.db   cd docs sphinx-build -b xml . _build sphinx-to-sqlite ../docs.db _build cd ..    
32475 8 deploy 6655 datasette 107914493 google-github-actions/setup-gcloud@master Set up Cloud Run
{
    "version": "275.0.0",
    "service_account_email": "${{ secrets.GCP_SA_EMAIL }}",
    "service_account_key": "${{ secrets.GCP_SA_KEY }}"
}
     
32476 9 deploy 6655 datasette 107914493   Deploy to Cloud Run   gcloud config set run/region us-central1 gcloud config set project datasette-222320 datasette publish cloudrun fixtures.db extra_database.db \ -m fixtures.json \ --plugins-dir=plugins \ --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options="--setting template_debug 1 --setting trace_debug 1 --crossdb" \ --install=pysqlite3-binary \ --service=datasette-latest # Deploy docs.db to a different service datasette publish cloudrun docs.db \ --branch=$GITHUB_SHA \ --version-note=$GITHUB_SHA \ --extra-options="--setting template_debug 1" \ --service=datasette-docs-latest    
32477 1 mirror 6656 datasette 107914493 zofrex/mirror-branch@ea152f124954fa4eb26eea3fe0dbe313a3a08d94 Mirror to "master"
{
    "target-branch": "master",
    "force": false
}
     
32478 2 mirror 6656 datasette 107914493 zofrex/mirror-branch@ea152f124954fa4eb26eea3fe0dbe313a3a08d94 Mirror to "main"
{
    "target-branch": "main",
    "force": false
}
     
32479 1 prettier 6657 datasette 107914493 actions/checkout@v2 Check out repo        
32480 2 prettier 6657 datasette 107914493 actions/cache@v2 Configure npm caching
{
    "path": "~/.npm",
    "key": "${{ runner.OS }}-npm-${{ hashFiles('**/package-lock.json') }}",
    "restore-keys": "${{ runner.OS }}-npm-\n"
}
     
32481 3 prettier 6657 datasette 107914493   Install dependencies   npm ci    
32482 4 prettier 6657 datasette 107914493   Run prettier   npm run prettier -- --check    
32483 1 test 6658 datasette 107914493 actions/checkout@v2          
32484 2 test 6658 datasette 107914493 actions/setup-python@v2 Set up Python ${{ matrix.python-version }}
{
    "python-version": "${{ matrix.python-version }}"
}
     
32485 3 test 6658 datasette 107914493 actions/cache@v2 Configure pip caching
{
    "path": "~/.cache/pip",
    "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}",
    "restore-keys": "${{ runner.os }}-pip-\n"
}
     
32486 4 test 6658 datasette 107914493   Install dependencies   pip install -e '.[test]'    
32487 5 test 6658 datasette 107914493   Run tests   pytest    
32488 1 deploy 6659 datasette 107914493 actions/checkout@v2          
32489 2 deploy 6659 datasette 107914493 actions/setup-python@v2 Set up Python
{
    "python-version": "3.9"
}
     
32490 3 deploy 6659 datasette 107914493 actions/cache@v2 Configure pip caching
{
    "path": "~/.cache/pip",
    "key": "${{ runner.os }}-publish-pip-${{ hashFiles('**/setup.py') }}",
    "restore-keys": "${{ runner.os }}-publish-pip-\n"
}
     
32491 4 deploy 6659 datasette 107914493   Install dependencies   pip install setuptools wheel twine    
32492 5 deploy 6659 datasette 107914493   Publish   python setup.py sdist bdist_wheel twine upload dist/*
{
    "TWINE_USERNAME": "__token__",
    "TWINE_PASSWORD": "${{ secrets.PYPI_TOKEN }}"
}
 
32493 1 deploy_docker 6660 datasette 107914493 actions/checkout@v2          
32494 2 deploy_docker 6660 datasette 107914493   Build and push to Docker Hub   sleep 60 # Give PyPI time to make the new release available docker login -u $DOCKER_USER -p $DOCKER_PASS export REPO=datasetteproject/datasette docker build -f Dockerfile \ -t $REPO:${GITHUB_REF#refs/tags/} \ --build-arg VERSION=${GITHUB_REF#refs/tags/} . docker tag $REPO:${GITHUB_REF#refs/tags/} $REPO:latest docker push $REPO:${VERSION_TAG} docker push $REPO:latest
{
    "DOCKER_USER": "${{ secrets.DOCKER_USER }}",
    "DOCKER_PASS": "${{ secrets.DOCKER_PASS }}"
}
 
32495 1 deploy_docker 6661 datasette 107914493 actions/checkout@v2          
32496 2 deploy_docker 6661 datasette 107914493   Build and push to Docker Hub   docker login -u $DOCKER_USER -p $DOCKER_PASS export REPO=datasetteproject/datasette docker build -f Dockerfile \ -t $REPO:${VERSION_TAG} \ --build-arg VERSION=${VERSION_TAG} . docker push $REPO:${VERSION_TAG}
{
    "DOCKER_USER": "${{ secrets.DOCKER_USER }}",
    "DOCKER_PASS": "${{ secrets.DOCKER_PASS }}",
    "VERSION_TAG": "${{ github.event.inputs.version_tag }}"
}
 
32497 1 test 6662 datasette 107914493 actions/checkout@v2 Check out datasette        
32498 2 test 6662 datasette 107914493 actions/setup-python@v2 Set up Python
{
    "python-version": 3.9
}
     
32499 3 test 6662 datasette 107914493 actions/cache@v2 Configure pip caching
{
    "path": "~/.cache/pip",
    "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}",
    "restore-keys": "${{ runner.os }}-pip-\n"
}
     
32500 4 test 6662 datasette 107914493   Install Python dependencies   python -m pip install --upgrade pip python -m pip install -e .[test] python -m pip install pytest-cov    
32501 5 test 6662 datasette 107914493   Run tests   ls -lah cat .coveragerc pytest --cov=datasette --cov-config=.coveragerc --cov-report xml:coverage.xml --cov-report term ls -lah    
32502 6 test 6662 datasette 107914493 codecov/codecov-action@v1 Upload coverage report
{
    "token": "${{ secrets.CODECOV_TOKEN }}",
    "file": "coverage.xml"
}
     
32503 1 test 6663 datasette 107914493 actions/checkout@v2          
32504 2 test 6663 datasette 107914493 actions/setup-python@v2 Set up Python ${{ matrix.python-version }}
{
    "python-version": "${{ matrix.python-version }}"
}
     
32505 3 test 6663 datasette 107914493 actions/cache@v2 Configure pip caching
{
    "path": "~/.cache/pip",
    "key": "${{ runner.os }}-pip-${{ hashFiles('**/setup.py') }}",
    "restore-keys": "${{ runner.os }}-pip-\n"
}
     
32506 4 test 6663 datasette 107914493   Install dependencies   pip install -e '.[test]'    
32507 5 test 6663 datasette 107914493   Run tests   pytest -n auto -m "not serial" pytest -m "serial"    
32508 1 build 6664 datasette 107914493 actions/checkout@v2          
32509 2 build 6664 datasette 107914493 mxschmitt/action-tmate@v3 Setup tmate session        
32510 1 build 6665 datasette 107914493 actions/checkout@v2          
32511 2 build 6665 datasette 107914493 mxschmitt/action-tmate@v3 Setup tmate session        

Advanced export

JSON shape: default, array, newline-delimited, object

CSV options:

CREATE TABLE [steps] (
   [id] INTEGER PRIMARY KEY,
   [seq] INTEGER,
   [job] INTEGER REFERENCES [jobs]([id]),
   [repo] INTEGER REFERENCES [repos]([id]),
   [uses] TEXT,
   [name] TEXT,
   [with] TEXT,
   [run] TEXT
, [env] TEXT, [if] TEXT);
CREATE INDEX [idx_steps_repo]
    ON [steps] ([repo]);
CREATE INDEX [idx_steps_job]
    ON [steps] ([job]);