[^/]+?$)"", table),
]
```
I'll use a `/t/` prefix for the moment, but this is probably something I'll fix in Datasette itself later.","{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",648435885,
https://github.com/simonw/datasette/issues/1519#issuecomment-974420619,https://api.github.com/repos/simonw/datasette/issues/1519,974420619,IC_kwDOBm6k_c46FHqL,9599,2021-11-19T20:25:19Z,2021-11-19T20:25:19Z,OWNER,"The implementations of `path_with_removed_args` and `path_with_format`:
https://github.com/simonw/datasette/blob/85849935292e500ab7a99f8fe0f9546e903baad3/datasette/utils/__init__.py#L228-L254
https://github.com/simonw/datasette/blob/85849935292e500ab7a99f8fe0f9546e903baad3/datasette/utils/__init__.py#L710-L729","{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",1058790545,
https://github.com/simonw/datasette/issues/1519#issuecomment-974398399,https://api.github.com/repos/simonw/datasette/issues/1519,974398399,IC_kwDOBm6k_c46FCO_,9599,2021-11-19T20:08:20Z,2021-11-19T20:22:02Z,OWNER,"The relevant test is this one: https://github.com/simonw/datasette/blob/30255055150d7bc0affc8156adc18295495020ff/tests/test_html.py#L1608-L1649
I modified that test to add `""/fixtures/facetable?sql=select+1""` as one of the tested paths, and dropped in an `assert False` to pause it in the debugger:
```
@pytest.mark.parametrize(
""path"",
[
""/"",
""/fixtures"",
""/fixtures/compound_three_primary_keys"",
""/fixtures/compound_three_primary_keys/a,a,a"",
""/fixtures/paginated_view"",
""/fixtures/facetable"",
""/fixtures?sql=select+1"",
],
)
def test_base_url_config(app_client_base_url_prefix, path):
client = app_client_base_url_prefix
response = client.get(""/prefix/"" + path.lstrip(""/""))
soup = Soup(response.body, ""html.parser"")
if path == ""/fixtures?sql=select+1"":
> assert False
E assert False
```
BUT... in the debugger:
```
(Pdb) print(soup)
...
This data as
json,
testall,
testnone,
testresponse,
CSV
```
Those all have the correct prefix! But that's not what I'm seeing in my `Dockerfile` reproduction of the issue.
Something very weird is going on here.","{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",1058790545,
https://github.com/simonw/datasette/issues/1520#issuecomment-974308215,https://api.github.com/repos/simonw/datasette/issues/1520,974308215,IC_kwDOBm6k_c46EsN3,9599,2021-11-19T18:29:26Z,2021-11-19T18:29:26Z,OWNER,"The solution that jumps to mind first is that it would be neat if routes could return something that meant ""actually my bad, I can't handle this after all - move to the next one in the list"".
A related idea: it might be useful for custom views like my one here to say ""no actually call the default view for this, but give me back the response so I can modify it in some way"". Kind of like Django or ASGI middleware.","{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",1058803238,
https://github.com/simonw/datasette/issues/1521#issuecomment-974336020,https://api.github.com/repos/simonw/datasette/issues/1521,974336020,IC_kwDOBm6k_c46EzAU,9599,2021-11-19T19:10:48Z,2021-11-19T19:10:48Z,OWNER,"There's a promising looking minimal Apache 2 proxy config here: https://stackoverflow.com/questions/26474476/minimal-configuration-for-apache-reverse-proxy-in-docker-container
","{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",1058815557,
https://github.com/simonw/datasette/issues/1521#issuecomment-974433520,https://api.github.com/repos/simonw/datasette/issues/1521,974433520,IC_kwDOBm6k_c46FKzw,9599,2021-11-19T20:32:29Z,2021-11-19T20:32:29Z,OWNER,This configuration works great.,"{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",1058815557,
https://github.com/simonw/datasette/issues/878#issuecomment-973635157,https://api.github.com/repos/simonw/datasette/issues/878,973635157,IC_kwDOBm6k_c46CH5V,9599,2021-11-19T01:07:08Z,2021-11-19T01:07:08Z,OWNER,"This exercise is proving so useful in getting my head around how the enormous and complex `TableView` class works again.
Here's where I've got to now - I'm systematically working through the variables that are returned for HTML and for JSON copying across code to get it to work:
```python
from datasette.database import QueryInterrupted
from datasette.utils import escape_sqlite
from datasette.utils.asgi import Response, NotFound, Forbidden
from datasette.views.base import DatasetteError
from datasette import hookimpl
from asyncinject import AsyncInject, inject
from pprint import pformat
class Table(AsyncInject):
@inject
async def database(self, request, datasette):
# TODO: all that nasty hash resolving stuff can go here
db_name = request.url_vars[""db_name""]
try:
db = datasette.databases[db_name]
except KeyError:
raise NotFound(f""Database '{db_name}' does not exist"")
return db
@inject
async def table_and_format(self, request, database, datasette):
table_and_format = request.url_vars[""table_and_format""]
# TODO: be a lot smarter here
if ""."" in table_and_format:
return table_and_format.split(""."", 2)
else:
return table_and_format, ""html""
@inject
async def main(self, request, database, table_and_format, datasette):
# TODO: if this is actually a canned query, dispatch to it
table, format = table_and_format
is_view = bool(await database.get_view_definition(table))
table_exists = bool(await database.table_exists(table))
if not is_view and not table_exists:
raise NotFound(f""Table not found: {table}"")
await check_permissions(
datasette,
request,
[
(""view-table"", (database.name, table)),
(""view-database"", database.name),
""view-instance"",
],
)
private = not await datasette.permission_allowed(
None, ""view-table"", (database.name, table), default=True
)
pks = await database.primary_keys(table)
table_columns = await database.table_columns(table)
specified_columns = await columns_to_select(datasette, database, table, request)
select_specified_columns = "", "".join(
escape_sqlite(t) for t in specified_columns
)
select_all_columns = "", "".join(escape_sqlite(t) for t in table_columns)
use_rowid = not pks and not is_view
if use_rowid:
select_specified_columns = f""rowid, {select_specified_columns}""
select_all_columns = f""rowid, {select_all_columns}""
order_by = ""rowid""
order_by_pks = ""rowid""
else:
order_by_pks = "", "".join([escape_sqlite(pk) for pk in pks])
order_by = order_by_pks
if is_view:
order_by = """"
nocount = request.args.get(""_nocount"")
nofacet = request.args.get(""_nofacet"")
if request.args.get(""_shape"") in (""array"", ""object""):
nocount = True
nofacet = True
# Next, a TON of SQL to build where_params and filters and suchlike
# skipping that and jumping straight to...
where_clauses = []
where_clause = """"
if where_clauses:
where_clause = f""where {' and '.join(where_clauses)} ""
from_sql = ""from {table_name} {where}"".format(
table_name=escape_sqlite(table),
where=(""where {} "".format("" and "".join(where_clauses)))
if where_clauses
else """",
)
from_sql_params ={}
params = {}
count_sql = f""select count(*) {from_sql}""
sql_no_order_no_limit = (
""select {select_all_columns} from {table_name} {where}"".format(
select_all_columns=select_all_columns,
table_name=escape_sqlite(table),
where=where_clause,
)
)
page_size = 100
offset = "" offset 0""
sql = ""select {select_specified_columns} from {table_name} {where}{order_by} limit {page_size}{offset}"".format(
select_specified_columns=select_specified_columns,
table_name=escape_sqlite(table),
where=where_clause,
order_by=order_by,
page_size=page_size + 1,
offset=offset,
)
# Fetch rows
results = await database.execute(sql, params, truncate=True)
columns = [r[0] for r in results.description]
rows = list(results.rows)
# Fetch count
filtered_table_rows_count = None
if count_sql:
try:
count_rows = list(await database.execute(count_sql, from_sql_params))
filtered_table_rows_count = count_rows[0][0]
except QueryInterrupted:
pass
vars = {
""json"": {
# THIS STUFF is from the regular JSON
""database"": database.name,
""table"": table,
""is_view"": is_view,
# ""human_description_en"": human_description_en,
""rows"": rows[:page_size],
""truncated"": results.truncated,
""filtered_table_rows_count"": filtered_table_rows_count,
# ""expanded_columns"": expanded_columns,
# ""expandable_columns"": expandable_columns,
""columns"": columns,
""primary_keys"": pks,
# ""units"": units,
""query"": {""sql"": sql, ""params"": params},
# ""facet_results"": facet_results,
# ""suggested_facets"": suggested_facets,
# ""next"": next_value and str(next_value) or None,
# ""next_url"": next_url,
""private"": private,
""allow_execute_sql"": await datasette.permission_allowed(
request.actor, ""execute-sql"", database, default=True
),
},
""html"": {
# ... this is the HTML special stuff
# ""table_actions"": table_actions,
# ""supports_search"": bool(fts_table),
# ""search"": search or """",
""use_rowid"": use_rowid,
# ""filters"": filters,
# ""display_columns"": display_columns,
# ""filter_columns"": filter_columns,
# ""display_rows"": display_rows,
# ""facets_timed_out"": facets_timed_out,
# ""sorted_facet_results"": sorted(
# facet_results.values(),
# key=lambda f: (len(f[""results""]), f[""name""]),
# reverse=True,
# ),
# ""show_facet_counts"": special_args.get(""_facet_size"") == ""max"",
# ""extra_wheres_for_ui"": extra_wheres_for_ui,
# ""form_hidden_args"": form_hidden_args,
# ""is_sortable"": any(c[""sortable""] for c in display_columns),
# ""path_with_replaced_args"": path_with_replaced_args,
# ""path_with_removed_args"": path_with_removed_args,
# ""append_querystring"": append_querystring,
""request"": request,
# ""sort"": sort,
# ""sort_desc"": sort_desc,
""disable_sort"": is_view,
# ""custom_table_templates"": [
# f""_table-{to_css_class(database)}-{to_css_class(table)}.html"",
# f""_table-table-{to_css_class(database)}-{to_css_class(table)}.html"",
# ""_table.html"",
# ],
# ""metadata"": metadata,
# ""view_definition"": await db.get_view_definition(table),
# ""table_definition"": await db.get_table_definition(table),
},
}
# I'm just trying to get HTML to work for the moment
if format == ""json"":
return Response.json(dict(vars, locals=locals()), default=repr)
else:
return Response.html(repr(vars[""html""]))
async def view(self, request, datasette):
return await self.main(request=request, datasette=datasette)
@hookimpl
def register_routes():
return [
(r""/t/(?P[^/]+)/(?P[^/]+?$)"", Table().view),
]
async def check_permissions(datasette, request, permissions):
""""""permissions is a list of (action, resource) tuples or 'action' strings""""""
for permission in permissions:
if isinstance(permission, str):
action = permission
resource = None
elif isinstance(permission, (tuple, list)) and len(permission) == 2:
action, resource = permission
else:
assert (
False
), ""permission should be string or tuple of two items: {}"".format(
repr(permission)
)
ok = await datasette.permission_allowed(
request.actor,
action,
resource=resource,
default=None,
)
if ok is not None:
if ok:
return
else:
raise Forbidden(action)
async def columns_to_select(datasette, database, table, request):
table_columns = await database.table_columns(table)
pks = await database.primary_keys(table)
columns = list(table_columns)
if ""_col"" in request.args:
columns = list(pks)
_cols = request.args.getlist(""_col"")
bad_columns = [column for column in _cols if column not in table_columns]
if bad_columns:
raise DatasetteError(
""_col={} - invalid columns"".format("", "".join(bad_columns)),
status=400,
)
# De-duplicate maintaining order:
columns.extend(dict.fromkeys(_cols))
if ""_nocol"" in request.args:
# Return all columns EXCEPT these
bad_columns = [
column
for column in request.args.getlist(""_nocol"")
if (column not in table_columns) or (column in pks)
]
if bad_columns:
raise DatasetteError(
""_nocol={} - invalid columns"".format("", "".join(bad_columns)),
status=400,
)
tmp_columns = [
column for column in columns if column not in request.args.getlist(""_nocol"")
]
columns = tmp_columns
return columns
```","{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",648435885,
https://github.com/simonw/datasette/issues/1522#issuecomment-974506401,https://api.github.com/repos/simonw/datasette/issues/1522,974506401,IC_kwDOBm6k_c46Fcmh,9599,2021-11-19T22:11:51Z,2021-11-19T22:11:51Z,OWNER,"This is frustrating: I have the following Dockerfile:
```dockerfile
FROM python:3-alpine
RUN apk add --no-cache \
apache2 \
apache2-proxy \
bash
RUN pip install datasette
ENV TINI_VERSION v0.18.0
ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini-static /tini
RUN chmod +x /tini
# Append this to the end of the default httpd.conf file
RUN echo $'ServerName localhost\n\
\n\
\n\
Order deny,allow\n\
Allow from all\n\
\n\
\n\
ProxyPass /prefix/ http://localhost:8001/\n\
Header add X-Proxied-By ""Apache2""' >> /etc/apache2/httpd.conf
RUN echo $'Datasette' > /var/www/localhost/htdocs/index.html
WORKDIR /app
ADD https://latest.datasette.io/fixtures.db /app/fixtures.db
RUN echo $'#!/usr/bin/env bash\n\
set -e\n\
\n\
httpd -D FOREGROUND &\n\
datasette fixtures.db --setting base_url ""/prefix/"" -h 0.0.0.0 -p 8001 &\n\
\n\
wait -n' > /app/start.sh
RUN chmod +x /app/start.sh
EXPOSE 80
ENTRYPOINT [""/tini"", ""--"", ""/app/start.sh""]
```
It works fine when I run it locally:
```
docker build -t datasette-apache-proxy-demo .
docker run -p 5000:80 datasette-apache-proxy-demo
```
But when I deploy it to Cloud Run with the following script:
```bash
#!/bin/bash
# https://til.simonwillison.net/cloudrun/ship-dockerfile-to-cloud-run
NAME=""datasette-apache-proxy-demo""
PROJECT=$(gcloud config get-value project)
IMAGE=""gcr.io/$PROJECT/$NAME""
gcloud builds submit --tag $IMAGE
gcloud run deploy \
--allow-unauthenticated \
--platform=managed \
--image $IMAGE $NAME \
--port 80
```
It serves the `/` page successfully, but hits to `/prefix/` return the following 503 error:
> Service Unavailable
>
> The server is temporarily unable to service your request due to maintenance downtime or capacity problems. Please try again later.
>
> Apache/2.4.51 (Unix) Server at datasette-apache-proxy-demo-j7hipcg4aq-uc.a.run.app Port 80
Cloud Run logs:
","{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",1058896236,
https://github.com/simonw/datasette/issues/878#issuecomment-973568285,https://api.github.com/repos/simonw/datasette/issues/878,973568285,IC_kwDOBm6k_c46B3kd,9599,2021-11-19T00:29:20Z,2021-11-19T00:29:20Z,OWNER,"This is working!
```python
from datasette.utils.asgi import Response
from datasette import hookimpl
import html
from asyncinject import AsyncInject, inject
class Table(AsyncInject):
@inject
async def database(self, request):
return request.url_vars[""db_name""]
@inject
async def main(self, request, database):
return Response.html(""Database: {}"".format(
html.escape(database)
))
async def view(self, request):
return await self.main(request=request)
@hookimpl
def register_routes():
return [
(r""/t/(?P[^/]+)/(?P[^/]+?$)"", Table().view),
]
```
This project will definitely show me if I actually like the `asyncinject` patterns or not.","{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",648435885,
https://github.com/simonw/datasette/issues/1521#issuecomment-974321391,https://api.github.com/repos/simonw/datasette/issues/1521,974321391,IC_kwDOBm6k_c46Evbv,9599,2021-11-19T18:49:15Z,2021-11-19T18:57:18Z,OWNER,"This pattern looks like it can help: https://ahmet.im/blog/cloud-run-multiple-processes-easy-way/ - see example in https://github.com/ahmetb/multi-process-container-lazy-solution
I got that demo working locally like this:
```bash
cd /tmp
git clone https://github.com/ahmetb/multi-process-container-lazy-solution
cd multi-process-container-lazy-solution
docker build -t multi-process-container-lazy-solution .
docker run -p 5000:8080 --rm multi-process-container-lazy-solution
```
I want to use `apache2` rather than `nginx` though. I found a few relevant examples of Apache in Alpine:
- https://github.com/Hacking-Lab/alpine-apache2-reverse-proxy/blob/master/Dockerfile
- https://www.sentiatechblog.com/running-apache-in-a-docker-container
- https://github.com/search?l=Dockerfile&q=alpine+apache2&type=code
","{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",1058815557,
https://github.com/simonw/datasette/issues/1518#issuecomment-974285803,https://api.github.com/repos/simonw/datasette/issues/1518,974285803,IC_kwDOBm6k_c46Emvr,9599,2021-11-19T17:56:48Z,2021-11-19T18:14:30Z,OWNER,"Very confused by this piece of code here: https://github.com/simonw/datasette/blob/1c13e1af0664a4dfb1e69714c56523279cae09e4/datasette/views/table.py#L37-L63
I added it in https://github.com/simonw/datasette/commit/754836eef043676e84626c4fd3cb993eed0d2976 - in the new world that should probably be replaced by pure JSON.
Aha - this comment explains it: https://github.com/simonw/datasette/issues/521#issuecomment-505279560
> I think the trick is to redefine what a ""cell_row"" is. Each row is currently a list of cells:
>
> https://github.com/simonw/datasette/blob/6341f8cbc7833022012804dea120b838ec1f6558/datasette/views/table.py#L159-L163
>
> I can redefine the row (the `cells` variable in the above example) as a thing-that-iterates-cells (hence behaving like a list) but that also supports `__getitem__` access for looking up cell values if you know the name of the column.
The goal was to support neater custom templates like this:
```html+jinja
{% for row in display_rows %}
{{ row[""First_Name""] }} {{ row[""Last_Name""] }}
...
```
This may be an argument for continuing to allow non-JSON-objects through to the HTML templates. Need to think about that a bit more.","{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",1058072543,
https://github.com/simonw/datasette/issues/878#issuecomment-973542284,https://api.github.com/repos/simonw/datasette/issues/878,973542284,IC_kwDOBm6k_c46BxOM,9599,2021-11-19T00:16:44Z,2021-11-19T00:16:44Z,OWNER,"```
Development % cookiecutter gh:simonw/datasette-plugin
You've downloaded /Users/simon/.cookiecutters/datasette-plugin before. Is it okay to delete and re-download it? [yes]: yes
plugin_name []: table-new
description []: New implementation of TableView, see https://github.com/simonw/datasette/issues/878
hyphenated [table-new]:
underscored [table_new]:
github_username []: simonw
author_name []: Simon Willison
include_static_directory []:
include_templates_directory []:
```","{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",648435885,
https://github.com/simonw/sqlite-utils/issues/342#issuecomment-973820125,https://api.github.com/repos/simonw/sqlite-utils/issues/342,973820125,IC_kwDOCGYnMM46C1Dd,9599,2021-11-19T07:25:55Z,2021-11-19T07:25:55Z,OWNER,"`alter=True` doesn't make sense to support here either, because `.lookup()` already adds missing columns: https://github.com/simonw/sqlite-utils/blob/3b8abe608796e99e4ffc5f3f4597a85e605c0e9b/sqlite_utils/db.py#L2743-L2746","{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",1058196641,
https://github.com/simonw/datasette/issues/1519#issuecomment-974309591,https://api.github.com/repos/simonw/datasette/issues/1519,974309591,IC_kwDOBm6k_c46EsjX,9599,2021-11-19T18:31:32Z,2021-11-19T18:31:32Z,OWNER,"`base_url` has been a source of so many bugs like this! I often find them quite hard to replicate, likely because I haven't made myself a good Apache `mod_proxy` testing environment yet.","{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",1058790545,
https://github.com/simonw/sqlite-utils/issues/342#issuecomment-973801650,https://api.github.com/repos/simonw/sqlite-utils/issues/342,973801650,IC_kwDOCGYnMM46Cwiy,9599,2021-11-19T06:55:56Z,2021-11-19T06:55:56Z,OWNER,"`pk` needs to be an explicit argument to `.lookup()`. The rest could be `**kwargs` passed through to `.insert()`, like this hacked together version (docstring removed for brevity):
```python
def lookup(
self,
lookup_values: Dict[str, Any],
extra_values: Optional[Dict[str, Any]] = None,
pk=""id"",
**insert_kwargs,
):
""""""
assert isinstance(lookup_values, dict)
if extra_values is not None:
assert isinstance(extra_values, dict)
combined_values = dict(lookup_values)
if extra_values is not None:
combined_values.update(extra_values)
if self.exists():
self.add_missing_columns([combined_values])
unique_column_sets = [set(i.columns) for i in self.indexes]
if set(lookup_values.keys()) not in unique_column_sets:
self.create_index(lookup_values.keys(), unique=True)
wheres = [""[{}] = ?"".format(column) for column in lookup_values]
rows = list(
self.rows_where(
"" and "".join(wheres), [value for _, value in lookup_values.items()]
)
)
try:
return rows[0][pk]
except IndexError:
return self.insert(combined_values, pk=pk, **insert_kwargs).last_pk
else:
pk = self.insert(combined_values, pk=pk, **insert_kwargs).last_pk
self.create_index(lookup_values.keys(), unique=True)
return pk
```
I think I'll explicitly list the parameters, mainly so they can be typed and covered by automatic documentation.
I do worry that I'll add more keyword arguments to `.insert()` in the future and forget to mirror them to `.lookup()` though.","{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",1058196641,
https://github.com/simonw/datasette/issues/1519#issuecomment-974450232,https://api.github.com/repos/simonw/datasette/issues/1519,974450232,IC_kwDOBm6k_c46FO44,9599,2021-11-19T20:41:53Z,2021-11-19T20:42:19Z,OWNER,https://docs.datasette.io/en/stable/deploying.html#apache-proxy-configuration says I should use `ProxyPreserveHost on`.,"{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",1058790545,
https://github.com/simonw/datasette/issues/1521#issuecomment-974371116,https://api.github.com/repos/simonw/datasette/issues/1521,974371116,IC_kwDOBm6k_c46E7ks,9599,2021-11-19T19:45:47Z,2021-11-19T19:45:47Z,OWNER,"https://github.com/krallin/tini says:
> *NOTE: If you are using Docker 1.13 or greater, Tini is included in Docker itself. This includes all versions of Docker CE. To enable Tini, just [pass the `--init` flag to `docker run`](https://docs.docker.com/engine/reference/commandline/run/).*","{""total_count"": 0, ""+1"": 0, ""-1"": 0, ""laugh"": 0, ""hooray"": 0, ""confused"": 0, ""heart"": 0, ""rocket"": 0, ""eyes"": 0}",1058815557,