Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions datasette/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -1041,12 +1041,12 @@ def _prepare_connection(self, conn, database):
for db_name, db in self.databases.items():
if count >= SQLITE_LIMIT_ATTACHED or db.is_memory:
continue
sql = 'ATTACH DATABASE "file:{path}?{qs}" AS [{name}];'.format(
sql = "ATTACH DATABASE ? AS {};".format(escape_sqlite(db_name))
location = "file:{path}?{qs}".format(
path=db.path,
qs="mode=ro" if db.is_mutable else "immutable=1",
name=db_name,
)
conn.execute(sql)
conn.execute(sql, [location])
count += 1

def add_message(self, request, message, type=INFO):
Expand Down
4 changes: 2 additions & 2 deletions datasette/database.py
Original file line number Diff line number Diff line change
Expand Up @@ -628,8 +628,8 @@ async def hidden_table_names(self):
] + [
r[0] for r in (await self.execute("""
select name from sqlite_master
where name like "idx_%"
and type = "table"
where name like 'idx_%'
and type = 'table'
""")).rows
]

Expand Down
2 changes: 1 addition & 1 deletion datasette/facets.py
Original file line number Diff line number Diff line change
Expand Up @@ -479,7 +479,7 @@ async def suggest(self):
suggested_facet_sql = """
select date({column}) from (
select * from ({sql}) limit 100
) where {column} glob "????-??-*"
) where {column} glob '????-??-*'
""".format(column=escape_sqlite(column), sql=self.sql)
try:
results = await self.ds.execute(
Expand Down
10 changes: 7 additions & 3 deletions datasette/filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -352,14 +352,14 @@ class Filters:
TemplatedFilter(
"isblank",
"is blank",
'("{c}" is null or "{c}" = "")',
"""("{c}" is null or "{c}" = '')""",
"{c} is blank",
no_argument=True,
),
TemplatedFilter(
"notblank",
"is not blank",
'("{c}" is not null and "{c}" != "")',
"""("{c}" is not null and "{c}" != '')""",
"{c} is not blank",
no_argument=True,
),
Expand Down Expand Up @@ -408,11 +408,15 @@ def selections(self):
def has_selections(self):
return bool(self.pairs)

def build_where_clauses(self, table):
def build_where_clauses(self, table, table_columns=None):
sql_bits = []
params = {}
i = 0
for column, lookup, value in self.selections():
if column != "rowid" and table_columns and column not in table_columns:
# Ignore invalid column names, with SQLITE_DQS=0 they don't
# degrade to harmless string literal comparisons
continue
filter = self._filters_by_key.get(lookup, None)
if filter:
sql_bit, param = filter.where_clause(table, column, value, i)
Expand Down
8 changes: 4 additions & 4 deletions datasette/inspect.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def inspect_hash(path):
def inspect_views(conn):
"""List views in a database."""
return [
v[0] for v in conn.execute('select name from sqlite_master where type = "view"')
v[0] for v in conn.execute("select name from sqlite_master where type = 'view'")
]


Expand All @@ -38,7 +38,7 @@ def inspect_tables(conn, database_metadata):
tables = {}
table_names = [
r["name"]
for r in conn.execute('select * from sqlite_master where type="table"')
for r in conn.execute("select * from sqlite_master where type='table'")
]

for table in table_names:
Expand Down Expand Up @@ -90,8 +90,8 @@ def inspect_tables(conn, database_metadata):
] + [
r["name"] for r in conn.execute("""
select name from sqlite_master
where name like "idx_%"
and type = "table"
where name like 'idx_%'
and type = 'table'
""")
]

Expand Down
6 changes: 3 additions & 3 deletions datasette/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -614,7 +614,7 @@ def get_all_foreign_keys(conn):
tables = [
r[0]
for r in conn.execute(
'select name from sqlite_master where type="table" order by name'
"select name from sqlite_master where type='table' order by name"
)
]
table_to_foreign_keys = {}
Expand Down Expand Up @@ -651,7 +651,7 @@ def get_all_foreign_keys(conn):

def detect_spatialite(conn):
rows = conn.execute(
'select 1 from sqlite_master where tbl_name = "geometry_columns"'
"select 1 from sqlite_master where tbl_name = 'geometry_columns'"
).fetchall()
return len(rows) > 0

Expand All @@ -673,7 +673,7 @@ def detect_fts_sql(table):
sql like '%VIRTUAL TABLE%USING FTS%content="{table}"%'
or sql like '%VIRTUAL TABLE%USING FTS%content=[{table}]%'
or (
tbl_name = "{table}"
tbl_name = '{table}'
and sql like '%VIRTUAL TABLE%USING FTS%'
)
)
Expand Down
2 changes: 1 addition & 1 deletion datasette/views/table.py
Original file line number Diff line number Diff line change
Expand Up @@ -1034,7 +1034,7 @@ async def table_view_data(

# Build where clauses from query string arguments
filters = Filters(sorted(filter_args))
where_clauses, params = filters.build_where_clauses(table_name)
where_clauses, params = filters.build_where_clauses(table_name, table_columns)

# Execute filters_from_request plugin hooks - including the default
# ones that live in datasette/filters.py
Expand Down
12 changes: 6 additions & 6 deletions docs/json_api.rst
Original file line number Diff line number Diff line change
Expand Up @@ -235,16 +235,16 @@ You can filter the data returned by the table based on column values using a que
Returns rows where the column does not match the value.

``?column__contains=value``
Rows where the string column contains the specified value (``column like "%value%"`` in SQL).
Rows where the string column contains the specified value (``column like '%value%'`` in SQL).

``?column__notcontains=value``
Rows where the string column does not contain the specified value (``column not like "%value%"`` in SQL).
Rows where the string column does not contain the specified value (``column not like '%value%'`` in SQL).

``?column__endswith=value``
Rows where the string column ends with the specified value (``column like "%value"`` in SQL).
Rows where the string column ends with the specified value (``column like '%value'`` in SQL).

``?column__startswith=value``
Rows where the string column starts with the specified value (``column like "value%"`` in SQL).
Rows where the string column starts with the specified value (``column like 'value%'`` in SQL).

``?column__gt=value``
Rows which are greater than the specified value.
Expand Down Expand Up @@ -358,8 +358,8 @@ Special table arguments

Some examples:

* `facetable?_where=_neighborhood like "%c%"&_where=_city_id=3 <https://latest.datasette.io/fixtures/facetable?_where=_neighborhood%20like%20%22%c%%22&_where=_city_id=3>`__
* `facetable?_where=_city_id in (select id from facet_cities where name != "Detroit") <https://latest.datasette.io/fixtures/facetable?_where=_city_id%20in%20(select%20id%20from%20facet_cities%20where%20name%20!=%20%22Detroit%22)>`__
* `facetable?_where=_neighborhood like '%c%'&_where=_city_id=3 <https://latest.datasette.io/fixtures/facetable?_where=_neighborhood%20like%20%27%c%%27&_where=_city_id=3>`__
* `facetable?_where=_city_id in (select id from facet_cities where name != 'Detroit') <https://latest.datasette.io/fixtures/facetable?_where=_city_id%20in%20(select%20id%20from%20facet_cities%20where%20name%20!=%20%27Detroit%27)>`__

``?_through={json}``
This can be used to filter rows via a join against another table.
Expand Down
67 changes: 37 additions & 30 deletions tests/fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,13 @@
PLUGINS_DIR = str(pathlib.Path(__file__).parent / "plugins")

EXPECTED_PLUGINS = [
{
"name": "disable_double_quoted_strings.py",
"static": False,
"templates": False,
"version": None,
"hooks": ["prepare_connection"],
},
{
"name": "messages_output_renderer.py",
"static": False,
Expand Down Expand Up @@ -526,12 +533,12 @@ def generate_sortable_rows(num):
INSERT INTO searchable VALUES (1, 'barry cat', 'terry dog', 'panther');
INSERT INTO searchable VALUES (2, 'terry dog', 'sara weasel', 'puma');

INSERT INTO tags VALUES ("canine");
INSERT INTO tags VALUES ("feline");
INSERT INTO tags VALUES ('canine');
INSERT INTO tags VALUES ('feline');

INSERT INTO searchable_tags (searchable_id, tag) VALUES
(1, "feline"),
(2, "canine")
(1, 'feline'),
(2, 'canine')
;

CREATE VIRTUAL TABLE "searchable_fts"
Expand Down Expand Up @@ -585,21 +592,21 @@ def generate_sortable_rows(num):
INSERT INTO facetable
(created, planet_int, on_earth, state, _city_id, _neighborhood, tags, complex_array, distinct_some_null, n)
VALUES
("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Mission', '["tag1", "tag2"]', '[{"foo": "bar"}]', 'one', 'n1'),
("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Dogpatch', '["tag1", "tag3"]', '[]', 'two', 'n2'),
("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'SOMA', '[]', '[]', null, null),
("2019-01-14 08:00:00", 1, 1, 'CA', 1, 'Tenderloin', '[]', '[]', null, null),
("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Bernal Heights', '[]', '[]', null, null),
("2019-01-15 08:00:00", 1, 1, 'CA', 1, 'Hayes Valley', '[]', '[]', null, null),
("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Hollywood', '[]', '[]', null, null),
("2019-01-15 08:00:00", 1, 1, 'CA', 2, 'Downtown', '[]', '[]', null, null),
("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Los Feliz', '[]', '[]', null, null),
("2019-01-16 08:00:00", 1, 1, 'CA', 2, 'Koreatown', '[]', '[]', null, null),
("2019-01-16 08:00:00", 1, 1, 'MI', 3, 'Downtown', '[]', '[]', null, null),
("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Greektown', '[]', '[]', null, null),
("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Corktown', '[]', '[]', null, null),
("2019-01-17 08:00:00", 1, 1, 'MI', 3, 'Mexicantown', '[]', '[]', null, null),
("2019-01-17 08:00:00", 2, 0, 'MC', 4, 'Arcadia Planitia', '[]', '[]', null, null)
('2019-01-14 08:00:00', 1, 1, 'CA', 1, 'Mission', '["tag1", "tag2"]', '[{"foo": "bar"}]', 'one', 'n1'),
('2019-01-14 08:00:00', 1, 1, 'CA', 1, 'Dogpatch', '["tag1", "tag3"]', '[]', 'two', 'n2'),
('2019-01-14 08:00:00', 1, 1, 'CA', 1, 'SOMA', '[]', '[]', null, null),
('2019-01-14 08:00:00', 1, 1, 'CA', 1, 'Tenderloin', '[]', '[]', null, null),
('2019-01-15 08:00:00', 1, 1, 'CA', 1, 'Bernal Heights', '[]', '[]', null, null),
('2019-01-15 08:00:00', 1, 1, 'CA', 1, 'Hayes Valley', '[]', '[]', null, null),
('2019-01-15 08:00:00', 1, 1, 'CA', 2, 'Hollywood', '[]', '[]', null, null),
('2019-01-15 08:00:00', 1, 1, 'CA', 2, 'Downtown', '[]', '[]', null, null),
('2019-01-16 08:00:00', 1, 1, 'CA', 2, 'Los Feliz', '[]', '[]', null, null),
('2019-01-16 08:00:00', 1, 1, 'CA', 2, 'Koreatown', '[]', '[]', null, null),
('2019-01-16 08:00:00', 1, 1, 'MI', 3, 'Downtown', '[]', '[]', null, null),
('2019-01-17 08:00:00', 1, 1, 'MI', 3, 'Greektown', '[]', '[]', null, null),
('2019-01-17 08:00:00', 1, 1, 'MI', 3, 'Corktown', '[]', '[]', null, null),
('2019-01-17 08:00:00', 1, 1, 'MI', 3, 'Mexicantown', '[]', '[]', null, null),
('2019-01-17 08:00:00', 2, 0, 'MC', 4, 'Arcadia Planitia', '[]', '[]', null, null)
;

CREATE TABLE binary_data (
Expand All @@ -617,19 +624,19 @@ def generate_sortable_rows(num):
longitude real
);
INSERT INTO roadside_attractions VALUES (
1, "The Mystery Spot", "465 Mystery Spot Road, Santa Cruz, CA 95065", "https://www.mysteryspot.com/",
1, 'The Mystery Spot', '465 Mystery Spot Road, Santa Cruz, CA 95065', 'https://www.mysteryspot.com/',
37.0167, -122.0024
);
INSERT INTO roadside_attractions VALUES (
2, "Winchester Mystery House", "525 South Winchester Boulevard, San Jose, CA 95128", "https://winchestermysteryhouse.com/",
2, 'Winchester Mystery House', '525 South Winchester Boulevard, San Jose, CA 95128', 'https://winchestermysteryhouse.com/',
37.3184, -121.9511
);
INSERT INTO roadside_attractions VALUES (
3, "Burlingame Museum of PEZ Memorabilia", "214 California Drive, Burlingame, CA 94010", null,
3, 'Burlingame Museum of PEZ Memorabilia', '214 California Drive, Burlingame, CA 94010', null,
37.5793, -122.3442
);
INSERT INTO roadside_attractions VALUES (
4, "Bigfoot Discovery Museum", "5497 Highway 9, Felton, CA 95018", "https://www.bigfootdiscoveryproject.com/",
4, 'Bigfoot Discovery Museum', '5497 Highway 9, Felton, CA 95018', 'https://www.bigfootdiscoveryproject.com/',
37.0414, -122.0725
);

Expand All @@ -638,10 +645,10 @@ def generate_sortable_rows(num):
name text
);
INSERT INTO attraction_characteristic VALUES (
1, "Museum"
1, 'Museum'
);
INSERT INTO attraction_characteristic VALUES (
2, "Paranormal"
2, 'Paranormal'
);

CREATE TABLE roadside_attraction_characteristics (
Expand Down Expand Up @@ -693,24 +700,24 @@ def generate_sortable_rows(num):
"""
+ "\n".join(
[
'INSERT INTO no_primary_key VALUES ({i}, "a{i}", "b{i}", "c{i}");'.format(
"INSERT INTO no_primary_key VALUES ({i}, 'a{i}', 'b{i}', 'c{i}');".format(
i=i + 1
)
for i in range(201)
]
)
+ '\nINSERT INTO no_primary_key VALUES ("RENDER_CELL_DEMO", "a202", "b202", "c202");\n'
+ "\nINSERT INTO no_primary_key VALUES ('RENDER_CELL_DEMO', 'a202', 'b202', 'c202');\n"
+ "\n".join(
[
'INSERT INTO compound_three_primary_keys VALUES ("{a}", "{b}", "{c}", "{content}");'.format(
"INSERT INTO compound_three_primary_keys VALUES ('{a}', '{b}', '{c}', '{content}');".format(
a=a, b=b, c=c, content=content
)
for a, b, c, content in generate_compound_rows(1001)
]
)
+ "\n".join(["""INSERT INTO sortable VALUES (
"{pk1}", "{pk2}", "{content}", {sortable},
{sortable_with_nulls}, {sortable_with_nulls_2}, "{text}");
'{pk1}', '{pk2}', '{content}', {sortable},
{sortable_with_nulls}, {sortable_with_nulls_2}, '{text}');
""".format(**row).replace("None", "null") for row in generate_sortable_rows(201)])
)
TABLE_PARAMETERIZED_SQL = [
Expand Down
10 changes: 10 additions & 0 deletions tests/plugins/disable_double_quoted_strings.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
from datasette import hookimpl
from datasette.utils.sqlite import sqlite3


@hookimpl
def prepare_connection(conn):
if hasattr(conn, "setconfig") and sqlite3.sqlite_version_info >= (3, 29):
# Available only since Python 3.12 and SQLite 3.29.0
conn.setconfig(sqlite3.SQLITE_DBCONFIG_DQS_DDL, False)
conn.setconfig(sqlite3.SQLITE_DBCONFIG_DQS_DML, False)
2 changes: 1 addition & 1 deletion tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -634,7 +634,7 @@ async def test_settings_json(ds_client):
)
async def test_json_columns(ds_client, extra_args, expected):
sql = """
select 1 as intval, "s" as strval, 0.5 as floatval,
select 1 as intval, 's' as strval, 0.5 as floatval,
'{"foo": "bar"}' as jsonval
"""
path = "/fixtures/-/query.json?" + urllib.parse.urlencode(
Expand Down
2 changes: 1 addition & 1 deletion tests/test_internals_database.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def run(conn):
# Table should exist
assert (
conn.execute(
'select count(*) from sqlite_master where name = "foo"'
"select count(*) from sqlite_master where name = 'foo'"
).fetchone()[0]
== 1
)
Expand Down