Skip to content
This repository was archived by the owner on Nov 3, 2023. It is now read-only.

Commit 05648eb

Browse files
authored
feat: support None operand in EQUAL operator (apache#21713)
1 parent 51c54b3 commit 05648eb

File tree

3 files changed

+90
-42
lines changed

3 files changed

+90
-42
lines changed

superset/charts/schemas.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -819,7 +819,8 @@ class ChartDataFilterSchema(Schema):
819819
)
820820
val = fields.Raw(
821821
description="The value or values to compare against. Can be a string, "
822-
"integer, decimal or list, depending on the operator.",
822+
"integer, decimal, None or list, depending on the operator.",
823+
allow_none=True,
823824
example=["China", "France", "Japan"],
824825
)
825826
grain = fields.String(

superset/connectors/sqla/models.py

+8-1
Original file line numberDiff line numberDiff line change
@@ -1615,7 +1615,14 @@ def get_sqla_query( # pylint: disable=too-many-arguments,too-many-locals,too-ma
16151615
elif op == utils.FilterOperator.IS_FALSE.value:
16161616
where_clause_and.append(sqla_col.is_(False))
16171617
else:
1618-
if eq is None:
1618+
if (
1619+
op
1620+
not in {
1621+
utils.FilterOperator.EQUALS.value,
1622+
utils.FilterOperator.NOT_EQUALS.value,
1623+
}
1624+
and eq is None
1625+
):
16191626
raise QueryObjectValidationError(
16201627
_(
16211628
"Must specify a value for filters "

tests/integration_tests/sqla_models_tests.py

+80-40
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@
5151
from tests.integration_tests.test_app import app
5252

5353
from .base_tests import SupersetTestCase
54+
from .conftest import only_postgresql
5455

5556
VIRTUAL_TABLE_INT_TYPES: Dict[str, Pattern[str]] = {
5657
"hive": re.compile(r"^INT_TYPE$"),
@@ -659,51 +660,90 @@ def test_filter_on_text_column(text_column_table):
659660
assert result_object.df["count"][0] == 1
660661

661662

662-
def test_should_generate_closed_and_open_time_filter_range():
663-
with app.app_context():
664-
if backend() != "postgresql":
665-
pytest.skip(f"{backend()} has different dialect for datetime column")
666-
667-
table = SqlaTable(
668-
table_name="temporal_column_table",
669-
sql=(
670-
"SELECT '2021-12-31'::timestamp as datetime_col "
671-
"UNION SELECT '2022-01-01'::timestamp "
672-
"UNION SELECT '2022-03-10'::timestamp "
673-
"UNION SELECT '2023-01-01'::timestamp "
674-
"UNION SELECT '2023-03-10'::timestamp "
675-
),
676-
database=get_example_database(),
677-
)
678-
TableColumn(
679-
column_name="datetime_col",
680-
type="TIMESTAMP",
681-
table=table,
682-
is_dttm=True,
683-
)
684-
SqlMetric(metric_name="count", expression="count(*)", table=table)
685-
result_object = table.query(
663+
@only_postgresql
664+
def test_should_generate_closed_and_open_time_filter_range(login_as_admin):
665+
table = SqlaTable(
666+
table_name="temporal_column_table",
667+
sql=(
668+
"SELECT '2021-12-31'::timestamp as datetime_col "
669+
"UNION SELECT '2022-01-01'::timestamp "
670+
"UNION SELECT '2022-03-10'::timestamp "
671+
"UNION SELECT '2023-01-01'::timestamp "
672+
"UNION SELECT '2023-03-10'::timestamp "
673+
),
674+
database=get_example_database(),
675+
)
676+
TableColumn(
677+
column_name="datetime_col",
678+
type="TIMESTAMP",
679+
table=table,
680+
is_dttm=True,
681+
)
682+
SqlMetric(metric_name="count", expression="count(*)", table=table)
683+
result_object = table.query(
684+
{
685+
"metrics": ["count"],
686+
"is_timeseries": False,
687+
"filter": [],
688+
"from_dttm": datetime(2022, 1, 1),
689+
"to_dttm": datetime(2023, 1, 1),
690+
"granularity": "datetime_col",
691+
}
692+
)
693+
""" >>> result_object.query
694+
SELECT count(*) AS count
695+
FROM
696+
(SELECT '2021-12-31'::timestamp as datetime_col
697+
UNION SELECT '2022-01-01'::timestamp
698+
UNION SELECT '2022-03-10'::timestamp
699+
UNION SELECT '2023-01-01'::timestamp
700+
UNION SELECT '2023-03-10'::timestamp) AS virtual_table
701+
WHERE datetime_col >= TO_TIMESTAMP('2022-01-01 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US')
702+
AND datetime_col < TO_TIMESTAMP('2023-01-01 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US')
703+
"""
704+
assert result_object.df.iloc[0]["count"] == 2
705+
706+
707+
def test_none_operand_in_filter(login_as_admin, physical_dataset):
708+
expected_results = [
709+
{
710+
"operator": FilterOperator.EQUALS.value,
711+
"count": 10,
712+
"sql_should_contain": "COL4 IS NULL",
713+
},
714+
{
715+
"operator": FilterOperator.NOT_EQUALS.value,
716+
"count": 0,
717+
"sql_should_contain": "COL4 IS NOT NULL",
718+
},
719+
]
720+
for expected in expected_results:
721+
result = physical_dataset.query(
686722
{
687723
"metrics": ["count"],
724+
"filter": [{"col": "col4", "val": None, "op": expected["operator"]}],
688725
"is_timeseries": False,
689-
"filter": [],
690-
"from_dttm": datetime(2022, 1, 1),
691-
"to_dttm": datetime(2023, 1, 1),
692-
"granularity": "datetime_col",
693726
}
694727
)
695-
""" >>> result_object.query
696-
SELECT count(*) AS count
697-
FROM
698-
(SELECT '2021-12-31'::timestamp as datetime_col
699-
UNION SELECT '2022-01-01'::timestamp
700-
UNION SELECT '2022-03-10'::timestamp
701-
UNION SELECT '2023-01-01'::timestamp
702-
UNION SELECT '2023-03-10'::timestamp) AS virtual_table
703-
WHERE datetime_col >= TO_TIMESTAMP('2022-01-01 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US')
704-
AND datetime_col < TO_TIMESTAMP('2023-01-01 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US')
705-
"""
706-
assert result_object.df.iloc[0]["count"] == 2
728+
assert result.df["count"][0] == expected["count"]
729+
assert expected["sql_should_contain"] in result.query.upper()
730+
731+
with pytest.raises(QueryObjectValidationError):
732+
for flt in [
733+
FilterOperator.GREATER_THAN,
734+
FilterOperator.LESS_THAN,
735+
FilterOperator.GREATER_THAN_OR_EQUALS,
736+
FilterOperator.LESS_THAN_OR_EQUALS,
737+
FilterOperator.LIKE,
738+
FilterOperator.ILIKE,
739+
]:
740+
physical_dataset.query(
741+
{
742+
"metrics": ["count"],
743+
"filter": [{"col": "col4", "val": None, "op": flt.value}],
744+
"is_timeseries": False,
745+
}
746+
)
707747

708748

709749
@pytest.mark.parametrize(

0 commit comments

Comments
 (0)