|
51 | 51 | from tests.integration_tests.test_app import app
|
52 | 52 |
|
53 | 53 | from .base_tests import SupersetTestCase
|
| 54 | +from .conftest import only_postgresql |
54 | 55 |
|
55 | 56 | VIRTUAL_TABLE_INT_TYPES: Dict[str, Pattern[str]] = {
|
56 | 57 | "hive": re.compile(r"^INT_TYPE$"),
|
@@ -659,51 +660,90 @@ def test_filter_on_text_column(text_column_table):
|
659 | 660 | assert result_object.df["count"][0] == 1
|
660 | 661 |
|
661 | 662 |
|
662 |
| -def test_should_generate_closed_and_open_time_filter_range(): |
663 |
| - with app.app_context(): |
664 |
| - if backend() != "postgresql": |
665 |
| - pytest.skip(f"{backend()} has different dialect for datetime column") |
666 |
| - |
667 |
| - table = SqlaTable( |
668 |
| - table_name="temporal_column_table", |
669 |
| - sql=( |
670 |
| - "SELECT '2021-12-31'::timestamp as datetime_col " |
671 |
| - "UNION SELECT '2022-01-01'::timestamp " |
672 |
| - "UNION SELECT '2022-03-10'::timestamp " |
673 |
| - "UNION SELECT '2023-01-01'::timestamp " |
674 |
| - "UNION SELECT '2023-03-10'::timestamp " |
675 |
| - ), |
676 |
| - database=get_example_database(), |
677 |
| - ) |
678 |
| - TableColumn( |
679 |
| - column_name="datetime_col", |
680 |
| - type="TIMESTAMP", |
681 |
| - table=table, |
682 |
| - is_dttm=True, |
683 |
| - ) |
684 |
| - SqlMetric(metric_name="count", expression="count(*)", table=table) |
685 |
| - result_object = table.query( |
| 663 | +@only_postgresql |
| 664 | +def test_should_generate_closed_and_open_time_filter_range(login_as_admin): |
| 665 | + table = SqlaTable( |
| 666 | + table_name="temporal_column_table", |
| 667 | + sql=( |
| 668 | + "SELECT '2021-12-31'::timestamp as datetime_col " |
| 669 | + "UNION SELECT '2022-01-01'::timestamp " |
| 670 | + "UNION SELECT '2022-03-10'::timestamp " |
| 671 | + "UNION SELECT '2023-01-01'::timestamp " |
| 672 | + "UNION SELECT '2023-03-10'::timestamp " |
| 673 | + ), |
| 674 | + database=get_example_database(), |
| 675 | + ) |
| 676 | + TableColumn( |
| 677 | + column_name="datetime_col", |
| 678 | + type="TIMESTAMP", |
| 679 | + table=table, |
| 680 | + is_dttm=True, |
| 681 | + ) |
| 682 | + SqlMetric(metric_name="count", expression="count(*)", table=table) |
| 683 | + result_object = table.query( |
| 684 | + { |
| 685 | + "metrics": ["count"], |
| 686 | + "is_timeseries": False, |
| 687 | + "filter": [], |
| 688 | + "from_dttm": datetime(2022, 1, 1), |
| 689 | + "to_dttm": datetime(2023, 1, 1), |
| 690 | + "granularity": "datetime_col", |
| 691 | + } |
| 692 | + ) |
| 693 | + """ >>> result_object.query |
| 694 | + SELECT count(*) AS count |
| 695 | + FROM |
| 696 | + (SELECT '2021-12-31'::timestamp as datetime_col |
| 697 | + UNION SELECT '2022-01-01'::timestamp |
| 698 | + UNION SELECT '2022-03-10'::timestamp |
| 699 | + UNION SELECT '2023-01-01'::timestamp |
| 700 | + UNION SELECT '2023-03-10'::timestamp) AS virtual_table |
| 701 | + WHERE datetime_col >= TO_TIMESTAMP('2022-01-01 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US') |
| 702 | + AND datetime_col < TO_TIMESTAMP('2023-01-01 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US') |
| 703 | + """ |
| 704 | + assert result_object.df.iloc[0]["count"] == 2 |
| 705 | + |
| 706 | + |
| 707 | +def test_none_operand_in_filter(login_as_admin, physical_dataset): |
| 708 | + expected_results = [ |
| 709 | + { |
| 710 | + "operator": FilterOperator.EQUALS.value, |
| 711 | + "count": 10, |
| 712 | + "sql_should_contain": "COL4 IS NULL", |
| 713 | + }, |
| 714 | + { |
| 715 | + "operator": FilterOperator.NOT_EQUALS.value, |
| 716 | + "count": 0, |
| 717 | + "sql_should_contain": "COL4 IS NOT NULL", |
| 718 | + }, |
| 719 | + ] |
| 720 | + for expected in expected_results: |
| 721 | + result = physical_dataset.query( |
686 | 722 | {
|
687 | 723 | "metrics": ["count"],
|
| 724 | + "filter": [{"col": "col4", "val": None, "op": expected["operator"]}], |
688 | 725 | "is_timeseries": False,
|
689 |
| - "filter": [], |
690 |
| - "from_dttm": datetime(2022, 1, 1), |
691 |
| - "to_dttm": datetime(2023, 1, 1), |
692 |
| - "granularity": "datetime_col", |
693 | 726 | }
|
694 | 727 | )
|
695 |
| - """ >>> result_object.query |
696 |
| - SELECT count(*) AS count |
697 |
| - FROM |
698 |
| - (SELECT '2021-12-31'::timestamp as datetime_col |
699 |
| - UNION SELECT '2022-01-01'::timestamp |
700 |
| - UNION SELECT '2022-03-10'::timestamp |
701 |
| - UNION SELECT '2023-01-01'::timestamp |
702 |
| - UNION SELECT '2023-03-10'::timestamp) AS virtual_table |
703 |
| - WHERE datetime_col >= TO_TIMESTAMP('2022-01-01 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US') |
704 |
| - AND datetime_col < TO_TIMESTAMP('2023-01-01 00:00:00.000000', 'YYYY-MM-DD HH24:MI:SS.US') |
705 |
| - """ |
706 |
| - assert result_object.df.iloc[0]["count"] == 2 |
| 728 | + assert result.df["count"][0] == expected["count"] |
| 729 | + assert expected["sql_should_contain"] in result.query.upper() |
| 730 | + |
| 731 | + with pytest.raises(QueryObjectValidationError): |
| 732 | + for flt in [ |
| 733 | + FilterOperator.GREATER_THAN, |
| 734 | + FilterOperator.LESS_THAN, |
| 735 | + FilterOperator.GREATER_THAN_OR_EQUALS, |
| 736 | + FilterOperator.LESS_THAN_OR_EQUALS, |
| 737 | + FilterOperator.LIKE, |
| 738 | + FilterOperator.ILIKE, |
| 739 | + ]: |
| 740 | + physical_dataset.query( |
| 741 | + { |
| 742 | + "metrics": ["count"], |
| 743 | + "filter": [{"col": "col4", "val": None, "op": flt.value}], |
| 744 | + "is_timeseries": False, |
| 745 | + } |
| 746 | + ) |
707 | 747 |
|
708 | 748 |
|
709 | 749 | @pytest.mark.parametrize(
|
|
0 commit comments