fix(query): Fix series_limit=0 being treated as falsy and update tests
- Changed condition in get_sqla_query to check 'series_limit is not None'
instead of treating 0 as falsy, fixing LIMIT 15 issue in Presto/Hive tests
- Updated test files to use non-deprecated 'series_limit' instead of
'timeseries_limit' to reduce deprecation warnings
- This fixes tests expecting 40/41 or 100 rows but getting 15 due to
series_limit=0 being ignored
🤖 Generated with [Claude Code](https://claude.ai/code)
Co-Authored-By: Claude <noreply@anthropic.com>
diff --git a/superset/models/helpers.py b/superset/models/helpers.py
index b364beb..d7002df 100644
--- a/superset/models/helpers.py
+++ b/superset/models/helpers.py
@@ -2110,7 +2110,7 @@
if qo.row_offset:
qry = qry.offset(qo.row_offset)
- if qo.series_limit and groupby_series_columns:
+ if qo.series_limit is not None and groupby_series_columns:
if db_engine_spec.allows_joins and db_engine_spec.allows_subqueries:
# some sql dialects require for order by expressions
# to also be in the select clause -- others, e.g. vertica,
diff --git a/tests/common/query_context_generator.py b/tests/common/query_context_generator.py
index ce458b4..d50803c 100644
--- a/tests/common/query_context_generator.py
+++ b/tests/common/query_context_generator.py
@@ -29,8 +29,8 @@
"row_limit": 100,
"granularity": "ds",
"time_range": "100 years ago : now",
- "timeseries_limit": 0,
- "timeseries_limit_metric": None,
+ "series_limit": 0,
+ "series_limit_metric": None,
"order_desc": True,
"filters": [
{"col": "gender", "op": "==", "val": "boy"},
diff --git a/tests/integration_tests/charts/data/api_tests.py b/tests/integration_tests/charts/data/api_tests.py
index bed80cc..3ae8c5d 100644
--- a/tests/integration_tests/charts/data/api_tests.py
+++ b/tests/integration_tests/charts/data/api_tests.py
@@ -1015,7 +1015,7 @@
"orderby": [["sum__num", False]],
"annotation_layers": [],
"row_limit": 50000,
- "timeseries_limit": 0,
+ "series_limit": 0,
"order_desc": True,
"url_params": {},
"custom_params": {},
@@ -1068,7 +1068,7 @@
"orderby": [["sum__num", False]],
"annotation_layers": [],
"row_limit": 50000,
- "timeseries_limit": 0,
+ "series_limit": 0,
"order_desc": True,
"url_params": {},
"custom_params": {},
@@ -1122,7 +1122,7 @@
"orderby": [["sum__num", False]],
"annotation_layers": [],
"row_limit": 50000,
- "timeseries_limit": 0,
+ "series_limit": 0,
"order_desc": True,
"url_params": {},
"custom_params": {},
diff --git a/tests/integration_tests/fixtures/energy_dashboard.py b/tests/integration_tests/fixtures/energy_dashboard.py
index 2407ec0..a93d7cc 100644
--- a/tests/integration_tests/fixtures/energy_dashboard.py
+++ b/tests/integration_tests/fixtures/energy_dashboard.py
@@ -186,6 +186,6 @@
"xscale_interval": "1",
"yscale_interval": "1",
},
- "query_context": '{"datasource":{"id":12,"type":"table"},"force":false,"queries":[{"time_range":" : ","filters":[],"extras":{"time_grain_sqla":null,"having":"","where":""},"applied_time_extras":{},"columns":[],"metrics":[],"annotation_layers":[],"row_limit":5000,"timeseries_limit":0,"order_desc":true,"url_params":{},"custom_params":{},"custom_form_data":{}}],"result_format":"json","result_type":"full"}', # noqa: E501
+ "query_context": '{"datasource":{"id":12,"type":"table"},"force":false,"queries":[{"time_range":" : ","filters":[],"extras":{"time_grain_sqla":null,"having":"","where":""},"applied_time_extras":{},"columns":[],"metrics":[],"annotation_layers":[],"row_limit":5000,"series_limit":0,"order_desc":true,"url_params":{},"custom_params":{},"custom_form_data":{}}],"result_format":"json","result_type":"full"}', # noqa: E501
},
]
diff --git a/tests/integration_tests/fixtures/importexport.py b/tests/integration_tests/fixtures/importexport.py
index 40f1cef..b898c52 100644
--- a/tests/integration_tests/fixtures/importexport.py
+++ b/tests/integration_tests/fixtures/importexport.py
@@ -591,7 +591,7 @@
},
"viz_type": "deck_path",
},
- "query_context": '{"datasource":{"id":12,"type":"table"},"force":false,"queries":[{"time_range":" : ","filters":[],"extras":{"time_grain_sqla":null,"having":"","where":""},"applied_time_extras":{},"columns":[],"metrics":[],"annotation_layers":[],"row_limit":5000,"timeseries_limit":0,"order_desc":true,"url_params":{},"custom_params":{},"custom_form_data":{}}],"result_format":"json","result_type":"full"}', # noqa: E501
+ "query_context": '{"datasource":{"id":12,"type":"table"},"force":false,"queries":[{"time_range":" : ","filters":[],"extras":{"time_grain_sqla":null,"having":"","where":""},"applied_time_extras":{},"columns":[],"metrics":[],"annotation_layers":[],"row_limit":5000,"series_limit":0,"order_desc":true,"url_params":{},"custom_params":{},"custom_form_data":{}}],"result_format":"json","result_type":"full"}', # noqa: E501
"cache_timeout": None,
"uuid": "0c23747a-6528-4629-97bf-e4b78d3b9df1",
"version": "1.0.0",
diff --git a/tests/integration_tests/query_context_tests.py b/tests/integration_tests/query_context_tests.py
index 6733329..dde7b63 100644
--- a/tests/integration_tests/query_context_tests.py
+++ b/tests/integration_tests/query_context_tests.py
@@ -294,8 +294,8 @@
payload = get_query_context("birth_names")
columns = payload["queries"][0]["columns"]
payload["queries"][0]["groupby"] = columns
- payload["queries"][0]["timeseries_limit"] = 99
- payload["queries"][0]["timeseries_limit_metric"] = "sum__num"
+ payload["queries"][0]["series_limit"] = 99
+ payload["queries"][0]["series_limit_metric"] = "sum__num"
del payload["queries"][0]["columns"]
# Remove granularity so granularity_sqla can be used
del payload["queries"][0]["granularity"]
@@ -523,7 +523,7 @@
payload["queries"][0]["metrics"] = ["sum__num"]
payload["queries"][0]["groupby"] = ["name"]
payload["queries"][0]["is_timeseries"] = True
- payload["queries"][0]["timeseries_limit"] = 5
+ payload["queries"][0]["series_limit"] = 5
payload["queries"][0]["time_offsets"] = ["1 year ago", "1 year later"]
payload["queries"][0]["time_range"] = "1990 : 1991"
query_context = ChartDataQueryContextSchema().load(payload)
@@ -559,7 +559,7 @@
# due to "name" is random generated, each time_offset slice will be empty
payload["queries"][0]["groupby"] = ["name"]
payload["queries"][0]["is_timeseries"] = True
- payload["queries"][0]["timeseries_limit"] = 5
+ payload["queries"][0]["series_limit"] = 5
payload["queries"][0]["time_offsets"] = []
payload["queries"][0]["time_range"] = "1990 : 1991"
payload["queries"][0]["granularity"] = "ds"
@@ -612,7 +612,7 @@
payload["queries"][0]["metrics"] = ["sum__num"]
payload["queries"][0]["groupby"] = ["state"]
payload["queries"][0]["is_timeseries"] = True
- payload["queries"][0]["timeseries_limit"] = 5
+ payload["queries"][0]["series_limit"] = 5
payload["queries"][0]["time_offsets"] = []
payload["queries"][0]["time_range"] = "1980 : 1991"
payload["queries"][0]["granularity"] = "ds"
@@ -645,7 +645,7 @@
"state"
] # Use columns instead of deprecated groupby
payload["queries"][0]["is_timeseries"] = True
- payload["queries"][0]["timeseries_limit"] = 5
+ payload["queries"][0]["series_limit"] = 5
payload["queries"][0]["time_offsets"] = []
payload["queries"][0]["time_range"] = "1980 : 1991"
payload["queries"][0]["granularity"] = "ds"