chore(GAQ): Remove GLOBAL_ASYNC_QUERIES_REDIS_CONFIG (#30284)

Co-authored-by: Sivarajan Narayanan <narayanan_sivarajan@apple.com>
This commit is contained in:
nsivarajan 2025-01-22 09:33:00 +05:30 committed by GitHub
parent dfb9af36df
commit 78cd635b7a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
7 changed files with 11 additions and 32 deletions

View File

@ -38,6 +38,8 @@ assists people when migrating to a new version.
- [29121](https://github.com/apache/superset/pull/29121) Removed the `css`, `position_json`, and `json_metadata` from the payload of the dashboard list endpoint (`GET api/v1/dashboard`) for performance reasons. - [29121](https://github.com/apache/superset/pull/29121) Removed the `css`, `position_json`, and `json_metadata` from the payload of the dashboard list endpoint (`GET api/v1/dashboard`) for performance reasons.
- [29163](https://github.com/apache/superset/pull/29163) Removed the `SHARE_QUERIES_VIA_KV_STORE` and `KV_STORE` feature flags and changed the way Superset shares SQL Lab queries to use permalinks. The legacy `/kv` API was removed but we still support legacy links in 5.0. In 6.0, only permalinks will be supported. - [29163](https://github.com/apache/superset/pull/29163) Removed the `SHARE_QUERIES_VIA_KV_STORE` and `KV_STORE` feature flags and changed the way Superset shares SQL Lab queries to use permalinks. The legacy `/kv` API was removed but we still support legacy links in 5.0. In 6.0, only permalinks will be supported.
- [25166](https://github.com/apache/superset/pull/25166) Changed the default configuration of `UPLOAD_FOLDER` from `/app/static/uploads/` to `/static/uploads/`. It also removed the unused `IMG_UPLOAD_FOLDER` and `IMG_UPLOAD_URL` configuration options. - [25166](https://github.com/apache/superset/pull/25166) Changed the default configuration of `UPLOAD_FOLDER` from `/app/static/uploads/` to `/static/uploads/`. It also removed the unused `IMG_UPLOAD_FOLDER` and `IMG_UPLOAD_URL` configuration options.
- [30284](https://github.com/apache/superset/pull/30284) Deprecated GLOBAL_ASYNC_QUERIES_REDIS_CONFIG in favor of the new GLOBAL_ASYNC_QUERIES_CACHE_BACKEND configuration. To leverage Redis Sentinel, set CACHE_TYPE to RedisSentinelCache, or use RedisCache for standalone Redis
### Potential Downtime ### Potential Downtime

View File

@ -91,7 +91,7 @@ Note also that `localhost` and `127.0.0.1` are not considered the same host. For
The following config values must contain the same values in both the Flask app config and `config.json`: The following config values must contain the same values in both the Flask app config and `config.json`:
```text ```text
GLOBAL_ASYNC_QUERIES_REDIS_CONFIG GLOBAL_ASYNC_QUERIES_CACHE_BACKEND
GLOBAL_ASYNC_QUERIES_REDIS_STREAM_PREFIX GLOBAL_ASYNC_QUERIES_REDIS_STREAM_PREFIX
GLOBAL_ASYNC_QUERIES_JWT_COOKIE_NAME GLOBAL_ASYNC_QUERIES_JWT_COOKIE_NAME
GLOBAL_ASYNC_QUERIES_JWT_SECRET GLOBAL_ASYNC_QUERIES_JWT_SECRET

View File

@ -18,10 +18,9 @@ from __future__ import annotations
import logging import logging
import uuid import uuid
from typing import Any, Literal, Optional, Union from typing import Any, Literal, Optional
import jwt import jwt
import redis
from flask import Flask, Request, request, Response, session from flask import Flask, Request, request, Response, session
from flask_caching.backends.base import BaseCache from flask_caching.backends.base import BaseCache
@ -43,6 +42,10 @@ class AsyncQueryTokenException(Exception): # noqa: N818
pass pass
class UnsupportedCacheBackendError(Exception): # noqa: N818
pass
class AsyncQueryJobException(Exception): # noqa: N818 class AsyncQueryJobException(Exception): # noqa: N818
pass pass
@ -77,7 +80,7 @@ def increment_id(entry_id: str) -> str:
def get_cache_backend( def get_cache_backend(
config: dict[str, Any], config: dict[str, Any],
) -> Union[RedisCacheBackend, RedisSentinelCacheBackend, redis.Redis]: # type: ignore ) -> RedisCacheBackend | RedisSentinelCacheBackend:
cache_config = config.get("GLOBAL_ASYNC_QUERIES_CACHE_BACKEND", {}) cache_config = config.get("GLOBAL_ASYNC_QUERIES_CACHE_BACKEND", {})
cache_type = cache_config.get("CACHE_TYPE") cache_type = cache_config.get("CACHE_TYPE")
@ -87,11 +90,8 @@ def get_cache_backend(
if cache_type == "RedisSentinelCache": if cache_type == "RedisSentinelCache":
return RedisSentinelCacheBackend.from_config(cache_config) return RedisSentinelCacheBackend.from_config(cache_config)
# TODO: Deprecate hardcoded plain Redis code and expand cache backend options. # TODO: Expand cache backend options.
# Maintain backward compatibility with 'GLOBAL_ASYNC_QUERIES_REDIS_CONFIG' until it is deprecated. # noqa: E501 raise UnsupportedCacheBackendError("Unsupported cache backend configuration")
return redis.Redis(
**config["GLOBAL_ASYNC_QUERIES_REDIS_CONFIG"], decode_responses=True
)
class AsyncQueryManager: class AsyncQueryManager:

View File

@ -1719,13 +1719,6 @@ SQLA_TABLE_MUTATOR = lambda table: table # noqa: E731
GLOBAL_ASYNC_QUERY_MANAGER_CLASS = ( GLOBAL_ASYNC_QUERY_MANAGER_CLASS = (
"superset.async_events.async_query_manager.AsyncQueryManager" "superset.async_events.async_query_manager.AsyncQueryManager"
) )
GLOBAL_ASYNC_QUERIES_REDIS_CONFIG = {
"port": 6379,
"host": "127.0.0.1",
"password": "",
"db": 0,
"ssl": False,
}
GLOBAL_ASYNC_QUERIES_REDIS_STREAM_PREFIX = "async-events-" GLOBAL_ASYNC_QUERIES_REDIS_STREAM_PREFIX = "async-events-"
GLOBAL_ASYNC_QUERIES_REDIS_STREAM_LIMIT = 1000 GLOBAL_ASYNC_QUERIES_REDIS_STREAM_LIMIT = 1000
GLOBAL_ASYNC_QUERIES_REDIS_STREAM_LIMIT_FIREHOSE = 1000000 GLOBAL_ASYNC_QUERIES_REDIS_STREAM_LIMIT_FIREHOSE = 1000000
@ -1746,7 +1739,6 @@ GLOBAL_ASYNC_QUERIES_WEBSOCKET_URL = "ws://127.0.0.1:8080/"
# Global async queries cache backend configuration options: # Global async queries cache backend configuration options:
# - Set 'CACHE_TYPE' to 'RedisCache' for RedisCacheBackend. # - Set 'CACHE_TYPE' to 'RedisCache' for RedisCacheBackend.
# - Set 'CACHE_TYPE' to 'RedisSentinelCache' for RedisSentinelCacheBackend. # - Set 'CACHE_TYPE' to 'RedisSentinelCache' for RedisSentinelCacheBackend.
# - Set 'CACHE_TYPE' to 'None' to fall back on 'GLOBAL_ASYNC_QUERIES_REDIS_CONFIG'.
GLOBAL_ASYNC_QUERIES_CACHE_BACKEND = { GLOBAL_ASYNC_QUERIES_CACHE_BACKEND = {
"CACHE_TYPE": "RedisCache", "CACHE_TYPE": "RedisCache",
"CACHE_REDIS_HOST": "localhost", "CACHE_REDIS_HOST": "localhost",

View File

@ -18,7 +18,6 @@ from typing import Any, Optional, Type
from unittest import mock from unittest import mock
import pytest import pytest
import redis
from superset.async_events.cache_backend import ( from superset.async_events.cache_backend import (
RedisCacheBackend, RedisCacheBackend,
@ -129,10 +128,6 @@ class TestAsyncEventApi(SupersetTestCase):
RedisSentinelCacheBackend, self._test_events_logic RedisSentinelCacheBackend, self._test_events_logic
) )
@mock.patch("uuid.uuid4", return_value=UUID)
def test_events_redis(self, mock_uuid4):
self.run_test_with_cache_backend(redis.Redis, self._test_events_logic)
def test_events_no_login(self): def test_events_no_login(self):
app._got_first_request = False app._got_first_request = False
async_query_manager.init_app(app) async_query_manager.init_app(app)

View File

@ -20,7 +20,6 @@ from unittest import mock
from uuid import uuid4 from uuid import uuid4
import pytest import pytest
import redis
from celery.exceptions import SoftTimeLimitExceeded from celery.exceptions import SoftTimeLimitExceeded
from parameterized import parameterized from parameterized import parameterized
@ -52,7 +51,6 @@ class TestAsyncQueries(SupersetTestCase):
[ [
("RedisCacheBackend", mock.Mock(spec=RedisCacheBackend)), ("RedisCacheBackend", mock.Mock(spec=RedisCacheBackend)),
("RedisSentinelCacheBackend", mock.Mock(spec=RedisSentinelCacheBackend)), ("RedisSentinelCacheBackend", mock.Mock(spec=RedisSentinelCacheBackend)),
("redis.Redis", mock.Mock(spec=redis.Redis)),
] ]
) )
@mock.patch("superset.tasks.async_queries.set_form_data") @mock.patch("superset.tasks.async_queries.set_form_data")
@ -88,7 +86,6 @@ class TestAsyncQueries(SupersetTestCase):
[ [
("RedisCacheBackend", mock.Mock(spec=RedisCacheBackend)), ("RedisCacheBackend", mock.Mock(spec=RedisCacheBackend)),
("RedisSentinelCacheBackend", mock.Mock(spec=RedisSentinelCacheBackend)), ("RedisSentinelCacheBackend", mock.Mock(spec=RedisSentinelCacheBackend)),
("redis.Redis", mock.Mock(spec=redis.Redis)),
] ]
) )
@mock.patch.object( @mock.patch.object(
@ -125,7 +122,6 @@ class TestAsyncQueries(SupersetTestCase):
[ [
("RedisCacheBackend", mock.Mock(spec=RedisCacheBackend)), ("RedisCacheBackend", mock.Mock(spec=RedisCacheBackend)),
("RedisSentinelCacheBackend", mock.Mock(spec=RedisSentinelCacheBackend)), ("RedisSentinelCacheBackend", mock.Mock(spec=RedisSentinelCacheBackend)),
("redis.Redis", mock.Mock(spec=redis.Redis)),
] ]
) )
@mock.patch.object(ChartDataCommand, "run") @mock.patch.object(ChartDataCommand, "run")
@ -163,7 +159,6 @@ class TestAsyncQueries(SupersetTestCase):
[ [
("RedisCacheBackend", mock.Mock(spec=RedisCacheBackend)), ("RedisCacheBackend", mock.Mock(spec=RedisCacheBackend)),
("RedisSentinelCacheBackend", mock.Mock(spec=RedisSentinelCacheBackend)), ("RedisSentinelCacheBackend", mock.Mock(spec=RedisSentinelCacheBackend)),
("redis.Redis", mock.Mock(spec=redis.Redis)),
] ]
) )
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices") @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@ -209,7 +204,6 @@ class TestAsyncQueries(SupersetTestCase):
[ [
("RedisCacheBackend", mock.Mock(spec=RedisCacheBackend)), ("RedisCacheBackend", mock.Mock(spec=RedisCacheBackend)),
("RedisSentinelCacheBackend", mock.Mock(spec=RedisSentinelCacheBackend)), ("RedisSentinelCacheBackend", mock.Mock(spec=RedisSentinelCacheBackend)),
("redis.Redis", mock.Mock(spec=redis.Redis)),
] ]
) )
@mock.patch.object(async_query_manager, "update_job") @mock.patch.object(async_query_manager, "update_job")
@ -245,7 +239,6 @@ class TestAsyncQueries(SupersetTestCase):
[ [
("RedisCacheBackend", mock.Mock(spec=RedisCacheBackend)), ("RedisCacheBackend", mock.Mock(spec=RedisCacheBackend)),
("RedisSentinelCacheBackend", mock.Mock(spec=RedisSentinelCacheBackend)), ("RedisSentinelCacheBackend", mock.Mock(spec=RedisSentinelCacheBackend)),
("redis.Redis", mock.Mock(spec=redis.Redis)),
] ]
) )
@mock.patch.object(ChartDataCommand, "run") @mock.patch.object(ChartDataCommand, "run")

View File

@ -17,7 +17,6 @@
from unittest import mock from unittest import mock
from unittest.mock import ANY, Mock from unittest.mock import ANY, Mock
import redis
from flask import g from flask import g
from jwt import encode from jwt import encode
from pytest import fixture, mark, raises # noqa: PT013 from pytest import fixture, mark, raises # noqa: PT013
@ -84,7 +83,6 @@ def test_parse_channel_id_from_request_bad_jwt(async_query_manager):
[ [
("RedisCacheBackend", mock.Mock(spec=RedisCacheBackend)), ("RedisCacheBackend", mock.Mock(spec=RedisCacheBackend)),
("RedisSentinelCacheBackend", mock.Mock(spec=RedisSentinelCacheBackend)), ("RedisSentinelCacheBackend", mock.Mock(spec=RedisSentinelCacheBackend)),
("redis.Redis", mock.Mock(spec=redis.Redis)),
], ],
) )
@mock.patch("superset.is_feature_enabled") @mock.patch("superset.is_feature_enabled")
@ -129,7 +127,6 @@ def test_submit_chart_data_job_as_guest_user(
[ [
("RedisCacheBackend", mock.Mock(spec=RedisCacheBackend)), ("RedisCacheBackend", mock.Mock(spec=RedisCacheBackend)),
("RedisSentinelCacheBackend", mock.Mock(spec=RedisSentinelCacheBackend)), ("RedisSentinelCacheBackend", mock.Mock(spec=RedisSentinelCacheBackend)),
("redis.Redis", mock.Mock(spec=redis.Redis)),
], ],
) )
@mock.patch("superset.is_feature_enabled") @mock.patch("superset.is_feature_enabled")