chore: Bump flask libs (#22355)

This commit is contained in:
EugeneTorap 2023-01-09 13:10:31 +03:00 committed by GitHub
parent 001100ddf0
commit 30dab3a00a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 49 additions and 37 deletions

View File

@ -68,7 +68,7 @@ dnspython==2.1.0
# via email-validator
email-validator==1.1.3
# via flask-appbuilder
flask==2.0.3
flask==2.1.3
# via
# apache-superset
# flask-appbuilder
@ -86,11 +86,11 @@ flask-babel==1.0.0
# via flask-appbuilder
flask-caching==1.10.1
# via apache-superset
flask-compress==1.10.1
flask-compress==1.13
# via apache-superset
flask-jwt-extended==4.3.1
# via flask-appbuilder
flask-login==0.4.1
flask-login==0.6.0
# via flask-appbuilder
flask-migrate==3.1.0
# via apache-superset
@ -98,9 +98,9 @@ flask-sqlalchemy==2.5.1
# via
# flask-appbuilder
# flask-migrate
flask-talisman==0.8.1
flask-talisman==1.0.0
# via apache-superset
flask-wtf==0.14.3
flask-wtf==1.0.1
# via
# apache-superset
# flask-appbuilder
@ -144,10 +144,11 @@ mako==1.1.4
# via alembic
markdown==3.3.4
# via apache-superset
markupsafe==2.0.1
markupsafe==2.1.1
# via
# jinja2
# mako
# werkzeug
# wtforms
marshmallow==3.13.0
# via
@ -236,7 +237,6 @@ six==1.16.0
# via
# bleach
# click-repl
# flask-talisman
# isodate
# jsonschema
# paramiko
@ -278,7 +278,7 @@ wcwidth==0.2.5
# via prompt-toolkit
webencodings==0.5.1
# via bleach
werkzeug==2.0.3
werkzeug==2.1.2
# via
# flask
# flask-jwt-extended

View File

@ -82,13 +82,14 @@ setup(
"cron-descriptor",
"cryptography>=3.3.2",
"deprecation>=2.1.0, <2.2.0",
"flask>=2.0.0, <3.0.0",
"flask>=2.1.3, <2.2",
"flask-appbuilder>=4.1.6, <5.0.0",
"flask-caching>=1.10.0",
"flask-compress",
"flask-talisman",
"flask-migrate",
"flask-wtf",
"flask-caching>=1.10.1, <1.11",
"flask-compress>=1.13, <2.0",
"flask-talisman>=1.0.0, <2.0",
"flask-login==0.6.0",
"flask-migrate>=3.1.0, <4.0",
"flask-wtf>=1.0.1, <1.1",
"func_timeout",
"geopy",
"graphlib-backport",

View File

@ -219,7 +219,7 @@ export default function DrillDetailPane({
useEffect(() => {
if (!responseError && !isLoading && !resultsPages.has(pageIndex)) {
setIsLoading(true);
const jsonPayload = getDrillPayload(formData, filters);
const jsonPayload = getDrillPayload(formData, filters) ?? {};
const cachePageLimit = Math.ceil(SAMPLES_ROW_LIMIT / PAGE_SIZE);
getDatasourceSamples(
datasourceType,

View File

@ -61,7 +61,7 @@ export const SamplesPane = ({
if (isRequest && !cache.has(datasource)) {
setIsLoading(true);
getDatasourceSamples(datasource.type, datasource.id, queryForce)
getDatasourceSamples(datasource.type, datasource.id, queryForce, {})
.then(response => {
setData(ensureIsArray(response.data));
setColnames(ensureIsArray(response.colnames));

View File

@ -773,7 +773,12 @@ class Superset(BaseSupersetView): # pylint: disable=too-many-public-methods
query["form_data_key"] = [form_data_key]
url = url._replace(query=parse.urlencode(query, True))
redirect_url = parse.urlunparse(url)
return redirect_url
# Return a relative URL
url = parse.urlparse(redirect_url)
if url.query:
return f"{url.path}?{url.query}"
return url.path
@has_access
@event_logger.log_this

View File

@ -188,7 +188,11 @@ def example_db_provider() -> Callable[[], Database]: # type: ignore
def setup_presto_if_needed():
backend = app.config["SQLALCHEMY_EXAMPLES_URI"].split("://")[0]
db_uri = (
app.config.get("SQLALCHEMY_EXAMPLES_URI")
or app.config["SQLALCHEMY_DATABASE_URI"]
)
backend = db_uri.split("://")[0]
database = get_example_database()
extra = database.get_extra()

View File

@ -629,7 +629,7 @@ class TestCore(SupersetTestCase):
self.login(username="admin")
response = self.client.get(f"/r/{model_url.id}")
assert response.headers["Location"] == "http://localhost/"
assert response.headers["Location"] == "/"
db.session.delete(model_url)
db.session.commit()
@ -1674,7 +1674,9 @@ class TestCore(SupersetTestCase):
rv = self.client.get(
f"/superset/explore/?form_data={quote(json.dumps(form_data))}"
)
self.assertRedirects(rv, f"/explore/?form_data_key={random_key}")
self.assertEqual(
rv.headers["Location"], f"/explore/?form_data_key={random_key}"
)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_has_table_by_name(self):

View File

@ -143,7 +143,7 @@ class TestDashboard(SupersetTestCase):
dash_count_after = db.session.query(func.count(Dashboard.id)).first()[0]
self.assertEqual(dash_count_before + 1, dash_count_after)
group = re.match(
r"http:\/\/localhost\/superset\/dashboard\/([0-9]*)\/\?edit=true",
r"\/superset\/dashboard\/([0-9]*)\/\?edit=true",
response.headers["Location"],
)
assert group is not None

View File

@ -466,9 +466,9 @@ def test_get_samples(test_client, login_as_admin, virtual_dataset):
f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table"
)
# feeds data
test_client.post(uri)
test_client.post(uri, json={})
# get from cache
rv = test_client.post(uri)
rv = test_client.post(uri, json={})
assert rv.status_code == 200
assert len(rv.json["result"]["data"]) == 10
assert QueryCacheManager.has(
@ -480,9 +480,9 @@ def test_get_samples(test_client, login_as_admin, virtual_dataset):
# 2. should read through cache data
uri2 = f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table&force=true"
# feeds data
test_client.post(uri2)
test_client.post(uri2, json={})
# force query
rv2 = test_client.post(uri2)
rv2 = test_client.post(uri2, json={})
assert rv2.status_code == 200
assert len(rv2.json["result"]["data"]) == 10
assert QueryCacheManager.has(
@ -518,7 +518,7 @@ def test_get_samples_with_incorrect_cc(test_client, login_as_admin, virtual_data
uri = (
f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table"
)
rv = test_client.post(uri)
rv = test_client.post(uri, json={})
assert rv.status_code == 422
assert "error" in rv.json
@ -530,7 +530,7 @@ def test_get_samples_on_physical_dataset(test_client, login_as_admin, physical_d
uri = (
f"/datasource/samples?datasource_id={physical_dataset.id}&datasource_type=table"
)
rv = test_client.post(uri)
rv = test_client.post(uri, json={})
assert rv.status_code == 200
assert QueryCacheManager.has(
rv.json["result"]["cache_key"], region=CacheRegion.DATA
@ -543,7 +543,7 @@ def test_get_samples_with_filters(test_client, login_as_admin, virtual_dataset):
f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table"
)
rv = test_client.post(uri, json=None)
assert rv.status_code == 200
assert rv.status_code == 400
rv = test_client.post(uri, json={})
assert rv.status_code == 200
@ -644,7 +644,7 @@ def test_get_samples_pagination(test_client, login_as_admin, virtual_dataset):
uri = (
f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table"
)
rv = test_client.post(uri)
rv = test_client.post(uri, json={})
assert rv.json["result"]["page"] == 1
assert rv.json["result"]["per_page"] == app.config["SAMPLES_ROW_LIMIT"]
assert rv.json["result"]["total_count"] == 10
@ -653,28 +653,28 @@ def test_get_samples_pagination(test_client, login_as_admin, virtual_dataset):
per_pages = (app.config["SAMPLES_ROW_LIMIT"] + 1, 0, "xx")
for per_page in per_pages:
uri = f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table&per_page={per_page}"
rv = test_client.post(uri)
rv = test_client.post(uri, json={})
assert rv.status_code == 400
# 3. incorrect page or datasource_type
uri = f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table&page=xx"
rv = test_client.post(uri)
rv = test_client.post(uri, json={})
assert rv.status_code == 400
uri = f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=xx"
rv = test_client.post(uri)
rv = test_client.post(uri, json={})
assert rv.status_code == 400
# 4. turning pages
uri = f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table&per_page=2&page=1"
rv = test_client.post(uri)
rv = test_client.post(uri, json={})
assert rv.json["result"]["page"] == 1
assert rv.json["result"]["per_page"] == 2
assert rv.json["result"]["total_count"] == 10
assert [row["col1"] for row in rv.json["result"]["data"]] == [0, 1]
uri = f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table&per_page=2&page=2"
rv = test_client.post(uri)
rv = test_client.post(uri, json={})
assert rv.json["result"]["page"] == 2
assert rv.json["result"]["per_page"] == 2
assert rv.json["result"]["total_count"] == 10
@ -682,7 +682,7 @@ def test_get_samples_pagination(test_client, login_as_admin, virtual_dataset):
# 5. Exceeding the maximum pages
uri = f"/datasource/samples?datasource_id={virtual_dataset.id}&datasource_type=table&per_page=2&page=6"
rv = test_client.post(uri)
rv = test_client.post(uri, json={})
assert rv.json["result"]["page"] == 6
assert rv.json["result"]["per_page"] == 2
assert rv.json["result"]["total_count"] == 10

View File

@ -368,7 +368,7 @@ class TestThumbnails(SupersetTestCase):
id_, thumbnail_url = self._get_id_and_thumbnail_url(CHART_URL)
rv = self.client.get(f"api/v1/chart/{id_}/thumbnail/1234/")
self.assertEqual(rv.status_code, 302)
self.assertRedirects(rv, thumbnail_url)
self.assertEqual(rv.headers["Location"], thumbnail_url)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@with_feature_flags(THUMBNAILS=True)
@ -413,4 +413,4 @@ class TestThumbnails(SupersetTestCase):
id_, thumbnail_url = self._get_id_and_thumbnail_url(DASHBOARD_URL)
rv = self.client.get(f"api/v1/dashboard/{id_}/thumbnail/1234/")
self.assertEqual(rv.status_code, 302)
self.assertRedirects(rv, thumbnail_url)
self.assertEqual(rv.headers["Location"], thumbnail_url)