fix: change database save in DatasourceEditor (#9255)

* fix: change database save in DatasourceEditor

This addresses the issue where pointing a datasource to another database
in the datasource editor is not reflected.

Also addresses:
- a minorcosmetic issue in the datasource editor.
- user/owners list not getting populated

* tests
This commit is contained in:
Maxime Beauchemin 2020-03-10 09:20:37 -07:00 committed by GitHub
parent 99242225d6
commit 116200cf73
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 49 additions and 15 deletions

View File

@ -729,11 +729,13 @@ export class DatasourceEditor extends React.PureComponent {
<Tab eventKey={4} title={t('Settings')}>
{activeTabKey === 4 && (
<div>
<div className="change-warning well">
<span className="bold">{t('Be careful.')} </span>
{t(
'Changing these settings will affect all charts using this datasource, including charts owned by other people.',
)}
<div className="m-t-10">
<Alert bsStyle="warning">
<span className="bold">{t('Be careful.')} </span>
{t(
'Changing these settings will affect all charts using this datasource, including charts owned by other people.',
)}
</Alert>
</div>
<Col md={6}>
<FormContainer>{this.renderSettingsFieldset()}</FormContainer>

View File

@ -83,6 +83,7 @@ PermissionModelView.list_widget = SupersetSecurityListWidget
# Limiting routes on FAB model views
UserModelView.include_route_methods = RouteMethod.CRUD_SET | {
RouteMethod.ACTION,
RouteMethod.API_READ,
RouteMethod.ACTION_POST,
"userinfo",
}

View File

@ -745,6 +745,7 @@ class Superset(BaseSupersetView):
try:
dashboard_import_export.import_dashboards(db.session, f.stream)
except DatabaseNotFound as e:
logger.exception(e)
flash(
_(
"Cannot import dashboard: %(db_error)s.\n"

View File

@ -41,24 +41,26 @@ class Datasource(BaseSupersetView):
if not isinstance(data, str):
return json_error_response("Request missing data field.", status="500")
datasource = json.loads(data)
datasource_id = datasource.get("id")
datasource_type = datasource.get("type")
datasource_dict = json.loads(data)
datasource_id = datasource_dict.get("id")
datasource_type = datasource_dict.get("type")
database_id = datasource_dict["database"].get("id")
orm_datasource = ConnectorRegistry.get_datasource(
datasource_type, datasource_id, db.session
)
orm_datasource.database_id = database_id
if "owners" in datasource and orm_datasource.owner_class is not None:
datasource["owners"] = (
if "owners" in datasource_dict and orm_datasource.owner_class is not None:
datasource_dict["owners"] = (
db.session.query(orm_datasource.owner_class)
.filter(orm_datasource.owner_class.id.in_(datasource["owners"]))
.filter(orm_datasource.owner_class.id.in_(datasource_dict["owners"]))
.all()
)
duplicates = [
name
for name, count in Counter(
[col["column_name"] for col in datasource["columns"]]
[col["column_name"] for col in datasource_dict["columns"]]
).items()
if count > 1
]
@ -66,8 +68,7 @@ class Datasource(BaseSupersetView):
return json_error_response(
f"Duplicate column name(s): {','.join(duplicates)}", status="409"
)
orm_datasource.update_from_object(datasource)
orm_datasource.update_from_object(datasource_dict)
data = orm_datasource.data
db.session.commit()

View File

@ -268,7 +268,7 @@ class SupersetTestCase(TestCase):
cls=models.Database,
criteria={"database_name": database_name},
session=db.session,
sqlalchemy_uri="sqlite://test",
sqlalchemy_uri="sqlite:///:memory:",
id=db_id,
extra=extra,
)

View File

@ -18,6 +18,8 @@
import json
from copy import deepcopy
from superset.utils.core import get_or_create_db
from .base_tests import SupersetTestCase
from .fixtures.datasource import datasource_post
@ -61,9 +63,35 @@ class DatasourceTests(SupersetTestCase):
self.compare_lists(datasource_post[k], resp[k], "column_name")
elif k == "metrics":
self.compare_lists(datasource_post[k], resp[k], "metric_name")
elif k == "database":
self.assertEqual(resp[k]["id"], datasource_post[k]["id"])
else:
self.assertEqual(resp[k], datasource_post[k])
def save_datasource_from_dict(self, datasource_dict):
data = dict(data=json.dumps(datasource_post))
resp = self.get_json_resp("/datasource/save/", data)
return resp
def test_change_database(self):
self.login(username="admin")
tbl = self.get_table_by_name("birth_names")
tbl_id = tbl.id
db_id = tbl.database_id
datasource_post["id"] = tbl_id
new_db = self.create_fake_db()
datasource_post["database"]["id"] = new_db.id
resp = self.save_datasource_from_dict(datasource_post)
self.assertEquals(resp["database"]["id"], new_db.id)
datasource_post["database"]["id"] = db_id
resp = self.save_datasource_from_dict(datasource_post)
self.assertEquals(resp["database"]["id"], db_id)
self.delete_fake_db()
def test_save_duplicate_key(self):
self.login(username="admin")
tbl_id = self.get_table_by_name("birth_names").id

View File

@ -18,6 +18,7 @@
datasource_post = {
"id": None,
"column_formats": {"ratio": ".2%"},
"database": {"id": 1},
"description": "Adding a DESCRip",
"default_endpoint": "",
"filter_select_enabled": True,