diff --git a/superset-frontend/src/datasource/DatasourceEditor.jsx b/superset-frontend/src/datasource/DatasourceEditor.jsx index 94b25b2cb..bb56fd253 100644 --- a/superset-frontend/src/datasource/DatasourceEditor.jsx +++ b/superset-frontend/src/datasource/DatasourceEditor.jsx @@ -729,11 +729,13 @@ export class DatasourceEditor extends React.PureComponent { {activeTabKey === 4 && (
-
- {t('Be careful.')} - {t( - 'Changing these settings will affect all charts using this datasource, including charts owned by other people.', - )} +
+ + {t('Be careful.')} + {t( + 'Changing these settings will affect all charts using this datasource, including charts owned by other people.', + )} +
{this.renderSettingsFieldset()} diff --git a/superset/security/manager.py b/superset/security/manager.py index a9ed62309..fe39b1924 100644 --- a/superset/security/manager.py +++ b/superset/security/manager.py @@ -83,6 +83,7 @@ PermissionModelView.list_widget = SupersetSecurityListWidget # Limiting routes on FAB model views UserModelView.include_route_methods = RouteMethod.CRUD_SET | { RouteMethod.ACTION, + RouteMethod.API_READ, RouteMethod.ACTION_POST, "userinfo", } diff --git a/superset/views/core.py b/superset/views/core.py index 527dbb911..8ee024679 100755 --- a/superset/views/core.py +++ b/superset/views/core.py @@ -745,6 +745,7 @@ class Superset(BaseSupersetView): try: dashboard_import_export.import_dashboards(db.session, f.stream) except DatabaseNotFound as e: + logger.exception(e) flash( _( "Cannot import dashboard: %(db_error)s.\n" diff --git a/superset/views/datasource.py b/superset/views/datasource.py index 73e8e4be2..3875062fb 100644 --- a/superset/views/datasource.py +++ b/superset/views/datasource.py @@ -41,24 +41,26 @@ class Datasource(BaseSupersetView): if not isinstance(data, str): return json_error_response("Request missing data field.", status="500") - datasource = json.loads(data) - datasource_id = datasource.get("id") - datasource_type = datasource.get("type") + datasource_dict = json.loads(data) + datasource_id = datasource_dict.get("id") + datasource_type = datasource_dict.get("type") + database_id = datasource_dict["database"].get("id") orm_datasource = ConnectorRegistry.get_datasource( datasource_type, datasource_id, db.session ) + orm_datasource.database_id = database_id - if "owners" in datasource and orm_datasource.owner_class is not None: - datasource["owners"] = ( + if "owners" in datasource_dict and orm_datasource.owner_class is not None: + datasource_dict["owners"] = ( db.session.query(orm_datasource.owner_class) - .filter(orm_datasource.owner_class.id.in_(datasource["owners"])) + .filter(orm_datasource.owner_class.id.in_(datasource_dict["owners"])) .all() ) duplicates = [ name for name, count in Counter( - [col["column_name"] for col in datasource["columns"]] + [col["column_name"] for col in datasource_dict["columns"]] ).items() if count > 1 ] @@ -66,8 +68,7 @@ class Datasource(BaseSupersetView): return json_error_response( f"Duplicate column name(s): {','.join(duplicates)}", status="409" ) - - orm_datasource.update_from_object(datasource) + orm_datasource.update_from_object(datasource_dict) data = orm_datasource.data db.session.commit() diff --git a/tests/base_tests.py b/tests/base_tests.py index 09b183d98..7ccbf0a27 100644 --- a/tests/base_tests.py +++ b/tests/base_tests.py @@ -268,7 +268,7 @@ class SupersetTestCase(TestCase): cls=models.Database, criteria={"database_name": database_name}, session=db.session, - sqlalchemy_uri="sqlite://test", + sqlalchemy_uri="sqlite:///:memory:", id=db_id, extra=extra, ) diff --git a/tests/datasource_tests.py b/tests/datasource_tests.py index fa3616c32..9015e6d01 100644 --- a/tests/datasource_tests.py +++ b/tests/datasource_tests.py @@ -18,6 +18,8 @@ import json from copy import deepcopy +from superset.utils.core import get_or_create_db + from .base_tests import SupersetTestCase from .fixtures.datasource import datasource_post @@ -61,9 +63,35 @@ class DatasourceTests(SupersetTestCase): self.compare_lists(datasource_post[k], resp[k], "column_name") elif k == "metrics": self.compare_lists(datasource_post[k], resp[k], "metric_name") + elif k == "database": + self.assertEqual(resp[k]["id"], datasource_post[k]["id"]) else: self.assertEqual(resp[k], datasource_post[k]) + def save_datasource_from_dict(self, datasource_dict): + data = dict(data=json.dumps(datasource_post)) + resp = self.get_json_resp("/datasource/save/", data) + return resp + + def test_change_database(self): + self.login(username="admin") + tbl = self.get_table_by_name("birth_names") + tbl_id = tbl.id + db_id = tbl.database_id + datasource_post["id"] = tbl_id + + new_db = self.create_fake_db() + + datasource_post["database"]["id"] = new_db.id + resp = self.save_datasource_from_dict(datasource_post) + self.assertEquals(resp["database"]["id"], new_db.id) + + datasource_post["database"]["id"] = db_id + resp = self.save_datasource_from_dict(datasource_post) + self.assertEquals(resp["database"]["id"], db_id) + + self.delete_fake_db() + def test_save_duplicate_key(self): self.login(username="admin") tbl_id = self.get_table_by_name("birth_names").id diff --git a/tests/fixtures/datasource.py b/tests/fixtures/datasource.py index f09332dd7..c57c9c3f0 100644 --- a/tests/fixtures/datasource.py +++ b/tests/fixtures/datasource.py @@ -18,6 +18,7 @@ datasource_post = { "id": None, "column_formats": {"ratio": ".2%"}, + "database": {"id": 1}, "description": "Adding a DESCRip", "default_endpoint": "", "filter_select_enabled": True,