feat: add modal to import datasets (#11910)

This commit is contained in:
Beto Dealmeida 2020-12-07 16:20:25 -08:00 committed by GitHub
parent 2b9695c520
commit fbb458fa8b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 447 additions and 22 deletions

View File

@ -0,0 +1,106 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import React from 'react';
import thunk from 'redux-thunk';
import configureStore from 'redux-mock-store';
import { styledMount as mount } from 'spec/helpers/theming';
import { ReactWrapper } from 'enzyme';
import ImportDatasetModal from 'src/datasource/components/ImportModal';
import Modal from 'src/common/components/Modal';
const mockStore = configureStore([thunk]);
const store = mockStore({});
const requiredProps = {
addDangerToast: () => {},
addSuccessToast: () => {},
onDatasetImport: () => {},
show: true,
onHide: () => {},
};
describe('ImportDatasetModal', () => {
let wrapper: ReactWrapper;
beforeEach(() => {
wrapper = mount(<ImportDatasetModal {...requiredProps} />, {
context: { store },
});
});
afterEach(() => {
jest.clearAllMocks();
});
it('renders', () => {
expect(wrapper.find(ImportDatasetModal)).toExist();
});
it('renders a Modal', () => {
expect(wrapper.find(Modal)).toExist();
});
it('renders "Import Dataset" header', () => {
expect(wrapper.find('h4').text()).toEqual('Import Dataset');
});
it('renders a label and a file input field', () => {
expect(wrapper.find('input[type="file"]')).toExist();
expect(wrapper.find('label')).toExist();
});
it('should attach the label to the input field', () => {
const id = 'datasetFile';
expect(wrapper.find('label').prop('htmlFor')).toBe(id);
expect(wrapper.find('input').prop('id')).toBe(id);
});
it('should render the close, import and cancel buttons', () => {
expect(wrapper.find('button')).toHaveLength(3);
});
it('should render the import button initially disabled', () => {
expect(wrapper.find('button[children="Import"]').prop('disabled')).toBe(
true,
);
});
it('should render the import button enabled when a file is selected', () => {
const file = new File([new ArrayBuffer(1)], 'dataset_export.zip');
wrapper.find('input').simulate('change', { target: { files: [file] } });
expect(wrapper.find('button[children="Import"]').prop('disabled')).toBe(
false,
);
});
it('should render password fields when needed for import', () => {
const wrapperWithPasswords = mount(
<ImportDatasetModal
{...requiredProps}
passwordFields={['datasets/examples.yaml']}
/>,
{
context: { store },
},
);
expect(wrapperWithPasswords.find('input[type="password"]')).toExist();
});
});

View File

@ -0,0 +1,186 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import React, { FunctionComponent, useEffect, useRef, useState } from 'react';
import { t } from '@superset-ui/core';
import Modal from 'src/common/components/Modal';
import {
StyledIcon,
StyledInputContainer,
} from 'src/views/CRUD/data/database/DatabaseModal';
import { useImportResource } from 'src/views/CRUD/hooks';
import { DatasetObject } from 'src/views/CRUD/data/dataset/types';
export interface ImportDatasetModalProps {
addDangerToast: (msg: string) => void;
addSuccessToast: (msg: string) => void;
onDatasetImport: () => void;
show: boolean;
onHide: () => void;
passwordFields?: string[];
setPasswordFields?: (passwordFields: string[]) => void;
}
const ImportDatasetModal: FunctionComponent<ImportDatasetModalProps> = ({
addDangerToast,
addSuccessToast,
onDatasetImport,
show,
onHide,
passwordFields = [],
setPasswordFields = () => {},
}) => {
const [uploadFile, setUploadFile] = useState<File | null>(null);
const [isHidden, setIsHidden] = useState<boolean>(true);
const [passwords, setPasswords] = useState<Record<string, string>>({});
const fileInputRef = useRef<HTMLInputElement>(null);
const clearModal = () => {
setUploadFile(null);
setPasswordFields([]);
setPasswords({});
if (fileInputRef && fileInputRef.current) {
fileInputRef.current.value = '';
}
};
const handleErrorMsg = (msg: string) => {
clearModal();
addDangerToast(msg);
};
const {
state: { passwordsNeeded },
importResource,
} = useImportResource<DatasetObject>('dataset', t('dataset'), handleErrorMsg);
useEffect(() => {
setPasswordFields(passwordsNeeded);
}, [passwordsNeeded]);
// Functions
const hide = () => {
setIsHidden(true);
onHide();
};
const onUpload = () => {
if (uploadFile === null) {
return;
}
importResource(uploadFile, passwords).then(result => {
if (result) {
addSuccessToast(t('The datasets have been imported'));
clearModal();
onDatasetImport();
}
});
};
const changeFile = (event: React.ChangeEvent<HTMLInputElement>) => {
const { files } = event.target as HTMLInputElement;
setUploadFile((files && files[0]) || null);
};
const renderPasswordFields = () => {
if (passwordFields.length === 0) {
return null;
}
return (
<>
<h5>Database passwords</h5>
<StyledInputContainer>
<div className="helper">
{t(
'The passwords for the databases below are needed in order to ' +
'import them together with the datasets. Please note that the ' +
'"Secure Extra" and "Certificate" sections of ' +
'the database configuration are not present in export files, and ' +
'should be added manually after the import if they are needed.',
)}
</div>
</StyledInputContainer>
{passwordFields.map(fileName => (
<StyledInputContainer key={`password-for-${fileName}`}>
<div className="control-label">
{fileName}
<span className="required">*</span>
</div>
<input
name={`password-${fileName}`}
autoComplete="off"
type="password"
value={passwords[fileName]}
onChange={event =>
setPasswords({ ...passwords, [fileName]: event.target.value })
}
/>
</StyledInputContainer>
))}
</>
);
};
// Show/hide
if (isHidden && show) {
setIsHidden(false);
}
return (
<Modal
name="dataset"
className="dataset-modal"
disablePrimaryButton={uploadFile === null}
onHandledPrimaryAction={onUpload}
onHide={hide}
primaryButtonName={t('Import')}
width="750px"
show={show}
title={
<h4>
<StyledIcon name="table" />
{t('Import Dataset')}
</h4>
}
>
<StyledInputContainer>
<div className="control-label">
<label htmlFor="datasetFile">
{t('File')}
<span className="required">*</span>
</label>
</div>
<input
ref={fileInputRef}
data-test="dataset-file-input"
name="datasetFile"
id="datasetFile"
type="file"
accept=".yaml,.json,.yml,.zip"
onChange={changeFile}
/>
</StyledInputContainer>
{renderPasswordFields()}
</Modal>
);
};
export default ImportDatasetModal;

View File

@ -27,7 +27,7 @@ import { createErrorHandler } from 'src/views/CRUD/utils';
type DatasetAddObject = {
id: number;
databse: number;
database: number;
schema: string;
table_name: string;
};

View File

@ -45,6 +45,8 @@ import TooltipWrapper from 'src/components/TooltipWrapper';
import Icon from 'src/components/Icon';
import FacePile from 'src/components/FacePile';
import CertifiedIconWithTooltip from 'src/components/CertifiedIconWithTooltip';
import ImportDatasetModal from 'src/datasource/components/ImportModal/index';
import { isFeatureEnabled, FeatureFlag } from 'src/featureFlags';
import AddDatasetModal from './AddDatasetModal';
const PAGE_SIZE = 25;
@ -114,6 +116,22 @@ const DatasetList: FunctionComponent<DatasetListProps> = ({
setDatasetCurrentlyEditing,
] = useState<Dataset | null>(null);
const [importingDataset, showImportModal] = useState<boolean>(false);
const [passwordFields, setPasswordFields] = useState<string[]>([]);
const openDatasetImportModal = () => {
showImportModal(true);
};
const closeDatasetImportModal = () => {
showImportModal(false);
};
const handleDatasetImport = () => {
showImportModal(false);
refreshData();
};
const canEdit = hasPerm('can_edit');
const canDelete = hasPerm('can_delete');
const canCreate = hasPerm('can_add');
@ -453,6 +471,14 @@ const DatasetList: FunctionComponent<DatasetListProps> = ({
});
}
if (isFeatureEnabled(FeatureFlag.VERSIONED_EXPORT)) {
buttonArr.push({
name: <Icon name="import" />,
buttonStyle: 'link',
onClick: openDatasetImportModal,
});
}
menuData.buttons = buttonArr;
const closeDatasetDeleteModal = () => {
@ -620,6 +646,16 @@ const DatasetList: FunctionComponent<DatasetListProps> = ({
);
}}
</ConfirmStatusChange>
<ImportDatasetModal
show={importingDataset}
onHide={closeDatasetImportModal}
addDangerToast={addDangerToast}
addSuccessToast={addSuccessToast}
onDatasetImport={handleDatasetImport}
passwordFields={passwordFields}
setPasswordFields={setPasswordFields}
/>
</>
);
};

View File

@ -0,0 +1,61 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
type ColumnObject = {
id: number;
column_name: string;
type: string;
verbose_name?: string;
description?: string;
expression?: string;
filterable: boolean;
groupby: boolean;
is_active: boolean;
is_dttm: boolean;
python_date_format?: string;
uuid?: string;
};
type MetricObject = {
id: number;
expression?: string;
description?: string;
metric_name: string;
metric_type: string;
d3format?: string;
warning_text?: string;
};
export type DatasetObject = {
table_name?: string;
sql?: string;
filter_select_enabled?: boolean;
fetch_values_predicate?: string;
schema?: string;
description?: string;
main_dttm_col?: string;
offset?: number;
default_endpoint?: string;
cache_timeout?: number;
is_sqllab_view?: boolean;
template_params?: string;
owners: number[];
columns: ColumnObject[];
metrics: MetricObject[];
extra?: string;
};

View File

@ -15,7 +15,6 @@
# specific language governing permissions and limitations
# under the License.
import urllib.parse
from typing import Any, Dict, List, Optional
from marshmallow import Schema, validate
@ -48,11 +47,9 @@ class ImportDatabasesCommand(BaseCommand):
"""Import databases"""
# pylint: disable=unused-argument
def __init__(
self, contents: Dict[str, str], *args: Any, **kwargs: Any,
):
def __init__(self, contents: Dict[str, str], *args: Any, **kwargs: Any):
self.contents = contents
self.passwords = kwargs.get("passwords") or {}
self.passwords: Dict[str, str] = kwargs.get("passwords") or {}
self._configs: Dict[str, Any] = {}
def _import_bundle(self, session: Session) -> None:
@ -87,6 +84,14 @@ class ImportDatabasesCommand(BaseCommand):
def validate(self) -> None:
exceptions: List[ValidationError] = []
# load existing databases so we can apply the password validation
db_passwords = {
str(uuid): password
for uuid, password in db.session.query(
Database.uuid, Database.password
).all()
}
# verify that the metadata file is present and valid
try:
metadata: Optional[Dict[str, str]] = load_metadata(self.contents)
@ -94,14 +99,20 @@ class ImportDatabasesCommand(BaseCommand):
exceptions.append(exc)
metadata = None
# validate databases and dataset
for file_name, content in self.contents.items():
prefix = file_name.split("/")[0]
schema = schemas.get(f"{prefix}/")
if schema:
try:
config = load_yaml(file_name, content)
# populate passwords from the request or from existing DBs
if file_name in self.passwords:
config["password"] = self.passwords[file_name]
elif prefix == "databases" and config["uuid"] in db_passwords:
config["password"] = db_passwords[config["uuid"]]
schema.load(config)
self._configs[file_name] = config
except ValidationError as exc:

View File

@ -14,6 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import json
import logging
from datetime import datetime
from distutils.util import strtobool
@ -656,7 +657,13 @@ class DatasetRestApi(BaseSupersetModelRestApi):
for file_name in bundle.namelist()
}
command = ImportDatasetsCommand(contents)
passwords = (
json.loads(request.form["passwords"])
if "passwords" in request.form
else None
)
command = ImportDatasetsCommand(contents, passwords=passwords)
try:
command.run()
return self.response(200, message="OK")

View File

@ -46,12 +46,14 @@ class ImportDatasetsCommand(BaseCommand):
# pylint: disable=unused-argument
def __init__(self, contents: Dict[str, str], *args: Any, **kwargs: Any):
self.contents = contents
self.args = args
self.kwargs = kwargs
def run(self) -> None:
# iterate over all commands until we find a version that can
# handle the contents
for version in command_versions:
command = version(self.contents)
command = version(self.contents, *self.args, **self.kwargs)
try:
command.run()
return

View File

@ -284,19 +284,17 @@ class ImportDatasetsCommand(BaseCommand):
in Superset.
"""
# pylint: disable=unused-argument
def __init__(
self,
contents: Dict[str, str],
sync_columns: bool = False,
sync_metrics: bool = False,
self, contents: Dict[str, str], *args: Any, **kwargs: Any,
):
self.contents = contents
self._configs: Dict[str, Any] = {}
self.sync = []
if sync_columns:
if kwargs.get("sync_columns"):
self.sync.append("columns")
if sync_metrics:
if kwargs.get("sync_metrics"):
self.sync.append("metrics")
def run(self) -> None:

View File

@ -35,6 +35,7 @@ from superset.databases.schemas import ImportV1DatabaseSchema
from superset.datasets.commands.exceptions import DatasetImportError
from superset.datasets.commands.importers.v1.utils import import_dataset
from superset.datasets.schemas import ImportV1DatasetSchema
from superset.models.core import Database
schemas: Dict[str, Schema] = {
"databases/": ImportV1DatabaseSchema(),
@ -49,6 +50,7 @@ class ImportDatasetsCommand(BaseCommand):
# pylint: disable=unused-argument
def __init__(self, contents: Dict[str, str], *args: Any, **kwargs: Any):
self.contents = contents
self.passwords: Dict[str, str] = kwargs.get("passwords") or {}
self._configs: Dict[str, Any] = {}
def _import_bundle(self, session: Session) -> None:
@ -88,6 +90,14 @@ class ImportDatasetsCommand(BaseCommand):
def validate(self) -> None:
exceptions: List[ValidationError] = []
# load existing databases so we can apply the password validation
db_passwords = {
str(uuid): password
for uuid, password in db.session.query(
Database.uuid, Database.password
).all()
}
# verify that the metadata file is present and valid
try:
metadata: Optional[Dict[str, str]] = load_metadata(self.contents)
@ -95,12 +105,20 @@ class ImportDatasetsCommand(BaseCommand):
exceptions.append(exc)
metadata = None
# validate datasets and databases
for file_name, content in self.contents.items():
prefix = file_name.split("/")[0]
schema = schemas.get(f"{prefix}/")
if schema:
try:
config = load_yaml(file_name, content)
# populate passwords from the request or from existing DBs
if file_name in self.passwords:
config["password"] = self.passwords[file_name]
elif prefix == "databases" and config["uuid"] in db_passwords:
config["password"] = db_passwords[config["uuid"]]
schema.load(config)
self._configs[file_name] = config
except ValidationError as exc:

View File

@ -126,20 +126,20 @@ class DatasetRelatedObjectsResponse(Schema):
class ImportV1ColumnSchema(Schema):
column_name = fields.String(required=True)
verbose_name = fields.String()
verbose_name = fields.String(allow_none=True)
is_dttm = fields.Boolean()
is_active = fields.Boolean(allow_none=True)
type = fields.String(required=True)
groupby = fields.Boolean()
filterable = fields.Boolean()
expression = fields.String()
expression = fields.String(allow_none=True)
description = fields.String(allow_none=True)
python_date_format = fields.String(allow_none=True)
class ImportV1MetricSchema(Schema):
metric_name = fields.String(required=True)
verbose_name = fields.String()
verbose_name = fields.String(allow_none=True)
metric_type = fields.String(allow_none=True)
expression = fields.String(required=True)
description = fields.String(allow_none=True)
@ -151,12 +151,12 @@ class ImportV1MetricSchema(Schema):
class ImportV1DatasetSchema(Schema):
table_name = fields.String(required=True)
main_dttm_col = fields.String(allow_none=True)
description = fields.String()
default_endpoint = fields.String()
description = fields.String(allow_none=True)
default_endpoint = fields.String(allow_none=True)
offset = fields.Integer()
cache_timeout = fields.Integer()
schema = fields.String()
sql = fields.String()
cache_timeout = fields.Integer(allow_none=True)
schema = fields.String(allow_none=True)
sql = fields.String(allow_none=True)
params = fields.String(allow_none=True)
template_params = fields.String(allow_none=True)
filter_select_enabled = fields.Boolean()