refactor(listviews): use correct filter endpoints for charts and datasets (#10442)

This commit is contained in:
ʈᵃᵢ 2020-07-29 11:00:19 -07:00 committed by GitHub
parent 7f70a241f9
commit 78cad9a4a8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 100 additions and 306 deletions

View File

@ -301,6 +301,10 @@ describe('ListView', () => {
expect(wrapper.find(ListViewFilters)).toExist();
});
it('fetched async filter values on mount', () => {
expect(fetchSelectsMock).toHaveBeenCalled();
});
it('calls fetchData on filter', () => {
act(() => {
wrapper

View File

@ -100,8 +100,8 @@ describe('DatasetList', () => {
it('fetches data', () => {
const callsD = fetchMock.calls(/dataset\/\?q/);
expect(callsD).toHaveLength(1);
expect(callsD[0][0]).toMatchInlineSnapshot(
expect(callsD).toHaveLength(2);
expect(callsD[1][0]).toMatchInlineSnapshot(
`"http://localhost/api/v1/dataset/?q=(order_column:changed_on_delta_humanized,order_direction:desc,page:0,page_size:25)"`,
);
});

View File

@ -108,6 +108,7 @@ function SelectFilter({
);
setSelectedOption(selected);
};
const fetchAndFormatSelects = async (
inputValue: string,
loadedOptions: SelectOption[],
@ -119,14 +120,16 @@ function SelectFilter({
if (fetchSelects) {
const selectValues = await fetchSelects(inputValue, page);
// update matching option at initial load
const matchingOption = result.find(x => x.value === initialValue);
if (matchingOption) {
setSelectedOption(matchingOption);
}
if (!selectValues.length) {
hasMore = false;
}
result = [...result, ...selectValues];
const matchingOption = result.find(x => x.value === initialValue);
if (matchingOption) {
setSelectedOption(matchingOption);
}
}
return {
options: result,
@ -143,13 +146,16 @@ function SelectFilter({
{fetchSelects ? (
<PaginatedSelect
data-test="filters-select"
defaultOptions
themeConfig={filterSelectTheme}
stylesConfig={filterSelectStyles}
// @ts-ignore
value={selectedOption}
// @ts-ignore
onChange={onChange}
// @ts-ignore
loadOptions={fetchAndFormatSelects}
placeholder={emptyLabel}
loadingMessage={() => 'Loading...'}
clearable={false}
additional={{
page: 0,

View File

@ -22,7 +22,6 @@ import { Col, Row, Alert } from 'react-bootstrap';
import styled from '@superset-ui/style';
import cx from 'classnames';
import Button from 'src/components/Button';
import Loading from 'src/components/Loading';
import IndeterminateCheckbox from 'src/components/IndeterminateCheckbox';
import TableCollection from './TableCollection';
import Pagination from './Pagination';
@ -302,9 +301,7 @@ const ListView: FunctionComponent<ListViewProps> = ({
}
});
}
if (loading && !data.length) {
return <Loading />;
}
return (
<ListViewStyles>
<div className={`superset-list-view ${className}`}>

View File

@ -178,6 +178,7 @@ export function useListViewState({
manualFilters: true,
manualPagination: true,
manualSortBy: true,
autoResetFilters: false,
pageCount: Math.ceil(count / initialPageSize),
},
useFilters,

View File

@ -22,12 +22,17 @@ import { getChartMetadataRegistry } from '@superset-ui/chart';
import PropTypes from 'prop-types';
import React from 'react';
import rison from 'rison';
import { uniqBy } from 'lodash';
import { createFetchRelated, createErrorHandler } from 'src/views/CRUD/utils';
import ConfirmStatusChange from 'src/components/ConfirmStatusChange';
import SubMenu from 'src/components/Menu/SubMenu';
import Icon from 'src/components/Icon';
import ListView, { ListViewProps } from 'src/components/ListView/ListView';
import { FetchDataConfig, Filters } from 'src/components/ListView/types';
import {
FetchDataConfig,
Filters,
SelectOption,
} from 'src/components/ListView/types';
import withToasts from 'src/messageToasts/enhancers/withToasts';
import PropertiesModal, { Slice } from 'src/explore/components/PropertiesModal';
import Chart from 'src/types/Chart';
@ -50,18 +55,37 @@ interface State {
// In future it would be better to have a unified Chart entity.
sliceCurrentlyEditing: Slice | null;
}
const createFetchDatasets = (
handleError: (err: Response) => void,
) => async () => {
const createFetchDatasets = (handleError: (err: Response) => void) => async (
filterValue = '',
pageIndex?: number,
pageSize?: number,
) => {
// add filters if filterValue
const filters = filterValue
? { filters: [{ col: 'table_name', opr: 'sw', value: filterValue }] }
: {};
try {
const { json = {} } = await SupersetClient.get({
endpoint: '/api/v1/chart/datasources',
const queryParams = rison.encode({
columns: ['datasource_name', 'datasource_id'],
keys: ['none'],
order_by: 'datasource_name',
...(pageIndex ? { page: pageIndex } : {}),
...(pageSize ? { page_size: pageSize } : {}),
...filters,
});
return json?.result?.map((ds: { label: string; value: any }) => ({
...ds,
value: JSON.stringify(ds.value),
}));
const { json = {} } = await SupersetClient.get({
endpoint: `/api/v1/dataset/?q=${queryParams}`,
});
const datasets = json?.result?.map(
({ table_name: tableName, id }: { table_name: string; id: number }) => ({
label: tableName,
value: id,
}),
);
return uniqBy<SelectOption>(datasets, 'value');
} catch (e) {
handleError(e);
}
@ -135,7 +159,7 @@ class ChartList extends React.PureComponent<Props, State> {
},
}: any) => <a href={dsUrl}>{dsNameTxt}</a>,
Header: t('Datasource'),
accessor: 'datasource_name',
accessor: 'datasource_id',
},
{
Cell: ({
@ -257,8 +281,8 @@ class ChartList extends React.PureComponent<Props, State> {
.map(k => ({ label: k, value: k })),
},
{
Header: t('Dataset'),
id: 'datasource',
Header: t('Datasource'),
id: 'datasource_id',
input: 'select',
operator: 'eq',
unfilteredLabel: 'All',
@ -369,29 +393,11 @@ class ChartList extends React.PureComponent<Props, State> {
loading: true,
});
const filterExps = filters
.map(({ id: col, operator: opr, value }) => ({
col,
opr,
value,
}))
.reduce((acc, fltr) => {
if (
fltr.col === 'datasource' &&
fltr.value &&
typeof fltr.value === 'string'
) {
const { datasource_id: dsId, datasource_type: dsType } = JSON.parse(
fltr.value,
);
return [
...acc,
{ ...fltr, col: 'datasource_id', value: dsId },
{ ...fltr, col: 'datasource_type', value: dsType },
];
}
return [...acc, fltr];
}, []);
const filterExps = filters.map(({ id: col, operator: opr, value }) => ({
col,
opr,
value,
}));
const queryParams = rison.encode({
order_column: sortBy[0].id,

View File

@ -97,8 +97,13 @@ const DatasetModal: FunctionComponent<DatasetModalProps> = ({
onHide();
})
.catch(
createErrorHandler(errMsg =>
addDangerToast(t('Error while saving dataset: %s', errMsg)),
createErrorHandler((errMsg: unknown) =>
addDangerToast(
t(
'Error while saving dataset: %s',
(errMsg as { table_name?: string }).table_name,
),
),
),
);
};

View File

@ -67,78 +67,41 @@ interface DatasetListProps {
addDangerToast: (msg: string) => void;
addSuccessToast: (msg: string) => void;
}
interface Database {
allow_csv_upload: boolean;
allow_ctas: boolean;
allow_cvas: null | boolean;
allow_dml: boolean;
allow_multi_schema_metadata_fetch: boolean;
allow_run_async: boolean;
allows_cost_estimate: boolean;
allows_subquery: boolean;
allows_virtual_table_explore: boolean;
backend: string;
database_name: string;
explore_database_id: number;
expose_in_sqllab: boolean;
force_ctas_schema: null | boolean;
function_names: string[];
id: number;
}
const createFetchDatabases = (handleError: (err: Response) => void) => async (
filterValue = '',
pageIndex?: number,
pageSize?: number,
) => {
try {
const queryParams = rison.encode({
columns: ['database_name', 'id'],
keys: ['none'],
...(pageIndex ? { page: pageIndex } : {}),
...(pageSize ? { page_size: pageSize } : {}),
...(filterValue ? { filter: filterValue } : {}),
});
const { json = {} } = await SupersetClient.get({
endpoint: `/api/v1/database/?q=${queryParams}`,
});
return json?.result?.map(
({ database_name: label, id: value }: Database) => ({
label,
value,
}),
);
} catch (e) {
handleError(e);
}
return [];
};
export const createFetchSchemas = (
handleError: (error: Response) => void,
) => async (filterValue = '', pageIndex?: number, pageSize?: number) => {
// add filters if filterValue
const filters = filterValue
? { filters: [{ col: 'schema', opr: 'sw', value: filterValue }] }
: {};
try {
const queryParams = rison.encode({
columns: ['schema'],
keys: ['none'],
order_by: 'schema',
...(pageIndex ? { page: pageIndex } : {}),
...(pageSize ? { page_size: pageSize } : {}),
...(filterValue ? { filter: filterValue } : {}),
...filters,
});
const { json = {} } = await SupersetClient.get({
endpoint: `/api/v1/database/schemas/?q=${queryParams}`,
endpoint: `/api/v1/dataset/?q=${queryParams}`,
});
return json?.result?.map(
({ text: label, value }: { text: string; value: any }) => ({
label,
value,
}),
const schemas: string[] = json?.result?.map(
({ schema }: { schema: string }) => schema,
);
// uniqueify schema values and create options
return [...new Set(schemas)]
.filter(schema => Boolean(schema))
.map(schema => ({ label: schema, value: schema }));
} catch (e) {
handleError(e);
}
return [];
};
const DatasetList: FunctionComponent<DatasetListProps> = ({
addDangerToast,
addSuccessToast,
@ -189,9 +152,14 @@ const DatasetList: FunctionComponent<DatasetListProps> = ({
input: 'select',
operator: 'rel_o_m',
unfilteredLabel: 'All',
fetchSelects: createFetchDatabases(
fetchSelects: createFetchRelated(
'dataset',
'database',
createErrorHandler(errMsg =>
t('An error occurred while fetching datasource values: %s', errMsg),
t(
'An error occurred while fetching dataset datasource values: %s',
errMsg,
),
),
),
paginate: true,

View File

@ -33,7 +33,7 @@ export const createFetchRelated = (
try {
const queryParams = rison.encode({
...(pageIndex ? { page: pageIndex } : {}),
...(pageSize ? { page_ize: pageSize } : {}),
...(pageSize ? { page_size: pageSize } : {}),
...(filterValue ? { filter: filterValue } : {}),
});
const { json = {} } = await SupersetClient.get({
@ -52,10 +52,10 @@ export const createFetchRelated = (
return [];
};
export const createErrorHandler = (
handleError: (errMsg?: string) => void,
) => async (e: SupersetClientResponse | string) => {
const parsedError = await getClientErrorObject(e);
console.error(e); // eslint-disable-line no-console
handleError(parsedError.message);
};
export function createErrorHandler(handleErrorFunc: (errMsg?: string) => void) {
return async (e: SupersetClientResponse | string) => {
const parsedError = await getClientErrorObject(e);
console.error(e); // eslint-disable-line no-console
handleErrorFunc(parsedError.message);
};
}

View File

@ -41,7 +41,6 @@ from superset.charts.commands.exceptions import (
ChartUpdateFailedError,
)
from superset.charts.commands.update import UpdateChartCommand
from superset.charts.dao import ChartDAO
from superset.charts.filters import ChartFilter, ChartNameOrDescriptionFilter
from superset.charts.schemas import (
CHART_SCHEMAS,
@ -84,7 +83,6 @@ class ChartRestApi(BaseSupersetModelRestApi):
"bulk_delete", # not using RouteMethod since locally defined
"data",
"viz_types",
"datasources",
}
class_permission_name = "SliceModelView"
show_columns = [
@ -693,42 +691,3 @@ class ChartRestApi(BaseSupersetModelRestApi):
return Response(
FileWrapper(screenshot), mimetype="image/png", direct_passthrough=True
)
@expose("/datasources", methods=["GET"])
@protect()
@safe
def datasources(self) -> Response:
"""Get available datasources
---
get:
description: Get available datasources.
responses:
200:
description: Query result
content:
application/json:
schema:
$ref: "#/components/schemas/ChartGetDatasourceResponseSchema"
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
404:
$ref: '#/components/responses/404'
422:
$ref: '#/components/responses/422'
500:
$ref: '#/components/responses/500'
"""
datasources = ChartDAO.fetch_all_datasources()
if not datasources:
return self.response(200, count=0, result=[])
result = [
{
"label": str(ds),
"value": {"datasource_id": ds.id, "datasource_type": ds.type},
}
for ds in datasources
]
return self.response(200, count=len(result), result=result)

View File

@ -20,7 +20,6 @@ from typing import List, Optional, TYPE_CHECKING
from sqlalchemy.exc import SQLAlchemyError
from superset.charts.filters import ChartFilter
from superset.connectors.connector_registry import ConnectorRegistry
from superset.dao.base import BaseDAO
from superset.extensions import db
from superset.models.slice import Slice
@ -57,10 +56,6 @@ class ChartDAO(BaseDAO):
db.session.rollback()
raise ex
@staticmethod
def fetch_all_datasources() -> List["BaseDatasource"]:
return ConnectorRegistry.get_all_datasources(db.session)
@staticmethod
def save(slc: Slice, commit: bool = True) -> None:
db.session.add(slc)

View File

@ -24,7 +24,6 @@ from superset import event_logger
from superset.databases.decorators import check_datasource_access
from superset.databases.schemas import (
database_schemas_query_schema,
DatabaseSchemaResponseSchema,
SchemasResponseSchema,
SelectStarResponseSchema,
TableMetadataResponseSchema,
@ -37,15 +36,6 @@ from superset.views.base_api import BaseSupersetModelRestApi, statsd_metrics
from superset.views.database.filters import DatabaseFilter
from superset.views.database.validators import sqlalchemy_uri_validator
get_schemas_schema = {
"type": "object",
"properties": {
"page_size": {"type": "integer"},
"page": {"type": "integer"},
"filter": {"type": "string"},
},
}
def get_foreign_keys_metadata(
database: Database, table_name: str, schema_name: Optional[str]
@ -129,7 +119,6 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
datamodel = SQLAInterface(Database)
include_route_methods = {
"all_schemas",
"get_list",
"table_metadata",
"select_star",
@ -137,7 +126,6 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
}
class_permission_name = "DatabaseView"
method_permission_name = {
"all_schemas": "list",
"get_list": "list",
"table_metadata": "list",
"select_star": "list",
@ -171,11 +159,9 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
apispec_parameter_schemas = {
"database_schemas_query_schema": database_schemas_query_schema,
"get_schemas_schema": get_schemas_schema,
}
openapi_spec_tag = "Database"
openapi_spec_component_schemas = (
DatabaseSchemaResponseSchema,
TableMetadataResponseSchema,
SelectStarResponseSchema,
SchemasResponseSchema,
@ -349,70 +335,3 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
return self.response(404, message="Table not found on the database")
self.incr_stats("success", self.select_star.__name__)
return self.response(200, result=result)
@expose("/schemas/", methods=["GET"])
@protect()
@safe
@statsd_metrics
@rison(get_schemas_schema)
def all_schemas(self, **kwargs: Any) -> FlaskResponse:
"""Get all schemas
---
get:
parameters:
- in: query
name: q
content:
application/json:
schema:
$ref: '#/components/schemas/get_schemas_schema'
responses:
200:
description: Related column data
content:
application/json:
schema:
$ref: "#/components/schemas/DatabaseSchemaResponseSchema"
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
404:
$ref: '#/components/responses/404'
422:
$ref: '#/components/responses/422'
500:
$ref: '#/components/responses/500'
"""
args = kwargs.get("rison", {})
# handle pagination
page, page_size = self._handle_page_args(args)
filter_ = args.get("filter", "")
_, databases = self.datamodel.query(page=page, page_size=page_size)
result = []
count = 0
if databases:
for database in databases:
try:
schemas = database.get_all_schema_names(
cache=database.schema_cache_enabled,
cache_timeout=database.schema_cache_timeout,
force=False,
)
except SQLAlchemyError:
self.incr_stats("error", self.schemas.__name__)
continue
schemas = security_manager.get_schemas_accessible_by_user(
database, schemas
)
count += len(schemas)
for schema in schemas:
if filter_:
if schema.startswith(filter_):
result.append({"text": schema, "value": schema})
else:
result.append({"text": schema, "value": schema})
return self.response(200, count=count, result=result)

View File

@ -86,13 +86,3 @@ class SelectStarResponseSchema(Schema):
class SchemasResponseSchema(Schema):
result = fields.List(fields.String(description="A database schema name"))
class DatabaseSchemaObjectResponseSchema(Schema):
value = fields.String(description="Schema name")
text = fields.String(description="Schema display name")
class DatabaseSchemaResponseSchema(Schema):
count = fields.Integer(description="The total number of schemas")
result = fields.Nested(DatabaseSchemaObjectResponseSchema)

View File

@ -82,7 +82,6 @@ class BaseSupersetModelRestApi(ModelRestApi):
"refresh": "edit",
"data": "list",
"viz_types": "list",
"datasources": "list",
"related_objects": "list",
}

View File

@ -895,14 +895,3 @@ class TestChartApi(SupersetTestCase, ApiOwnersTestCaseMixin):
payload = get_query_context(table.name, table.id, table.type)
rv = self.post_assert_metric(CHART_DATA_URI, payload, "data")
self.assertEqual(rv.status_code, 401)
def test_datasources(self):
"""
Chart API: Test get datasources
"""
self.login(username="admin")
uri = "api/v1/chart/datasources"
rv = self.client.get(uri)
self.assertEqual(rv.status_code, 200)
data = json.loads(rv.data.decode("utf-8"))
self.assertEqual(data["count"], 6)

View File

@ -225,50 +225,6 @@ class TestDatabaseApi(SupersetTestCase):
rv = self.client.get(uri)
self.assertEqual(rv.status_code, 404)
def test_schemas(self):
"""
Database API: Test get select star not found database
"""
self.login("admin")
dbs = db.session.query(Database).all()
schemas = []
for database in dbs:
schemas += database.get_all_schema_names()
rv = self.client.get("api/v1/database/schemas/")
response = json.loads(rv.data.decode("utf-8"))
self.assertEqual(len(schemas), response["count"])
self.assertEqual(schemas[0], response["result"][0]["value"])
rv = self.client.get(
f"api/v1/database/schemas/?q={prison.dumps({'filter': 'foo'})}"
)
response = json.loads(rv.data.decode("utf-8"))
self.assertEqual(0, len(response["result"]))
rv = self.client.get(
f"api/v1/database/schemas/?q={prison.dumps({'page': 0, 'page_size': 25})}"
)
response = json.loads(rv.data.decode("utf-8"))
self.assertEqual(len(schemas), len(response["result"]))
rv = self.client.get(
f"api/v1/database/schemas/?q={prison.dumps({'page': 1, 'page_size': 25})}"
)
response = json.loads(rv.data.decode("utf-8"))
self.assertEqual(0, len(response["result"]))
@mock.patch("superset.security_manager.get_schemas_accessible_by_user")
def test_schemas_no_access(self, mock_get_schemas_accessible_by_user):
"""
Database API: Test all schemas with no access
"""
mock_get_schemas_accessible_by_user.return_value = []
self.login("admin")
rv = self.client.get("api/v1/database/schemas/")
response = json.loads(rv.data.decode("utf-8"))
self.assertEqual(0, response["count"])
def test_database_schemas(self):
"""
Database API: Test database schemas