feat(SIP-95): catalogs in SQL Lab and datasets (#28376)

This commit is contained in:
Beto Dealmeida 2024-05-08 17:19:36 -04:00 committed by GitHub
parent 07cd1d89d0
commit ce668d46cc
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
71 changed files with 842 additions and 100 deletions

View File

@ -316,6 +316,7 @@ export type Query = {
link?: string; link?: string;
progress: number; progress: number;
resultsKey: string | null; resultsKey: string | null;
catalog?: string | null;
schema?: string; schema?: string;
sql: string; sql: string;
sqlEditorId: string; sqlEditorId: string;

View File

@ -168,6 +168,7 @@ export interface SubMenuProps {
export interface CustomAutoCompleteArgs { export interface CustomAutoCompleteArgs {
queryEditorId: string; queryEditorId: string;
dbId?: string | number; dbId?: string | number;
catalog?: string | null;
schema?: string; schema?: string;
} }

View File

@ -55,6 +55,7 @@ export const REMOVE_QUERY = 'REMOVE_QUERY';
export const EXPAND_TABLE = 'EXPAND_TABLE'; export const EXPAND_TABLE = 'EXPAND_TABLE';
export const COLLAPSE_TABLE = 'COLLAPSE_TABLE'; export const COLLAPSE_TABLE = 'COLLAPSE_TABLE';
export const QUERY_EDITOR_SETDB = 'QUERY_EDITOR_SETDB'; export const QUERY_EDITOR_SETDB = 'QUERY_EDITOR_SETDB';
export const QUERY_EDITOR_SET_CATALOG = 'QUERY_EDITOR_SET_CATALOG';
export const QUERY_EDITOR_SET_SCHEMA = 'QUERY_EDITOR_SET_SCHEMA'; export const QUERY_EDITOR_SET_SCHEMA = 'QUERY_EDITOR_SET_SCHEMA';
export const QUERY_EDITOR_SET_TITLE = 'QUERY_EDITOR_SET_TITLE'; export const QUERY_EDITOR_SET_TITLE = 'QUERY_EDITOR_SET_TITLE';
export const QUERY_EDITOR_SET_AUTORUN = 'QUERY_EDITOR_SET_AUTORUN'; export const QUERY_EDITOR_SET_AUTORUN = 'QUERY_EDITOR_SET_AUTORUN';
@ -326,6 +327,7 @@ export function runQuery(query) {
database_id: query.dbId, database_id: query.dbId,
json: true, json: true,
runAsync: query.runAsync, runAsync: query.runAsync,
catalog: query.catalog,
schema: query.schema, schema: query.schema,
sql: query.sql, sql: query.sql,
sql_editor_id: query.sqlEditorId, sql_editor_id: query.sqlEditorId,
@ -381,6 +383,7 @@ export function runQueryFromSqlEditor(
sql: qe.selectedText || qe.sql, sql: qe.selectedText || qe.sql,
sqlEditorId: qe.id, sqlEditorId: qe.id,
tab: qe.name, tab: qe.name,
catalog: qe.catalog,
schema: qe.schema, schema: qe.schema,
tempTable, tempTable,
templateParams: qe.templateParams, templateParams: qe.templateParams,
@ -556,7 +559,7 @@ export function addNewQueryEditor() {
); );
const dbIds = Object.values(databases).map(database => database.id); const dbIds = Object.values(databases).map(database => database.id);
const firstDbId = dbIds.length > 0 ? Math.min(...dbIds) : undefined; const firstDbId = dbIds.length > 0 ? Math.min(...dbIds) : undefined;
const { dbId, schema, queryLimit, autorun } = { const { dbId, catalog, schema, queryLimit, autorun } = {
...queryEditors[0], ...queryEditors[0],
...activeQueryEditor, ...activeQueryEditor,
...(unsavedQueryEditor.id === activeQueryEditor?.id && ...(unsavedQueryEditor.id === activeQueryEditor?.id &&
@ -578,6 +581,7 @@ export function addNewQueryEditor() {
return dispatch( return dispatch(
addQueryEditor({ addQueryEditor({
dbId: dbId || defaultDbId || firstDbId, dbId: dbId || defaultDbId || firstDbId,
catalog: catalog ?? null,
schema: schema ?? null, schema: schema ?? null,
autorun: autorun ?? false, autorun: autorun ?? false,
sql: `${warning}SELECT ...`, sql: `${warning}SELECT ...`,
@ -600,6 +604,7 @@ export function cloneQueryToNewTab(query, autorun) {
const queryEditor = { const queryEditor = {
name: t('Copy of %s', sourceQueryEditor.name), name: t('Copy of %s', sourceQueryEditor.name),
dbId: query.dbId ? query.dbId : null, dbId: query.dbId ? query.dbId : null,
catalog: query.catalog ? query.catalog : null,
schema: query.schema ? query.schema : null, schema: query.schema ? query.schema : null,
autorun, autorun,
sql: query.sql, sql: query.sql,
@ -656,6 +661,7 @@ export function setTables(tableSchemas) {
return { return {
dbId: tableSchema.database_id, dbId: tableSchema.database_id,
queryEditorId: tableSchema.tab_state_id.toString(), queryEditorId: tableSchema.tab_state_id.toString(),
catalog: tableSchema.catalog,
schema: tableSchema.schema, schema: tableSchema.schema,
name: tableSchema.table, name: tableSchema.table,
expanded: tableSchema.expanded, expanded: tableSchema.expanded,
@ -694,6 +700,7 @@ export function switchQueryEditor(queryEditor, displayLimit) {
autorun: json.autorun, autorun: json.autorun,
dbId: json.database_id, dbId: json.database_id,
templateParams: json.template_params, templateParams: json.template_params,
catalog: json.catalog,
schema: json.schema, schema: json.schema,
queryLimit: json.query_limit, queryLimit: json.query_limit,
remoteId: json.saved_query?.id, remoteId: json.saved_query?.id,
@ -797,6 +804,14 @@ export function queryEditorSetDb(queryEditor, dbId) {
return { type: QUERY_EDITOR_SETDB, queryEditor, dbId }; return { type: QUERY_EDITOR_SETDB, queryEditor, dbId };
} }
export function queryEditorSetCatalog(queryEditor, catalog) {
return {
type: QUERY_EDITOR_SET_CATALOG,
queryEditor: queryEditor || {},
catalog,
};
}
export function queryEditorSetSchema(queryEditor, schema) { export function queryEditorSetSchema(queryEditor, schema) {
return { return {
type: QUERY_EDITOR_SET_SCHEMA, type: QUERY_EDITOR_SET_SCHEMA,
@ -954,12 +969,13 @@ export function mergeTable(table, query, prepend) {
return { type: MERGE_TABLE, table, query, prepend }; return { type: MERGE_TABLE, table, query, prepend };
} }
export function addTable(queryEditor, tableName, schemaName) { export function addTable(queryEditor, tableName, catalogName, schemaName) {
return function (dispatch, getState) { return function (dispatch, getState) {
const query = getUpToDateQuery(getState(), queryEditor, queryEditor.id); const query = getUpToDateQuery(getState(), queryEditor, queryEditor.id);
const table = { const table = {
dbId: query.dbId, dbId: query.dbId,
queryEditorId: query.id, queryEditorId: query.id,
catalog: catalogName,
schema: schemaName, schema: schemaName,
name: tableName, name: tableName,
}; };
@ -983,12 +999,14 @@ export function runTablePreviewQuery(newTable) {
sqlLab: { databases }, sqlLab: { databases },
} = getState(); } = getState();
const database = databases[newTable.dbId]; const database = databases[newTable.dbId];
const { dbId } = newTable; const { dbId, catalog, schema } = newTable;
if (database && !database.disable_data_preview) { if (database && !database.disable_data_preview) {
const dataPreviewQuery = { const dataPreviewQuery = {
id: shortid.generate(), id: shortid.generate(),
dbId, dbId,
catalog,
schema,
sql: newTable.selectStar, sql: newTable.selectStar,
tableName: newTable.name, tableName: newTable.name,
sqlEditorId: null, sqlEditorId: null,
@ -1003,6 +1021,7 @@ export function runTablePreviewQuery(newTable) {
{ {
id: newTable.id, id: newTable.id,
dbId: newTable.dbId, dbId: newTable.dbId,
catalog: newTable.catalog,
schema: newTable.schema, schema: newTable.schema,
name: newTable.name, name: newTable.name,
queryEditorId: newTable.queryEditorId, queryEditorId: newTable.queryEditorId,
@ -1180,6 +1199,7 @@ export function popStoredQuery(urlId) {
addQueryEditor({ addQueryEditor({
name: json.name ? json.name : t('Shared query'), name: json.name ? json.name : t('Shared query'),
dbId: json.dbId ? parseInt(json.dbId, 10) : null, dbId: json.dbId ? parseInt(json.dbId, 10) : null,
catalog: json.catalog ? json.catalog : null,
schema: json.schema ? json.schema : null, schema: json.schema ? json.schema : null,
autorun: json.autorun ? json.autorun : false, autorun: json.autorun ? json.autorun : false,
sql: json.sql ? json.sql : 'SELECT ...', sql: json.sql ? json.sql : 'SELECT ...',
@ -1215,6 +1235,7 @@ export function popQuery(queryId) {
const queryData = json.result; const queryData = json.result;
const queryEditorProps = { const queryEditorProps = {
dbId: queryData.database.id, dbId: queryData.database.id,
catalog: queryData.catalog,
schema: queryData.schema, schema: queryData.schema,
sql: queryData.sql, sql: queryData.sql,
name: t('Copy of %s', queryData.tab_name), name: t('Copy of %s', queryData.tab_name),
@ -1268,12 +1289,13 @@ export function createDatasourceFailed(err) {
export function createDatasource(vizOptions) { export function createDatasource(vizOptions) {
return dispatch => { return dispatch => {
dispatch(createDatasourceStarted()); dispatch(createDatasourceStarted());
const { dbId, schema, datasourceName, sql } = vizOptions; const { dbId, catalog, schema, datasourceName, sql } = vizOptions;
return SupersetClient.post({ return SupersetClient.post({
endpoint: '/api/v1/dataset/', endpoint: '/api/v1/dataset/',
headers: { 'Content-Type': 'application/json' }, headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ body: JSON.stringify({
database: dbId, database: dbId,
catalog,
schema, schema,
sql, sql,
table_name: datasourceName, table_name: datasourceName,

View File

@ -419,6 +419,7 @@ describe('async actions', () => {
queryEditor: { queryEditor: {
name: 'Copy of Dummy query editor', name: 'Copy of Dummy query editor',
dbId: 1, dbId: 1,
catalog: query.catalog,
schema: query.schema, schema: query.schema,
autorun: true, autorun: true,
sql: 'SELECT * FROM something', sql: 'SELECT * FROM something',
@ -481,6 +482,7 @@ describe('async actions', () => {
sql: expect.stringContaining('SELECT ...'), sql: expect.stringContaining('SELECT ...'),
name: `Untitled Query 7`, name: `Untitled Query 7`,
dbId: defaultQueryEditor.dbId, dbId: defaultQueryEditor.dbId,
catalog: defaultQueryEditor.catalog,
schema: defaultQueryEditor.schema, schema: defaultQueryEditor.schema,
autorun: false, autorun: false,
queryLimit: queryLimit:
@ -607,6 +609,24 @@ describe('async actions', () => {
}); });
}); });
describe('queryEditorSetCatalog', () => {
it('updates the tab state in the backend', () => {
expect.assertions(1);
const catalog = 'public';
const store = mockStore({});
const expectedActions = [
{
type: actions.QUERY_EDITOR_SET_CATALOG,
queryEditor,
catalog,
},
];
store.dispatch(actions.queryEditorSetCatalog(queryEditor, catalog));
expect(store.getActions()).toEqual(expectedActions);
});
});
describe('queryEditorSetSchema', () => { describe('queryEditorSetSchema', () => {
it('updates the tab state in the backend', () => { it('updates the tab state in the backend', () => {
expect.assertions(1); expect.assertions(1);
@ -747,6 +767,7 @@ describe('async actions', () => {
describe('addTable', () => { describe('addTable', () => {
it('dispatches table state from unsaved change', () => { it('dispatches table state from unsaved change', () => {
const tableName = 'table'; const tableName = 'table';
const catalogName = null;
const schemaName = 'schema'; const schemaName = 'schema';
const expectedDbId = 473892; const expectedDbId = 473892;
const store = mockStore({ const store = mockStore({
@ -759,12 +780,18 @@ describe('async actions', () => {
}, },
}, },
}); });
const request = actions.addTable(query, tableName, schemaName); const request = actions.addTable(
query,
tableName,
catalogName,
schemaName,
);
request(store.dispatch, store.getState); request(store.dispatch, store.getState);
expect(store.getActions()[0]).toEqual( expect(store.getActions()[0]).toEqual(
expect.objectContaining({ expect.objectContaining({
table: expect.objectContaining({ table: expect.objectContaining({
name: tableName, name: tableName,
catalog: catalogName,
schema: schemaName, schema: schemaName,
dbId: expectedDbId, dbId: expectedDbId,
}), }),
@ -811,6 +838,7 @@ describe('async actions', () => {
}); });
const tableName = 'table'; const tableName = 'table';
const catalogName = null;
const schemaName = 'schema'; const schemaName = 'schema';
const store = mockStore({ const store = mockStore({
...initialState, ...initialState,
@ -829,6 +857,7 @@ describe('async actions', () => {
const request = actions.runTablePreviewQuery({ const request = actions.runTablePreviewQuery({
dbId: 1, dbId: 1,
name: tableName, name: tableName,
catalog: catalogName,
schema: schemaName, schema: schemaName,
}); });
return request(store.dispatch, store.getState).then(() => { return request(store.dispatch, store.getState).then(() => {

View File

@ -74,6 +74,7 @@ const AceEditorWrapper = ({
'id', 'id',
'dbId', 'dbId',
'sql', 'sql',
'catalog',
'schema', 'schema',
'templateParams', 'templateParams',
'cursorPosition', 'cursorPosition',
@ -161,6 +162,7 @@ const AceEditorWrapper = ({
const { data: annotations } = useAnnotations({ const { data: annotations } = useAnnotations({
dbId: queryEditor.dbId, dbId: queryEditor.dbId,
catalog: queryEditor.catalog,
schema: queryEditor.schema, schema: queryEditor.schema,
sql: currentSql, sql: currentSql,
templateParams: queryEditor.templateParams, templateParams: queryEditor.templateParams,
@ -170,6 +172,7 @@ const AceEditorWrapper = ({
{ {
queryEditorId, queryEditorId,
dbId: queryEditor.dbId, dbId: queryEditor.dbId,
catalog: queryEditor.catalog,
schema: queryEditor.schema, schema: queryEditor.schema,
}, },
!autocomplete, !autocomplete,

View File

@ -189,7 +189,12 @@ test('returns column keywords among selected tables', async () => {
storeWithSqlLab.dispatch( storeWithSqlLab.dispatch(
tableApiUtil.upsertQueryData( tableApiUtil.upsertQueryData(
'tableMetadata', 'tableMetadata',
{ dbId: expectDbId, schema: expectSchema, table: expectTable }, {
dbId: expectDbId,
catalog: null,
schema: expectSchema,
table: expectTable,
},
{ {
name: expectTable, name: expectTable,
columns: [ columns: [
@ -205,7 +210,12 @@ test('returns column keywords among selected tables', async () => {
storeWithSqlLab.dispatch( storeWithSqlLab.dispatch(
tableApiUtil.upsertQueryData( tableApiUtil.upsertQueryData(
'tableMetadata', 'tableMetadata',
{ dbId: expectDbId, schema: expectSchema, table: unexpectedTable }, {
dbId: expectDbId,
catalog: null,
schema: expectSchema,
table: unexpectedTable,
},
{ {
name: unexpectedTable, name: unexpectedTable,
columns: [ columns: [
@ -227,6 +237,7 @@ test('returns column keywords among selected tables', async () => {
useKeywords({ useKeywords({
queryEditorId: expectQueryEditorId, queryEditorId: expectQueryEditorId,
dbId: expectDbId, dbId: expectDbId,
catalog: null,
schema: expectSchema, schema: expectSchema,
}), }),
{ {

View File

@ -42,6 +42,7 @@ import { SqlLabRootState } from 'src/SqlLab/types';
type Params = { type Params = {
queryEditorId: string | number; queryEditorId: string | number;
dbId?: string | number; dbId?: string | number;
catalog?: string | null;
schema?: string; schema?: string;
}; };
@ -58,7 +59,7 @@ const getHelperText = (value: string) =>
const extensionsRegistry = getExtensionsRegistry(); const extensionsRegistry = getExtensionsRegistry();
export function useKeywords( export function useKeywords(
{ queryEditorId, dbId, schema }: Params, { queryEditorId, dbId, catalog, schema }: Params,
skip = false, skip = false,
) { ) {
const useCustomKeywords = extensionsRegistry.get( const useCustomKeywords = extensionsRegistry.get(
@ -68,6 +69,7 @@ export function useKeywords(
const customKeywords = useCustomKeywords?.({ const customKeywords = useCustomKeywords?.({
queryEditorId: String(queryEditorId), queryEditorId: String(queryEditorId),
dbId, dbId,
catalog,
schema, schema,
}); });
const dispatch = useDispatch(); const dispatch = useDispatch();
@ -78,6 +80,7 @@ export function useKeywords(
const { data: schemaOptions } = useSchemasQueryState( const { data: schemaOptions } = useSchemasQueryState(
{ {
dbId, dbId,
catalog: catalog || undefined,
forceRefresh: false, forceRefresh: false,
}, },
{ skip: skipFetch || !dbId }, { skip: skipFetch || !dbId },
@ -85,6 +88,7 @@ export function useKeywords(
const { data: tableData } = useTablesQueryState( const { data: tableData } = useTablesQueryState(
{ {
dbId, dbId,
catalog,
schema, schema,
forceRefresh: false, forceRefresh: false,
}, },
@ -125,6 +129,7 @@ export function useKeywords(
dbId && schema dbId && schema
? { ? {
dbId, dbId,
catalog,
schema, schema,
table, table,
} }
@ -137,7 +142,7 @@ export function useKeywords(
}); });
}); });
return [...columns]; return [...columns];
}, [dbId, schema, apiState, tablesForColumnMetadata]); }, [dbId, catalog, schema, apiState, tablesForColumnMetadata]);
const insertMatch = useEffectEvent((editor: Editor, data: any) => { const insertMatch = useEffectEvent((editor: Editor, data: any) => {
if (data.meta === 'table') { if (data.meta === 'table') {

View File

@ -210,6 +210,38 @@ describe('SaveDatasetModal', () => {
expect(createDatasource).toHaveBeenCalledWith({ expect(createDatasource).toHaveBeenCalledWith({
datasourceName: 'my dataset', datasourceName: 'my dataset',
dbId: 1, dbId: 1,
catalog: null,
schema: 'main',
sql: 'SELECT *',
templateParams: undefined,
});
});
it('sends the catalog when creating the dataset', async () => {
const dummyDispatch = jest.fn().mockResolvedValue({});
useDispatchMock.mockReturnValue(dummyDispatch);
useSelectorMock.mockReturnValue({ ...user });
render(
<SaveDatasetModal
{...mockedProps}
datasource={{ ...mockedProps.datasource, catalog: 'public' }}
/>,
{ useRedux: true },
);
const inputFieldText = screen.getByDisplayValue(/unimportant/i);
fireEvent.change(inputFieldText, { target: { value: 'my dataset' } });
const saveConfirmationBtn = screen.getByRole('button', {
name: /save/i,
});
userEvent.click(saveConfirmationBtn);
expect(createDatasource).toHaveBeenCalledWith({
datasourceName: 'my dataset',
dbId: 1,
catalog: 'public',
schema: 'main', schema: 'main',
sql: 'SELECT *', sql: 'SELECT *',
templateParams: undefined, templateParams: undefined,

View File

@ -77,6 +77,7 @@ export interface ISaveableDatasource {
dbId: number; dbId: number;
sql: string; sql: string;
templateParams?: string | object | null; templateParams?: string | object | null;
catalog?: string | null;
schema?: string | null; schema?: string | null;
database?: Database; database?: Database;
} }
@ -292,6 +293,7 @@ export const SaveDatasetModal = ({
createDatasource({ createDatasource({
sql: datasource.sql, sql: datasource.sql,
dbId: datasource.dbId || datasource?.database?.id, dbId: datasource.dbId || datasource?.database?.id,
catalog: datasource?.catalog,
schema: datasource?.schema, schema: datasource?.schema,
templateParams, templateParams,
datasourceName: datasetName, datasourceName: datasetName,

View File

@ -42,6 +42,7 @@ const mockState = {
{ {
id: mockedProps.queryEditorId, id: mockedProps.queryEditorId,
dbId: 1, dbId: 1,
catalog: null,
schema: 'main', schema: 'main',
sql: 'SELECT * FROM t', sql: 'SELECT * FROM t',
}, },

View File

@ -48,7 +48,7 @@ export type QueryPayload = {
description?: string; description?: string;
id?: string; id?: string;
remoteId?: number; remoteId?: number;
} & Pick<QueryEditor, 'dbId' | 'schema' | 'sql'>; } & Pick<QueryEditor, 'dbId' | 'catalog' | 'schema' | 'sql'>;
const Styles = styled.span` const Styles = styled.span`
span[role='img'] { span[role='img'] {
@ -78,6 +78,7 @@ const SaveQuery = ({
'dbId', 'dbId',
'latestQueryId', 'latestQueryId',
'queryLimit', 'queryLimit',
'catalog',
'schema', 'schema',
'selectedText', 'selectedText',
'sql', 'sql',
@ -115,6 +116,7 @@ const SaveQuery = ({
description, description,
dbId: query.dbId ?? 0, dbId: query.dbId ?? 0,
sql: query.sql, sql: query.sql,
catalog: query.catalog,
schema: query.schema, schema: query.schema,
templateParams: query.templateParams, templateParams: query.templateParams,
remoteId: query?.remoteId || undefined, remoteId: query?.remoteId || undefined,

View File

@ -44,6 +44,10 @@ const mockedProps = {
beforeEach(() => { beforeEach(() => {
fetchMock.get('glob:*/api/v1/database/?*', { result: [] }); fetchMock.get('glob:*/api/v1/database/?*', { result: [] });
fetchMock.get('glob:*/api/v1/database/*/catalogs/?*', {
count: 0,
result: [],
});
fetchMock.get('glob:*/api/v1/database/*/schemas/?*', { fetchMock.get('glob:*/api/v1/database/*/schemas/?*', {
count: 2, count: 2,
result: ['main', 'new_schema'], result: ['main', 'new_schema'],
@ -103,11 +107,14 @@ test('renders a TableElement', async () => {
}); });
test('table should be visible when expanded is true', async () => { test('table should be visible when expanded is true', async () => {
const { container, getByText, getByRole, queryAllByText } = const { container, getByText, getByRole } = await renderAndWait(
await renderAndWait(mockedProps, undefined, { mockedProps,
undefined,
{
...initialState, ...initialState,
sqlLab: { ...initialState.sqlLab, tables: [table] }, sqlLab: { ...initialState.sqlLab, tables: [table] },
}); },
);
const dbSelect = getByRole('combobox', { const dbSelect = getByRole('combobox', {
name: 'Select database or type to search databases', name: 'Select database or type to search databases',
@ -115,14 +122,56 @@ test('table should be visible when expanded is true', async () => {
const schemaSelect = getByRole('combobox', { const schemaSelect = getByRole('combobox', {
name: 'Select schema or type to search schemas', name: 'Select schema or type to search schemas',
}); });
const dropdown = getByText(/Table/i); const dropdown = getByText(/Select table/i);
const abUser = queryAllByText(/ab_user/i); const abUser = getByText(/ab_user/i);
expect(getByText(/Database/i)).toBeInTheDocument(); expect(getByText(/Database/i)).toBeInTheDocument();
expect(dbSelect).toBeInTheDocument(); expect(dbSelect).toBeInTheDocument();
expect(schemaSelect).toBeInTheDocument(); expect(schemaSelect).toBeInTheDocument();
expect(dropdown).toBeInTheDocument(); expect(dropdown).toBeInTheDocument();
expect(abUser).toHaveLength(2); expect(abUser).toBeInTheDocument();
expect(
container.querySelector('.ant-collapse-content-active'),
).toBeInTheDocument();
table.columns.forEach(({ name }) => {
expect(getByText(name)).toBeInTheDocument();
});
});
test('catalog selector should be visible when enabled in the database', async () => {
const { container, getByText, getByRole } = await renderAndWait(
{
...mockedProps,
database: {
...mockedProps.database,
allow_multi_catalog: true,
},
},
undefined,
{
...initialState,
sqlLab: { ...initialState.sqlLab, tables: [table] },
},
);
const dbSelect = getByRole('combobox', {
name: 'Select database or type to search databases',
});
const catalogSelect = getByRole('combobox', {
name: 'Select catalog or type to search catalogs',
});
const schemaSelect = getByRole('combobox', {
name: 'Select schema or type to search schemas',
});
const dropdown = getByText(/Select table/i);
const abUser = getByText(/ab_user/i);
expect(getByText(/Database/i)).toBeInTheDocument();
expect(dbSelect).toBeInTheDocument();
expect(catalogSelect).toBeInTheDocument();
expect(schemaSelect).toBeInTheDocument();
expect(dropdown).toBeInTheDocument();
expect(abUser).toBeInTheDocument();
expect( expect(
container.querySelector('.ant-collapse-content-active'), container.querySelector('.ant-collapse-content-active'),
).toBeInTheDocument(); ).toBeInTheDocument();

View File

@ -34,6 +34,7 @@ import {
removeTables, removeTables,
collapseTable, collapseTable,
expandTable, expandTable,
queryEditorSetCatalog,
queryEditorSetSchema, queryEditorSetSchema,
setDatabases, setDatabases,
addDangerToast, addDangerToast,
@ -115,13 +116,17 @@ const SqlEditorLeftBar = ({
shallowEqual, shallowEqual,
); );
const dispatch = useDispatch(); const dispatch = useDispatch();
const queryEditor = useQueryEditor(queryEditorId, ['dbId', 'schema']); const queryEditor = useQueryEditor(queryEditorId, [
'dbId',
'catalog',
'schema',
]);
const [emptyResultsWithSearch, setEmptyResultsWithSearch] = useState(false); const [emptyResultsWithSearch, setEmptyResultsWithSearch] = useState(false);
const [userSelectedDb, setUserSelected] = useState<DatabaseObject | null>( const [userSelectedDb, setUserSelected] = useState<DatabaseObject | null>(
null, null,
); );
const { schema } = queryEditor; const { catalog, schema } = queryEditor;
useEffect(() => { useEffect(() => {
const bool = querystring.parse(window.location.search).db; const bool = querystring.parse(window.location.search).db;
@ -138,9 +143,9 @@ const SqlEditorLeftBar = ({
} }
}, [database]); }, [database]);
const onEmptyResults = (searchText?: string) => { const onEmptyResults = useCallback((searchText?: string) => {
setEmptyResultsWithSearch(!!searchText); setEmptyResultsWithSearch(!!searchText);
}; }, []);
const onDbChange = ({ id: dbId }: { id: number }) => { const onDbChange = ({ id: dbId }: { id: number }) => {
setEmptyState?.(false); setEmptyState?.(false);
@ -152,7 +157,11 @@ const SqlEditorLeftBar = ({
[tables], [tables],
); );
const onTablesChange = (tableNames: string[], schemaName: string) => { const onTablesChange = (
tableNames: string[],
catalogName: string | null,
schemaName: string,
) => {
if (!schemaName) { if (!schemaName) {
return; return;
} }
@ -169,7 +178,7 @@ const SqlEditorLeftBar = ({
}); });
tablesToAdd.forEach(tableName => { tablesToAdd.forEach(tableName => {
dispatch(addTable(queryEditor, tableName, schemaName)); dispatch(addTable(queryEditor, tableName, catalogName, schemaName));
}); });
dispatch(removeTables(currentTables)); dispatch(removeTables(currentTables));
@ -210,6 +219,15 @@ const SqlEditorLeftBar = ({
const shouldShowReset = window.location.search === '?reset=1'; const shouldShowReset = window.location.search === '?reset=1';
const tableMetaDataHeight = height - 130; // 130 is the height of the selects above const tableMetaDataHeight = height - 130; // 130 is the height of the selects above
const handleCatalogChange = useCallback(
(catalog: string | null) => {
if (queryEditor) {
dispatch(queryEditorSetCatalog(queryEditor, catalog));
}
},
[dispatch, queryEditor],
);
const handleSchemaChange = useCallback( const handleSchemaChange = useCallback(
(schema: string) => { (schema: string) => {
if (queryEditor) { if (queryEditor) {
@ -246,9 +264,11 @@ const SqlEditorLeftBar = ({
getDbList={handleDbList} getDbList={handleDbList}
handleError={handleError} handleError={handleError}
onDbChange={onDbChange} onDbChange={onDbChange}
onCatalogChange={handleCatalogChange}
catalog={catalog}
onSchemaChange={handleSchemaChange} onSchemaChange={handleSchemaChange}
onTableSelectChange={onTablesChange}
schema={schema} schema={schema}
onTableSelectChange={onTablesChange}
tableValue={selectedTableNames} tableValue={selectedTableNames}
sqlLabMode sqlLabMode
/> />

View File

@ -111,6 +111,7 @@ class TabbedSqlEditors extends React.PureComponent<TabbedSqlEditorsProps> {
queryId, queryId,
dbid, dbid,
dbname, dbname,
catalog,
schema, schema,
autorun, autorun,
new: isNewQuery, new: isNewQuery,
@ -149,6 +150,7 @@ class TabbedSqlEditors extends React.PureComponent<TabbedSqlEditorsProps> {
const newQueryEditor = { const newQueryEditor = {
name, name,
dbId: databaseId, dbId: databaseId,
catalog,
schema, schema,
autorun, autorun,
sql, sql,

View File

@ -101,7 +101,7 @@ const StyledCollapsePanel = styled(Collapse.Panel)`
`; `;
const TableElement = ({ table, ...props }: TableElementProps) => { const TableElement = ({ table, ...props }: TableElementProps) => {
const { dbId, schema, name, expanded } = table; const { dbId, catalog, schema, name, expanded } = table;
const theme = useTheme(); const theme = useTheme();
const dispatch = useDispatch(); const dispatch = useDispatch();
const { const {
@ -112,6 +112,7 @@ const TableElement = ({ table, ...props }: TableElementProps) => {
} = useTableMetadataQuery( } = useTableMetadataQuery(
{ {
dbId, dbId,
catalog,
schema, schema,
table: name, table: name,
}, },
@ -125,6 +126,7 @@ const TableElement = ({ table, ...props }: TableElementProps) => {
} = useTableExtendedMetadataQuery( } = useTableExtendedMetadataQuery(
{ {
dbId, dbId,
catalog,
schema, schema,
table: name, table: name,
}, },

View File

@ -36,6 +36,7 @@ export const table = {
dbId: 1, dbId: 1,
selectStar: 'SELECT * FROM ab_user', selectStar: 'SELECT * FROM ab_user',
queryEditorId: 'dfsadfs', queryEditorId: 'dfsadfs',
catalog: null,
schema: 'superset', schema: 'superset',
name: 'ab_user', name: 'ab_user',
id: 'r11Vgt60', id: 'r11Vgt60',
@ -191,6 +192,7 @@ export const defaultQueryEditor = {
selectedText: undefined, selectedText: undefined,
sql: 'SELECT *\nFROM\nWHERE', sql: 'SELECT *\nFROM\nWHERE',
name: 'Untitled Query 1', name: 'Untitled Query 1',
catalog: null,
schema: 'main', schema: 'main',
remoteId: null, remoteId: null,
hideLeftBar: false, hideLeftBar: false,
@ -233,6 +235,7 @@ export const queries = [
queryLimit: 100, queryLimit: 100,
endDttm: 1476910566798, endDttm: 1476910566798,
limit_reached: false, limit_reached: false,
catalog: null,
schema: 'test_schema', schema: 'test_schema',
errorMessage: null, errorMessage: null,
db: 'main', db: 'main',
@ -294,6 +297,7 @@ export const queries = [
rows: 42, rows: 42,
endDttm: 1476910579693, endDttm: 1476910579693,
limit_reached: false, limit_reached: false,
catalog: null,
schema: null, schema: null,
errorMessage: null, errorMessage: null,
db: 'main', db: 'main',
@ -323,6 +327,7 @@ export const queryWithNoQueryLimit = {
rows: 42, rows: 42,
endDttm: 1476910566798, endDttm: 1476910566798,
limit_reached: false, limit_reached: false,
catalog: null,
schema: 'test_schema', schema: 'test_schema',
errorMessage: null, errorMessage: null,
db: 'main', db: 'main',
@ -456,18 +461,21 @@ export const tables = {
options: [ options: [
{ {
value: 'birth_names', value: 'birth_names',
catalog: null,
schema: 'main', schema: 'main',
label: 'birth_names', label: 'birth_names',
title: 'birth_names', title: 'birth_names',
}, },
{ {
value: 'energy_usage', value: 'energy_usage',
catalog: null,
schema: 'main', schema: 'main',
label: 'energy_usage', label: 'energy_usage',
title: 'energy_usage', title: 'energy_usage',
}, },
{ {
value: 'wb_health_population', value: 'wb_health_population',
catalog: null,
schema: 'main', schema: 'main',
label: 'wb_health_population', label: 'wb_health_population',
title: 'wb_health_population', title: 'wb_health_population',
@ -483,6 +491,7 @@ export const stoppedQuery = {
progress: 0, progress: 0,
results: [], results: [],
runAsync: false, runAsync: false,
catalog: null,
schema: 'main', schema: 'main',
sql: 'SELECT ...', sql: 'SELECT ...',
sqlEditorId: 'rJaf5u9WZ', sqlEditorId: 'rJaf5u9WZ',
@ -501,6 +510,7 @@ export const failedQueryWithErrorMessage = {
progress: 0, progress: 0,
results: [], results: [],
runAsync: false, runAsync: false,
catalog: null,
schema: 'main', schema: 'main',
sql: 'SELECT ...', sql: 'SELECT ...',
sqlEditorId: 'rJaf5u9WZ', sqlEditorId: 'rJaf5u9WZ',
@ -526,6 +536,7 @@ export const failedQueryWithErrors = {
progress: 0, progress: 0,
results: [], results: [],
runAsync: false, runAsync: false,
catalog: null,
schema: 'main', schema: 'main',
sql: 'SELECT ...', sql: 'SELECT ...',
sqlEditorId: 'rJaf5u9WZ', sqlEditorId: 'rJaf5u9WZ',
@ -555,6 +566,7 @@ const baseQuery: QueryResponse = {
started: 'started', started: 'started',
queryLimit: 100, queryLimit: 100,
endDttm: 1476910566798, endDttm: 1476910566798,
catalog: null,
schema: 'test_schema', schema: 'test_schema',
errorMessage: null, errorMessage: null,
db: { key: 'main' }, db: { key: 'main' },
@ -689,6 +701,7 @@ export const query = {
dbId: 1, dbId: 1,
sql: 'SELECT * FROM something', sql: 'SELECT * FROM something',
description: 'test description', description: 'test description',
catalog: null,
schema: 'test schema', schema: 'test schema',
resultsKey: 'test', resultsKey: 'test',
}; };
@ -698,6 +711,7 @@ export const queryId = 'clientId2353';
export const testQuery: ISaveableDatasource = { export const testQuery: ISaveableDatasource = {
name: 'unimportant', name: 'unimportant',
dbId: 1, dbId: 1,
catalog: null,
schema: 'main', schema: 'main',
sql: 'SELECT *', sql: 'SELECT *',
columns: [ columns: [
@ -727,6 +741,7 @@ export const mockdatasets = [...new Array(3)].map((_, i) => ({
database_name: `db ${i}`, database_name: `db ${i}`,
explore_url: `/explore/?datasource_type=table&datasource_id=${i}`, explore_url: `/explore/?datasource_type=table&datasource_id=${i}`,
id: i, id: i,
catalog: null,
schema: `schema ${i}`, schema: `schema ${i}`,
table_name: `coolest table ${i}`, table_name: `coolest table ${i}`,
owners: [{ username: 'admin', userId: 1 }], owners: [{ username: 'admin', userId: 1 }],

View File

@ -89,6 +89,7 @@ export default function getInitialState({
autorun: Boolean(activeTab.autorun), autorun: Boolean(activeTab.autorun),
templateParams: activeTab.template_params || undefined, templateParams: activeTab.template_params || undefined,
dbId: activeTab.database_id, dbId: activeTab.database_id,
catalog: activeTab.catalog,
schema: activeTab.schema, schema: activeTab.schema,
queryLimit: activeTab.query_limit, queryLimit: activeTab.query_limit,
hideLeftBar: activeTab.hide_left_bar, hideLeftBar: activeTab.hide_left_bar,
@ -121,6 +122,7 @@ export default function getInitialState({
const table = { const table = {
dbId: tableSchema.database_id, dbId: tableSchema.database_id,
queryEditorId: tableSchema.tab_state_id.toString(), queryEditorId: tableSchema.tab_state_id.toString(),
catalog: tableSchema.catalog,
schema: tableSchema.schema, schema: tableSchema.schema,
name: tableSchema.table, name: tableSchema.table,
expanded: tableSchema.expanded, expanded: tableSchema.expanded,

View File

@ -109,6 +109,7 @@ export default function sqlLabReducer(state = {}, action) {
remoteId: progenitor.remoteId, remoteId: progenitor.remoteId,
name: t('Copy of %s', progenitor.name), name: t('Copy of %s', progenitor.name),
dbId: action.query.dbId ? action.query.dbId : null, dbId: action.query.dbId ? action.query.dbId : null,
catalog: action.query.catalog ? action.query.catalog : null,
schema: action.query.schema ? action.query.schema : null, schema: action.query.schema ? action.query.schema : null,
autorun: true, autorun: true,
sql: action.query.sql, sql: action.query.sql,
@ -180,6 +181,7 @@ export default function sqlLabReducer(state = {}, action) {
if ( if (
xt.dbId === at.dbId && xt.dbId === at.dbId &&
xt.queryEditorId === at.queryEditorId && xt.queryEditorId === at.queryEditorId &&
xt.catalog === at.catalog &&
xt.schema === at.schema && xt.schema === at.schema &&
xt.name === at.name xt.name === at.name
) { ) {
@ -503,6 +505,18 @@ export default function sqlLabReducer(state = {}, action) {
), ),
}; };
}, },
[actions.QUERY_EDITOR_SET_CATALOG]() {
return {
...state,
...alterUnsavedQueryEditorState(
state,
{
catalog: action.catalog,
},
action.queryEditor.id,
),
};
},
[actions.QUERY_EDITOR_SET_SCHEMA]() { [actions.QUERY_EDITOR_SET_SCHEMA]() {
return { return {
...state, ...state,

View File

@ -50,6 +50,7 @@ export interface QueryEditor {
dbId?: number; dbId?: number;
name: string; name: string;
title?: string; // keep it optional for backward compatibility title?: string; // keep it optional for backward compatibility
catalog?: string | null;
schema?: string; schema?: string;
autorun: boolean; autorun: boolean;
sql: string; sql: string;
@ -81,6 +82,7 @@ export type UnsavedQueryEditor = Partial<QueryEditor>;
export interface Table { export interface Table {
id: string; id: string;
dbId: number; dbId: number;
catalog: string | null;
schema: string; schema: string;
name: string; name: string;
queryEditorId: QueryEditor['id']; queryEditorId: QueryEditor['id'];

View File

@ -109,22 +109,24 @@ export function rehydratePersistedState(
state: SqlLabRootState, state: SqlLabRootState,
) { ) {
// Rehydrate server side persisted table metadata // Rehydrate server side persisted table metadata
state.sqlLab.tables.forEach(({ name: table, schema, dbId, persistData }) => { state.sqlLab.tables.forEach(
if (dbId && schema && table && persistData?.columns) { ({ name: table, catalog, schema, dbId, persistData }) => {
dispatch( if (dbId && schema && table && persistData?.columns) {
tableApiUtil.upsertQueryData( dispatch(
'tableMetadata', tableApiUtil.upsertQueryData(
{ dbId, schema, table }, 'tableMetadata',
persistData, { dbId, catalog, schema, table },
), persistData,
); ),
dispatch( );
tableApiUtil.upsertQueryData( dispatch(
'tableExtendedMetadata', tableApiUtil.upsertQueryData(
{ dbId, schema, table }, 'tableExtendedMetadata',
{}, { dbId, catalog, schema, table },
), {},
); ),
} );
}); }
},
);
} }

View File

@ -40,6 +40,7 @@ const createProps = (): DatabaseSelectorProps => ({
formMode: false, formMode: false,
isDatabaseSelectEnabled: true, isDatabaseSelectEnabled: true,
readOnly: false, readOnly: false,
catalog: null,
schema: 'public', schema: 'public',
sqlLabMode: true, sqlLabMode: true,
getDbList: jest.fn(), getDbList: jest.fn(),
@ -158,16 +159,23 @@ const fakeSchemaApiResult = {
result: ['information_schema', 'public'], result: ['information_schema', 'public'],
}; };
const fakeCatalogApiResult = {
count: 0,
result: [],
};
const fakeFunctionNamesApiResult = { const fakeFunctionNamesApiResult = {
function_names: [], function_names: [],
}; };
const databaseApiRoute = 'glob:*/api/v1/database/?*'; const databaseApiRoute = 'glob:*/api/v1/database/?*';
const catalogApiRoute = 'glob:*/api/v1/database/*/catalogs/?*';
const schemaApiRoute = 'glob:*/api/v1/database/*/schemas/?*'; const schemaApiRoute = 'glob:*/api/v1/database/*/schemas/?*';
const tablesApiRoute = 'glob:*/api/v1/database/*/tables/*'; const tablesApiRoute = 'glob:*/api/v1/database/*/tables/*';
function setupFetchMock() { function setupFetchMock() {
fetchMock.get(databaseApiRoute, fakeDatabaseApiResult); fetchMock.get(databaseApiRoute, fakeDatabaseApiResult);
fetchMock.get(catalogApiRoute, fakeCatalogApiResult);
fetchMock.get(schemaApiRoute, fakeSchemaApiResult); fetchMock.get(schemaApiRoute, fakeSchemaApiResult);
fetchMock.get(tablesApiRoute, fakeFunctionNamesApiResult); fetchMock.get(tablesApiRoute, fakeFunctionNamesApiResult);
} }

View File

@ -24,7 +24,12 @@ import Label from 'src/components/Label';
import { FormLabel } from 'src/components/Form'; import { FormLabel } from 'src/components/Form';
import RefreshLabel from 'src/components/RefreshLabel'; import RefreshLabel from 'src/components/RefreshLabel';
import { useToasts } from 'src/components/MessageToasts/withToasts'; import { useToasts } from 'src/components/MessageToasts/withToasts';
import { useSchemas, SchemaOption } from 'src/hooks/apiResources'; import {
useCatalogs,
CatalogOption,
useSchemas,
SchemaOption,
} from 'src/hooks/apiResources';
const DatabaseSelectorWrapper = styled.div` const DatabaseSelectorWrapper = styled.div`
${({ theme }) => ` ${({ theme }) => `
@ -81,6 +86,7 @@ export type DatabaseObject = {
id: number; id: number;
database_name: string; database_name: string;
backend?: string; backend?: string;
allow_multi_catalog?: boolean;
}; };
export interface DatabaseSelectorProps { export interface DatabaseSelectorProps {
@ -92,9 +98,11 @@ export interface DatabaseSelectorProps {
isDatabaseSelectEnabled?: boolean; isDatabaseSelectEnabled?: boolean;
onDbChange?: (db: DatabaseObject) => void; onDbChange?: (db: DatabaseObject) => void;
onEmptyResults?: (searchText?: string) => void; onEmptyResults?: (searchText?: string) => void;
onCatalogChange?: (catalog?: string) => void;
catalog?: string | null;
onSchemaChange?: (schema?: string) => void; onSchemaChange?: (schema?: string) => void;
readOnly?: boolean;
schema?: string; schema?: string;
readOnly?: boolean;
sqlLabMode?: boolean; sqlLabMode?: boolean;
} }
@ -113,6 +121,7 @@ const SelectLabel = ({
</LabelStyle> </LabelStyle>
); );
const EMPTY_CATALOG_OPTIONS: CatalogOption[] = [];
const EMPTY_SCHEMA_OPTIONS: SchemaOption[] = []; const EMPTY_SCHEMA_OPTIONS: SchemaOption[] = [];
export default function DatabaseSelector({ export default function DatabaseSelector({
@ -124,12 +133,20 @@ export default function DatabaseSelector({
isDatabaseSelectEnabled = true, isDatabaseSelectEnabled = true,
onDbChange, onDbChange,
onEmptyResults, onEmptyResults,
onCatalogChange,
catalog,
onSchemaChange, onSchemaChange,
readOnly = false,
schema, schema,
readOnly = false,
sqlLabMode = false, sqlLabMode = false,
}: DatabaseSelectorProps) { }: DatabaseSelectorProps) {
const showCatalogSelector = !!db?.allow_multi_catalog;
const [currentDb, setCurrentDb] = useState<DatabaseValue | undefined>(); const [currentDb, setCurrentDb] = useState<DatabaseValue | undefined>();
const [currentCatalog, setCurrentCatalog] = useState<
CatalogOption | undefined
>(catalog ? { label: catalog, value: catalog, title: catalog } : undefined);
const catalogRef = useRef(catalog);
catalogRef.current = catalog;
const [currentSchema, setCurrentSchema] = useState<SchemaOption | undefined>( const [currentSchema, setCurrentSchema] = useState<SchemaOption | undefined>(
schema ? { label: schema, value: schema, title: schema } : undefined, schema ? { label: schema, value: schema, title: schema } : undefined,
); );
@ -185,6 +202,7 @@ export default function DatabaseSelector({
id: row.id, id: row.id,
database_name: row.database_name, database_name: row.database_name,
backend: row.backend, backend: row.backend,
allow_multi_catalog: row.allow_multi_catalog,
})); }));
return { return {
@ -193,7 +211,7 @@ export default function DatabaseSelector({
}; };
}); });
}, },
[formMode, getDbList, sqlLabMode], [formMode, getDbList, sqlLabMode, onEmptyResults],
); );
useEffect(() => { useEffect(() => {
@ -223,11 +241,12 @@ export default function DatabaseSelector({
} }
const { const {
data, data: schemaData,
isFetching: loadingSchemas, isFetching: loadingSchemas,
refetch, refetch: refetchSchemas,
} = useSchemas({ } = useSchemas({
dbId: currentDb?.value, dbId: currentDb?.value,
catalog: currentCatalog?.value,
onSuccess: (schemas, isFetched) => { onSuccess: (schemas, isFetched) => {
if (schemas.length === 1) { if (schemas.length === 1) {
changeSchema(schemas[0]); changeSchema(schemas[0]);
@ -244,17 +263,55 @@ export default function DatabaseSelector({
onError: () => handleError(t('There was an error loading the schemas')), onError: () => handleError(t('There was an error loading the schemas')),
}); });
const schemaOptions = data || EMPTY_SCHEMA_OPTIONS; const schemaOptions = schemaData || EMPTY_SCHEMA_OPTIONS;
function changeDataBase( function changeCatalog(catalog: CatalogOption | undefined) {
setCurrentCatalog(catalog);
setCurrentSchema(undefined);
if (onCatalogChange && catalog?.value !== catalogRef.current) {
onCatalogChange(catalog?.value);
}
}
const {
data: catalogData,
isFetching: loadingCatalogs,
refetch: refetchCatalogs,
} = useCatalogs({
dbId: currentDb?.value,
onSuccess: (catalogs, isFetched) => {
if (catalogs.length === 1) {
changeCatalog(catalogs[0]);
} else if (
!catalogs.find(
catalogOption => catalogRef.current === catalogOption.value,
)
) {
changeCatalog(undefined);
}
if (isFetched) {
addSuccessToast('List refreshed');
}
},
onError: () => handleError(t('There was an error loading the catalogs')),
});
const catalogOptions = catalogData || EMPTY_CATALOG_OPTIONS;
function changeDatabase(
value: { label: string; value: number }, value: { label: string; value: number },
database: DatabaseValue, database: DatabaseValue,
) { ) {
setCurrentDb(database); setCurrentDb(database);
setCurrentCatalog(undefined);
setCurrentSchema(undefined); setCurrentSchema(undefined);
if (onDbChange) { if (onDbChange) {
onDbChange(database); onDbChange(database);
} }
if (onCatalogChange) {
onCatalogChange(undefined);
}
if (onSchemaChange) { if (onSchemaChange) {
onSchemaChange(undefined); onSchemaChange(undefined);
} }
@ -278,7 +335,7 @@ export default function DatabaseSelector({
header={<FormLabel>{t('Database')}</FormLabel>} header={<FormLabel>{t('Database')}</FormLabel>}
lazyLoading={false} lazyLoading={false}
notFoundContent={emptyState} notFoundContent={emptyState}
onChange={changeDataBase} onChange={changeDatabase}
value={currentDb} value={currentDb}
placeholder={t('Select database or type to search databases')} placeholder={t('Select database or type to search databases')}
disabled={!isDatabaseSelectEnabled || readOnly} disabled={!isDatabaseSelectEnabled || readOnly}
@ -288,10 +345,36 @@ export default function DatabaseSelector({
); );
} }
function renderCatalogSelect() {
const refreshIcon = !readOnly && (
<RefreshLabel
onClick={refetchCatalogs}
tooltipContent={t('Force refresh catalog list')}
/>
);
return renderSelectRow(
<Select
ariaLabel={t('Select catalog or type to search catalogs')}
disabled={!currentDb || readOnly}
header={<FormLabel>{t('Catalog')}</FormLabel>}
labelInValue
loading={loadingCatalogs}
name="select-catalog"
notFoundContent={t('No compatible catalog found')}
placeholder={t('Select catalog or type to search catalogs')}
onChange={item => changeCatalog(item as CatalogOption)}
options={catalogOptions}
showSearch
value={currentCatalog}
/>,
refreshIcon,
);
}
function renderSchemaSelect() { function renderSchemaSelect() {
const refreshIcon = !readOnly && ( const refreshIcon = !readOnly && (
<RefreshLabel <RefreshLabel
onClick={() => refetch()} onClick={refetchSchemas}
tooltipContent={t('Force refresh schema list')} tooltipContent={t('Force refresh schema list')}
/> />
); );
@ -317,6 +400,7 @@ export default function DatabaseSelector({
return ( return (
<DatabaseSelectorWrapper data-test="DatabaseSelector"> <DatabaseSelectorWrapper data-test="DatabaseSelector">
{renderDatabaseSelect()} {renderDatabaseSelect()}
{showCatalogSelector && renderCatalogSelect()}
{renderSchemaSelect()} {renderSchemaSelect()}
</DatabaseSelectorWrapper> </DatabaseSelectorWrapper>
); );

View File

@ -758,6 +758,7 @@ class DatasourceEditor extends React.PureComponent {
datasource_type: datasource.type || datasource.datasource_type, datasource_type: datasource.type || datasource.datasource_type,
database_name: database_name:
datasource.database.database_name || datasource.database.name, datasource.database.database_name || datasource.database.name,
catalog_name: datasource.catalog,
schema_name: datasource.schema, schema_name: datasource.schema,
table_name: datasource.table_name, table_name: datasource.table_name,
normalize_columns: datasource.normalize_columns, normalize_columns: datasource.normalize_columns,
@ -1090,7 +1091,12 @@ class DatasourceEditor extends React.PureComponent {
<div css={{ marginTop: 8 }}> <div css={{ marginTop: 8 }}>
<DatabaseSelector <DatabaseSelector
db={datasource?.database} db={datasource?.database}
catalog={datasource.catalog}
schema={datasource.schema} schema={datasource.schema}
onCatalogChange={catalog =>
this.state.isEditMode &&
this.onDatasourcePropChange('catalog', catalog)
}
onSchemaChange={schema => onSchemaChange={schema =>
this.state.isEditMode && this.state.isEditMode &&
this.onDatasourcePropChange('schema', schema) this.onDatasourcePropChange('schema', schema)
@ -1164,9 +1170,16 @@ class DatasourceEditor extends React.PureComponent {
}} }}
dbId={datasource.database?.id} dbId={datasource.database?.id}
handleError={this.props.addDangerToast} handleError={this.props.addDangerToast}
catalog={datasource.catalog}
schema={datasource.schema} schema={datasource.schema}
sqlLabMode={false} sqlLabMode={false}
tableValue={datasource.table_name} tableValue={datasource.table_name}
onCatalogChange={
this.state.isEditMode
? catalog =>
this.onDatasourcePropChange('catalog', catalog)
: undefined
}
onSchemaChange={ onSchemaChange={
this.state.isEditMode this.state.isEditMode
? schema => ? schema =>

View File

@ -54,6 +54,7 @@ const getTableMockFunction = () =>
}) as any; }) as any;
const databaseApiRoute = 'glob:*/api/v1/database/?*'; const databaseApiRoute = 'glob:*/api/v1/database/?*';
const catalogApiRoute = 'glob:*/api/v1/database/*/catalogs/?*';
const schemaApiRoute = 'glob:*/api/v1/database/*/schemas/?*'; const schemaApiRoute = 'glob:*/api/v1/database/*/schemas/?*';
const tablesApiRoute = 'glob:*/api/v1/database/*/tables/*'; const tablesApiRoute = 'glob:*/api/v1/database/*/tables/*';
@ -74,6 +75,7 @@ afterEach(() => {
}); });
test('renders with default props', async () => { test('renders with default props', async () => {
fetchMock.get(catalogApiRoute, { result: [] });
fetchMock.get(schemaApiRoute, { result: [] }); fetchMock.get(schemaApiRoute, { result: [] });
fetchMock.get(tablesApiRoute, getTableMockFunction()); fetchMock.get(tablesApiRoute, getTableMockFunction());
@ -96,6 +98,7 @@ test('renders with default props', async () => {
}); });
test('skips select all options', async () => { test('skips select all options', async () => {
fetchMock.get(catalogApiRoute, { result: [] });
fetchMock.get(schemaApiRoute, { result: ['test_schema'] }); fetchMock.get(schemaApiRoute, { result: ['test_schema'] });
fetchMock.get(tablesApiRoute, getTableMockFunction()); fetchMock.get(tablesApiRoute, getTableMockFunction());
@ -115,6 +118,7 @@ test('skips select all options', async () => {
}); });
test('renders table options without Select All option', async () => { test('renders table options without Select All option', async () => {
fetchMock.get(catalogApiRoute, { result: [] });
fetchMock.get(schemaApiRoute, { result: ['test_schema'] }); fetchMock.get(schemaApiRoute, { result: ['test_schema'] });
fetchMock.get(tablesApiRoute, getTableMockFunction()); fetchMock.get(tablesApiRoute, getTableMockFunction());
@ -133,6 +137,7 @@ test('renders table options without Select All option', async () => {
}); });
test('renders disabled without schema', async () => { test('renders disabled without schema', async () => {
fetchMock.get(catalogApiRoute, { result: [] });
fetchMock.get(schemaApiRoute, { result: [] }); fetchMock.get(schemaApiRoute, { result: [] });
fetchMock.get(tablesApiRoute, getTableMockFunction()); fetchMock.get(tablesApiRoute, getTableMockFunction());
@ -150,6 +155,7 @@ test('renders disabled without schema', async () => {
}); });
test('table select retain value if not in SQL Lab mode', async () => { test('table select retain value if not in SQL Lab mode', async () => {
fetchMock.get(catalogApiRoute, { result: [] });
fetchMock.get(schemaApiRoute, { result: ['test_schema'] }); fetchMock.get(schemaApiRoute, { result: ['test_schema'] });
fetchMock.get(tablesApiRoute, getTableMockFunction()); fetchMock.get(tablesApiRoute, getTableMockFunction());
@ -191,6 +197,7 @@ test('table select retain value if not in SQL Lab mode', async () => {
}); });
test('table multi select retain all the values selected', async () => { test('table multi select retain all the values selected', async () => {
fetchMock.get(catalogApiRoute, { result: [] });
fetchMock.get(schemaApiRoute, { result: ['test_schema'] }); fetchMock.get(schemaApiRoute, { result: ['test_schema'] });
fetchMock.get(tablesApiRoute, getTableMockFunction()); fetchMock.get(tablesApiRoute, getTableMockFunction());

View File

@ -97,13 +97,19 @@ interface TableSelectorProps {
handleError: (msg: string) => void; handleError: (msg: string) => void;
isDatabaseSelectEnabled?: boolean; isDatabaseSelectEnabled?: boolean;
onDbChange?: (db: DatabaseObject) => void; onDbChange?: (db: DatabaseObject) => void;
onCatalogChange?: (catalog?: string | null) => void;
onSchemaChange?: (schema?: string) => void; onSchemaChange?: (schema?: string) => void;
readOnly?: boolean; readOnly?: boolean;
catalog?: string | null;
schema?: string; schema?: string;
onEmptyResults?: (searchText?: string) => void; onEmptyResults?: (searchText?: string) => void;
sqlLabMode?: boolean; sqlLabMode?: boolean;
tableValue?: string | string[]; tableValue?: string | string[];
onTableSelectChange?: (value?: string | string[], schema?: string) => void; onTableSelectChange?: (
value?: string | string[],
catalog?: string | null,
schema?: string,
) => void;
tableSelectMode?: 'single' | 'multiple'; tableSelectMode?: 'single' | 'multiple';
customTableOptionLabelRenderer?: (table: Table) => JSX.Element; customTableOptionLabelRenderer?: (table: Table) => JSX.Element;
} }
@ -159,9 +165,11 @@ const TableSelector: FunctionComponent<TableSelectorProps> = ({
handleError, handleError,
isDatabaseSelectEnabled = true, isDatabaseSelectEnabled = true,
onDbChange, onDbChange,
onCatalogChange,
onSchemaChange, onSchemaChange,
readOnly = false, readOnly = false,
onEmptyResults, onEmptyResults,
catalog,
schema, schema,
sqlLabMode = true, sqlLabMode = true,
tableSelectMode = 'single', tableSelectMode = 'single',
@ -170,6 +178,9 @@ const TableSelector: FunctionComponent<TableSelectorProps> = ({
customTableOptionLabelRenderer, customTableOptionLabelRenderer,
}) => { }) => {
const { addSuccessToast } = useToasts(); const { addSuccessToast } = useToasts();
const [currentCatalog, setCurrentCatalog] = useState<
string | null | undefined
>(catalog);
const [currentSchema, setCurrentSchema] = useState<string | undefined>( const [currentSchema, setCurrentSchema] = useState<string | undefined>(
schema, schema,
); );
@ -182,6 +193,7 @@ const TableSelector: FunctionComponent<TableSelectorProps> = ({
refetch, refetch,
} = useTables({ } = useTables({
dbId: database?.id, dbId: database?.id,
catalog: currentCatalog,
schema: currentSchema, schema: currentSchema,
onSuccess: (data, isFetched) => { onSuccess: (data, isFetched) => {
if (isFetched) { if (isFetched) {
@ -218,6 +230,7 @@ const TableSelector: FunctionComponent<TableSelectorProps> = ({
useEffect(() => { useEffect(() => {
// reset selections // reset selections
if (database === undefined) { if (database === undefined) {
setCurrentCatalog(undefined);
setCurrentSchema(undefined); setCurrentSchema(undefined);
setTableSelectValue(undefined); setTableSelectValue(undefined);
} }
@ -245,6 +258,7 @@ const TableSelector: FunctionComponent<TableSelectorProps> = ({
Array.isArray(selectedOptions) Array.isArray(selectedOptions)
? selectedOptions.map(option => option?.value) ? selectedOptions.map(option => option?.value)
: selectedOptions?.value, : selectedOptions?.value,
currentCatalog,
currentSchema, currentSchema,
); );
} else { } else {
@ -256,6 +270,22 @@ const TableSelector: FunctionComponent<TableSelectorProps> = ({
if (onDbChange) { if (onDbChange) {
onDbChange(db); onDbChange(db);
} }
setCurrentCatalog(undefined);
setCurrentSchema(undefined);
const value = tableSelectMode === 'single' ? undefined : [];
setTableSelectValue(value);
};
const internalCatalogChange = (catalog?: string | null) => {
setCurrentCatalog(catalog);
if (onCatalogChange) {
onCatalogChange(catalog);
}
setCurrentSchema(undefined);
const value = tableSelectMode === 'single' ? undefined : [];
setTableSelectValue(value);
}; };
const internalSchemaChange = (schema?: string) => { const internalSchemaChange = (schema?: string) => {
@ -265,7 +295,7 @@ const TableSelector: FunctionComponent<TableSelectorProps> = ({
} }
const value = tableSelectMode === 'single' ? undefined : []; const value = tableSelectMode === 'single' ? undefined : [];
internalTableChange(value); setTableSelectValue(value);
}; };
const handleFilterOption = useMemo( const handleFilterOption = useMemo(
@ -328,6 +358,8 @@ const TableSelector: FunctionComponent<TableSelectorProps> = ({
handleError={handleError} handleError={handleError}
onDbChange={readOnly ? undefined : internalDbChange} onDbChange={readOnly ? undefined : internalDbChange}
onEmptyResults={onEmptyResults} onEmptyResults={onEmptyResults}
onCatalogChange={readOnly ? undefined : internalCatalogChange}
catalog={currentCatalog}
onSchemaChange={readOnly ? undefined : internalSchemaChange} onSchemaChange={readOnly ? undefined : internalSchemaChange}
schema={currentSchema} schema={currentSchema}
sqlLabMode={sqlLabMode} sqlLabMode={sqlLabMode}

View File

@ -48,6 +48,7 @@ import {
} from 'src/explore/components/DatasourcePanel/types'; } from 'src/explore/components/DatasourcePanel/types';
import { DndItemType } from 'src/explore/components/DndItemType'; import { DndItemType } from 'src/explore/components/DndItemType';
import { ControlComponentProps } from 'src/explore/components/Control'; import { ControlComponentProps } from 'src/explore/components/Control';
import { toQueryString } from 'src/utils/urlUtils';
import DndAdhocFilterOption from './DndAdhocFilterOption'; import DndAdhocFilterOption from './DndAdhocFilterOption';
import { useDefaultTimeFilter } from '../DateFilterControl/utils'; import { useDefaultTimeFilter } from '../DateFilterControl/utils';
import { Clauses, ExpressionTypes } from '../FilterControl/types'; import { Clauses, ExpressionTypes } from '../FilterControl/types';
@ -175,13 +176,20 @@ const DndFilterSelect = (props: DndFilterSelectProps) => {
const dbId = datasource.database?.id; const dbId = datasource.database?.id;
const { const {
datasource_name: name, datasource_name: name,
catalog,
schema, schema,
is_sqllab_view: isSqllabView, is_sqllab_view: isSqllabView,
} = datasource; } = datasource;
if (!isSqllabView && dbId && name && schema) { if (!isSqllabView && dbId && name && schema) {
SupersetClient.get({ SupersetClient.get({
endpoint: `/api/v1/database/${dbId}/table_metadata/extra/?name=${name}&schema=${schema}`, endpoint: `/api/v1/database/${dbId}/table_metadata/extra/${toQueryString(
{
name,
catalog,
schema,
},
)}`,
}) })
.then(({ json }: { json: Record<string, any> }) => { .then(({ json }: { json: Record<string, any> }) => {
if (json?.partitions) { if (json?.partitions) {

View File

@ -48,6 +48,7 @@ import AdhocFilterOption from 'src/explore/components/controls/FilterControl/Adh
import AdhocFilter from 'src/explore/components/controls/FilterControl/AdhocFilter'; import AdhocFilter from 'src/explore/components/controls/FilterControl/AdhocFilter';
import adhocFilterType from 'src/explore/components/controls/FilterControl/adhocFilterType'; import adhocFilterType from 'src/explore/components/controls/FilterControl/adhocFilterType';
import columnType from 'src/explore/components/controls/FilterControl/columnType'; import columnType from 'src/explore/components/controls/FilterControl/columnType';
import { toQueryString } from 'src/utils/urlUtils';
import { Clauses, ExpressionTypes } from '../types'; import { Clauses, ExpressionTypes } from '../types';
const { warning } = Modal; const { warning } = Modal;
@ -137,13 +138,20 @@ class AdhocFilterControl extends React.Component {
const dbId = datasource.database?.id; const dbId = datasource.database?.id;
const { const {
datasource_name: name, datasource_name: name,
catalog,
schema, schema,
is_sqllab_view: isSqllabView, is_sqllab_view: isSqllabView,
} = datasource; } = datasource;
if (!isSqllabView && dbId && name && schema) { if (!isSqllabView && dbId && name && schema) {
SupersetClient.get({ SupersetClient.get({
endpoint: `/api/v1/database/${dbId}/table_metadata/extra/?name=${name}&schema=${schema}`, endpoint: `/api/v1/database/${dbId}/table_metadata/extra/${toQueryString(
{
name,
catalog,
schema,
},
)}`,
}) })
.then(({ json }) => { .then(({ json }) => {
if (json && json.partitions) { if (json && json.partitions) {

View File

@ -66,6 +66,7 @@ export type OptionSortType = Partial<
export type Datasource = Dataset & { export type Datasource = Dataset & {
database?: DatabaseObject; database?: DatabaseObject;
datasource?: string; datasource?: string;
catalog?: string | null;
schema?: string; schema?: string;
is_sqllab_view?: boolean; is_sqllab_view?: boolean;
extra?: string; extra?: string;

View File

@ -56,6 +56,8 @@ const ExtraOptions = ({
const createAsOpen = !!(db?.allow_ctas || db?.allow_cvas); const createAsOpen = !!(db?.allow_ctas || db?.allow_cvas);
const isFileUploadSupportedByEngine = const isFileUploadSupportedByEngine =
db?.engine_information?.supports_file_upload; db?.engine_information?.supports_file_upload;
const supportsDynamicCatalog =
db?.engine_information?.supports_dynamic_catalog;
// JSON.parse will deep parse engine_params // JSON.parse will deep parse engine_params
// if it's an object, and we want to keep it a string // if it's an object, and we want to keep it a string
@ -191,7 +193,8 @@ const ExtraOptions = ({
<IndeterminateCheckbox <IndeterminateCheckbox
id="allows_virtual_table_explore" id="allows_virtual_table_explore"
indeterminate={false} indeterminate={false}
checked={!!extraJson?.allows_virtual_table_explore} // when `allows_virtual_table_explore` is not present in `extra` it defaults to true
checked={extraJson?.allows_virtual_table_explore !== false}
onChange={onExtraInputChange} onChange={onExtraInputChange}
labelText={t('Allow this database to be explored')} labelText={t('Allow this database to be explored')}
/> />
@ -587,6 +590,24 @@ const ExtraOptions = ({
/> />
</div> </div>
</StyledInputContainer> </StyledInputContainer>
{supportsDynamicCatalog && (
<StyledInputContainer css={no_margin_bottom}>
<div className="input-container">
<IndeterminateCheckbox
id="allow_multi_catalog"
indeterminate={false}
checked={!!extraJson?.allow_multi_catalog}
onChange={onExtraInputChange}
labelText={t('Allow changing catalogs')}
/>
<InfoTooltip
tooltip={t(
'Give access to multiple catalogs in a single database connection.',
)}
/>
</div>
</StyledInputContainer>
)}
</Collapse.Panel> </Collapse.Panel>
</Collapse> </Collapse>
); );

View File

@ -96,6 +96,7 @@ test('Does not render if SSH Tunnel is disabled', () => {
engine_information: { engine_information: {
disable_ssh_tunneling: true, disable_ssh_tunneling: true,
supports_file_upload: false, supports_file_upload: false,
supports_dynamic_catalog: false,
}, },
}} }}
/>, />,

View File

@ -47,6 +47,10 @@ fetchMock.get(
}, },
); );
fetchMock.get('glob:*api/v1/database/*/catalogs/', {
result: [],
});
fetchMock.get('glob:*api/v1/database/1/schemas/', { fetchMock.get('glob:*api/v1/database/1/schemas/', {
result: ['information_schema', 'public'], result: ['information_schema', 'public'],
}); });

View File

@ -109,6 +109,7 @@ export type DatabaseObject = {
engine_information?: { engine_information?: {
supports_file_upload?: boolean; supports_file_upload?: boolean;
disable_ssh_tunneling?: boolean; disable_ssh_tunneling?: boolean;
supports_dynamic_catalog?: boolean;
}; };
// SSH Tunnel information // SSH Tunnel information
@ -202,6 +203,7 @@ export type DatabaseForm = {
engine_information: { engine_information: {
supports_file_upload: boolean; supports_file_upload: boolean;
disable_ssh_tunneling: boolean; disable_ssh_tunneling: boolean;
supports_dynamic_catalog: boolean;
}; };
}; };
@ -223,6 +225,7 @@ export interface ExtraJson {
cost_estimate_enabled?: boolean; // in SQL Lab cost_estimate_enabled?: boolean; // in SQL Lab
disable_data_preview?: boolean; // in SQL Lab disable_data_preview?: boolean; // in SQL Lab
disable_drill_to_detail?: boolean; disable_drill_to_detail?: boolean;
allow_multi_catalog?: boolean;
engine_params?: { engine_params?: {
catalog?: Record<string, string>; catalog?: Record<string, string>;
connect_args?: { connect_args?: {

View File

@ -20,6 +20,7 @@ import React, { useEffect, useState, useRef } from 'react';
import { SupersetClient, logging, t } from '@superset-ui/core'; import { SupersetClient, logging, t } from '@superset-ui/core';
import { DatasetObject } from 'src/features/datasets/AddDataset/types'; import { DatasetObject } from 'src/features/datasets/AddDataset/types';
import { addDangerToast } from 'src/components/MessageToasts/actions'; import { addDangerToast } from 'src/components/MessageToasts/actions';
import { toQueryString } from 'src/utils/urlUtils';
import DatasetPanel from './DatasetPanel'; import DatasetPanel from './DatasetPanel';
import { ITableColumn, IDatabaseTable, isIDatabaseTable } from './types'; import { ITableColumn, IDatabaseTable, isIDatabaseTable } from './types';
@ -51,8 +52,9 @@ export interface IDatasetPanelWrapperProps {
*/ */
dbId?: number; dbId?: number;
/** /**
* The selected schema for the database * The selected catalog/schema for the database
*/ */
catalog?: string | null;
schema?: string | null; schema?: string | null;
setHasColumns?: Function; setHasColumns?: Function;
datasets?: DatasetObject[] | undefined; datasets?: DatasetObject[] | undefined;
@ -61,6 +63,7 @@ export interface IDatasetPanelWrapperProps {
const DatasetPanelWrapper = ({ const DatasetPanelWrapper = ({
tableName, tableName,
dbId, dbId,
catalog,
schema, schema,
setHasColumns, setHasColumns,
datasets, datasets,
@ -74,9 +77,11 @@ const DatasetPanelWrapper = ({
const { dbId, tableName, schema } = props; const { dbId, tableName, schema } = props;
setLoading(true); setLoading(true);
setHasColumns?.(false); setHasColumns?.(false);
const path = schema const path = `/api/v1/database/${dbId}/table_metadata/${toQueryString({
? `/api/v1/database/${dbId}/table_metadata/?name=${tableName}&schema=${schema}` name: tableName,
: `/api/v1/database/${dbId}/table_metadata/?name=${tableName}`; catalog,
schema,
})}`;
try { try {
const response = await SupersetClient.get({ const response = await SupersetClient.get({
endpoint: path, endpoint: path,

View File

@ -90,6 +90,7 @@ function Footer({
if (datasetObject) { if (datasetObject) {
const data = { const data = {
database: datasetObject.db?.id, database: datasetObject.db?.id,
catalog: datasetObject.catalog,
schema: datasetObject.schema, schema: datasetObject.schema,
table_name: datasetObject.table_name, table_name: datasetObject.table_name,
}; };

View File

@ -129,6 +129,14 @@ export default function LeftPanel({
}, },
[setDataset], [setDataset],
); );
const setCatalog = (catalog: string | null) => {
if (catalog) {
setDataset({
type: DatasetActionType.SelectCatalog,
payload: { name: 'catalog', value: catalog },
});
}
};
const setSchema = (schema: string) => { const setSchema = (schema: string) => {
if (schema) { if (schema) {
setDataset({ setDataset({
@ -178,10 +186,12 @@ export default function LeftPanel({
handleError={addDangerToast} handleError={addDangerToast}
emptyState={emptyStateComponent(false)} emptyState={emptyStateComponent(false)}
onDbChange={setDatabase} onDbChange={setDatabase}
onCatalogChange={setCatalog}
onSchemaChange={setSchema} onSchemaChange={setSchema}
onTableSelectChange={setTable} onTableSelectChange={setTable}
sqlLabMode={false} sqlLabMode={false}
customTableOptionLabelRenderer={customTableOptionLabelRenderer} customTableOptionLabelRenderer={customTableOptionLabelRenderer}
{...(dataset?.catalog && { catalog: dataset.catalog })}
{...(dataset?.schema && { schema: dataset.schema })} {...(dataset?.schema && { schema: dataset.schema })}
/> />
</LeftPanelStyle> </LeftPanelStyle>

View File

@ -20,6 +20,7 @@ import { DatabaseObject } from 'src/components/DatabaseSelector';
export enum DatasetActionType { export enum DatasetActionType {
SelectDatabase, SelectDatabase,
SelectCatalog,
SelectSchema, SelectSchema,
SelectTable, SelectTable,
ChangeDataset, ChangeDataset,
@ -27,6 +28,7 @@ export enum DatasetActionType {
export interface DatasetObject { export interface DatasetObject {
db: DatabaseObject & { owners: [number] }; db: DatabaseObject & { owners: [number] };
catalog?: string | null;
schema?: string | null; schema?: string | null;
dataset_name: string; dataset_name: string;
table_name?: string | null; table_name?: string | null;
@ -50,6 +52,7 @@ export type DSReducerActionType =
| { | {
type: type:
| DatasetActionType.ChangeDataset | DatasetActionType.ChangeDataset
| DatasetActionType.SelectCatalog
| DatasetActionType.SelectSchema | DatasetActionType.SelectSchema
| DatasetActionType.SelectTable; | DatasetActionType.SelectTable;
payload: DatasetReducerPayloadType; payload: DatasetReducerPayloadType;

View File

@ -0,0 +1,127 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { useCallback, useEffect, useRef } from 'react';
import useEffectEvent from 'src/hooks/useEffectEvent';
import { api, JsonResponse } from './queryApi';
export type CatalogOption = {
value: string;
label: string;
title: string;
};
export type FetchCatalogsQueryParams = {
dbId?: string | number;
forceRefresh: boolean;
onSuccess?: (data: CatalogOption[], isRefetched: boolean) => void;
onError?: () => void;
};
type Params = Omit<FetchCatalogsQueryParams, 'forceRefresh'>;
const catalogApi = api.injectEndpoints({
endpoints: builder => ({
catalogs: builder.query<CatalogOption[], FetchCatalogsQueryParams>({
providesTags: [{ type: 'Catalogs', id: 'LIST' }],
query: ({ dbId, forceRefresh }) => ({
endpoint: `/api/v1/database/${dbId}/catalogs/`,
urlParams: {
force: forceRefresh,
},
transformResponse: ({ json }: JsonResponse) =>
json.result.sort().map((value: string) => ({
value,
label: value,
title: value,
})),
}),
serializeQueryArgs: ({ queryArgs: { dbId } }) => ({
dbId,
}),
}),
}),
});
export const {
useLazyCatalogsQuery,
useCatalogsQuery,
endpoints: catalogEndpoints,
util: catalogApiUtil,
} = catalogApi;
export const EMPTY_CATALOGS = [] as CatalogOption[];
export function useCatalogs(options: Params) {
const isMountedRef = useRef(false);
const { dbId, onSuccess, onError } = options || {};
const [trigger] = useLazyCatalogsQuery();
const result = useCatalogsQuery(
{ dbId, forceRefresh: false },
{
skip: !dbId,
},
);
const handleOnSuccess = useEffectEvent(
(data: CatalogOption[], isRefetched: boolean) => {
onSuccess?.(data, isRefetched);
},
);
const handleOnError = useEffectEvent(() => {
onError?.();
});
const refetch = useCallback(() => {
if (dbId) {
trigger({ dbId, forceRefresh: true }).then(
({ isSuccess, isError, data }) => {
if (isSuccess) {
handleOnSuccess(data || EMPTY_CATALOGS, true);
}
if (isError) {
handleOnError();
}
},
);
}
}, [dbId, handleOnError, handleOnSuccess, trigger]);
useEffect(() => {
if (isMountedRef.current) {
const { requestId, isSuccess, isError, isFetching, data, originalArgs } =
result;
if (!originalArgs?.forceRefresh && requestId && !isFetching) {
if (isSuccess) {
handleOnSuccess(data || EMPTY_CATALOGS, false);
}
if (isError) {
handleOnError();
}
}
} else {
isMountedRef.current = true;
}
}, [result, handleOnSuccess, handleOnError]);
return {
...result,
refetch,
};
}

View File

@ -26,6 +26,7 @@ export {
// A central catalog of API Resource hooks. // A central catalog of API Resource hooks.
// Add new API hooks here, organized under // Add new API hooks here, organized under
// different files for different resource types. // different files for different resource types.
export * from './catalogs';
export * from './charts'; export * from './charts';
export * from './dashboards'; export * from './dashboards';
export * from './tables'; export * from './tables';

View File

@ -72,6 +72,7 @@ export const supersetClientQuery: BaseQueryFn<
export const api = createApi({ export const api = createApi({
reducerPath: 'queryApi', reducerPath: 'queryApi',
tagTypes: [ tagTypes: [
'Catalogs',
'Schemas', 'Schemas',
'Tables', 'Tables',
'DatabaseFunctions', 'DatabaseFunctions',

View File

@ -20,6 +20,7 @@ import { api, JsonResponse } from './queryApi';
export type FetchValidationQueryParams = { export type FetchValidationQueryParams = {
dbId?: string | number; dbId?: string | number;
catalog?: string | null;
schema?: string; schema?: string;
sql: string; sql: string;
templateParams?: string; templateParams?: string;
@ -39,7 +40,7 @@ const queryValidationApi = api.injectEndpoints({
FetchValidationQueryParams FetchValidationQueryParams
>({ >({
providesTags: ['QueryValidations'], providesTags: ['QueryValidations'],
query: ({ dbId, schema, sql, templateParams }) => { query: ({ dbId, catalog, schema, sql, templateParams }) => {
let template_params = templateParams; let template_params = templateParams;
try { try {
template_params = JSON.parse(templateParams || ''); template_params = JSON.parse(templateParams || '');
@ -47,6 +48,7 @@ const queryValidationApi = api.injectEndpoints({
template_params = undefined; template_params = undefined;
} }
const postPayload = { const postPayload = {
catalog,
schema, schema,
sql, sql,
...(template_params && { template_params }), ...(template_params && { template_params }),

View File

@ -80,7 +80,7 @@ describe('useSchemas hook', () => {
})}`, })}`,
).length, ).length,
).toBe(1); ).toBe(1);
expect(onSuccess).toHaveBeenCalledTimes(1); expect(onSuccess).toHaveBeenCalledTimes(2);
act(() => { act(() => {
result.current.refetch(); result.current.refetch();
}); });
@ -92,7 +92,7 @@ describe('useSchemas hook', () => {
})}`, })}`,
).length, ).length,
).toBe(1); ).toBe(1);
expect(onSuccess).toHaveBeenCalledTimes(2); expect(onSuccess).toHaveBeenCalledTimes(3);
expect(result.current.data).toEqual(expectedResult); expect(result.current.data).toEqual(expectedResult);
}); });
@ -143,17 +143,17 @@ describe('useSchemas hook', () => {
await waitFor(() => expect(result.current.data).toEqual(expectedResult)); await waitFor(() => expect(result.current.data).toEqual(expectedResult));
expect(fetchMock.calls(schemaApiRoute).length).toBe(1); expect(fetchMock.calls(schemaApiRoute).length).toBe(1);
expect(onSuccess).toHaveBeenCalledTimes(1); expect(onSuccess).toHaveBeenCalledTimes(2);
rerender({ dbId: 'db2' }); rerender({ dbId: 'db2' });
await waitFor(() => expect(result.current.data).toEqual(expectedResult2)); await waitFor(() => expect(result.current.data).toEqual(expectedResult2));
expect(fetchMock.calls(schemaApiRoute).length).toBe(2); expect(fetchMock.calls(schemaApiRoute).length).toBe(2);
expect(onSuccess).toHaveBeenCalledTimes(2); expect(onSuccess).toHaveBeenCalledTimes(4);
rerender({ dbId: expectDbId }); rerender({ dbId: expectDbId });
await waitFor(() => expect(result.current.data).toEqual(expectedResult)); await waitFor(() => expect(result.current.data).toEqual(expectedResult));
expect(fetchMock.calls(schemaApiRoute).length).toBe(2); expect(fetchMock.calls(schemaApiRoute).length).toBe(2);
expect(onSuccess).toHaveBeenCalledTimes(3); expect(onSuccess).toHaveBeenCalledTimes(5);
// clean up cache // clean up cache
act(() => { act(() => {

View File

@ -28,6 +28,7 @@ export type SchemaOption = {
export type FetchSchemasQueryParams = { export type FetchSchemasQueryParams = {
dbId?: string | number; dbId?: string | number;
catalog?: string;
forceRefresh: boolean; forceRefresh: boolean;
onSuccess?: (data: SchemaOption[], isRefetched: boolean) => void; onSuccess?: (data: SchemaOption[], isRefetched: boolean) => void;
onError?: () => void; onError?: () => void;
@ -39,14 +40,15 @@ const schemaApi = api.injectEndpoints({
endpoints: builder => ({ endpoints: builder => ({
schemas: builder.query<SchemaOption[], FetchSchemasQueryParams>({ schemas: builder.query<SchemaOption[], FetchSchemasQueryParams>({
providesTags: [{ type: 'Schemas', id: 'LIST' }], providesTags: [{ type: 'Schemas', id: 'LIST' }],
query: ({ dbId, forceRefresh }) => ({ query: ({ dbId, catalog, forceRefresh }) => ({
endpoint: `/api/v1/database/${dbId}/schemas/`, endpoint: `/api/v1/database/${dbId}/schemas/`,
// TODO: Would be nice to add pagination in a follow-up. Needs endpoint changes. // TODO: Would be nice to add pagination in a follow-up. Needs endpoint changes.
urlParams: { urlParams: {
force: forceRefresh, force: forceRefresh,
...(catalog !== undefined && { catalog }),
}, },
transformResponse: ({ json }: JsonResponse) => transformResponse: ({ json }: JsonResponse) =>
json.result.map((value: string) => ({ json.result.sort().map((value: string) => ({
value, value,
label: value, label: value,
title: value, title: value,
@ -70,10 +72,10 @@ export const EMPTY_SCHEMAS = [] as SchemaOption[];
export function useSchemas(options: Params) { export function useSchemas(options: Params) {
const isMountedRef = useRef(false); const isMountedRef = useRef(false);
const { dbId, onSuccess, onError } = options || {}; const { dbId, catalog, onSuccess, onError } = options || {};
const [trigger] = useLazySchemasQuery(); const [trigger] = useLazySchemasQuery();
const result = useSchemasQuery( const result = useSchemasQuery(
{ dbId, forceRefresh: false }, { dbId, catalog: catalog || undefined, forceRefresh: false },
{ {
skip: !dbId, skip: !dbId,
}, },
@ -89,9 +91,9 @@ export function useSchemas(options: Params) {
onError?.(); onError?.();
}); });
const refetch = useCallback(() => { useEffect(() => {
if (dbId) { if (dbId) {
trigger({ dbId, forceRefresh: true }).then( trigger({ dbId, catalog, forceRefresh: false }).then(
({ isSuccess, isError, data }) => { ({ isSuccess, isError, data }) => {
if (isSuccess) { if (isSuccess) {
handleOnSuccess(data || EMPTY_SCHEMAS, true); handleOnSuccess(data || EMPTY_SCHEMAS, true);
@ -102,7 +104,22 @@ export function useSchemas(options: Params) {
}, },
); );
} }
}, [dbId, handleOnError, handleOnSuccess, trigger]); }, [dbId, catalog, handleOnError, handleOnSuccess, trigger]);
const refetch = useCallback(() => {
if (dbId) {
trigger({ dbId, catalog, forceRefresh: true }).then(
({ isSuccess, isError, data }) => {
if (isSuccess) {
handleOnSuccess(data || EMPTY_SCHEMAS, true);
}
if (isError) {
handleOnError();
}
},
);
}
}, [dbId, catalog, handleOnError, handleOnSuccess, trigger]);
useEffect(() => { useEffect(() => {
if (isMountedRef.current) { if (isMountedRef.current) {
@ -119,7 +136,7 @@ export function useSchemas(options: Params) {
} else { } else {
isMountedRef.current = true; isMountedRef.current = true;
} }
}, [result, handleOnSuccess, handleOnError]); }, [catalog, result, handleOnSuccess, handleOnError]);
return { return {
...result, ...result,

View File

@ -33,6 +33,7 @@ const sqlEditorApi = api.injectEndpoints({
version = LatestQueryEditorVersion, version = LatestQueryEditorVersion,
id, id,
dbId, dbId,
catalog,
schema, schema,
queryLimit, queryLimit,
sql, sql,
@ -50,6 +51,7 @@ const sqlEditorApi = api.injectEndpoints({
postPayload: pickBy( postPayload: pickBy(
{ {
database_id: dbId, database_id: dbId,
catalog,
schema, schema,
sql, sql,
label: name, label: name,

View File

@ -27,6 +27,7 @@ export type InitialState = {
label: string; label: string;
active: boolean; active: boolean;
database_id: number; database_id: number;
catalog?: string | null;
schema?: string; schema?: string;
table_schemas: { table_schemas: {
id: number; id: number;
@ -38,6 +39,7 @@ export type InitialState = {
}[]; }[];
dataPreviewQueryId?: string; dataPreviewQueryId?: string;
} & Record<string, any>; } & Record<string, any>;
catalog?: string | null;
schema?: string; schema?: string;
tab_state_id: number; tab_state_id: number;
database_id?: number; database_id?: number;

View File

@ -81,9 +81,11 @@ describe('useTables hook', () => {
test('returns api response mapping json options', async () => { test('returns api response mapping json options', async () => {
const expectDbId = 'db1'; const expectDbId = 'db1';
const expectedSchema = 'schema1'; const expectedSchema = 'schema1';
const catalogApiRoute = `glob:*/api/v1/database/${expectDbId}/catalogs/*`;
const schemaApiRoute = `glob:*/api/v1/database/${expectDbId}/schemas/*`; const schemaApiRoute = `glob:*/api/v1/database/${expectDbId}/schemas/*`;
const tableApiRoute = `glob:*/api/v1/database/${expectDbId}/tables/?q=*`; const tableApiRoute = `glob:*/api/v1/database/${expectDbId}/tables/?q=*`;
fetchMock.get(tableApiRoute, fakeApiResult); fetchMock.get(tableApiRoute, fakeApiResult);
fetchMock.get(catalogApiRoute, { count: 0, result: [] });
fetchMock.get(schemaApiRoute, { fetchMock.get(schemaApiRoute, {
result: fakeSchemaApiResult, result: fakeSchemaApiResult,
}); });
@ -130,9 +132,11 @@ describe('useTables hook', () => {
test('skips the deprecated schema option', async () => { test('skips the deprecated schema option', async () => {
const expectDbId = 'db1'; const expectDbId = 'db1';
const unexpectedSchema = 'invalid schema'; const unexpectedSchema = 'invalid schema';
const catalogApiRoute = `glob:*/api/v1/database/${expectDbId}/catalogs/*`;
const schemaApiRoute = `glob:*/api/v1/database/${expectDbId}/schemas/*`; const schemaApiRoute = `glob:*/api/v1/database/${expectDbId}/schemas/*`;
const tableApiRoute = `glob:*/api/v1/database/${expectDbId}/tables/?q=*`; const tableApiRoute = `glob:*/api/v1/database/${expectDbId}/tables/?q=*`;
fetchMock.get(tableApiRoute, fakeApiResult); fetchMock.get(tableApiRoute, fakeApiResult);
fetchMock.get(catalogApiRoute, { count: 0, result: [] });
fetchMock.get(schemaApiRoute, { fetchMock.get(schemaApiRoute, {
result: fakeSchemaApiResult, result: fakeSchemaApiResult,
}); });
@ -166,6 +170,10 @@ describe('useTables hook', () => {
const expectedSchema = 'schema2'; const expectedSchema = 'schema2';
const tableApiRoute = `glob:*/api/v1/database/${expectDbId}/tables/?q=*`; const tableApiRoute = `glob:*/api/v1/database/${expectDbId}/tables/?q=*`;
fetchMock.get(tableApiRoute, fakeHasMoreApiResult); fetchMock.get(tableApiRoute, fakeHasMoreApiResult);
fetchMock.get(`glob:*/api/v1/database/${expectDbId}/catalogs/*`, {
count: 0,
result: [],
});
fetchMock.get(`glob:*/api/v1/database/${expectDbId}/schemas/*`, { fetchMock.get(`glob:*/api/v1/database/${expectDbId}/schemas/*`, {
result: fakeSchemaApiResult, result: fakeSchemaApiResult,
}); });
@ -191,6 +199,10 @@ describe('useTables hook', () => {
const expectedSchema = 'schema1'; const expectedSchema = 'schema1';
const tableApiRoute = `glob:*/api/v1/database/${expectDbId}/tables/?q=*`; const tableApiRoute = `glob:*/api/v1/database/${expectDbId}/tables/?q=*`;
fetchMock.get(tableApiRoute, fakeApiResult); fetchMock.get(tableApiRoute, fakeApiResult);
fetchMock.get(`glob:*/api/v1/database/${expectDbId}/catalogs/*`, {
count: 0,
result: [],
});
fetchMock.get(`glob:*/api/v1/database/${expectDbId}/schemas/*`, { fetchMock.get(`glob:*/api/v1/database/${expectDbId}/schemas/*`, {
result: fakeSchemaApiResult, result: fakeSchemaApiResult,
}); });
@ -220,6 +232,10 @@ describe('useTables hook', () => {
fetchMock.get(tableApiRoute, url => fetchMock.get(tableApiRoute, url =>
url.includes(expectedSchema) ? fakeApiResult : fakeHasMoreApiResult, url.includes(expectedSchema) ? fakeApiResult : fakeHasMoreApiResult,
); );
fetchMock.get(`glob:*/api/v1/database/${expectDbId}/catalogs/*`, {
count: 0,
result: [],
});
fetchMock.get(`glob:*/api/v1/database/${expectDbId}/schemas/*`, { fetchMock.get(`glob:*/api/v1/database/${expectDbId}/schemas/*`, {
result: fakeSchemaApiResult, result: fakeSchemaApiResult,
}); });

View File

@ -18,6 +18,7 @@
*/ */
import { useCallback, useMemo, useEffect, useRef } from 'react'; import { useCallback, useMemo, useEffect, useRef } from 'react';
import useEffectEvent from 'src/hooks/useEffectEvent'; import useEffectEvent from 'src/hooks/useEffectEvent';
import { toQueryString } from 'src/utils/urlUtils';
import { api, JsonResponse } from './queryApi'; import { api, JsonResponse } from './queryApi';
import { useSchemas } from './schemas'; import { useSchemas } from './schemas';
@ -50,6 +51,7 @@ export type Data = {
export type FetchTablesQueryParams = { export type FetchTablesQueryParams = {
dbId?: string | number; dbId?: string | number;
catalog?: string | null;
schema?: string; schema?: string;
forceRefresh?: boolean; forceRefresh?: boolean;
onSuccess?: (data: Data, isRefetched: boolean) => void; onSuccess?: (data: Data, isRefetched: boolean) => void;
@ -58,6 +60,7 @@ export type FetchTablesQueryParams = {
export type FetchTableMetadataQueryParams = { export type FetchTableMetadataQueryParams = {
dbId: string | number; dbId: string | number;
catalog?: string | null;
schema: string; schema: string;
table: string; table: string;
}; };
@ -95,12 +98,13 @@ const tableApi = api.injectEndpoints({
endpoints: builder => ({ endpoints: builder => ({
tables: builder.query<Data, FetchTablesQueryParams>({ tables: builder.query<Data, FetchTablesQueryParams>({
providesTags: ['Tables'], providesTags: ['Tables'],
query: ({ dbId, schema, forceRefresh }) => ({ query: ({ dbId, catalog, schema, forceRefresh }) => ({
endpoint: `/api/v1/database/${dbId ?? 'undefined'}/tables/`, endpoint: `/api/v1/database/${dbId ?? 'undefined'}/tables/`,
// TODO: Would be nice to add pagination in a follow-up. Needs endpoint changes. // TODO: Would be nice to add pagination in a follow-up. Needs endpoint changes.
urlParams: { urlParams: {
force: forceRefresh, force: forceRefresh,
schema_name: schema ? encodeURIComponent(schema) : '', schema_name: schema ? encodeURIComponent(schema) : '',
...(catalog && { catalog_name: catalog }),
}, },
transformResponse: ({ json }: QueryResponse) => ({ transformResponse: ({ json }: QueryResponse) => ({
options: json.result, options: json.result,
@ -113,10 +117,12 @@ const tableApi = api.injectEndpoints({
}), }),
}), }),
tableMetadata: builder.query<TableMetaData, FetchTableMetadataQueryParams>({ tableMetadata: builder.query<TableMetaData, FetchTableMetadataQueryParams>({
query: ({ dbId, schema, table }) => ({ query: ({ dbId, catalog, schema, table }) => ({
endpoint: schema endpoint: `/api/v1/database/${dbId}/table_metadata/${toQueryString({
? `/api/v1/database/${dbId}/table_metadata/?name=${table}&schema=${schema}` name: table,
: `/api/v1/database/${dbId}/table_metadata/?name=${table}`, catalog,
schema,
})}`,
transformResponse: ({ json }: TableMetadataReponse) => json, transformResponse: ({ json }: TableMetadataReponse) => json,
}), }),
}), }),
@ -124,10 +130,10 @@ const tableApi = api.injectEndpoints({
TableExtendedMetadata, TableExtendedMetadata,
FetchTableMetadataQueryParams FetchTableMetadataQueryParams
>({ >({
query: ({ dbId, schema, table }) => ({ query: ({ dbId, catalog, schema, table }) => ({
endpoint: schema endpoint: `/api/v1/database/${dbId}/table_metadata/extra/${toQueryString(
? `/api/v1/database/${dbId}/table_metadata/extra/?name=${table}&schema=${schema}` { name: table, catalog, schema },
: `/api/v1/database/${dbId}/table_metadata/extra/?name=${table}`, )}`,
transformResponse: ({ json }: JsonResponse) => json, transformResponse: ({ json }: JsonResponse) => json,
}), }),
}), }),
@ -144,22 +150,23 @@ export const {
} = tableApi; } = tableApi;
export function useTables(options: Params) { export function useTables(options: Params) {
const { dbId, catalog, schema, onSuccess, onError } = options || {};
const isMountedRef = useRef(false); const isMountedRef = useRef(false);
const { data: schemaOptions, isFetching } = useSchemas({ const { data: schemaOptions, isFetching } = useSchemas({
dbId: options.dbId, dbId,
catalog: catalog || undefined,
}); });
const schemaOptionsMap = useMemo( const schemaOptionsMap = useMemo(
() => new Set(schemaOptions?.map(({ value }) => value)), () => new Set(schemaOptions?.map(({ value }) => value)),
[schemaOptions], [schemaOptions],
); );
const { dbId, schema, onSuccess, onError } = options || {};
const enabled = Boolean( const enabled = Boolean(
dbId && schema && !isFetching && schemaOptionsMap.has(schema), dbId && schema && !isFetching && schemaOptionsMap.has(schema),
); );
const result = useTablesQuery( const result = useTablesQuery(
{ dbId, schema, forceRefresh: false }, { dbId, catalog, schema, forceRefresh: false },
{ {
skip: !enabled, skip: !enabled,
}, },
@ -176,7 +183,7 @@ export function useTables(options: Params) {
const refetch = useCallback(() => { const refetch = useCallback(() => {
if (enabled) { if (enabled) {
trigger({ dbId, schema, forceRefresh: true }).then( trigger({ dbId, catalog, schema, forceRefresh: true }).then(
({ isSuccess, isError, data, error }) => { ({ isSuccess, isError, data, error }) => {
if (isSuccess && data) { if (isSuccess && data) {
handleOnSuccess(data, true); handleOnSuccess(data, true);
@ -187,7 +194,7 @@ export function useTables(options: Params) {
}, },
); );
} }
}, [dbId, schema, enabled, handleOnSuccess, handleOnError, trigger]); }, [dbId, catalog, schema, enabled, handleOnSuccess, handleOnError, trigger]);
useEffect(() => { useEffect(() => {
if (isMountedRef.current) { if (isMountedRef.current) {

View File

@ -48,6 +48,14 @@ export function datasetReducer(
return { return {
...trimmedState, ...trimmedState,
...action.payload, ...action.payload,
catalog: null,
schema: null,
table_name: null,
};
case DatasetActionType.SelectCatalog:
return {
...trimmedState,
[action.payload.name]: action.payload.value,
schema: null, schema: null,
table_name: null, table_name: null,
}; };
@ -112,6 +120,7 @@ export default function AddDataset() {
<DatasetPanel <DatasetPanel
tableName={dataset?.table_name} tableName={dataset?.table_name}
dbId={dataset?.db?.id} dbId={dataset?.db?.id}
catalog={dataset?.catalog}
schema={dataset?.schema} schema={dataset?.schema}
setHasColumns={setHasColumns} setHasColumns={setHasColumns}
datasets={datasets} datasets={datasets}

View File

@ -29,4 +29,5 @@ export default interface Database {
catalog: object; catalog: object;
parameters: any; parameters: any;
disable_drill_to_detail?: boolean; disable_drill_to_detail?: boolean;
allow_multi_catalog?: boolean;
} }

View File

@ -21,5 +21,6 @@ export const getDatasourceAsSaveableDataset = source => ({
name: source?.datasource_name || source?.name || 'Untitled', name: source?.datasource_name || source?.name || 'Untitled',
dbId: source?.database?.id || source?.dbId, dbId: source?.database?.id || source?.dbId,
sql: source?.sql || '', sql: source?.sql || '',
catalog: source?.catalog,
schema: source?.schema, schema: source?.schema,
}); });

View File

@ -17,7 +17,7 @@
* under the License. * under the License.
*/ */
import { isUrlExternal, parseUrl } from './urlUtils'; import { isUrlExternal, parseUrl, toQueryString } from './urlUtils';
test('isUrlExternal', () => { test('isUrlExternal', () => {
expect(isUrlExternal('http://google.com')).toBeTruthy(); expect(isUrlExternal('http://google.com')).toBeTruthy();
@ -52,3 +52,47 @@ test('parseUrl', () => {
expect(parseUrl('/about')).toEqual('/about'); expect(parseUrl('/about')).toEqual('/about');
expect(parseUrl('#anchor')).toEqual('#anchor'); expect(parseUrl('#anchor')).toEqual('#anchor');
}); });
describe('toQueryString', () => {
it('should return an empty string if the input is an empty object', () => {
expect(toQueryString({})).toBe('');
});
it('should correctly convert a single key-value pair to a query string', () => {
expect(toQueryString({ key: 'value' })).toBe('?key=value');
});
it('should correctly convert multiple key-value pairs to a query string', () => {
expect(toQueryString({ key1: 'value1', key2: 'value2' })).toBe(
'?key1=value1&key2=value2',
);
});
it('should encode URI components', () => {
expect(
toQueryString({ 'a key': 'a value', email: 'test@example.com' }),
).toBe('?a%20key=a%20value&email=test%40example.com');
});
it('should omit keys with undefined values', () => {
expect(toQueryString({ key1: 'value1', key2: undefined })).toBe(
'?key1=value1',
);
});
it('should omit keys with null values', () => {
expect(toQueryString({ key1: 'value1', key2: null })).toBe('?key1=value1');
});
it('should handle numbers and boolean values as parameter values', () => {
expect(toQueryString({ number: 123, truth: true, lie: false })).toBe(
'?number=123&truth=true&lie=false',
);
});
it('should handle special characters in keys and values', () => {
expect(toQueryString({ 'user@domain': 'me&you' })).toBe(
'?user%40domain=me%26you',
);
});
});

View File

@ -206,3 +206,16 @@ export function parseUrl(url: string) {
} }
return url; return url;
} }
export function toQueryString(params: Record<string, any>): string {
const queryParts: string[] = [];
Object.keys(params).forEach(key => {
const value = params[key];
if (value !== null && value !== undefined) {
queryParts.push(
`${encodeURIComponent(key)}=${encodeURIComponent(value)}`,
);
}
});
return queryParts.length > 0 ? `?${queryParts.join('&')}` : '';
}

View File

@ -85,6 +85,7 @@ class CacheRestApi(BaseSupersetModelRestApi):
for ds in datasources.get("datasources", []): for ds in datasources.get("datasources", []):
ds_obj = SqlaTable.get_datasource_by_name( ds_obj = SqlaTable.get_datasource_by_name(
datasource_name=ds.get("datasource_name"), datasource_name=ds.get("datasource_name"),
catalog=ds.get("catalog"),
schema=ds.get("schema"), schema=ds.get("schema"),
database_name=ds.get("database_name"), database_name=ds.get("database_name"),
) )

View File

@ -66,6 +66,7 @@ def import_chart(
datasource = SqlaTable.get_datasource_by_name( datasource = SqlaTable.get_datasource_by_name(
datasource_name=params["datasource_name"], datasource_name=params["datasource_name"],
database_name=params["database_name"], database_name=params["database_name"],
catalog=params.get("catalog"),
schema=params["schema"], schema=params["schema"],
) )
slc_to_import.datasource_id = datasource.id # type: ignore slc_to_import.datasource_id = datasource.id # type: ignore

View File

@ -60,8 +60,8 @@ class ValidateSQLCommand(BaseCommand):
if not self._validator or not self._model: if not self._validator or not self._model:
raise ValidatorSQLUnexpectedError() raise ValidatorSQLUnexpectedError()
sql = self._properties["sql"] sql = self._properties["sql"]
schema = self._properties.get("schema")
catalog = self._properties.get("catalog") catalog = self._properties.get("catalog")
schema = self._properties.get("schema")
try: try:
timeout = current_app.config["SQLLAB_VALIDATION_TIMEOUT"] timeout = current_app.config["SQLLAB_VALIDATION_TIMEOUT"]
timeout_msg = f"The query exceeded the {timeout} seconds timeout." timeout_msg = f"The query exceeded the {timeout} seconds timeout."

View File

@ -698,7 +698,11 @@ class BaseDatasource(AuditMixinNullable, ImportExportMixin): # pylint: disable=
@classmethod @classmethod
def get_datasource_by_name( def get_datasource_by_name(
cls, datasource_name: str, schema: str, database_name: str cls,
datasource_name: str,
catalog: str | None,
schema: str,
database_name: str,
) -> BaseDatasource | None: ) -> BaseDatasource | None:
raise NotImplementedError() raise NotImplementedError()
@ -1239,6 +1243,7 @@ class SqlaTable(
def get_datasource_by_name( def get_datasource_by_name(
cls, cls,
datasource_name: str, datasource_name: str,
catalog: str | None,
schema: str | None, schema: str | None,
database_name: str, database_name: str,
) -> SqlaTable | None: ) -> SqlaTable | None:
@ -1248,6 +1253,7 @@ class SqlaTable(
.join(Database) .join(Database)
.filter(cls.table_name == datasource_name) .filter(cls.table_name == datasource_name)
.filter(Database.database_name == database_name) .filter(Database.database_name == database_name)
.filter(cls.catalog == catalog)
) )
# Handling schema being '' or None, which is easier to handle # Handling schema being '' or None, which is easier to handle
# in python than in the SQLA query in a multi-dialect way # in python than in the SQLA query in a multi-dialect way
@ -1752,7 +1758,7 @@ class SqlaTable(
try: try:
df = self.database.get_df( df = self.database.get_df(
sql, sql,
None, self.catalog,
self.schema or None, self.schema or None,
mutator=assign_column_label, mutator=assign_column_label,
) )

View File

@ -216,6 +216,7 @@ class DatabaseSchema(Schema):
allows_virtual_table_explore = fields.Bool() allows_virtual_table_explore = fields.Bool()
disable_data_preview = fields.Bool() disable_data_preview = fields.Bool()
disable_drill_to_detail = fields.Bool() disable_drill_to_detail = fields.Bool()
allow_multi_catalog = fields.Bool()
explore_database_id = fields.Int() explore_database_id = fields.Int()

View File

@ -217,6 +217,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
"uuid", "uuid",
"disable_data_preview", "disable_data_preview",
"disable_drill_to_detail", "disable_drill_to_detail",
"allow_multi_catalog",
"engine_information", "engine_information",
] ]
add_columns = [ add_columns = [

View File

@ -156,7 +156,9 @@ extra_description = markdown(
"6. The ``disable_data_preview`` field is a boolean specifying whether or not data " "6. The ``disable_data_preview`` field is a boolean specifying whether or not data "
"preview queries will be run when fetching table metadata in SQL Lab." "preview queries will be run when fetching table metadata in SQL Lab."
"7. The ``disable_drill_to_detail`` field is a boolean specifying whether or not" "7. The ``disable_drill_to_detail`` field is a boolean specifying whether or not"
"drill to detail is disabled for the database.", "drill to detail is disabled for the database."
"8. The ``allow_multi_catalog`` indicates if the database allows changing "
"the default catalog when running queries and creating datasets.",
True, True,
) )
get_export_ids_schema = {"type": "array", "items": {"type": "integer"}} get_export_ids_schema = {"type": "array", "items": {"type": "integer"}}
@ -739,6 +741,7 @@ class ValidateSQLRequest(Schema):
sql = fields.String( sql = fields.String(
required=True, metadata={"description": "SQL statement to validate"} required=True, metadata={"description": "SQL statement to validate"}
) )
catalog = fields.String(required=False, allow_none=True)
schema = fields.String(required=False, allow_none=True) schema = fields.String(required=False, allow_none=True)
template_params = fields.Dict(required=False, allow_none=True) template_params = fields.Dict(required=False, allow_none=True)
@ -824,6 +827,7 @@ class ImportV1DatabaseExtraSchema(Schema):
cancel_query_on_windows_unload = fields.Boolean(required=False) cancel_query_on_windows_unload = fields.Boolean(required=False)
disable_data_preview = fields.Boolean(required=False) disable_data_preview = fields.Boolean(required=False)
disable_drill_to_detail = fields.Boolean(required=False) disable_drill_to_detail = fields.Boolean(required=False)
allow_multi_catalog = fields.Boolean(required=False)
version = fields.String(required=False, allow_none=True) version = fields.String(required=False, allow_none=True)
@ -968,6 +972,20 @@ class DatabaseSchemaAccessForFileUploadResponse(Schema):
) )
class EngineInformationSchema(Schema):
supports_file_upload = fields.Boolean(
metadata={"description": "Users can upload files to the database"}
)
disable_ssh_tunneling = fields.Boolean(
metadata={"description": "SSH tunnel is not available to the database"}
)
supports_dynamic_catalog = fields.Boolean(
metadata={
"description": "The database supports multiple catalogs in a single connection"
}
)
class DatabaseConnectionSchema(Schema): class DatabaseConnectionSchema(Schema):
""" """
Schema with database connection information. Schema with database connection information.
@ -1001,7 +1019,7 @@ class DatabaseConnectionSchema(Schema):
driver = fields.String( driver = fields.String(
allow_none=True, metadata={"description": "SQLAlchemy driver to use"} allow_none=True, metadata={"description": "SQLAlchemy driver to use"}
) )
engine_information = fields.Dict(keys=fields.String(), values=fields.Raw()) engine_information = fields.Nested(EngineInformationSchema)
expose_in_sqllab = fields.Boolean( expose_in_sqllab = fields.Boolean(
metadata={"description": expose_in_sqllab_description} metadata={"description": expose_in_sqllab_description}
) )

View File

@ -119,6 +119,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
"owners.id", "owners.id",
"owners.first_name", "owners.first_name",
"owners.last_name", "owners.last_name",
"catalog",
"schema", "schema",
"sql", "sql",
"table_name", "table_name",
@ -126,6 +127,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
list_select_columns = list_columns + ["changed_on", "changed_by_fk"] list_select_columns = list_columns + ["changed_on", "changed_by_fk"]
order_columns = [ order_columns = [
"table_name", "table_name",
"catalog",
"schema", "schema",
"changed_by.first_name", "changed_by.first_name",
"changed_on_delta_humanized", "changed_on_delta_humanized",
@ -139,6 +141,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
"sql", "sql",
"filter_select_enabled", "filter_select_enabled",
"fetch_values_predicate", "fetch_values_predicate",
"catalog",
"schema", "schema",
"description", "description",
"main_dttm_col", "main_dttm_col",
@ -197,6 +200,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
show_columns = show_select_columns + [ show_columns = show_select_columns + [
"columns.type_generic", "columns.type_generic",
"database.backend", "database.backend",
"database.allow_multi_catalog",
"columns.advanced_data_type", "columns.advanced_data_type",
"is_managed_externally", "is_managed_externally",
"uid", "uid",
@ -212,12 +216,13 @@ class DatasetRestApi(BaseSupersetModelRestApi):
add_model_schema = DatasetPostSchema() add_model_schema = DatasetPostSchema()
edit_model_schema = DatasetPutSchema() edit_model_schema = DatasetPutSchema()
duplicate_model_schema = DatasetDuplicateSchema() duplicate_model_schema = DatasetDuplicateSchema()
add_columns = ["database", "schema", "table_name", "sql", "owners"] add_columns = ["database", "catalog", "schema", "table_name", "sql", "owners"]
edit_columns = [ edit_columns = [
"table_name", "table_name",
"sql", "sql",
"filter_select_enabled", "filter_select_enabled",
"fetch_values_predicate", "fetch_values_predicate",
"catalog",
"schema", "schema",
"description", "description",
"main_dttm_col", "main_dttm_col",
@ -251,6 +256,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
"id", "id",
"database", "database",
"owners", "owners",
"catalog",
"schema", "schema",
"sql", "sql",
"table_name", "table_name",
@ -258,7 +264,7 @@ class DatasetRestApi(BaseSupersetModelRestApi):
"changed_by", "changed_by",
] ]
allowed_rel_fields = {"database", "owners", "created_by", "changed_by"} allowed_rel_fields = {"database", "owners", "created_by", "changed_by"}
allowed_distinct_fields = {"schema"} allowed_distinct_fields = {"catalog", "schema"}
apispec_parameter_schemas = { apispec_parameter_schemas = {
"get_export_ids_schema": get_export_ids_schema, "get_export_ids_schema": get_export_ids_schema,

View File

@ -93,6 +93,7 @@ class DatasetMetricsPutSchema(Schema):
class DatasetPostSchema(Schema): class DatasetPostSchema(Schema):
database = fields.Integer(required=True) database = fields.Integer(required=True)
catalog = fields.String(allow_none=True, validate=Length(0, 250))
schema = fields.String(allow_none=True, validate=Length(0, 250)) schema = fields.String(allow_none=True, validate=Length(0, 250))
table_name = fields.String(required=True, allow_none=False, validate=Length(1, 250)) table_name = fields.String(required=True, allow_none=False, validate=Length(1, 250))
sql = fields.String(allow_none=True) sql = fields.String(allow_none=True)
@ -109,6 +110,7 @@ class DatasetPutSchema(Schema):
sql = fields.String(allow_none=True) sql = fields.String(allow_none=True)
filter_select_enabled = fields.Boolean(allow_none=True) filter_select_enabled = fields.Boolean(allow_none=True)
fetch_values_predicate = fields.String(allow_none=True, validate=Length(0, 1000)) fetch_values_predicate = fields.String(allow_none=True, validate=Length(0, 1000))
catalog = fields.String(allow_none=True, validate=Length(0, 250))
schema = fields.String(allow_none=True, validate=Length(0, 255)) schema = fields.String(allow_none=True, validate=Length(0, 255))
description = fields.String(allow_none=True) description = fields.String(allow_none=True)
main_dttm_col = fields.String(allow_none=True) main_dttm_col = fields.String(allow_none=True)
@ -272,6 +274,11 @@ class GetOrCreateDatasetSchema(Schema):
database_id = fields.Integer( database_id = fields.Integer(
required=True, metadata={"description": "ID of database table belongs to"} required=True, metadata={"description": "ID of database table belongs to"}
) )
catalog = fields.String(
allow_none=True,
validate=Length(0, 250),
metadata={"description": "The catalog the table belongs to"},
)
schema = fields.String( schema = fields.String(
allow_none=True, allow_none=True,
validate=Length(0, 250), validate=Length(0, 250),

View File

@ -2184,6 +2184,7 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
return { return {
"supports_file_upload": cls.supports_file_upload, "supports_file_upload": cls.supports_file_upload,
"disable_ssh_tunneling": cls.disable_ssh_tunneling, "disable_ssh_tunneling": cls.disable_ssh_tunneling,
"supports_dynamic_catalog": cls.supports_dynamic_catalog,
} }
@classmethod @classmethod

View File

@ -235,6 +235,10 @@ class Database(Model, AuditMixinNullable, ImportExportMixin): # pylint: disable
# this will prevent any 'trash value' strings from going through # this will prevent any 'trash value' strings from going through
return self.get_extra().get("disable_drill_to_detail", False) is True return self.get_extra().get("disable_drill_to_detail", False) is True
@property
def allow_multi_catalog(self) -> bool:
return self.get_extra().get("allow_multi_catalog", False)
@property @property
def schema_options(self) -> dict[str, Any]: def schema_options(self) -> dict[str, Any]:
"""Additional schema display config for engines with complex schemas""" """Additional schema display config for engines with complex schemas"""
@ -255,6 +259,7 @@ class Database(Model, AuditMixinNullable, ImportExportMixin): # pylint: disable
"parameters": self.parameters, "parameters": self.parameters,
"disable_data_preview": self.disable_data_preview, "disable_data_preview": self.disable_data_preview,
"disable_drill_to_detail": self.disable_drill_to_detail, "disable_drill_to_detail": self.disable_drill_to_detail,
"allow_multi_catalog": self.allow_multi_catalog,
"parameters_schema": self.parameters_schema, "parameters_schema": self.parameters_schema,
"engine_information": self.engine_information, "engine_information": self.engine_information,
} }

View File

@ -514,6 +514,7 @@ class TabState(AuditMixinNullable, ExtraJSONMixin, Model):
"label": self.label, "label": self.label,
"active": self.active, "active": self.active,
"database_id": self.database_id, "database_id": self.database_id,
"catalog": self.catalog,
"schema": self.schema, "schema": self.schema,
"table_schemas": [ts.to_dict() for ts in self.table_schemas], "table_schemas": [ts.to_dict() for ts in self.table_schemas],
"sql": self.sql, "sql": self.sql,

View File

@ -95,6 +95,7 @@ class SavedQueryRestApi(BaseSupersetModelRestApi):
"description", "description",
"id", "id",
"label", "label",
"catalog",
"schema", "schema",
"sql", "sql",
"sql_tables", "sql_tables",
@ -119,6 +120,7 @@ class SavedQueryRestApi(BaseSupersetModelRestApi):
"label", "label",
"last_run_delta_humanized", "last_run_delta_humanized",
"rows", "rows",
"catalog",
"schema", "schema",
"sql", "sql",
"sql_tables", "sql_tables",
@ -130,12 +132,14 @@ class SavedQueryRestApi(BaseSupersetModelRestApi):
"db_id", "db_id",
"description", "description",
"label", "label",
"catalog",
"schema", "schema",
"sql", "sql",
"template_parameters", "template_parameters",
] ]
edit_columns = add_columns edit_columns = add_columns
order_columns = [ order_columns = [
"catalog",
"schema", "schema",
"label", "label",
"description", "description",
@ -148,7 +152,15 @@ class SavedQueryRestApi(BaseSupersetModelRestApi):
"last_run_delta_humanized", "last_run_delta_humanized",
] ]
search_columns = ["id", "database", "label", "schema", "created_by", "changed_by"] search_columns = [
"id",
"database",
"label",
"catalog",
"schema",
"created_by",
"changed_by",
]
if is_feature_enabled("TAGGING_SYSTEM"): if is_feature_enabled("TAGGING_SYSTEM"):
search_columns += ["tags"] search_columns += ["tags"]
search_filters = { search_filters = {
@ -170,7 +182,7 @@ class SavedQueryRestApi(BaseSupersetModelRestApi):
} }
base_related_field_filters = {"database": [["id", DatabaseFilter, lambda: []]]} base_related_field_filters = {"database": [["id", DatabaseFilter, lambda: []]]}
allowed_rel_fields = {"database", "changed_by", "created_by"} allowed_rel_fields = {"database", "changed_by", "created_by"}
allowed_distinct_fields = {"schema"} allowed_distinct_fields = {"catalog", "schema"}
def pre_add(self, item: SavedQuery) -> None: def pre_add(self, item: SavedQuery) -> None:
item.user = g.user item.user = g.user

View File

@ -53,6 +53,7 @@ class ExecutePayloadSchema(Schema):
client_id = fields.String(allow_none=True) client_id = fields.String(allow_none=True)
queryLimit = fields.Integer(allow_none=True) queryLimit = fields.Integer(allow_none=True)
sql_editor_id = fields.String(allow_none=True) sql_editor_id = fields.String(allow_none=True)
catalog = fields.String(allow_none=True)
schema = fields.String(allow_none=True) schema = fields.String(allow_none=True)
tab = fields.String(allow_none=True) tab = fields.String(allow_none=True)
ctas_method = fields.String(allow_none=True) ctas_method = fields.String(allow_none=True)

View File

@ -44,6 +44,7 @@ SqlResults = dict[str, Any]
@dataclass @dataclass
class SqlJsonExecutionContext: # pylint: disable=too-many-instance-attributes class SqlJsonExecutionContext: # pylint: disable=too-many-instance-attributes
database_id: int database_id: int
catalog: str | None
schema: str schema: str
sql: str sql: str
template_params: dict[str, Any] template_params: dict[str, Any]
@ -73,6 +74,7 @@ class SqlJsonExecutionContext: # pylint: disable=too-many-instance-attributes
def _init_from_query_params(self, query_params: dict[str, Any]) -> None: def _init_from_query_params(self, query_params: dict[str, Any]) -> None:
self.database_id = cast(int, query_params.get("database_id")) self.database_id = cast(int, query_params.get("database_id"))
self.catalog = cast(str, query_params.get("catalog"))
self.schema = cast(str, query_params.get("schema")) self.schema = cast(str, query_params.get("schema"))
self.sql = cast(str, query_params.get("sql")) self.sql = cast(str, query_params.get("sql"))
self.template_params = self._get_template_params(query_params) self.template_params = self._get_template_params(query_params)
@ -147,6 +149,7 @@ class SqlJsonExecutionContext: # pylint: disable=too-many-instance-attributes
return Query( return Query(
database_id=self.database_id, database_id=self.database_id,
sql=self.sql, sql=self.sql,
catalog=self.catalog,
schema=self.schema, schema=self.schema,
select_as_cta=True, select_as_cta=True,
ctas_method=self.create_table_as_select.ctas_method, # type: ignore ctas_method=self.create_table_as_select.ctas_method, # type: ignore
@ -163,6 +166,7 @@ class SqlJsonExecutionContext: # pylint: disable=too-many-instance-attributes
return Query( return Query(
database_id=self.database_id, database_id=self.database_id,
sql=self.sql, sql=self.sql,
catalog=self.catalog,
schema=self.schema, schema=self.schema,
select_as_cta=False, select_as_cta=False,
start_time=start_time, start_time=start_time,

View File

@ -39,6 +39,7 @@ DATABASE_KEYS = [
"id", "id",
"disable_data_preview", "disable_data_preview",
"disable_drill_to_detail", "disable_drill_to_detail",
"allow_multi_catalog",
] ]

View File

@ -149,7 +149,9 @@ class DatabaseMixin:
"not data preview queries will be run when fetching table metadata in" "not data preview queries will be run when fetching table metadata in"
"SQL Lab." "SQL Lab."
"7. The ``disable_drill_to_detail`` field is a boolean specifying whether or" "7. The ``disable_drill_to_detail`` field is a boolean specifying whether or"
"not drill to detail is disabled for the database.", "not drill to detail is disabled for the database."
"8. The ``allow_multi_catalog`` indicates if the database allows changing "
"the default catalog when running queries and creating datasets.",
True, True,
), ),
"encrypted_extra": utils.markdown( "encrypted_extra": utils.markdown(

View File

@ -26,6 +26,7 @@ from superset.utils.core import DatasourceType
class ExternalMetadataParams(TypedDict): class ExternalMetadataParams(TypedDict):
datasource_type: str datasource_type: str
database_name: str database_name: str
catalog_name: Optional[str]
schema_name: str schema_name: str
table_name: str table_name: str
normalize_columns: Optional[bool] normalize_columns: Optional[bool]
@ -45,6 +46,7 @@ get_external_metadata_schema = {
class ExternalMetadataSchema(Schema): class ExternalMetadataSchema(Schema):
datasource_type = fields.Str(required=True) datasource_type = fields.Str(required=True)
database_name = fields.Str(required=True) database_name = fields.Str(required=True)
catalog_name = fields.Str(allow_none=True)
schema_name = fields.Str(allow_none=True) schema_name = fields.Str(allow_none=True)
table_name = fields.Str(required=True) table_name = fields.Str(required=True)
normalize_columns = fields.Bool(allow_none=True) normalize_columns = fields.Bool(allow_none=True)
@ -60,6 +62,7 @@ class ExternalMetadataSchema(Schema):
return ExternalMetadataParams( return ExternalMetadataParams(
datasource_type=data["datasource_type"], datasource_type=data["datasource_type"],
database_name=data["database_name"], database_name=data["database_name"],
catalog_name=data.get("catalog_name"),
schema_name=data.get("schema_name", ""), schema_name=data.get("schema_name", ""),
table_name=data["table_name"], table_name=data["table_name"],
normalize_columns=data["normalize_columns"], normalize_columns=data["normalize_columns"],

View File

@ -165,6 +165,7 @@ class Datasource(BaseSupersetView):
datasource = SqlaTable.get_datasource_by_name( datasource = SqlaTable.get_datasource_by_name(
database_name=params["database_name"], database_name=params["database_name"],
catalog=params.get("catalog_name"),
schema=params["schema_name"], schema=params["schema_name"],
datasource_name=params["table_name"], datasource_name=params["table_name"],
) )

View File

@ -91,6 +91,7 @@ class TabStateView(BaseSupersetView):
or query_editor.get("title", __("Untitled Query")), or query_editor.get("title", __("Untitled Query")),
active=True, active=True,
database_id=query_editor["dbId"], database_id=query_editor["dbId"],
catalog=query_editor.get("catalog"),
schema=query_editor.get("schema"), schema=query_editor.get("schema"),
sql=query_editor.get("sql", "SELECT ..."), sql=query_editor.get("sql", "SELECT ..."),
query_limit=query_editor.get("queryLimit"), query_limit=query_editor.get("queryLimit"),