refactor: upload data unification, less permissions and less endpoints (#31959)

This commit is contained in:
Daniel Vaz Gaspar 2025-01-28 11:09:55 +00:00 committed by GitHub
parent 09c1987de4
commit 1b375b715c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 312 additions and 489 deletions

View File

@ -24,6 +24,7 @@ assists people when migrating to a new version.
## Next
- [31959](https://github.com/apache/superset/pull/31959) Removes the following endpoints from data uploads: /api/v1/database/<id>/<file type>_upload and /api/v1/database/<file type>_metadata, in favour of new one (Details on the PR). And simplifies permissions.
- [31844](https://github.com/apache/superset/pull/31844) The `ALERT_REPORTS_EXECUTE_AS` and `THUMBNAILS_EXECUTE_AS` config parameters have been renamed to `ALERT_REPORTS_EXECUTORS` and `THUMBNAILS_EXECUTORS` respectively. A new config flag `CACHE_WARMUP_EXECUTORS` has also been introduced to be able to control which user is used to execute cache warmup tasks. Finally, the config flag `THUMBNAILS_SELENIUM_USER` has been removed. To use a fixed executor for async tasks, use the new `FixedExecutor` class. See the config and docs for more info on setting up different executor profiles.
- [31894](https://github.com/apache/superset/pull/31894) Domain sharding is deprecated in favor of HTTP2. The `SUPERSET_WEBSERVER_DOMAINS` configuration will be removed in the next major version (6.0)
- [31774](https://github.com/apache/superset/pull/31774): Fixes the spelling of the `USE-ANALAGOUS-COLORS` feature flag. Please update any scripts/configuration item to use the new/corrected `USE-ANALOGOUS-COLORS` flag spelling.

View File

@ -25,9 +25,7 @@ import userEvent from '@testing-library/user-event';
import { waitFor } from '@testing-library/react';
import { UploadFile } from 'antd/lib/upload/interface';
fetchMock.post('glob:*api/v1/database/1/csv_upload/', {});
fetchMock.post('glob:*api/v1/database/1/excel_upload/', {});
fetchMock.post('glob:*api/v1/database/1/columnar_upload/', {});
fetchMock.post('glob:*api/v1/database/1/upload/', {});
fetchMock.get(
'glob:*api/v1/database/?q=(filters:!((col:allow_file_upload,opr:eq,value:!t)),page:0,page_size:100)',
@ -643,18 +641,21 @@ test('CSV, form post', async () => {
});
userEvent.click(uploadButton);
await waitFor(() => fetchMock.called('glob:*api/v1/database/1/csv_upload/'));
await waitFor(() => fetchMock.called('glob:*api/v1/database/1/upload/'));
// Get the matching fetch calls made
const matchingCalls = fetchMock.calls('glob:*api/v1/database/1/csv_upload/');
const matchingCalls = fetchMock.calls('glob:*api/v1/database/1/upload/');
expect(matchingCalls).toHaveLength(1);
const [_, options] = matchingCalls[0];
const formData = options?.body as FormData;
expect(formData.get('type')).toBe('csv');
expect(formData.get('table_name')).toBe('table1');
expect(formData.get('schema')).toBe('public');
expect(formData.get('table_name')).toBe('table1');
const fileData = formData.get('file') as File;
expect(fileData.name).toBe('test.csv');
// Avoid leaking fetchMock calls
fetchMock.resetHistory();
});
test('Excel, form post', async () => {
@ -700,22 +701,21 @@ test('Excel, form post', async () => {
});
userEvent.click(uploadButton);
await waitFor(() =>
fetchMock.called('glob:*api/v1/database/1/excel_upload/'),
);
await waitFor(() => fetchMock.called('glob:*api/v1/database/1/upload/'));
// Get the matching fetch calls made
const matchingCalls = fetchMock.calls(
'glob:*api/v1/database/1/excel_upload/',
);
const matchingCalls = fetchMock.calls('glob:*api/v1/database/1/upload/');
expect(matchingCalls).toHaveLength(1);
const [_, options] = matchingCalls[0];
const formData = options?.body as FormData;
expect(formData.get('type')).toBe('excel');
expect(formData.get('table_name')).toBe('table1');
expect(formData.get('schema')).toBe('public');
expect(formData.get('table_name')).toBe('table1');
const fileData = formData.get('file') as File;
expect(fileData.name).toBe('test.xls');
// Avoid leaking fetchMock calls
fetchMock.resetHistory();
});
test('Columnar, form post', async () => {
@ -761,22 +761,21 @@ test('Columnar, form post', async () => {
});
userEvent.click(uploadButton);
await waitFor(() =>
fetchMock.called('glob:*api/v1/database/1/columnar_upload/'),
);
await waitFor(() => fetchMock.called('glob:*api/v1/database/1/upload/'));
// Get the matching fetch calls made
const matchingCalls = fetchMock.calls(
'glob:*api/v1/database/1/columnar_upload/',
);
const matchingCalls = fetchMock.calls('glob:*api/v1/database/1/upload/');
expect(matchingCalls).toHaveLength(1);
const [_, options] = matchingCalls[0];
const formData = options?.body as FormData;
expect(formData.get('type')).toBe('columnar');
expect(formData.get('table_name')).toBe('table1');
expect(formData.get('schema')).toBe('public');
expect(formData.get('table_name')).toBe('table1');
const fileData = formData.get('file') as File;
expect(fileData.name).toBe('test.parquet');
// Avoid leaking fetchMock calls
fetchMock.resetHistory();
});
test('CSV, validate file extension returns false', () => {

View File

@ -230,19 +230,8 @@ const UploadDataModal: FunctionComponent<UploadDataModalProps> = ({
const [previewUploadedFile, setPreviewUploadedFile] = useState<boolean>(true);
const [fileLoading, setFileLoading] = useState<boolean>(false);
const createTypeToEndpointMap = (
databaseId: number,
): { [key: string]: string } => ({
csv: `/api/v1/database/${databaseId}/csv_upload/`,
excel: `/api/v1/database/${databaseId}/excel_upload/`,
columnar: `/api/v1/database/${databaseId}/columnar_upload/`,
});
const typeToFileMetadataEndpoint = {
csv: '/api/v1/database/csv_metadata/',
excel: '/api/v1/database/excel_metadata/',
columnar: '/api/v1/database/columnar_metadata/',
};
const createTypeToEndpointMap = (databaseId: number) =>
`/api/v1/database/${databaseId}/upload/`;
const nullValuesOptions = [
{
@ -389,9 +378,10 @@ const UploadDataModal: FunctionComponent<UploadDataModalProps> = ({
if (type === 'csv') {
formData.append('delimiter', mergedValues.delimiter);
}
formData.append('type', type);
setFileLoading(true);
return SupersetClient.post({
endpoint: typeToFileMetadataEndpoint[type],
endpoint: '/api/v1/database/upload_metadata/',
body: formData,
headers: { Accept: 'application/json' },
})
@ -472,7 +462,8 @@ const UploadDataModal: FunctionComponent<UploadDataModalProps> = ({
}
appendFormData(formData, mergedValues);
setIsLoading(true);
const endpoint = createTypeToEndpointMap(currentDatabaseId)[type];
const endpoint = createTypeToEndpointMap(currentDatabaseId);
formData.append('type', type);
return SupersetClient.post({
endpoint,
body: formData,

View File

@ -168,8 +168,7 @@ const resetUseSelectorMock = () => {
permissions: {},
roles: {
Admin: [
['can_csv_upload', 'Database'], // So we can upload CSV
['can_excel_upload', 'Database'], // So we can upload CSV
['can_upload', 'Database'], // So we can upload data (CSV, Excel, Columnar)
['can_write', 'Database'], // So we can write DBs
['can_write', 'Dataset'], // So we can write Datasets
['can_write', 'Chart'], // So we can write Datasets

View File

@ -507,14 +507,14 @@ export const uploadUserPerms = (
allowedExt: Array<string>,
) => {
const canUploadCSV =
findPermission('can_csv_upload', 'Database', roles) &&
findPermission('can_upload', 'Database', roles) &&
checkUploadExtensions(csvExt, allowedExt);
const canUploadColumnar =
checkUploadExtensions(colExt, allowedExt) &&
findPermission('can_columnar_upload', 'Database', roles);
findPermission('can_upload', 'Database', roles);
const canUploadExcel =
checkUploadExtensions(excelExt, allowedExt) &&
findPermission('can_excel_upload', 'Database', roles);
findPermission('can_upload', 'Database', roles);
return {
canUploadCSV,
canUploadColumnar,

View File

@ -36,6 +36,7 @@ from superset.connectors.sqla.models import SqlaTable
from superset.daos.database import DatabaseDAO
from superset.models.core import Database
from superset.sql_parse import Table
from superset.utils.backports import StrEnum
from superset.utils.core import get_user
from superset.utils.decorators import on_error, transaction
from superset.views.database.validators import schema_allows_file_upload
@ -45,6 +46,12 @@ logger = logging.getLogger(__name__)
READ_CHUNK_SIZE = 1000
class UploadFileType(StrEnum):
CSV = "csv"
EXCEL = "excel"
COLUMNAR = "columnar"
class ReaderOptions(TypedDict, total=False):
already_exists: str
index_label: str

View File

@ -169,9 +169,7 @@ MODEL_API_RW_METHOD_PERMISSION_MAP = {
"delete_object": "write",
"copy_dash": "write",
"get_connection": "write",
"excel_metadata": "excel_upload",
"columnar_metadata": "columnar_upload",
"csv_metadata": "csv_upload",
"upload_metadata": "upload",
"slack_channels": "write",
"put_filters": "write",
"put_colors": "write",

View File

@ -55,7 +55,11 @@ from superset.commands.database.ssh_tunnel.exceptions import (
from superset.commands.database.tables import TablesDatabaseCommand
from superset.commands.database.test_connection import TestConnectionDatabaseCommand
from superset.commands.database.update import UpdateDatabaseCommand
from superset.commands.database.uploaders.base import UploadCommand
from superset.commands.database.uploaders.base import (
BaseDataReader,
UploadCommand,
UploadFileType,
)
from superset.commands.database.uploaders.columnar_reader import ColumnarReader
from superset.commands.database.uploaders.csv_reader import CSVReader
from superset.commands.database.uploaders.excel_reader import ExcelReader
@ -72,10 +76,6 @@ from superset.databases.decorators import check_table_access
from superset.databases.filters import DatabaseFilter, DatabaseUploadEnabledFilter
from superset.databases.schemas import (
CatalogsResponseSchema,
ColumnarMetadataUploadFilePostSchema,
ColumnarUploadPostSchema,
CSVMetadataUploadFilePostSchema,
CSVUploadPostSchema,
database_catalogs_query_schema,
database_schemas_query_schema,
database_tables_query_schema,
@ -88,8 +88,6 @@ from superset.databases.schemas import (
DatabaseTablesResponse,
DatabaseTestConnectionSchema,
DatabaseValidateParametersSchema,
ExcelMetadataUploadFilePostSchema,
ExcelUploadPostSchema,
get_export_ids_schema,
OAuth2ProviderResponseSchema,
openapi_spec_methods_override,
@ -99,6 +97,8 @@ from superset.databases.schemas import (
TableExtraMetadataResponseSchema,
TableMetadataResponseSchema,
UploadFileMetadata,
UploadFileMetadataPostSchema,
UploadPostSchema,
ValidateSQLRequest,
ValidateSQLResponse,
)
@ -162,12 +162,8 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
"delete_ssh_tunnel",
"schemas_access_for_file_upload",
"get_connection",
"csv_upload",
"csv_metadata",
"excel_upload",
"excel_metadata",
"columnar_upload",
"columnar_metadata",
"upload_metadata",
"upload",
"oauth2",
}
@ -282,8 +278,6 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
openapi_spec_tag = "Database"
openapi_spec_component_schemas = (
CatalogsResponseSchema,
ColumnarUploadPostSchema,
CSVUploadPostSchema,
DatabaseConnectionSchema,
DatabaseFunctionNamesResponse,
DatabaseSchemaAccessForFileUploadResponse,
@ -291,15 +285,13 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
DatabaseTablesResponse,
DatabaseTestConnectionSchema,
DatabaseValidateParametersSchema,
ExcelUploadPostSchema,
TableExtraMetadataResponseSchema,
TableMetadataResponseSchema,
SelectStarResponseSchema,
SchemasResponseSchema,
CSVMetadataUploadFilePostSchema,
ExcelMetadataUploadFilePostSchema,
ColumnarMetadataUploadFilePostSchema,
UploadFileMetadataPostSchema,
UploadFileMetadata,
UploadPostSchema,
ValidateSQLRequest,
ValidateSQLResponse,
)
@ -1621,30 +1613,31 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
command.run()
return self.response(200, message="OK")
@expose("/csv_metadata/", methods=("POST",))
@expose("/upload_metadata/", methods=("POST",))
@protect()
@statsd_metrics
@event_logger.log_this_with_context(
action=(
lambda self, *args, **kwargs: f"{self.__class__.__name__}" ".csv_metadata"
lambda self, *args, **kwargs: f"{self.__class__.__name__}"
".upload_metadata"
),
log_to_statsd=False,
)
@requires_form_data
def csv_metadata(self) -> Response:
"""Upload a CSV file and returns file metadata.
def upload_metadata(self) -> Response:
"""Upload a file and returns file metadata.
---
post:
summary: Upload a CSV file and returns file metadata
summary: Upload a file and returns file metadata
requestBody:
required: true
content:
multipart/form-data:
schema:
$ref: '#/components/schemas/CSVMetadataUploadFilePostSchema'
$ref: '#/components/schemas/UploadFileMetadataPostSchema'
responses:
200:
description: Columnar upload response
description: Upload response
content:
application/json:
schema:
@ -1664,25 +1657,32 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
try:
request_form = request.form.to_dict()
request_form["file"] = request.files.get("file")
parameters = CSVMetadataUploadFilePostSchema().load(request_form)
parameters = UploadFileMetadataPostSchema().load(request_form)
except ValidationError as error:
return self.response_400(message=error.messages)
metadata = CSVReader(parameters).file_metadata(parameters["file"])
if parameters["type"] == UploadFileType.CSV.value:
metadata = CSVReader(parameters).file_metadata(parameters["file"])
elif parameters["type"] == UploadFileType.EXCEL.value:
metadata = ExcelReader(parameters).file_metadata(parameters["file"])
elif parameters["type"] == UploadFileType.COLUMNAR.value:
metadata = ColumnarReader(parameters).file_metadata(parameters["file"])
else:
self.response_400(message="Unexpected Invalid file type")
return self.response(200, result=UploadFileMetadata().dump(metadata))
@expose("/<int:pk>/csv_upload/", methods=("POST",))
@expose("/<int:pk>/upload/", methods=("POST",))
@protect()
@statsd_metrics
@event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.csv_upload",
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.upload",
log_to_statsd=False,
)
@requires_form_data
def csv_upload(self, pk: int) -> Response:
"""Upload a CSV file into a database.
def upload(self, pk: int) -> Response:
"""Upload a file into a database.
---
post:
summary: Upload a CSV file to a database table
summary: Upload a file to a database table
parameters:
- in: path
schema:
@ -1693,7 +1693,7 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
content:
multipart/form-data:
schema:
$ref: '#/components/schemas/CSVUploadPostSchema'
$ref: '#/components/schemas/UploadPostSchema'
responses:
201:
description: CSV upload response
@ -1718,232 +1718,22 @@ class DatabaseRestApi(BaseSupersetModelRestApi):
try:
request_form = request.form.to_dict()
request_form["file"] = request.files.get("file")
parameters = CSVUploadPostSchema().load(request_form)
parameters = UploadPostSchema().load(request_form)
reader: BaseDataReader
if parameters["type"] == UploadFileType.CSV.value:
reader = CSVReader(parameters)
elif parameters["type"] == UploadFileType.EXCEL.value:
reader = ExcelReader(parameters)
elif parameters["type"] == UploadFileType.COLUMNAR.value:
reader = ColumnarReader(parameters)
else:
return self.response_400(message="Unexpected Invalid file type")
UploadCommand(
pk,
parameters["table_name"],
parameters["file"],
parameters.get("schema"),
CSVReader(parameters),
).run()
except ValidationError as error:
return self.response_400(message=error.messages)
return self.response(201, message="OK")
@expose("/excel_metadata/", methods=("POST",))
@protect()
@statsd_metrics
@event_logger.log_this_with_context(
action=(
lambda self, *args, **kwargs: f"{self.__class__.__name__}" ".excel_metadata"
),
log_to_statsd=False,
)
@requires_form_data
def excel_metadata(self) -> Response:
"""Upload an Excel file and returns file metadata.
---
post:
summary: Upload an Excel file and returns file metadata
requestBody:
required: true
content:
multipart/form-data:
schema:
$ref: '#/components/schemas/ExcelMetadataUploadFilePostSchema'
responses:
200:
description: Columnar upload response
content:
application/json:
schema:
type: object
properties:
result:
$ref: '#/components/schemas/UploadFileMetadata'
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
404:
$ref: '#/components/responses/404'
500:
$ref: '#/components/responses/500'
"""
try:
request_form = request.form.to_dict()
request_form["file"] = request.files.get("file")
parameters = ExcelMetadataUploadFilePostSchema().load(request_form)
except ValidationError as error:
return self.response_400(message=error.messages)
metadata = ExcelReader().file_metadata(parameters["file"])
return self.response(200, result=UploadFileMetadata().dump(metadata))
@expose("/<int:pk>/excel_upload/", methods=("POST",))
@protect()
@statsd_metrics
@event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.excel_upload",
log_to_statsd=False,
)
@requires_form_data
def excel_upload(self, pk: int) -> Response:
"""Upload an Excel file into a database.
---
post:
summary: Upload an Excel file to a database table
parameters:
- in: path
schema:
type: integer
name: pk
requestBody:
required: true
content:
multipart/form-data:
schema:
$ref: '#/components/schemas/ExcelUploadPostSchema'
responses:
201:
description: Excel upload response
content:
application/json:
schema:
type: object
properties:
message:
type: string
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
404:
$ref: '#/components/responses/404'
422:
$ref: '#/components/responses/422'
500:
$ref: '#/components/responses/500'
"""
try:
request_form = request.form.to_dict()
request_form["file"] = request.files.get("file")
parameters = ExcelUploadPostSchema().load(request_form)
UploadCommand(
pk,
parameters["table_name"],
parameters["file"],
parameters.get("schema"),
ExcelReader(parameters),
).run()
except ValidationError as error:
return self.response_400(message=error.messages)
return self.response(201, message="OK")
@expose("/columnar_metadata/", methods=("POST",))
@protect()
@statsd_metrics
@event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}"
".columnar_metadata",
log_to_statsd=False,
)
@requires_form_data
def columnar_metadata(self) -> Response:
"""Upload a Columnar file and returns file metadata.
---
post:
summary: Upload a Columnar file and returns file metadata
requestBody:
required: true
content:
multipart/form-data:
schema:
$ref: '#/components/schemas/ColumnarMetadataUploadFilePostSchema'
responses:
200:
description: Columnar upload response
content:
application/json:
schema:
type: object
properties:
result:
$ref: '#/components/schemas/UploadFileMetadata'
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
404:
$ref: '#/components/responses/404'
500:
$ref: '#/components/responses/500'
"""
try:
request_form = request.form.to_dict()
request_form["file"] = request.files.get("file")
parameters = ColumnarMetadataUploadFilePostSchema().load(request_form)
except ValidationError as error:
return self.response_400(message=error.messages)
metadata = ColumnarReader().file_metadata(parameters["file"])
return self.response(200, result=UploadFileMetadata().dump(metadata))
@expose("/<int:pk>/columnar_upload/", methods=("POST",))
@protect()
@statsd_metrics
@event_logger.log_this_with_context(
action=lambda self,
*args,
**kwargs: f"{self.__class__.__name__}.columnar_upload",
log_to_statsd=False,
)
@requires_form_data
def columnar_upload(self, pk: int) -> Response:
"""Upload a Columnar file into a database.
---
post:
summary: Upload a Columnar file to a database table
parameters:
- in: path
schema:
type: integer
name: pk
requestBody:
required: true
content:
multipart/form-data:
schema:
$ref: '#/components/schemas/ColumnarUploadPostSchema'
responses:
201:
description: Columnar upload response
content:
application/json:
schema:
type: object
properties:
message:
type: string
400:
$ref: '#/components/responses/400'
401:
$ref: '#/components/responses/401'
404:
$ref: '#/components/responses/404'
422:
$ref: '#/components/responses/422'
500:
$ref: '#/components/responses/500'
"""
try:
request_form = request.form.to_dict()
request_form["file"] = request.files.get("file")
parameters = ColumnarUploadPostSchema().load(request_form)
UploadCommand(
pk,
parameters["table_name"],
parameters["file"],
parameters.get("schema"),
ColumnarReader(parameters),
reader,
).run()
except ValidationError as error:
return self.response_400(message=error.messages)

View File

@ -45,6 +45,7 @@ from superset.commands.database.ssh_tunnel.exceptions import (
SSHTunnelInvalidCredentials,
SSHTunnelMissingCredentials,
)
from superset.commands.database.uploaders.base import UploadFileType
from superset.constants import PASSWORD_MASK
from superset.databases.types import ( # pylint:disable=unused-import
EncryptedDict, # noqa: F401
@ -1081,20 +1082,22 @@ class DelimitedListField(fields.List):
) from exc
class BaseUploadFilePostSchema(Schema):
_extension_config_key = ""
class BaseUploadFilePostSchemaMixin(Schema):
@validates("file")
def validate_file_extension(self, file: FileStorage) -> None:
allowed_extensions = current_app.config["ALLOWED_EXTENSIONS"].intersection(
current_app.config[self._extension_config_key]
)
allowed_extensions = current_app.config["ALLOWED_EXTENSIONS"]
file_suffix = Path(file.filename).suffix
if not file_suffix or file_suffix[1:] not in allowed_extensions:
raise ValidationError([_("File extension is not allowed.")])
class BaseUploadPostSchema(BaseUploadFilePostSchema):
class UploadPostSchema(BaseUploadFilePostSchemaMixin):
type = fields.Enum(
UploadFileType,
required=True,
by_value=True,
metadata={"description": "File type to upload"},
)
already_exists = fields.String(
load_default="fail",
validate=OneOf(choices=("fail", "replace", "append")),
@ -1123,43 +1126,26 @@ class BaseUploadPostSchema(BaseUploadFilePostSchema):
metadata={"description": "The name of the table to be created/appended"},
)
class ColumnarUploadPostSchema(BaseUploadPostSchema):
"""
Schema for Columnar Upload
"""
_extension_config_key = "COLUMNAR_EXTENSIONS"
# ------------
# CSV Schema
# ------------
file = fields.Raw(
required=True,
metadata={
"description": "The Columnar file to upload",
"type": "string",
"format": "binary",
},
)
class CSVUploadPostSchema(BaseUploadPostSchema):
"""
Schema for CSV Upload
"""
_extension_config_key = "CSV_EXTENSIONS"
file = fields.Raw(
required=True,
metadata={
"description": "The CSV file to upload",
"description": "The file to upload",
"type": "string",
"format": "text/csv",
},
)
delimiter = fields.String(metadata={"description": "The delimiter of the CSV file"})
delimiter = fields.String(
metadata={
"description": "[CSV only] The character used to separate values in the CSV"
" file (e.g., a comma, semicolon, or tab)."
}
)
column_data_types = fields.String(
metadata={
"description": "A dictionary with column names and "
"description": "[CSV only] A dictionary with column names and "
"their data types if you need to change "
"the defaults. Example: {'user_id':'int'}. "
"Check Python Pandas library for supported data types"
@ -1167,57 +1153,69 @@ class CSVUploadPostSchema(BaseUploadPostSchema):
)
day_first = fields.Boolean(
metadata={
"description": "DD/MM format dates, international and European format"
"description": "[CSV only] DD/MM format dates, international and European"
" format"
}
)
skip_blank_lines = fields.Boolean(
metadata={"description": "Skip blank lines in the CSV file."}
metadata={"description": "[CSV only] Skip blank lines in the CSV file."}
)
skip_initial_space = fields.Boolean(
metadata={"description": "Skip spaces after delimiter."}
metadata={"description": "[CSV only] Skip spaces after delimiter."}
)
column_dates = DelimitedListField(
fields.String(),
metadata={
"description": "A list of column names that should be "
"description": "[CSV and Excel only] A list of column names that should be "
"parsed as dates. Example: date,timestamp"
},
)
decimal_character = fields.String(
metadata={
"description": "Character to recognize as decimal point. Default is '.'"
"description": "[CSV and Excel only] Character to recognize as decimal"
" point. Default is '.'"
}
)
header_row = fields.Integer(
metadata={
"description": "Row containing the headers to use as column names"
"(0 is first line of data). Leave empty if there is no header row."
"description": "[CSV and Excel only] Row containing the headers to use as"
" column names (0 is first line of data). Leave empty if"
" there is no header row."
}
)
index_column = fields.String(
metadata={
"description": "Column to use as the row labels of the dataframe. "
"Leave empty if no index column"
"description": "[CSV and Excel only] Column to use as the row labels of the"
" dataframe. Leave empty if no index column"
}
)
null_values = DelimitedListField(
fields.String(),
metadata={
"description": "A list of strings that should be treated as null. "
"Examples: '' for empty strings, 'None', 'N/A',"
"Warning: Hive database supports only a single value"
"description": "[CSV and Excel only] A list of strings that should be "
"treated as null. Examples: '' for empty strings, 'None',"
" 'N/A', Warning: Hive database supports only a single value"
},
)
rows_to_read = fields.Integer(
metadata={
"description": "Number of rows to read from the file. "
"description": "[CSV and Excel only] Number of rows to read from the file. "
"If None, reads all rows."
},
allow_none=True,
validate=Range(min=1),
)
skip_rows = fields.Integer(
metadata={"description": "Number of rows to skip at start of file."}
metadata={
"description": "[CSV and Excel only] Number of rows to skip at start"
" of file."
}
)
sheet_name = fields.String(
metadata={
"description": "[Excel only]] Strings used for sheet names "
"(default is the first sheet)."
}
)
@post_load
@ -1234,79 +1232,17 @@ class CSVUploadPostSchema(BaseUploadPostSchema):
return data
class ExcelUploadPostSchema(BaseUploadPostSchema):
class UploadFileMetadataPostSchema(BaseUploadFilePostSchemaMixin):
"""
Schema for Excel Upload
Schema for Upload file metadata.
"""
_extension_config_key = "EXCEL_EXTENSIONS"
file = fields.Raw(
type = fields.Enum(
UploadFileType,
required=True,
metadata={
"description": "The Excel file to upload",
"type": "string",
"format": "binary",
},
by_value=True,
metadata={"description": "File type to upload"},
)
sheet_name = fields.String(
metadata={
"description": "Strings used for sheet names "
"(default is the first sheet)."
}
)
column_dates = DelimitedListField(
fields.String(),
metadata={
"description": "A list of column names that should be "
"parsed as dates. Example: date,timestamp"
},
)
decimal_character = fields.String(
metadata={
"description": "Character to recognize as decimal point. Default is '.'"
}
)
header_row = fields.Integer(
metadata={
"description": "Row containing the headers to use as column names"
"(0 is first line of data). Leave empty if there is no header row."
}
)
index_column = fields.String(
metadata={
"description": "Column to use as the row labels of the dataframe. "
"Leave empty if no index column"
}
)
null_values = DelimitedListField(
fields.String(),
metadata={
"description": "A list of strings that should be treated as null. "
"Examples: '' for empty strings, 'None', 'N/A',"
"Warning: Hive database supports only a single value"
},
)
rows_to_read = fields.Integer(
metadata={
"description": "Number of rows to read from the file. "
"If None, reads all rows."
},
allow_none=True,
validate=Range(min=1),
)
skip_rows = fields.Integer(
metadata={"description": "Number of rows to skip at start of file."}
)
class CSVMetadataUploadFilePostSchema(BaseUploadFilePostSchema):
"""
Schema for CSV metadata.
"""
_extension_config_key = "CSV_EXTENSIONS"
file = fields.Raw(
required=True,
metadata={
@ -1315,30 +1251,12 @@ class CSVMetadataUploadFilePostSchema(BaseUploadFilePostSchema):
"format": "binary",
},
)
delimiter = fields.String(metadata={"description": "The delimiter of the CSV file"})
header_row = fields.Integer(
delimiter = fields.String(
metadata={
"description": "Row containing the headers to use as column names"
"(0 is first line of data). Leave empty if there is no header row."
"description": "The character used to separate values in the CSV file"
" (e.g., a comma, semicolon, or tab)."
}
)
class ExcelMetadataUploadFilePostSchema(BaseUploadFilePostSchema):
"""
Schema for CSV metadata.
"""
_extension_config_key = "EXCEL_EXTENSIONS"
file = fields.Raw(
required=True,
metadata={
"description": "The file to upload",
"type": "string",
"format": "binary",
},
)
header_row = fields.Integer(
metadata={
"description": "Row containing the headers to use as column names"
@ -1347,23 +1265,6 @@ class ExcelMetadataUploadFilePostSchema(BaseUploadFilePostSchema):
)
class ColumnarMetadataUploadFilePostSchema(BaseUploadFilePostSchema):
"""
Schema for CSV metadata.
"""
_extension_config_key = "COLUMNAR_EXTENSIONS"
file = fields.Raw(
required=True,
metadata={
"description": "The file to upload",
"type": "string",
"format": "binary",
},
)
class UploadFileMetadataItemSchema(Schema):
sheet_name = fields.String(metadata={"description": "The name of the sheet"})
column_names = fields.List(

View File

@ -0,0 +1,84 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""converge_upload_permissions
Revision ID: 74ad1125881c
Revises: d482d51c15ca
Create Date: 2025-01-22 14:34:25.610084
"""
# revision identifiers, used by Alembic.
revision = "74ad1125881c"
down_revision = "d482d51c15ca"
from alembic import op # noqa: E402
from sqlalchemy.exc import SQLAlchemyError # noqa: E402
from sqlalchemy.orm import Session # noqa: E402
from superset.migrations.shared.security_converge import ( # noqa: E402
add_pvms,
get_reversed_new_pvms,
get_reversed_pvm_map,
migrate_roles,
Pvm,
)
NEW_PVMS = {"Database": ("can_upload",)}
PVM_MAP = {
Pvm("Database", "can_csv_upload"): (Pvm("Database", "can_upload"),),
Pvm("Database", "can_excel_upload"): (Pvm("Database", "can_upload"),),
Pvm("Database", "can_columnar_upload"): (Pvm("Database", "can_upload"),),
}
def do_upgrade(session: Session) -> None:
add_pvms(session, NEW_PVMS)
migrate_roles(session, PVM_MAP)
def do_downgrade(session: Session) -> None:
add_pvms(session, get_reversed_new_pvms(PVM_MAP))
migrate_roles(session, get_reversed_pvm_map(PVM_MAP))
def upgrade():
bind = op.get_bind()
session = Session(bind=bind)
do_upgrade(session)
try:
session.commit()
except SQLAlchemyError as ex:
session.rollback()
raise Exception(f"An error occurred while upgrading permissions: {ex}") from ex
def downgrade():
bind = op.get_bind()
session = Session(bind=bind)
do_downgrade(session)
try:
session.commit()
except SQLAlchemyError as ex:
print(f"An error occurred while downgrading permissions: {ex}")
session.rollback()
pass

View File

@ -270,8 +270,7 @@ class SupersetSecurityManager( # pylint: disable=too-many-public-methods
}
ALPHA_ONLY_PMVS = {
("can_csv_upload", "Database"),
("can_excel_upload", "Database"),
("can_upload", "Database"),
}
ADMIN_ONLY_PERMISSIONS = {

View File

@ -1433,11 +1433,9 @@ class TestDatabaseApi(SupersetTestCase):
assert rv.status_code == 200
assert set(data["permissions"]) == {
"can_read",
"can_columnar_upload",
"can_csv_upload",
"can_excel_upload",
"can_write",
"can_export",
"can_upload",
}
def test_get_invalid_database_table_metadata(self):

View File

@ -1336,7 +1336,7 @@ class TestRolePermission(SupersetTestCase):
self.assert_cannot_menu("Upload a CSV", perm_set)
self.assert_cannot_menu("ReportSchedule", perm_set)
self.assert_cannot_menu("Alerts & Report", perm_set)
assert ("can_csv_upload", "Database") not in perm_set
assert ("can_upload", "Database") not in perm_set
def assert_can_gamma(self, perm_set):
self.assert_can_read("Dataset", perm_set)
@ -1365,7 +1365,7 @@ class TestRolePermission(SupersetTestCase):
self.assert_can_all("CssTemplate", perm_set)
self.assert_can_all("Dataset", perm_set)
self.assert_can_read("Database", perm_set)
assert ("can_csv_upload", "Database") in perm_set
assert ("can_upload", "Database") in perm_set
self.assert_can_menu("Manage", perm_set)
self.assert_can_menu("Annotation Layers", perm_set)
self.assert_can_menu("CSS Templates", perm_set)

View File

@ -842,6 +842,7 @@ def test_oauth2_error(
[
(
{
"type": "csv",
"file": (create_csv_file(), "out.csv"),
"table_name": "table1",
"delimiter": ",",
@ -855,6 +856,7 @@ def test_oauth2_error(
),
(
{
"type": "csv",
"already_exists": "fail",
"delimiter": ",",
"file": ANY,
@ -864,6 +866,7 @@ def test_oauth2_error(
),
(
{
"type": "csv",
"file": (create_csv_file(), "out.csv"),
"table_name": "table2",
"delimiter": ";",
@ -879,6 +882,7 @@ def test_oauth2_error(
),
(
{
"type": "csv",
"already_exists": "replace",
"column_dates": ["col1", "col2"],
"delimiter": ";",
@ -889,6 +893,7 @@ def test_oauth2_error(
),
(
{
"type": "csv",
"file": (create_csv_file(), "out.csv"),
"table_name": "table2",
"delimiter": ";",
@ -911,6 +916,7 @@ def test_oauth2_error(
),
(
{
"type": "csv",
"already_exists": "replace",
"columns_read": ["col1", "col2"],
"null_values": ["None", "N/A", "''"],
@ -945,7 +951,7 @@ def test_csv_upload(
reader_mock = mocker.patch.object(CSVReader, "__init__")
reader_mock.return_value = None
response = client.post(
"/api/v1/database/1/csv_upload/",
"/api/v1/database/1/upload/",
data=payload,
content_type="multipart/form-data",
)
@ -960,6 +966,7 @@ def test_csv_upload(
[
(
{
"type": "csv",
"file": (create_csv_file(), "out.csv"),
"delimiter": ",",
"already_exists": "fail",
@ -968,6 +975,7 @@ def test_csv_upload(
),
(
{
"type": "csv",
"file": (create_csv_file(), "out.csv"),
"table_name": "",
"delimiter": ",",
@ -976,11 +984,17 @@ def test_csv_upload(
{"message": {"table_name": ["Length must be between 1 and 10000."]}},
),
(
{"table_name": "table1", "delimiter": ",", "already_exists": "fail"},
{
"type": "csv",
"table_name": "table1",
"delimiter": ",",
"already_exists": "fail",
},
{"message": {"file": ["Field may not be null."]}},
),
(
{
"type": "csv",
"file": "xpto",
"table_name": "table1",
"delimiter": ",",
@ -990,6 +1004,7 @@ def test_csv_upload(
),
(
{
"type": "csv",
"file": (create_csv_file(), "out.csv"),
"table_name": "table1",
"delimiter": ",",
@ -999,6 +1014,7 @@ def test_csv_upload(
),
(
{
"type": "csv",
"file": (create_csv_file(), "out.csv"),
"table_name": "table1",
"delimiter": ",",
@ -1009,6 +1025,7 @@ def test_csv_upload(
),
(
{
"type": "csv",
"file": (create_csv_file(), "out.csv"),
"table_name": "table1",
"delimiter": ",",
@ -1019,6 +1036,7 @@ def test_csv_upload(
),
(
{
"type": "csv",
"file": (create_csv_file(), "out.csv"),
"table_name": "table1",
"delimiter": ",",
@ -1029,6 +1047,7 @@ def test_csv_upload(
),
(
{
"type": "csv",
"file": (create_csv_file(), "out.csv"),
"table_name": "table1",
"delimiter": ",",
@ -1039,6 +1058,7 @@ def test_csv_upload(
),
(
{
"type": "csv",
"file": (create_csv_file(), "out.csv"),
"table_name": "table1",
"delimiter": ",",
@ -1049,6 +1069,7 @@ def test_csv_upload(
),
(
{
"type": "csv",
"file": (create_csv_file(), "out.csv"),
"table_name": "table1",
"delimiter": ",",
@ -1059,6 +1080,7 @@ def test_csv_upload(
),
(
{
"type": "csv",
"file": (create_csv_file(), "out.csv"),
"table_name": "table1",
"delimiter": ",",
@ -1082,7 +1104,7 @@ def test_csv_upload_validation(
_ = mocker.patch.object(UploadCommand, "run")
response = client.post(
"/api/v1/database/1/csv_upload/",
"/api/v1/database/1/upload/",
data=payload,
content_type="multipart/form-data",
)
@ -1116,8 +1138,9 @@ def test_csv_upload_file_extension_invalid(
"""
_ = mocker.patch.object(UploadCommand, "run")
response = client.post(
"/api/v1/database/1/csv_upload/",
"/api/v1/database/1/upload/",
data={
"type": "csv",
"file": create_csv_file(filename=filename),
"table_name": "table1",
"delimiter": ",",
@ -1152,8 +1175,9 @@ def test_csv_upload_file_extension_valid(
"""
_ = mocker.patch.object(UploadCommand, "run")
response = client.post(
"/api/v1/database/1/csv_upload/",
"/api/v1/database/1/upload/",
data={
"type": "csv",
"file": create_csv_file(filename=filename),
"table_name": "table1",
"delimiter": ",",
@ -1168,6 +1192,7 @@ def test_csv_upload_file_extension_valid(
[
(
{
"type": "excel",
"file": (create_excel_file(), "out.xls"),
"table_name": "table1",
},
@ -1180,6 +1205,7 @@ def test_csv_upload_file_extension_valid(
),
(
{
"type": "excel",
"already_exists": "fail",
"file": ANY,
"table_name": "table1",
@ -1188,6 +1214,7 @@ def test_csv_upload_file_extension_valid(
),
(
{
"type": "excel",
"file": (create_excel_file(), "out.xls"),
"table_name": "table2",
"sheet_name": "Sheet1",
@ -1203,6 +1230,7 @@ def test_csv_upload_file_extension_valid(
),
(
{
"type": "excel",
"already_exists": "replace",
"column_dates": ["col1", "col2"],
"sheet_name": "Sheet1",
@ -1213,6 +1241,7 @@ def test_csv_upload_file_extension_valid(
),
(
{
"type": "excel",
"file": (create_excel_file(), "out.xls"),
"table_name": "table2",
"sheet_name": "Sheet1",
@ -1231,6 +1260,7 @@ def test_csv_upload_file_extension_valid(
),
(
{
"type": "excel",
"already_exists": "replace",
"columns_read": ["col1", "col2"],
"null_values": ["None", "N/A", "''"],
@ -1261,7 +1291,7 @@ def test_excel_upload(
reader_mock = mocker.patch.object(ExcelReader, "__init__")
reader_mock.return_value = None
response = client.post(
"/api/v1/database/1/excel_upload/",
"/api/v1/database/1/upload/",
data=payload,
content_type="multipart/form-data",
)
@ -1276,6 +1306,7 @@ def test_excel_upload(
[
(
{
"type": "excel",
"file": (create_excel_file(), "out.xls"),
"sheet_name": "Sheet1",
"already_exists": "fail",
@ -1284,6 +1315,7 @@ def test_excel_upload(
),
(
{
"type": "excel",
"file": (create_excel_file(), "out.xls"),
"table_name": "",
"sheet_name": "Sheet1",
@ -1292,11 +1324,12 @@ def test_excel_upload(
{"message": {"table_name": ["Length must be between 1 and 10000."]}},
),
(
{"table_name": "table1", "already_exists": "fail"},
{"type": "excel", "table_name": "table1", "already_exists": "fail"},
{"message": {"file": ["Field may not be null."]}},
),
(
{
"type": "excel",
"file": "xpto",
"table_name": "table1",
"already_exists": "fail",
@ -1305,6 +1338,7 @@ def test_excel_upload(
),
(
{
"type": "excel",
"file": (create_excel_file(), "out.xls"),
"table_name": "table1",
"already_exists": "xpto",
@ -1313,6 +1347,7 @@ def test_excel_upload(
),
(
{
"type": "excel",
"file": (create_excel_file(), "out.xls"),
"table_name": "table1",
"already_exists": "fail",
@ -1322,6 +1357,7 @@ def test_excel_upload(
),
(
{
"type": "excel",
"file": (create_excel_file(), "out.xls"),
"table_name": "table1",
"already_exists": "fail",
@ -1331,6 +1367,7 @@ def test_excel_upload(
),
(
{
"type": "excel",
"file": (create_excel_file(), "out.xls"),
"table_name": "table1",
"already_exists": "fail",
@ -1353,7 +1390,7 @@ def test_excel_upload_validation(
_ = mocker.patch.object(UploadCommand, "run")
response = client.post(
"/api/v1/database/1/excel_upload/",
"/api/v1/database/1/upload/",
data=payload,
content_type="multipart/form-data",
)
@ -1387,8 +1424,9 @@ def test_excel_upload_file_extension_invalid(
"""
_ = mocker.patch.object(UploadCommand, "run")
response = client.post(
"/api/v1/database/1/excel_upload/",
"/api/v1/database/1/upload/",
data={
"type": "excel",
"file": create_excel_file(filename=filename),
"table_name": "table1",
},
@ -1403,6 +1441,7 @@ def test_excel_upload_file_extension_invalid(
[
(
{
"type": "columnar",
"file": (create_columnar_file(), "out.parquet"),
"table_name": "table1",
},
@ -1415,6 +1454,7 @@ def test_excel_upload_file_extension_invalid(
),
(
{
"type": "columnar",
"already_exists": "fail",
"file": ANY,
"table_name": "table1",
@ -1423,6 +1463,7 @@ def test_excel_upload_file_extension_invalid(
),
(
{
"type": "columnar",
"file": (create_columnar_file(), "out.parquet"),
"table_name": "table2",
"already_exists": "replace",
@ -1439,6 +1480,7 @@ def test_excel_upload_file_extension_invalid(
),
(
{
"type": "columnar",
"already_exists": "replace",
"columns_read": ["col1", "col2"],
"file": ANY,
@ -1467,7 +1509,7 @@ def test_columnar_upload(
reader_mock = mocker.patch.object(ColumnarReader, "__init__")
reader_mock.return_value = None
response = client.post(
"/api/v1/database/1/columnar_upload/",
"/api/v1/database/1/upload/",
data=payload,
content_type="multipart/form-data",
)
@ -1482,6 +1524,7 @@ def test_columnar_upload(
[
(
{
"type": "columnar",
"file": (create_columnar_file(), "out.parquet"),
"already_exists": "fail",
},
@ -1489,6 +1532,7 @@ def test_columnar_upload(
),
(
{
"type": "columnar",
"file": (create_columnar_file(), "out.parquet"),
"table_name": "",
"already_exists": "fail",
@ -1496,11 +1540,12 @@ def test_columnar_upload(
{"message": {"table_name": ["Length must be between 1 and 10000."]}},
),
(
{"table_name": "table1", "already_exists": "fail"},
{"type": "columnar", "table_name": "table1", "already_exists": "fail"},
{"message": {"file": ["Field may not be null."]}},
),
(
{
"type": "columnar",
"file": "xpto",
"table_name": "table1",
"already_exists": "fail",
@ -1509,6 +1554,7 @@ def test_columnar_upload(
),
(
{
"type": "columnar",
"file": (create_columnar_file(), "out.parquet"),
"table_name": "table1",
"already_exists": "xpto",
@ -1530,7 +1576,7 @@ def test_columnar_upload_validation(
_ = mocker.patch.object(UploadCommand, "run")
response = client.post(
"/api/v1/database/1/columnar_upload/",
"/api/v1/database/1/upload/",
data=payload,
content_type="multipart/form-data",
)
@ -1559,8 +1605,9 @@ def test_columnar_upload_file_extension_valid(
"""
_ = mocker.patch.object(UploadCommand, "run")
response = client.post(
"/api/v1/database/1/columnar_upload/",
"/api/v1/database/1/upload/",
data={
"type": "columnar",
"file": (create_columnar_file(), filename),
"table_name": "table1",
},
@ -1595,8 +1642,9 @@ def test_columnar_upload_file_extension_invalid(
"""
_ = mocker.patch.object(UploadCommand, "run")
response = client.post(
"/api/v1/database/1/columnar_upload/",
"/api/v1/database/1/upload/",
data={
"type": "columnar",
"file": create_columnar_file(filename=filename),
"table_name": "table1",
},
@ -1611,8 +1659,8 @@ def test_csv_metadata(
) -> None:
_ = mocker.patch.object(CSVReader, "file_metadata")
response = client.post(
"/api/v1/database/csv_metadata/",
data={"file": create_csv_file()},
"/api/v1/database/upload_metadata/",
data={"type": "csv", "file": create_csv_file()},
content_type="multipart/form-data",
)
assert response.status_code == 200
@ -1623,8 +1671,8 @@ def test_csv_metadata_bad_extension(
) -> None:
_ = mocker.patch.object(CSVReader, "file_metadata")
response = client.post(
"/api/v1/database/csv_metadata/",
data={"file": create_csv_file(filename="test.out")},
"/api/v1/database/upload_metadata/",
data={"type": "csv", "file": create_csv_file(filename="test.out")},
content_type="multipart/form-data",
)
assert response.status_code == 400
@ -1636,21 +1684,29 @@ def test_csv_metadata_validation(
) -> None:
_ = mocker.patch.object(CSVReader, "file_metadata")
response = client.post(
"/api/v1/database/csv_metadata/",
data={},
"/api/v1/database/upload_metadata/",
data={"type": "csv"},
content_type="multipart/form-data",
)
assert response.status_code == 400
assert response.json == {"message": {"file": ["Field may not be null."]}}
response = client.post(
"/api/v1/database/upload_metadata/",
data={"file": create_csv_file(filename="test.csv")},
content_type="multipart/form-data",
)
assert response.status_code == 400
assert response.json == {"message": {"type": ["Missing data for required field."]}}
def test_excel_metadata(
mocker: MockerFixture, client: Any, full_api_access: None
) -> None:
_ = mocker.patch.object(ExcelReader, "file_metadata")
response = client.post(
"/api/v1/database/excel_metadata/",
data={"file": create_excel_file()},
"/api/v1/database/upload_metadata/",
data={"type": "excel", "file": create_excel_file()},
content_type="multipart/form-data",
)
assert response.status_code == 200
@ -1661,8 +1717,8 @@ def test_excel_metadata_bad_extension(
) -> None:
_ = mocker.patch.object(ExcelReader, "file_metadata")
response = client.post(
"/api/v1/database/excel_metadata/",
data={"file": create_excel_file(filename="test.out")},
"/api/v1/database/upload_metadata/",
data={"type": "excel", "file": create_excel_file(filename="test.out")},
content_type="multipart/form-data",
)
assert response.status_code == 400
@ -1674,8 +1730,8 @@ def test_excel_metadata_validation(
) -> None:
_ = mocker.patch.object(ExcelReader, "file_metadata")
response = client.post(
"/api/v1/database/excel_metadata/",
data={},
"/api/v1/database/upload_metadata/",
data={"type": "excel"},
content_type="multipart/form-data",
)
assert response.status_code == 400
@ -1687,8 +1743,8 @@ def test_columnar_metadata(
) -> None:
_ = mocker.patch.object(ColumnarReader, "file_metadata")
response = client.post(
"/api/v1/database/columnar_metadata/",
data={"file": create_columnar_file()},
"/api/v1/database/upload_metadata/",
data={"type": "columnar", "file": create_columnar_file()},
content_type="multipart/form-data",
)
assert response.status_code == 200
@ -1699,8 +1755,8 @@ def test_columnar_metadata_bad_extension(
) -> None:
_ = mocker.patch.object(ColumnarReader, "file_metadata")
response = client.post(
"/api/v1/database/columnar_metadata/",
data={"file": create_columnar_file(filename="test.out")},
"/api/v1/database/upload_metadata/",
data={"type": "columnar", "file": create_columnar_file(filename="test.out")},
content_type="multipart/form-data",
)
assert response.status_code == 400
@ -1712,8 +1768,8 @@ def test_columnar_metadata_validation(
) -> None:
_ = mocker.patch.object(ColumnarReader, "file_metadata")
response = client.post(
"/api/v1/database/columnar_metadata/",
data={},
"/api/v1/database/upload_metadata/",
data={"type": "columnar"},
content_type="multipart/form-data",
)
assert response.status_code == 400