fix: new import/export CLI (#13921)

* fix: CLI for import/export

* Add tests

* Remove debug
This commit is contained in:
Beto Dealmeida 2021-04-23 10:50:49 -07:00 committed by GitHub
parent 58534b36c5
commit ffcacc3393
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 1450 additions and 139 deletions

View File

@ -122,11 +122,7 @@ idna==2.10
# email-validator # email-validator
# yarl # yarl
importlib-metadata==2.1.1 importlib-metadata==2.1.1
# via # via -r requirements/base.in
# -r requirements/base.in
# jsonschema
# kombu
# markdown
isodate==0.6.0 isodate==0.6.0
# via apache-superset # via apache-superset
itsdangerous==1.1.0 itsdangerous==1.1.0
@ -270,6 +266,7 @@ sqlalchemy==1.3.20
# via # via
# alembic # alembic
# apache-superset # apache-superset
# flask-appbuilder
# flask-sqlalchemy # flask-sqlalchemy
# marshmallow-sqlalchemy # marshmallow-sqlalchemy
# sqlalchemy-utils # sqlalchemy-utils
@ -279,7 +276,6 @@ typing-extensions==3.7.4.3
# via # via
# aiohttp # aiohttp
# apache-superset # apache-superset
# yarl
urllib3==1.25.11 urllib3==1.25.11
# via selenium # via selenium
vine==1.3.0 vine==1.3.0

View File

@ -6,38 +6,301 @@
# pip-compile-multi # pip-compile-multi
# #
-r base.txt -r base.txt
-e file:. # via -r requirements/base.in -e file:.
boto3==1.16.10 # via tabulator # via -r requirements/base.in
botocore==1.19.10 # via boto3, s3transfer # via slackclient
cached-property==1.5.2 # via tableschema # via flask-migrate
certifi==2020.6.20 # via requests # via kombu
deprecated==1.2.11 # via pygithub # via flask-appbuilder
et-xmlfile==1.0.1 # via openpyxl # via aiohttp
flask-cors==3.0.9 # via -r requirements/development.in # via
future==0.18.2 # via pyhive # aiohttp
ijson==3.1.2.post0 # via tabulator # jsonschema
jdcal==1.4.1 # via openpyxl # via flask-babel
jmespath==0.10.0 # via boto3, botocore # via apache-superset
jsonlines==1.2.0 # via tabulator # via celery
linear-tsv==1.1.0 # via tabulator # via apache-superset
mysqlclient==1.4.2.post1 # via -r requirements/development.in boto3==1.16.10
openpyxl==3.0.5 # via tabulator # via tabulator
pillow==7.2.0 # via -r requirements/development.in botocore==1.19.10
psycopg2-binary==2.8.5 # via -r requirements/development.in # via
pydruid==0.6.1 # via -r requirements/development.in # boto3
pygithub==1.54.1 # via -r requirements/development.in # s3transfer
pyhive[hive]==0.6.3 # via -r requirements/development.in # via flask-compress
requests==2.24.0 # via pydruid, pygithub, tableschema, tabulator cached-property==1.5.2
rfc3986==1.4.0 # via tableschema # via tableschema
s3transfer==0.3.3 # via boto3 # via apache-superset
sasl==0.2.1 # via pyhive, thrift-sasl # via apache-superset
tableschema==1.20.0 # via -r requirements/development.in certifi==2020.6.20
tabulator==1.52.5 # via tableschema # via requests
thrift-sasl==0.4.2 # via pyhive # via cryptography
thrift==0.13.0 # via -r requirements/development.in, pyhive, thrift-sasl # via
unicodecsv==0.14.1 # via tableschema, tabulator # aiohttp
wrapt==1.12.1 # via deprecated # requests
xlrd==1.2.0 # via tabulator # tabulator
# via
# apache-superset
# flask
# flask-appbuilder
# tableschema
# tabulator
# via
# apache-superset
# flask-appbuilder
# via apache-superset
# via holidays
# via apache-superset
# via apache-superset
# via apache-superset
# via retry
# via python3-openid
deprecated==1.2.11
# via pygithub
# via email-validator
# via flask-appbuilder
et-xmlfile==1.0.1
# via openpyxl
# via apache-superset
# via flask-appbuilder
# via apache-superset
# via apache-superset
flask-cors==3.0.9
# via -r requirements/development.in
# via flask-appbuilder
# via flask-appbuilder
# via apache-superset
# via flask-appbuilder
# via
# flask-appbuilder
# flask-migrate
# via apache-superset
# via
# apache-superset
# flask-appbuilder
# via
# apache-superset
# flask-appbuilder
# flask-babel
# flask-caching
# flask-compress
# flask-cors
# flask-jwt-extended
# flask-login
# flask-migrate
# flask-openid
# flask-sqlalchemy
# flask-wtf
future==0.18.2
# via pyhive
# via geopy
# via apache-superset
# via apache-superset
# via apache-superset
# via apache-superset
# via
# email-validator
# requests
# yarl
ijson==3.1.2.post0
# via tabulator
# via -r requirements/base.in
# via
# apache-superset
# tableschema
# via
# flask
# flask-wtf
jdcal==1.4.1
# via openpyxl
# via
# flask
# flask-babel
jmespath==0.10.0
# via
# boto3
# botocore
jsonlines==1.2.0
# via tabulator
# via
# flask-appbuilder
# tableschema
# via celery
# via holidays
linear-tsv==1.1.0
# via tabulator
# via alembic
# via apache-superset
# via
# jinja2
# mako
# wtforms
# via flask-appbuilder
# via flask-appbuilder
# via
# flask-appbuilder
# marshmallow-enum
# marshmallow-sqlalchemy
# via apache-superset
# via
# aiohttp
# yarl
mysqlclient==1.4.2.post1
# via -r requirements/development.in
# via croniter
# via
# pandas
# pyarrow
openpyxl==3.0.5
# via tabulator
# via bleach
# via apache-superset
# via apache-superset
# via apache-superset
# via apache-superset
pillow==7.2.0
# via -r requirements/development.in
# via apache-superset
# via flask-appbuilder
psycopg2-binary==2.8.5
# via -r requirements/development.in
# via retry
# via apache-superset
# via cffi
pydruid==0.6.1
# via -r requirements/development.in
pygithub==1.54.1
# via -r requirements/development.in
pyhive[hive]==0.6.3
# via -r requirements/development.in
# via
# apache-superset
# flask-appbuilder
# flask-jwt-extended
# pygithub
# via convertdate
# via
# apache-superset
# packaging
# via
# -r requirements/base.in
# jsonschema
# via
# alembic
# apache-superset
# botocore
# croniter
# flask-appbuilder
# holidays
# pandas
# pyhive
# tableschema
# via apache-superset
# via alembic
# via apache-superset
# via flask-openid
# via
# babel
# celery
# convertdate
# flask-babel
# pandas
# via
# apache-superset
# apispec
# via apache-superset
requests==2.24.0
# via
# pydruid
# pygithub
# tableschema
# tabulator
# via apache-superset
rfc3986==1.4.0
# via tableschema
s3transfer==0.3.3
# via boto3
sasl==0.2.1
# via
# pyhive
# thrift-sasl
# via apache-superset
# via apache-superset
# via
# bleach
# cryptography
# flask-cors
# flask-jwt-extended
# flask-talisman
# holidays
# isodate
# jsonlines
# jsonschema
# linear-tsv
# packaging
# pathlib2
# polyline
# prison
# pyrsistent
# python-dateutil
# sasl
# sqlalchemy-utils
# tableschema
# tabulator
# thrift
# thrift-sasl
# wtforms-json
# via apache-superset
# via
# apache-superset
# flask-appbuilder
# via
# alembic
# apache-superset
# flask-appbuilder
# flask-sqlalchemy
# marshmallow-sqlalchemy
# sqlalchemy-utils
# tabulator
# via apache-superset
tableschema==1.20.0
# via -r requirements/development.in
tabulator==1.52.5
# via tableschema
thrift-sasl==0.4.2
# via pyhive
thrift==0.13.0
# via
# -r requirements/development.in
# pyhive
# thrift-sasl
# via
# aiohttp
# apache-superset
unicodecsv==0.14.1
# via
# tableschema
# tabulator
# via
# botocore
# requests
# selenium
# via
# amqp
# celery
# via bleach
# via
# flask
# flask-jwt-extended
wrapt==1.12.1
# via deprecated
# via apache-superset
# via
# flask-wtf
# wtforms-json
xlrd==1.2.0
# via tabulator
# via aiohttp
# via importlib-metadata
# The following packages are considered to be unsafe in a requirements file: # The following packages are considered to be unsafe in a requirements file:
# setuptools # setuptools

View File

@ -6,12 +6,207 @@
# pip-compile-multi # pip-compile-multi
# #
-r base.txt -r base.txt
-e file:. # via -r requirements/base.in -e file:.
gevent==20.9.0 # via -r requirements/docker.in # via -r requirements/base.in
greenlet==0.4.17 # via gevent # via slackclient
psycopg2-binary==2.8.6 # via -r requirements/docker.in # via flask-migrate
zope.event==4.5.0 # via gevent # via kombu
zope.interface==5.1.2 # via gevent # via flask-appbuilder
# via aiohttp
# via
# aiohttp
# jsonschema
# via flask-babel
# via apache-superset
# via celery
# via apache-superset
# via flask-compress
# via apache-superset
# via apache-superset
# via cryptography
# via aiohttp
# via
# apache-superset
# flask
# flask-appbuilder
# via
# apache-superset
# flask-appbuilder
# via apache-superset
# via holidays
# via apache-superset
# via apache-superset
# via apache-superset
# via retry
# via python3-openid
# via email-validator
# via flask-appbuilder
# via apache-superset
# via flask-appbuilder
# via apache-superset
# via apache-superset
# via flask-appbuilder
# via flask-appbuilder
# via apache-superset
# via flask-appbuilder
# via
# flask-appbuilder
# flask-migrate
# via apache-superset
# via
# apache-superset
# flask-appbuilder
# via
# apache-superset
# flask-appbuilder
# flask-babel
# flask-caching
# flask-compress
# flask-jwt-extended
# flask-login
# flask-migrate
# flask-openid
# flask-sqlalchemy
# flask-wtf
# via geopy
# via apache-superset
gevent==20.9.0
# via -r requirements/docker.in
greenlet==0.4.17
# via gevent
# via apache-superset
# via apache-superset
# via apache-superset
# via
# email-validator
# yarl
# via -r requirements/base.in
# via apache-superset
# via
# flask
# flask-wtf
# via
# flask
# flask-babel
# via flask-appbuilder
# via celery
# via holidays
# via alembic
# via apache-superset
# via
# jinja2
# mako
# wtforms
# via flask-appbuilder
# via flask-appbuilder
# via
# flask-appbuilder
# marshmallow-enum
# marshmallow-sqlalchemy
# via apache-superset
# via
# aiohttp
# yarl
# via croniter
# via
# pandas
# pyarrow
# via bleach
# via apache-superset
# via apache-superset
# via apache-superset
# via apache-superset
# via apache-superset
# via flask-appbuilder
psycopg2-binary==2.8.6
# via -r requirements/docker.in
# via retry
# via apache-superset
# via cffi
# via
# apache-superset
# flask-appbuilder
# flask-jwt-extended
# via convertdate
# via
# apache-superset
# packaging
# via
# -r requirements/base.in
# jsonschema
# via
# alembic
# apache-superset
# croniter
# flask-appbuilder
# holidays
# pandas
# via apache-superset
# via alembic
# via apache-superset
# via flask-openid
# via
# babel
# celery
# convertdate
# flask-babel
# pandas
# via
# apache-superset
# apispec
# via apache-superset
# via apache-superset
# via apache-superset
# via apache-superset
# via
# bleach
# cryptography
# flask-jwt-extended
# flask-talisman
# holidays
# isodate
# jsonschema
# packaging
# pathlib2
# polyline
# prison
# pyrsistent
# python-dateutil
# sqlalchemy-utils
# wtforms-json
# via apache-superset
# via
# apache-superset
# flask-appbuilder
# via
# alembic
# apache-superset
# flask-appbuilder
# flask-sqlalchemy
# marshmallow-sqlalchemy
# sqlalchemy-utils
# via apache-superset
# via
# aiohttp
# apache-superset
# via selenium
# via
# amqp
# celery
# via bleach
# via
# flask
# flask-jwt-extended
# via apache-superset
# via
# flask-wtf
# wtforms-json
# via aiohttp
# via importlib-metadata
zope.event==4.5.0
# via gevent
zope.interface==5.1.2
# via gevent
# The following packages are considered to be unsafe in a requirements file: # The following packages are considered to be unsafe in a requirements file:
# setuptools # setuptools

View File

@ -5,28 +5,58 @@
# #
# pip-compile-multi # pip-compile-multi
# #
appdirs==1.4.4 # via virtualenv appdirs==1.4.4
cfgv==3.2.0 # via pre-commit # via virtualenv
click==7.1.2 # via pip-compile-multi, pip-tools cfgv==3.2.0
distlib==0.3.1 # via virtualenv # via pre-commit
filelock==3.0.12 # via tox, virtualenv click==7.1.2
identify==1.5.9 # via pre-commit # via
importlib-metadata==2.1.1 # via pluggy, pre-commit, tox, virtualenv # pip-compile-multi
nodeenv==1.5.0 # via pre-commit # pip-tools
packaging==20.4 # via tox distlib==0.3.1
pip-compile-multi==2.1.0 # via -r requirements/integration.in # via virtualenv
pip-tools==5.3.1 # via pip-compile-multi filelock==3.0.12
pluggy==0.13.1 # via tox # via
pre-commit==2.8.2 # via -r requirements/integration.in # tox
py==1.9.0 # via tox # virtualenv
pyparsing==2.4.7 # via packaging identify==1.5.9
pyyaml==5.4.1 # via pre-commit # via pre-commit
six==1.15.0 # via packaging, pip-tools, tox, virtualenv nodeenv==1.5.0
toml==0.10.2 # via pre-commit, tox # via pre-commit
toposort==1.5 # via pip-compile-multi packaging==20.4
tox==3.20.1 # via -r requirements/integration.in # via tox
virtualenv==20.1.0 # via pre-commit, tox pip-compile-multi==2.1.0
zipp==3.4.0 # via importlib-metadata # via -r requirements/integration.in
pip-tools==5.3.1
# via pip-compile-multi
pluggy==0.13.1
# via tox
pre-commit==2.8.2
# via -r requirements/integration.in
py==1.9.0
# via tox
pyparsing==2.4.7
# via packaging
pyyaml==5.4.1
# via pre-commit
six==1.15.0
# via
# packaging
# pip-tools
# tox
# virtualenv
toml==0.10.2
# via
# pre-commit
# tox
toposort==1.5
# via pip-compile-multi
tox==3.20.1
# via -r requirements/integration.in
virtualenv==20.1.0
# via
# pre-commit
# tox
# The following packages are considered to be unsafe in a requirements file: # The following packages are considered to be unsafe in a requirements file:
# pip # pip

View File

@ -6,7 +6,270 @@
# pip-compile-multi # pip-compile-multi
# #
-r development.txt -r development.txt
-e file:. # via -r requirements/base.in -e file:.
# via -r requirements/base.in
# via slackclient
# via flask-migrate
# via kombu
# via flask-appbuilder
# via aiohttp
# via
# aiohttp
# jsonschema
# via flask-babel
# via apache-superset
# via celery
# via apache-superset
# via tabulator
# via
# boto3
# s3transfer
# via flask-compress
# via tableschema
# via apache-superset
# via apache-superset
# via requests
# via cryptography
# via
# aiohttp
# requests
# tabulator
# via
# apache-superset
# flask
# flask-appbuilder
# tableschema
# tabulator
# via
# apache-superset
# flask-appbuilder
# via apache-superset
# via holidays
# via apache-superset
# via apache-superset
# via apache-superset
# via retry
# via python3-openid
# via pygithub
# via email-validator
# via flask-appbuilder
# via openpyxl
# via apache-superset
# via flask-appbuilder
# via apache-superset
# via apache-superset
# via -r requirements/development.in
# via flask-appbuilder
# via flask-appbuilder
# via apache-superset
# via flask-appbuilder
# via
# flask-appbuilder
# flask-migrate
# via apache-superset
# via
# apache-superset
# flask-appbuilder
# via
# apache-superset
# flask-appbuilder
# flask-babel
# flask-caching
# flask-compress
# flask-cors
# flask-jwt-extended
# flask-login
# flask-migrate
# flask-openid
# flask-sqlalchemy
# flask-wtf
# via pyhive
# via geopy
# via apache-superset
# via apache-superset
# via apache-superset
# via apache-superset
# via
# email-validator
# requests
# yarl
# via tabulator
# via -r requirements/base.in
# via
# apache-superset
# tableschema
# via
# flask
# flask-wtf
# via openpyxl
# via
# flask
# flask-babel
# via
# boto3
# botocore
# via tabulator
# via
# flask-appbuilder
# tableschema
# via celery
# via holidays
# via tabulator
# via alembic
# via apache-superset
# via
# jinja2
# mako
# wtforms
# via flask-appbuilder
# via flask-appbuilder
# via
# flask-appbuilder
# marshmallow-enum
# marshmallow-sqlalchemy
# via apache-superset
# via
# aiohttp
# yarl
# via -r requirements/development.in
# via croniter
# via
# pandas
# pyarrow
# via tabulator
# via bleach
# via apache-superset
# via apache-superset
# via apache-superset
# via apache-superset
# via -r requirements/development.in
# via apache-superset
# via flask-appbuilder
# via -r requirements/development.in
# via retry
# via apache-superset
# via cffi
# via -r requirements/development.in
# via -r requirements/development.in
# via -r requirements/development.in
# via
# apache-superset
# flask-appbuilder
# flask-jwt-extended
# pygithub
# via convertdate
# via
# apache-superset
# packaging
# via
# -r requirements/base.in
# jsonschema
# via
# alembic
# apache-superset
# botocore
# croniter
# flask-appbuilder
# holidays
# pandas
# pyhive
# tableschema
# via apache-superset
# via alembic
# via apache-superset
# via flask-openid
# via
# babel
# celery
# convertdate
# flask-babel
# pandas
# via
# apache-superset
# apispec
# via apache-superset
# via
# pydruid
# pygithub
# tableschema
# tabulator
# via apache-superset
# via tableschema
# via boto3
# via
# pyhive
# thrift-sasl
# via apache-superset
# via apache-superset
# via
# bleach
# cryptography
# flask-cors
# flask-jwt-extended
# flask-talisman
# holidays
# isodate
# jsonlines
# jsonschema
# linear-tsv
# packaging
# pathlib2
# polyline
# prison
# pyrsistent
# python-dateutil
# sasl
# sqlalchemy-utils
# tableschema
# tabulator
# thrift
# thrift-sasl
# wtforms-json
# via apache-superset
# via
# apache-superset
# flask-appbuilder
# via
# alembic
# apache-superset
# flask-appbuilder
# flask-sqlalchemy
# marshmallow-sqlalchemy
# sqlalchemy-utils
# tabulator
# via apache-superset
# via -r requirements/development.in
# via tableschema
# via pyhive
# via
# -r requirements/development.in
# pyhive
# thrift-sasl
# via
# aiohttp
# apache-superset
# via
# tableschema
# tabulator
# via
# botocore
# requests
# selenium
# via
# amqp
# celery
# via bleach
# via
# flask
# flask-jwt-extended
# via deprecated
# via apache-superset
# via
# flask-wtf
# wtforms-json
# via tabulator
# via aiohttp
# via importlib-metadata
# The following packages are considered to be unsafe in a requirements file: # The following packages are considered to be unsafe in a requirements file:
# setuptools # setuptools

View File

@ -26,6 +26,7 @@ ipython==7.16.1
openapi-spec-validator openapi-spec-validator
openpyxl openpyxl
parameterized parameterized
pyfakefs
pyhive[presto]>=0.6.3 pyhive[presto]>=0.6.3
pylint pylint
pytest pytest

View File

@ -1,4 +1,4 @@
# SHA1:9d449781bc4ef88cd346b9dd5db55240472d5f0c # SHA1:1b285a0aa0e721283892b052553751d44f5dd81f
# #
# This file is autogenerated by pip-compile-multi # This file is autogenerated by pip-compile-multi
# To update, run: # To update, run:
@ -7,39 +7,392 @@
# #
-r development.txt -r development.txt
-r integration.txt -r integration.txt
-e file:. # via -r requirements/base.in -e file:.
appnope==0.1.0 # via ipython # via -r requirements/base.in
astroid==2.4.2 # via pylint # via slackclient
backcall==0.2.0 # via ipython # via flask-migrate
coverage==5.3 # via pytest-cov # via kombu
docker==4.3.1 # via -r requirements/testing.in # via flask-appbuilder
flask-testing==0.8.0 # via -r requirements/testing.in # via virtualenv
freezegun==1.0.0 # via -r requirements/testing.in appnope==0.1.0
iniconfig==1.1.1 # via pytest # via ipython
ipdb==0.13.4 # via -r requirements/testing.in astroid==2.4.2
ipython-genutils==0.2.0 # via traitlets # via pylint
ipython==7.16.1 # via -r requirements/testing.in, ipdb # via aiohttp
isort==5.6.4 # via pylint # via
jedi==0.17.2 # via ipython # aiohttp
lazy-object-proxy==1.4.3 # via astroid # jsonschema
mccabe==0.6.1 # via pylint # pytest
openapi-spec-validator==0.2.9 # via -r requirements/testing.in # via flask-babel
parameterized==0.7.4 # via -r requirements/testing.in backcall==0.2.0
parso==0.7.1 # via jedi # via ipython
pexpect==4.8.0 # via ipython # via apache-superset
pickleshare==0.7.5 # via ipython # via celery
prompt-toolkit==3.0.8 # via ipython # via apache-superset
ptyprocess==0.6.0 # via pexpect # via tabulator
pygments==2.7.2 # via ipython # via
pyhive[hive,presto]==0.6.3 # via -r requirements/development.in, -r requirements/testing.in # boto3
pylint==2.6.0 # via -r requirements/testing.in # s3transfer
pytest-cov==2.10.1 # via -r requirements/testing.in # via flask-compress
pytest==6.1.2 # via -r requirements/testing.in, pytest-cov # via tableschema
statsd==3.3.0 # via -r requirements/testing.in # via apache-superset
traitlets==5.0.5 # via ipython # via apache-superset
typed-ast==1.4.1 # via astroid # via requests
wcwidth==0.2.5 # via prompt-toolkit # via cryptography
websocket-client==0.57.0 # via docker # via pre-commit
# via
# aiohttp
# requests
# tabulator
# via
# apache-superset
# flask
# flask-appbuilder
# pip-compile-multi
# pip-tools
# tableschema
# tabulator
# via
# apache-superset
# flask-appbuilder
# via apache-superset
# via holidays
coverage==5.3
# via pytest-cov
# via apache-superset
# via apache-superset
# via apache-superset
# via
# ipython
# retry
# via python3-openid
# via pygithub
# via virtualenv
# via email-validator
docker==4.3.1
# via -r requirements/testing.in
# via flask-appbuilder
# via openpyxl
# via
# tox
# virtualenv
# via apache-superset
# via flask-appbuilder
# via apache-superset
# via apache-superset
# via -r requirements/development.in
# via flask-appbuilder
# via flask-appbuilder
# via apache-superset
# via flask-appbuilder
# via
# flask-appbuilder
# flask-migrate
# via apache-superset
flask-testing==0.8.0
# via -r requirements/testing.in
# via
# apache-superset
# flask-appbuilder
# via
# apache-superset
# flask-appbuilder
# flask-babel
# flask-caching
# flask-compress
# flask-cors
# flask-jwt-extended
# flask-login
# flask-migrate
# flask-openid
# flask-sqlalchemy
# flask-testing
# flask-wtf
freezegun==1.0.0
# via -r requirements/testing.in
# via pyhive
# via geopy
# via apache-superset
# via apache-superset
# via apache-superset
# via apache-superset
# via pre-commit
# via
# email-validator
# requests
# yarl
# via tabulator
# via -r requirements/base.in
iniconfig==1.1.1
# via pytest
ipdb==0.13.4
# via -r requirements/testing.in
ipython-genutils==0.2.0
# via traitlets
ipython==7.16.1
# via
# -r requirements/testing.in
# ipdb
# via
# apache-superset
# tableschema
isort==5.6.4
# via pylint
# via
# flask
# flask-wtf
# via openpyxl
jedi==0.17.2
# via ipython
# via
# flask
# flask-babel
# via
# boto3
# botocore
# via tabulator
# via
# flask-appbuilder
# openapi-spec-validator
# tableschema
# via celery
# via holidays
lazy-object-proxy==1.4.3
# via astroid
# via tabulator
# via alembic
# via apache-superset
# via
# jinja2
# mako
# wtforms
# via flask-appbuilder
# via flask-appbuilder
# via
# flask-appbuilder
# marshmallow-enum
# marshmallow-sqlalchemy
mccabe==0.6.1
# via pylint
# via apache-superset
# via
# aiohttp
# yarl
# via -r requirements/development.in
# via croniter
# via pre-commit
# via
# pandas
# pyarrow
openapi-spec-validator==0.2.9
# via -r requirements/testing.in
# via
# -r requirements/testing.in
# tabulator
# via
# bleach
# pytest
# tox
# via apache-superset
parameterized==0.7.4
# via -r requirements/testing.in
# via apache-superset
parso==0.7.1
# via jedi
# via apache-superset
pexpect==4.8.0
# via ipython
# via apache-superset
pickleshare==0.7.5
# via ipython
# via -r requirements/development.in
# via -r requirements/integration.in
# via pip-compile-multi
# via
# pytest
# tox
# via apache-superset
# via -r requirements/integration.in
# via flask-appbuilder
prompt-toolkit==3.0.8
# via ipython
# via -r requirements/development.in
ptyprocess==0.6.0
# via pexpect
# via
# pytest
# retry
# tox
# via apache-superset
# via cffi
# via -r requirements/development.in
pyfakefs==4.4.0
# via -r requirements/testing.in
# via -r requirements/development.in
pygments==2.7.2
# via ipython
pyhive[hive,presto]==0.6.3
# via
# -r requirements/development.in
# -r requirements/testing.in
# via
# apache-superset
# flask-appbuilder
# flask-jwt-extended
# pygithub
pylint==2.6.0
# via -r requirements/testing.in
# via convertdate
# via
# apache-superset
# packaging
# via
# -r requirements/base.in
# jsonschema
pytest-cov==2.10.1
# via -r requirements/testing.in
pytest==6.1.2
# via
# -r requirements/testing.in
# pytest-cov
# via
# alembic
# apache-superset
# botocore
# croniter
# flask-appbuilder
# freezegun
# holidays
# pandas
# pyhive
# tableschema
# via apache-superset
# via alembic
# via apache-superset
# via flask-openid
# via
# babel
# celery
# convertdate
# flask-babel
# pandas
# via
# apache-superset
# apispec
# openapi-spec-validator
# pre-commit
# via apache-superset
# via
# docker
# pydruid
# pygithub
# pyhive
# tableschema
# tabulator
# via apache-superset
# via tableschema
# via boto3
# via
# pyhive
# thrift-sasl
# via apache-superset
# via apache-superset
# via
# astroid
# bleach
# cryptography
# docker
# flask-cors
# flask-jwt-extended
# flask-talisman
# holidays
# isodate
# jsonlines
# jsonschema
# linear-tsv
# openapi-spec-validator
# packaging
# pathlib2
# pip-tools
# polyline
# prison
# pyrsistent
# python-dateutil
# sasl
# sqlalchemy-utils
# tableschema
# tabulator
# thrift
# thrift-sasl
# tox
# virtualenv
# websocket-client
# wtforms-json
# via apache-superset
# via
# apache-superset
# flask-appbuilder
# via
# alembic
# apache-superset
# flask-appbuilder
# flask-sqlalchemy
# marshmallow-sqlalchemy
# sqlalchemy-utils
# tabulator
# via apache-superset
statsd==3.3.0
# via -r requirements/testing.in
# via -r requirements/development.in
# via tableschema
# via pyhive
# via
# -r requirements/development.in
# pyhive
# thrift-sasl
# via
# pre-commit
# pylint
# pytest
# tox
# via pip-compile-multi
# via -r requirements/integration.in
traitlets==5.0.5
# via ipython
# via
# aiohttp
# apache-superset
# via
# tableschema
# tabulator
# via
# botocore
# requests
# selenium
# via
# amqp
# celery
# via
# pre-commit
# tox
wcwidth==0.2.5
# via prompt-toolkit
# via bleach
websocket-client==0.57.0
# via docker
# via
# flask
# flask-jwt-extended
# via
# astroid
# deprecated
# via apache-superset
# via
# flask-wtf
# wtforms-json
# via tabulator
# via aiohttp
# via importlib-metadata
# The following packages are considered to be unsafe in a requirements file: # The following packages are considered to be unsafe in a requirements file:
# pip # pip

View File

@ -21,7 +21,7 @@ import sys
from datetime import datetime, timedelta from datetime import datetime, timedelta
from subprocess import Popen from subprocess import Popen
from typing import Any, Dict, List, Optional, Type, Union from typing import Any, Dict, List, Optional, Type, Union
from zipfile import ZipFile from zipfile import is_zipfile, ZipFile
import click import click
import yaml import yaml
@ -250,12 +250,9 @@ if feature_flags.get("VERSIONED_EXPORT"):
@superset.command() @superset.command()
@with_appcontext @with_appcontext
@click.option( @click.option(
"--dashboard-file", "--dashboard-file", "-f", help="Specify the the file to export to",
"-f",
default="dashboard_export_YYYYMMDDTHHMMSS",
help="Specify the the file to export to",
) )
def export_dashboards(dashboard_file: Optional[str]) -> None: def export_dashboards(dashboard_file: Optional[str] = None) -> None:
"""Export dashboards to ZIP file""" """Export dashboards to ZIP file"""
from superset.dashboards.commands.export import ExportDashboardsCommand from superset.dashboards.commands.export import ExportDashboardsCommand
from superset.models.dashboard import Dashboard from superset.models.dashboard import Dashboard
@ -284,12 +281,9 @@ if feature_flags.get("VERSIONED_EXPORT"):
@superset.command() @superset.command()
@with_appcontext @with_appcontext
@click.option( @click.option(
"--datasource-file", "--datasource-file", "-f", help="Specify the the file to export to",
"-f",
default="dataset_export_YYYYMMDDTHHMMSS",
help="Specify the the file to export to",
) )
def export_datasources(datasource_file: Optional[str]) -> None: def export_datasources(datasource_file: Optional[str] = None) -> None:
"""Export datasources to ZIP file""" """Export datasources to ZIP file"""
from superset.connectors.sqla.models import SqlaTable from superset.connectors.sqla.models import SqlaTable
from superset.datasets.commands.export import ExportDatasetsCommand from superset.datasets.commands.export import ExportDatasetsCommand
@ -325,15 +319,20 @@ if feature_flags.get("VERSIONED_EXPORT"):
) )
def import_dashboards(path: str, username: Optional[str]) -> None: def import_dashboards(path: str, username: Optional[str]) -> None:
"""Import dashboards from ZIP file""" """Import dashboards from ZIP file"""
from superset.commands.importers.v1.utils import get_contents_from_bundle
from superset.dashboards.commands.importers.dispatcher import ( from superset.dashboards.commands.importers.dispatcher import (
ImportDashboardsCommand, ImportDashboardsCommand,
) )
if username is not None: if username is not None:
g.user = security_manager.find_user(username=username) g.user = security_manager.find_user(username=username)
contents = {path: open(path).read()} if is_zipfile(path):
with ZipFile(path) as bundle:
contents = get_contents_from_bundle(bundle)
else:
contents = {path: open(path).read()}
try: try:
ImportDashboardsCommand(contents).run() ImportDashboardsCommand(contents, overwrite=True).run()
except Exception: # pylint: disable=broad-except except Exception: # pylint: disable=broad-except
logger.exception( logger.exception(
"There was an error when importing the dashboards(s), please check " "There was an error when importing the dashboards(s), please check "
@ -343,36 +342,22 @@ if feature_flags.get("VERSIONED_EXPORT"):
@superset.command() @superset.command()
@with_appcontext @with_appcontext
@click.option( @click.option(
"--path", "--path", "-p", help="Path to a single ZIP file",
"-p",
help="Path to a single YAML file or path containing multiple YAML "
"files to import (*.yaml or *.yml)",
)
@click.option(
"--sync",
"-s",
"sync",
default="",
help="comma seperated list of element types to synchronize "
'e.g. "metrics,columns" deletes metrics and columns in the DB '
"that are not specified in the YAML file",
)
@click.option(
"--recursive",
"-r",
is_flag=True,
default=False,
help="recursively search the path for yaml files",
) )
def import_datasources(path: str) -> None: def import_datasources(path: str) -> None:
"""Import datasources from ZIP file""" """Import datasources from ZIP file"""
from superset.commands.importers.v1.utils import get_contents_from_bundle
from superset.datasets.commands.importers.dispatcher import ( from superset.datasets.commands.importers.dispatcher import (
ImportDatasetsCommand, ImportDatasetsCommand,
) )
contents = {path: open(path).read()} if is_zipfile(path):
with ZipFile(path) as bundle:
contents = get_contents_from_bundle(bundle)
else:
contents = {path: open(path).read()}
try: try:
ImportDatasetsCommand(contents).run() ImportDatasetsCommand(contents, overwrite=True).run()
except Exception: # pylint: disable=broad-except except Exception: # pylint: disable=broad-except
logger.exception( logger.exception(
"There was an error when importing the dataset(s), please check the " "There was an error when importing the dataset(s), please check the "
@ -482,7 +467,7 @@ else:
help="Specify the user name to assign dashboards to", help="Specify the user name to assign dashboards to",
) )
def import_dashboards(path: str, recursive: bool, username: str) -> None: def import_dashboards(path: str, recursive: bool, username: str) -> None:
"""Import dashboards from ZIP file""" """Import dashboards from JSON file"""
from superset.dashboards.commands.importers.v0 import ImportDashboardsCommand from superset.dashboards.commands.importers.v0 import ImportDashboardsCommand
path_object = Path(path) path_object = Path(path)

View File

@ -124,6 +124,8 @@ class ImportDashboardsCommand(ImportModelsCommand):
config = update_id_refs(config, chart_ids) config = update_id_refs(config, chart_ids)
dashboard = import_dashboard(session, config, overwrite=overwrite) dashboard = import_dashboard(session, config, overwrite=overwrite)
for uuid in find_chart_uuids(config["position"]): for uuid in find_chart_uuids(config["position"]):
if uuid not in chart_ids:
break
chart_id = chart_ids[uuid] chart_id = chart_ids[uuid]
if (dashboard.id, chart_id) not in existing_relationships: if (dashboard.id, chart_id) not in existing_relationships:
dashboard_chart_ids.append((dashboard.id, chart_id)) dashboard_chart_ids.append((dashboard.id, chart_id))

View File

@ -51,7 +51,9 @@ def update_id_refs(config: Dict[str, Any], chart_ids: Dict[str, int]) -> Dict[st
# build map old_id => new_id # build map old_id => new_id
old_ids = build_uuid_to_id_map(fixed["position"]) old_ids = build_uuid_to_id_map(fixed["position"])
id_map = {old_id: chart_ids[uuid] for uuid, old_id in old_ids.items()} id_map = {
old_id: chart_ids[uuid] for uuid, old_id in old_ids.items() if uuid in chart_ids
}
# fix metadata # fix metadata
metadata = fixed.get("metadata", {}) metadata = fixed.get("metadata", {})
@ -97,6 +99,7 @@ def update_id_refs(config: Dict[str, Any], chart_ids: Dict[str, int]) -> Dict[st
isinstance(child, dict) isinstance(child, dict)
and child["type"] == "CHART" and child["type"] == "CHART"
and "uuid" in child["meta"] and "uuid" in child["meta"]
and child["meta"]["uuid"] in chart_ids
): ):
child["meta"]["chartId"] = chart_ids[child["meta"]["uuid"]] child["meta"]["chartId"] = chart_ids[child["meta"]["uuid"]]

View File

@ -116,7 +116,13 @@ def import_dataset(
session.flush() session.flush()
example_database = get_example_database() example_database = get_example_database()
table_exists = example_database.has_table_by_name(dataset.table_name) try:
table_exists = example_database.has_table_by_name(dataset.table_name)
except Exception as ex:
# MySQL doesn't play nice with GSheets table names
logger.warning("Couldn't check if table %s exists, stopping import")
raise ex
if data_uri and (not table_exists or force_data): if data_uri and (not table_exists or force_data):
load_data(data_uri, dataset, example_database, session) load_data(data_uri, dataset, example_database, session)

208
tests/cli_tests.py Normal file
View File

@ -0,0 +1,208 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import importlib
import json
from pathlib import Path
from unittest import mock
from zipfile import is_zipfile, ZipFile
import pytest
import yaml
from freezegun import freeze_time
import superset.cli
from superset import app
from tests.fixtures.birth_names_dashboard import load_birth_names_dashboard_with_slices
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_export_dashboards_original(app_context, fs):
"""
Test that a JSON file is exported.
"""
# pylint: disable=reimported, redefined-outer-name
import superset.cli # noqa: F811
# reload to define export_dashboards correctly based on the
# feature flags
importlib.reload(superset.cli)
runner = app.test_cli_runner()
response = runner.invoke(superset.cli.export_dashboards, ("-f", "dashboards.json"))
assert response.exit_code == 0
assert Path("dashboards.json").exists()
# check that file is valid JSON
with open("dashboards.json") as fp:
contents = fp.read()
json.loads(contents)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_export_datasources_original(app_context, fs):
"""
Test that a YAML file is exported.
"""
# pylint: disable=reimported, redefined-outer-name
import superset.cli # noqa: F811
# reload to define export_dashboards correctly based on the
# feature flags
importlib.reload(superset.cli)
runner = app.test_cli_runner()
response = runner.invoke(
superset.cli.export_datasources, ("-f", "datasources.yaml")
)
assert response.exit_code == 0
assert Path("datasources.yaml").exists()
# check that file is valid JSON
with open("datasources.yaml") as fp:
contents = fp.read()
yaml.safe_load(contents)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@mock.patch.dict(
"superset.config.DEFAULT_FEATURE_FLAGS", {"VERSIONED_EXPORT": True}, clear=True
)
def test_export_dashboards_versioned_export(app_context, fs):
"""
Test that a ZIP file is exported.
"""
# pylint: disable=reimported, redefined-outer-name
import superset.cli # noqa: F811
# reload to define export_dashboards correctly based on the
# feature flags
importlib.reload(superset.cli)
runner = app.test_cli_runner()
with freeze_time("2021-01-01T00:00:00Z"):
response = runner.invoke(superset.cli.export_dashboards, ())
assert response.exit_code == 0
assert Path("dashboard_export_20210101T000000.zip").exists()
assert is_zipfile("dashboard_export_20210101T000000.zip")
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
@mock.patch.dict(
"superset.config.DEFAULT_FEATURE_FLAGS", {"VERSIONED_EXPORT": True}, clear=True
)
def test_export_datasources_versioned_export(app_context, fs):
"""
Test that a ZIP file is exported.
"""
# pylint: disable=reimported, redefined-outer-name
import superset.cli # noqa: F811
# reload to define export_dashboards correctly based on the
# feature flags
importlib.reload(superset.cli)
runner = app.test_cli_runner()
with freeze_time("2021-01-01T00:00:00Z"):
response = runner.invoke(superset.cli.export_datasources, ())
assert response.exit_code == 0
assert Path("dataset_export_20210101T000000.zip").exists()
assert is_zipfile("dataset_export_20210101T000000.zip")
@mock.patch.dict(
"superset.config.DEFAULT_FEATURE_FLAGS", {"VERSIONED_EXPORT": True}, clear=True
)
@mock.patch("superset.dashboards.commands.importers.dispatcher.ImportDashboardsCommand")
def test_import_dashboards_versioned_export(import_dashboards_command, app_context, fs):
"""
Test that both ZIP and JSON can be imported.
"""
# pylint: disable=reimported, redefined-outer-name
import superset.cli # noqa: F811
# reload to define export_dashboards correctly based on the
# feature flags
importlib.reload(superset.cli)
# write JSON file
with open("dashboards.json", "w") as fp:
fp.write('{"hello": "world"}')
runner = app.test_cli_runner()
response = runner.invoke(superset.cli.import_dashboards, ("-p", "dashboards.json"))
assert response.exit_code == 0
expected_contents = {"dashboards.json": '{"hello": "world"}'}
import_dashboards_command.assert_called_with(expected_contents, overwrite=True)
# write ZIP file
with ZipFile("dashboards.zip", "w") as bundle:
with bundle.open("dashboards/dashboard.yaml", "w") as fp:
fp.write(b"hello: world")
runner = app.test_cli_runner()
response = runner.invoke(superset.cli.import_dashboards, ("-p", "dashboards.zip"))
assert response.exit_code == 0
expected_contents = {"dashboard.yaml": "hello: world"}
import_dashboards_command.assert_called_with(expected_contents, overwrite=True)
@mock.patch.dict(
"superset.config.DEFAULT_FEATURE_FLAGS", {"VERSIONED_EXPORT": True}, clear=True
)
@mock.patch("superset.datasets.commands.importers.dispatcher.ImportDatasetsCommand")
def test_import_datasets_versioned_export(import_datasets_command, app_context, fs):
"""
Test that both ZIP and YAML can be imported.
"""
# pylint: disable=reimported, redefined-outer-name
import superset.cli # noqa: F811
# reload to define export_datasets correctly based on the
# feature flags
importlib.reload(superset.cli)
# write YAML file
with open("datasets.yaml", "w") as fp:
fp.write("hello: world")
runner = app.test_cli_runner()
response = runner.invoke(superset.cli.import_datasources, ("-p", "datasets.yaml"))
assert response.exit_code == 0
expected_contents = {"datasets.yaml": "hello: world"}
import_datasets_command.assert_called_with(expected_contents, overwrite=True)
# write ZIP file
with ZipFile("datasets.zip", "w") as bundle:
with bundle.open("datasets/dataset.yaml", "w") as fp:
fp.write(b"hello: world")
runner = app.test_cli_runner()
response = runner.invoke(superset.cli.import_datasources, ("-p", "datasets.zip"))
assert response.exit_code == 0
expected_contents = {"dataset.yaml": "hello: world"}
import_datasets_command.assert_called_with(expected_contents, overwrite=True)

View File

@ -30,6 +30,12 @@ CTAS_SCHEMA_NAME = "sqllab_test_db"
ADMIN_SCHEMA_NAME = "admin_database" ADMIN_SCHEMA_NAME = "admin_database"
@pytest.fixture
def app_context():
with app.app_context():
yield
@pytest.fixture(autouse=True, scope="session") @pytest.fixture(autouse=True, scope="session")
def setup_sample_data() -> Any: def setup_sample_data() -> Any:
with app.app_context(): with app.app_context():