chore: deprecate tox in favor of act (#29382)
This commit is contained in:
parent
68499a1199
commit
fd9d3301f6
|
|
@ -91,13 +91,14 @@ jobs:
|
|||
const body = action === 'noop'
|
||||
? `@${user} No ephemeral environment action detected. Please use '/testenv up' or '/testenv down'. [View workflow run](${workflowUrl}).`
|
||||
: `@${user} Processing your ephemeral environment request [here](${workflowUrl}).`;
|
||||
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: issueNumber,
|
||||
body,
|
||||
});
|
||||
if (action !== 'noop') {
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: issueNumber,
|
||||
body,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
ephemeral-docker-build:
|
||||
|
|
|
|||
|
|
@ -455,17 +455,6 @@ pre-commit install
|
|||
|
||||
A series of checks will now run when you make a git commit.
|
||||
|
||||
Alternatively, it is possible to run pre-commit via tox:
|
||||
|
||||
```bash
|
||||
tox -e pre-commit
|
||||
```
|
||||
|
||||
Or by running pre-commit manually:
|
||||
|
||||
```bash
|
||||
pre-commit run --all-files
|
||||
```
|
||||
|
||||
## Linting
|
||||
|
||||
|
|
@ -474,8 +463,7 @@ pre-commit run --all-files
|
|||
We use [Pylint](https://pylint.org/) for linting which can be invoked via:
|
||||
|
||||
```bash
|
||||
# for python
|
||||
tox -e pylint
|
||||
pylint
|
||||
```
|
||||
|
||||
In terms of best practices please avoid blanket disabling of Pylint messages globally (via `.pylintrc`) or top-level within the file header, albeit there being a few exceptions. Disabling should occur inline as it prevents masking issues and provides context as to why said message is disabled.
|
||||
|
|
@ -502,39 +490,108 @@ If using the eslint extension with vscode, put the following in your workspace `
|
|||
]
|
||||
```
|
||||
|
||||
|
||||
## GitHub Actions and `act`
|
||||
|
||||
:::tip
|
||||
`act` compatibility of Superset's GHAs is not fully tested. Running `act` locally may or may not
|
||||
work for different actions, and may require fine tunning and local secret-handling.
|
||||
For those more intricate GHAs that are tricky to run locally, we recommend iterating
|
||||
directly on GHA's infrastructure, by pushing directly on a branch and monitoring GHA logs.
|
||||
For more targetted iteration, see the `gh workflow run --ref {BRANCH}` subcommand of the GitHub CLI.
|
||||
:::
|
||||
|
||||
For automation and CI/CD, Superset makes extensive use of GitHub Actions (GHA). You
|
||||
can find all of the workflows and other assets under the `.github/` folder. This includes:
|
||||
- running the backend unit test suites (`tests/`)
|
||||
- running the frontend test suites (`superset-frontend/src/**.*.test.*`)
|
||||
- running our Cypress end-to-end tests (`superset-frontend/cypress-base/`)
|
||||
- linting the codebase, including all Python, Typescript and Javascript, yaml and beyond
|
||||
- checking for all sorts of other rules conventions
|
||||
|
||||
When you open a pull request (PR), the appropriate GitHub Actions (GHA) workflows will
|
||||
automatically run depending on the changes in your branch. It's perfectly reasonable
|
||||
(and required!) to rely on this automation. However, the downside is that it's mostly an
|
||||
all-or-nothing approach and doesn't provide much control to target specific tests or
|
||||
iterate quickly.
|
||||
|
||||
At times, it may be more convenient to run GHA workflows locally. For that purpose
|
||||
we use [act](https://github.com/nektos/act), a tool that allows you to run GitHub Actions (GHA)
|
||||
workflows locally. It simulates the GitHub Actions environment, enabling developers to
|
||||
test and debug workflows on their local machines before pushing changes to the repository. More
|
||||
on how to use it in the next section.
|
||||
|
||||
:::note
|
||||
In both GHA and `act`, we can run a more complex matrix for our tests, executing against different
|
||||
database engines (PostgreSQL, MySQL, SQLite) and different versions of Python.
|
||||
This enables us to ensure compatibility and stability across various environments.
|
||||
:::
|
||||
|
||||
### Using `act`
|
||||
|
||||
First, install `act` -> https://nektosact.com/
|
||||
|
||||
To list the workflows, simply:
|
||||
|
||||
```bash
|
||||
act --list
|
||||
```
|
||||
|
||||
To run a specific workflow:
|
||||
|
||||
```bash
|
||||
act pull_request --job {workflow_name} --secret GITHUB_TOKEN=$GITHUB_TOKEN --container-architecture linux/amd64
|
||||
```
|
||||
|
||||
In the example above, notice that:
|
||||
- we target a specific workflow, using `--job`
|
||||
- we pass a secret using `--secret`, as many jobs require read access (public) to the repo
|
||||
- we simulate a `pull_request` event by specifying it as the first arg,
|
||||
similarly, we could simulate a `push` event or something else
|
||||
- we specify `--container-architecture`, which tends to emulate GHA more reliably
|
||||
|
||||
:::note
|
||||
`act` is a rich tool that offers all sorts of features, allowing you to simulate different
|
||||
events (pull_request, push, ...), semantics around passing secrets where required and much
|
||||
more. For more information, refer to [act's documentation](https://nektosact.com/)
|
||||
:::
|
||||
|
||||
:::note
|
||||
Some jobs require secrets to interact with external systems and accounts that you may
|
||||
not have in your possession. In those cases you may have to rely on remote CI or parameterize the
|
||||
job further to target a different environment/sandbox or your own alongside the related
|
||||
secrets.
|
||||
:::
|
||||
|
||||
---
|
||||
|
||||
## Testing
|
||||
|
||||
### Python Testing
|
||||
|
||||
All python tests are carried out in [tox](https://tox.readthedocs.io/en/latest/index.html)
|
||||
a standardized testing framework.
|
||||
All python tests can be run with any of the tox [environments](https://tox.readthedocs.io/en/latest/example/basic.html#a-simple-tox-ini-default-environments), via,
|
||||
#### Unit Tests
|
||||
|
||||
For unit tests located in `tests/unit_tests/`, it's usually easy to simply run the script locally using:
|
||||
|
||||
```bash
|
||||
tox -e <environment>
|
||||
pytest tests/unit_tests/*
|
||||
```
|
||||
|
||||
For example,
|
||||
#### Integration Tests
|
||||
|
||||
For more complex pytest-defined integration tests (not to be confused with our end-to-end Cypress tests), many tests will require having a working test environment. Some tests require a database, Celery, and potentially other services or libraries installed.
|
||||
|
||||
### Running Tests with `act`
|
||||
|
||||
To run integration tests locally using `act`, ensure you have followed the setup instructions from the [GitHub Actions and `act`](#github-actions-and-act) section. You can run specific workflows or jobs that include integration tests. For example:
|
||||
|
||||
```bash
|
||||
tox -e py38
|
||||
act --job test-python-38 --secret GITHUB_TOKEN=$GITHUB_TOKEN --event pull_request --container-architecture linux/amd64
|
||||
```
|
||||
|
||||
Alternatively, you can run all tests in a single file via,
|
||||
#### Running locally using a test script
|
||||
|
||||
```bash
|
||||
tox -e <environment> -- tests/test_file.py
|
||||
```
|
||||
|
||||
or for a specific test via,
|
||||
|
||||
```bash
|
||||
tox -e <environment> -- tests/test_file.py::TestClassName::test_method_name
|
||||
```
|
||||
|
||||
Note that the test environment uses a temporary directory for defining the
|
||||
SQLite databases which will be cleared each time before the group of test
|
||||
commands are invoked.
|
||||
There is also a utility script included in the Superset codebase to run Python integration tests. The [readme can be found here](https://github.com/apache/superset/tree/master/scripts/tests).
|
||||
|
||||
There is also a utility script included in the Superset codebase to run python integration tests. The [readme can be
|
||||
found here](https://github.com/apache/superset/tree/master/scripts/tests)
|
||||
|
|
@ -545,7 +602,7 @@ To run all integration tests, for example, run this script from the root directo
|
|||
scripts/tests/run.sh
|
||||
```
|
||||
|
||||
You can run unit tests found in './tests/unit_tests' for example with pytest. It is a simple way to run an isolated test that doesn't need any database setup
|
||||
You can run unit tests found in `./tests/unit_tests` with pytest. It is a simple way to run an isolated test that doesn't need any database setup:
|
||||
|
||||
```bash
|
||||
pytest ./link_to_test.py
|
||||
|
|
@ -568,7 +625,7 @@ npm run test -- path/to/file.js
|
|||
|
||||
### Integration Testing
|
||||
|
||||
We use [Cypress](https://www.cypress.io/) for integration tests. Tests can be run by `tox -e cypress`. To open Cypress and explore tests first setup and run test server:
|
||||
We use [Cypress](https://www.cypress.io/) for integration tests. To open Cypress and explore tests first setup and run test server:
|
||||
|
||||
```bash
|
||||
export SUPERSET_CONFIG=tests.integration_tests.superset_test_config
|
||||
|
|
|
|||
|
|
@ -170,31 +170,10 @@ npm run dev-server
|
|||
|
||||
### Python Testing
|
||||
|
||||
All python tests are carried out in [tox](https://tox.readthedocs.io/en/latest/index.html)
|
||||
a standardized testing framework.
|
||||
All python tests can be run with any of the tox [environments](https://tox.readthedocs.io/en/latest/example/basic.html#a-simple-tox-ini-default-environments), via,
|
||||
`pytest`, backend by docker-compose is how we recommend running tests locally.
|
||||
|
||||
```bash
|
||||
tox -e <environment>
|
||||
```
|
||||
|
||||
For example,
|
||||
|
||||
```bash
|
||||
tox -e py38
|
||||
```
|
||||
|
||||
Alternatively, you can run all tests in a single file via,
|
||||
|
||||
```bash
|
||||
tox -e <environment> -- tests/test_file.py
|
||||
```
|
||||
|
||||
or for a specific test via,
|
||||
|
||||
```bash
|
||||
tox -e <environment> -- tests/test_file.py::TestClassName::test_method_name
|
||||
```
|
||||
For a more complex test matrix (against different database backends, python versions, ...) you
|
||||
can rely on our GitHub Actions by simply opening a draft pull request.
|
||||
|
||||
Note that the test environment uses a temporary directory for defining the
|
||||
SQLite databases which will be cleared each time before the group of test
|
||||
|
|
@ -246,13 +225,7 @@ npm run test -- path/to/file.js
|
|||
|
||||
### e2e Integration Testing
|
||||
|
||||
We use [Cypress](https://www.cypress.io/) for end-to-end integration
|
||||
tests. One easy option to get started quickly is to leverage `tox` to
|
||||
run the whole suite in an isolated environment.
|
||||
|
||||
```bash
|
||||
tox -e cypress
|
||||
```
|
||||
For e2e testing, we recommend that you use a `docker-compose` backed-setup
|
||||
|
||||
Alternatively, you can go lower level and set things up in your
|
||||
development environment by following these steps:
|
||||
|
|
@ -598,17 +571,31 @@ pybabel compile -d superset/translations
|
|||
|
||||
### Python
|
||||
|
||||
We use [Pylint](https://pylint.org/) for linting which can be invoked via:
|
||||
We use [Pylint](https://pylint.org/) and [ruff](https://github.com/astral-sh/ruff)
|
||||
for linting which can be invoked via:
|
||||
|
||||
```bash
|
||||
# for python
|
||||
tox -e pylint
|
||||
```
|
||||
# Run pylint
|
||||
pylint superset/
|
||||
|
||||
# auto-reformat using ruff
|
||||
ruff format
|
||||
|
||||
# lint check with ruff
|
||||
ruff check
|
||||
|
||||
# lint fix with ruff
|
||||
ruff check --fix
|
||||
```
|
||||
|
||||
In terms of best practices please avoid blanket disabling of Pylint messages globally (via `.pylintrc`) or top-level within the file header, albeit there being a few exceptions. Disabling should occur inline as it prevents masking issues and provides context as to why said message is disabled.
|
||||
|
||||
Additionally, the Python code is auto-formatted using [Black](https://github.com/python/black) which
|
||||
is configured as a pre-commit hook. There are also numerous [editor integrations](https://black.readthedocs.io/en/stable/integrations/editors.html)
|
||||
In terms of best practices please avoid blanket disabling of Pylint messages globally
|
||||
(via `.pylintrc`) or top-level within the file header, albeit there being a few exceptions.
|
||||
Disabling should occur inline as it prevents masking issues and provides context as to why
|
||||
said message is disabled.
|
||||
|
||||
All this is configured to run in pre-commit hooks, which we encourage you to setup
|
||||
with `pre-commit install`
|
||||
|
||||
### TypeScript
|
||||
|
||||
|
|
|
|||
164
pyproject.toml
164
pyproject.toml
|
|
@ -198,7 +198,6 @@ development = [
|
|||
"ruff",
|
||||
"sqloxide",
|
||||
"statsd",
|
||||
"tox",
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
|
|
@ -235,169 +234,6 @@ disallow_untyped_calls = false
|
|||
disallow_untyped_defs = false
|
||||
disable_error_code = "annotation-unchecked"
|
||||
|
||||
[tool.tox]
|
||||
legacy_tox_ini = """
|
||||
# Remember to start celery workers to run celery tests, e.g.
|
||||
# celery --app=superset.tasks.celery_app:app worker -Ofair -c 2
|
||||
[testenv]
|
||||
basepython = python3.10
|
||||
ignore_basepython_conflict = true
|
||||
commands =
|
||||
superset db upgrade
|
||||
superset init
|
||||
superset load-test-users
|
||||
# use -s to be able to use break pointers.
|
||||
# no args or tests/* can be passed as an argument to run all tests
|
||||
pytest -s {posargs}
|
||||
deps =
|
||||
-rrequirements/development.txt
|
||||
setenv =
|
||||
PYTHONPATH = {toxinidir}
|
||||
SUPERSET_TESTENV = true
|
||||
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
|
||||
SUPERSET_HOME = {envtmpdir}
|
||||
mysql: SUPERSET__SQLALCHEMY_DATABASE_URI = mysql://mysqluser:mysqluserpassword@localhost/superset?charset=utf8
|
||||
postgres: SUPERSET__SQLALCHEMY_DATABASE_URI = postgresql+psycopg2://superset:superset@localhost/test
|
||||
sqlite: SUPERSET__SQLALCHEMY_DATABASE_URI = sqlite:////{envtmpdir}/superset.db
|
||||
sqlite: SUPERSET__SQLALCHEMY_EXAMPLES_URI = sqlite:////{envtmpdir}/examples.db
|
||||
mysql-presto: SUPERSET__SQLALCHEMY_DATABASE_URI = mysql://mysqluser:mysqluserpassword@localhost/superset?charset=utf8
|
||||
# docker run -p 8080:8080 --name presto starburstdata/presto
|
||||
mysql-presto: SUPERSET__SQLALCHEMY_EXAMPLES_URI = presto://localhost:8080/memory/default
|
||||
# based on https://github.com/big-data-europe/docker-hadoop
|
||||
# clone the repo & run docker compose up -d to test locally
|
||||
mysql-hive: SUPERSET__SQLALCHEMY_DATABASE_URI = mysql://mysqluser:mysqluserpassword@localhost/superset?charset=utf8
|
||||
mysql-hive: SUPERSET__SQLALCHEMY_EXAMPLES_URI = hive://localhost:10000/default
|
||||
# make sure that directory is accessible by docker
|
||||
hive: UPLOAD_FOLDER = /tmp/.superset/app/static/uploads/
|
||||
usedevelop = true
|
||||
allowlist_externals =
|
||||
npm
|
||||
pkill
|
||||
|
||||
[testenv:cypress]
|
||||
setenv =
|
||||
PYTHONPATH = {toxinidir}
|
||||
SUPERSET_TESTENV = true
|
||||
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
|
||||
SUPERSET_HOME = {envtmpdir}
|
||||
commands =
|
||||
npm install -g npm@'>=6.5.0'
|
||||
pip install -e {toxinidir}/
|
||||
{toxinidir}/superset-frontend/cypress_build.sh
|
||||
commands_post =
|
||||
pkill -if "python {envbindir}/flask"
|
||||
|
||||
[testenv:cypress-dashboard]
|
||||
setenv =
|
||||
PYTHONPATH = {toxinidir}
|
||||
SUPERSET_TESTENV = true
|
||||
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
|
||||
SUPERSET_HOME = {envtmpdir}
|
||||
commands =
|
||||
npm install -g npm@'>=6.5.0'
|
||||
pip install -e {toxinidir}/
|
||||
{toxinidir}/superset-frontend/cypress_build.sh dashboard
|
||||
commands_post =
|
||||
pkill -if "python {envbindir}/flask"
|
||||
|
||||
[testenv:cypress-explore]
|
||||
setenv =
|
||||
PYTHONPATH = {toxinidir}
|
||||
SUPERSET_TESTENV = true
|
||||
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
|
||||
SUPERSET_HOME = {envtmpdir}
|
||||
commands =
|
||||
npm install -g npm@'>=6.5.0'
|
||||
pip install -e {toxinidir}/
|
||||
{toxinidir}/superset-frontend/cypress_build.sh explore
|
||||
commands_post =
|
||||
pkill -if "python {envbindir}/flask"
|
||||
|
||||
[testenv:cypress-sqllab]
|
||||
setenv =
|
||||
PYTHONPATH = {toxinidir}
|
||||
SUPERSET_TESTENV = true
|
||||
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
|
||||
SUPERSET_HOME = {envtmpdir}
|
||||
commands =
|
||||
npm install -g npm@'>=6.5.0'
|
||||
pip install -e {toxinidir}/
|
||||
{toxinidir}/superset-frontend/cypress_build.sh sqllab
|
||||
commands_post =
|
||||
pkill -if "python {envbindir}/flask"
|
||||
|
||||
[testenv:cypress-sqllab-backend-persist]
|
||||
setenv =
|
||||
PYTHONPATH = {toxinidir}
|
||||
SUPERSET_TESTENV = true
|
||||
SUPERSET_CONFIG = tests.integration_tests.superset_test_config
|
||||
SUPERSET_HOME = {envtmpdir}
|
||||
commands =
|
||||
npm install -g npm@'>=6.5.0'
|
||||
pip install -e {toxinidir}/
|
||||
{toxinidir}/superset-frontend/cypress_build.sh sqllab
|
||||
commands_post =
|
||||
pkill -if "python {envbindir}/flask"
|
||||
|
||||
[testenv:eslint]
|
||||
changedir = {toxinidir}/superset-frontend
|
||||
commands =
|
||||
npm run lint
|
||||
deps =
|
||||
|
||||
[testenv:fossa]
|
||||
commands =
|
||||
{toxinidir}/scripts/fossa.sh
|
||||
deps =
|
||||
passenv = *
|
||||
|
||||
[testenv:javascript]
|
||||
commands =
|
||||
npm install -g npm@'>=6.5.0'
|
||||
{toxinidir}/superset-frontend/js_build.sh
|
||||
deps =
|
||||
|
||||
[testenv:license-check]
|
||||
commands =
|
||||
{toxinidir}/scripts/check_license.sh
|
||||
passenv = *
|
||||
whitelist_externals =
|
||||
{toxinidir}/scripts/check_license.sh
|
||||
deps =
|
||||
|
||||
[testenv:pre-commit]
|
||||
commands =
|
||||
pre-commit run --all-files
|
||||
deps =
|
||||
-rrequirements/development.txt
|
||||
skip_install = true
|
||||
|
||||
[testenv:pylint]
|
||||
commands =
|
||||
pylint superset
|
||||
deps =
|
||||
-rrequirements/development.txt
|
||||
|
||||
[testenv:thumbnails]
|
||||
setenv =
|
||||
SUPERSET_CONFIG = tests.integration_tests.superset_test_config_thumbnails
|
||||
deps =
|
||||
-rrequirements/development.txt
|
||||
|
||||
[tox]
|
||||
envlist =
|
||||
cypress-dashboard
|
||||
cypress-explore
|
||||
cypress-sqllab
|
||||
cypress-sqllab-backend-persist
|
||||
eslint
|
||||
fossa
|
||||
javascript
|
||||
license-check
|
||||
pre-commit
|
||||
pylint
|
||||
skipsdist = true
|
||||
"""
|
||||
[tool.ruff]
|
||||
# Exclude a variety of commonly ignored directories.
|
||||
exclude = [
|
||||
|
|
|
|||
|
|
@ -152,9 +152,7 @@ geopy==2.4.1
|
|||
google-auth==2.36.0
|
||||
# via shillelagh
|
||||
greenlet==3.0.3
|
||||
# via
|
||||
# shillelagh
|
||||
# sqlalchemy
|
||||
# via shillelagh
|
||||
gunicorn==23.0.0
|
||||
# via apache-superset
|
||||
hashids==1.3.1
|
||||
|
|
|
|||
|
|
@ -12,18 +12,10 @@
|
|||
# -r requirements/development.in
|
||||
astroid==3.1.0
|
||||
# via pylint
|
||||
boto3==1.34.112
|
||||
# via apache-superset
|
||||
botocore==1.34.112
|
||||
# via
|
||||
# boto3
|
||||
# s3transfer
|
||||
build==1.2.1
|
||||
# via pip-tools
|
||||
cfgv==3.4.0
|
||||
# via pre-commit
|
||||
chardet==5.2.0
|
||||
# via tox
|
||||
cmdstanpy==1.1.0
|
||||
# via prophet
|
||||
contourpy==1.0.7
|
||||
|
|
@ -41,9 +33,7 @@ distlib==0.3.8
|
|||
docker==7.0.0
|
||||
# via apache-superset
|
||||
filelock==3.12.2
|
||||
# via
|
||||
# tox
|
||||
# virtualenv
|
||||
# via virtualenv
|
||||
flask-cors==4.0.0
|
||||
# via apache-superset
|
||||
flask-testing==0.8.1
|
||||
|
|
@ -97,10 +87,6 @@ iniconfig==2.0.0
|
|||
# via pytest
|
||||
isort==5.12.0
|
||||
# via pylint
|
||||
jmespath==1.0.1
|
||||
# via
|
||||
# boto3
|
||||
# botocore
|
||||
jsonschema-spec==0.1.6
|
||||
# via openapi-spec-validator
|
||||
kiwisolver==1.4.7
|
||||
|
|
@ -138,9 +124,7 @@ pip-tools==7.4.1
|
|||
playwright==1.42.0
|
||||
# via apache-superset
|
||||
pluggy==1.5.0
|
||||
# via
|
||||
# pytest
|
||||
# tox
|
||||
# via pytest
|
||||
pre-commit==4.0.1
|
||||
# via apache-superset
|
||||
progress==1.6
|
||||
|
|
@ -176,8 +160,6 @@ pyinstrument==4.4.0
|
|||
# via apache-superset
|
||||
pylint==3.1.0
|
||||
# via apache-superset
|
||||
pyproject-api==1.8.0
|
||||
# via tox
|
||||
pyproject-hooks==1.2.0
|
||||
# via
|
||||
# build
|
||||
|
|
@ -199,8 +181,6 @@ rfc3339-validator==0.1.4
|
|||
# via openapi-schema-validator
|
||||
ruff==0.8.0
|
||||
# via apache-superset
|
||||
s3transfer==0.10.1
|
||||
# via boto3
|
||||
sqlalchemy-bigquery==1.12.0
|
||||
# via apache-superset
|
||||
sqloxide==0.1.51
|
||||
|
|
@ -213,15 +193,11 @@ tomli==2.1.0
|
|||
# coverage
|
||||
# pip-tools
|
||||
# pylint
|
||||
# pyproject-api
|
||||
# pytest
|
||||
# tox
|
||||
tomlkit==0.13.2
|
||||
# via pylint
|
||||
toposort==1.10
|
||||
# via pip-compile-multi
|
||||
tox==4.6.4
|
||||
# via apache-superset
|
||||
tqdm==4.67.1
|
||||
# via
|
||||
# cmdstanpy
|
||||
|
|
@ -231,9 +207,7 @@ trino==0.330.0
|
|||
tzlocal==5.2
|
||||
# via trino
|
||||
virtualenv==20.23.1
|
||||
# via
|
||||
# pre-commit
|
||||
# tox
|
||||
# via pre-commit
|
||||
wheel==0.45.1
|
||||
# via pip-tools
|
||||
zope-event==5.0
|
||||
|
|
|
|||
|
|
@ -95,15 +95,21 @@ def print_files(files: List[str]) -> None:
|
|||
print("\n".join([f"- {s}" for s in files]))
|
||||
|
||||
|
||||
def is_int(s: str) -> bool:
|
||||
return bool(re.match(r"^-?\d+$", s))
|
||||
|
||||
|
||||
def main(event_type: str, sha: str, repo: str) -> None:
|
||||
"""Main function to check for file changes based on event context."""
|
||||
print("SHA:", sha)
|
||||
print("EVENT_TYPE", event_type)
|
||||
files = None
|
||||
if event_type == "pull_request":
|
||||
pr_number = os.getenv("GITHUB_REF", "").split("/")[-2]
|
||||
files = fetch_changed_files_pr(repo, pr_number)
|
||||
print("PR files:")
|
||||
print_files(files)
|
||||
if is_int(pr_number):
|
||||
files = fetch_changed_files_pr(repo, pr_number)
|
||||
print("PR files:")
|
||||
print_files(files)
|
||||
|
||||
elif event_type == "push":
|
||||
files = fetch_changed_files_push(repo, sha)
|
||||
|
|
@ -119,7 +125,7 @@ def main(event_type: str, sha: str, repo: str) -> None:
|
|||
changes_detected = {}
|
||||
for group, regex_patterns in PATTERNS.items():
|
||||
patterns_compiled = [re.compile(p) for p in regex_patterns]
|
||||
changes_detected[group] = event_type == "workflow_dispatch" or detect_changes(
|
||||
changes_detected[group] = files is None or detect_changes(
|
||||
files, patterns_compiled
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -66,9 +66,8 @@ def upload_to_s3(filename: str, upload_prefix: str, table: Table) -> str:
|
|||
:returns: The S3 location of the table
|
||||
"""
|
||||
|
||||
# pylint: disable=import-outside-toplevel
|
||||
import boto3
|
||||
from boto3.s3.transfer import TransferConfig
|
||||
import boto3 # pylint: disable=all
|
||||
from boto3.s3.transfer import TransferConfig # pylint: disable=all
|
||||
|
||||
bucket_path = current_app.config["CSV_TO_HIVE_UPLOAD_S3_BUCKET"]
|
||||
|
||||
|
|
@ -474,7 +473,7 @@ class HiveEngineSpec(PrestoEngineSpec):
|
|||
return None
|
||||
|
||||
@classmethod
|
||||
def _partition_query( # pylint: disable=too-many-arguments
|
||||
def _partition_query( # pylint: disable=all
|
||||
cls,
|
||||
table: Table,
|
||||
indexes: list[dict[str, Any]],
|
||||
|
|
@ -489,7 +488,7 @@ class HiveEngineSpec(PrestoEngineSpec):
|
|||
return f"SHOW PARTITIONS {full_table_name}"
|
||||
|
||||
@classmethod
|
||||
def select_star( # pylint: disable=too-many-arguments
|
||||
def select_star( # pylint: disable=all
|
||||
cls,
|
||||
database: Database,
|
||||
table: Table,
|
||||
|
|
|
|||
|
|
@ -256,6 +256,9 @@ def test_s3_upload_prefix(schema: str, upload_prefix: str) -> None:
|
|||
)
|
||||
|
||||
|
||||
@unittest.skipUnless(
|
||||
SupersetTestCase.is_module_installed("boto3"), "boto3 not installed"
|
||||
)
|
||||
def test_upload_to_s3_no_bucket_path():
|
||||
with app.app_context():
|
||||
with pytest.raises(
|
||||
|
|
@ -265,6 +268,9 @@ def test_upload_to_s3_no_bucket_path():
|
|||
upload_to_s3("filename", "prefix", Table("table"))
|
||||
|
||||
|
||||
@unittest.skipUnless(
|
||||
SupersetTestCase.is_module_installed("boto3"), "boto3 not installed"
|
||||
)
|
||||
@mock.patch("boto3.client")
|
||||
def test_upload_to_s3_client_error(client):
|
||||
config = app.config.copy()
|
||||
|
|
@ -282,6 +288,9 @@ def test_upload_to_s3_client_error(client):
|
|||
app.config = config
|
||||
|
||||
|
||||
@unittest.skipUnless(
|
||||
SupersetTestCase.is_module_installed("boto3"), "boto3 not installed"
|
||||
)
|
||||
@mock.patch("boto3.client")
|
||||
def test_upload_to_s3_success(client):
|
||||
config = app.config.copy()
|
||||
|
|
|
|||
|
|
@ -29,10 +29,6 @@ from superset.advanced_data_type.plugins.internet_address import internet_addres
|
|||
from superset.advanced_data_type.plugins.internet_port import internet_port as port
|
||||
|
||||
|
||||
# To run the unit tests below, use the following command in the root Superset folder:
|
||||
# tox -e py38 -- tests/unit_tests/advanced_data_type/types_tests.py
|
||||
|
||||
|
||||
def test_ip_func_valid_ip():
|
||||
"""Test to see if the cidr_func behaves as expected when a valid IP is passed in"""
|
||||
cidr_request: AdvancedDataTypeRequest = {
|
||||
|
|
|
|||
Loading…
Reference in New Issue