build: try to speed up Github workflows (#12090)
This commit is contained in:
parent
bcb8db621c
commit
f48284909d
|
|
@ -0,0 +1,20 @@
|
|||
name: Cancel Duplicates
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["CI"]
|
||||
types: ["requested"]
|
||||
|
||||
jobs:
|
||||
cancel-duplicate-workflow-runs:
|
||||
name: "Cancel duplicate workflow runs"
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: apache-superset/cancel-workflow-runs@953e057
|
||||
name: "Cancel duplicate workflow runs"
|
||||
with:
|
||||
cancelMode: duplicates
|
||||
cancelFutureDuplicates: true
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
sourceRunId: ${{ github.event.workflow_run.id }}
|
||||
notifyPRCancel: true
|
||||
skipEventTypes: '["push", "pull_request", "pull_request_target"]'
|
||||
|
|
@ -1,45 +0,0 @@
|
|||
name: License
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-18.04
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Java
|
||||
uses: actions/setup-java@v1
|
||||
with:
|
||||
java-version: 8
|
||||
- name: Generate fossa report
|
||||
env:
|
||||
FOSSA_API_KEY: ${{ secrets.FOSSA_API_KEY }}
|
||||
run: |
|
||||
set -eo pipefail
|
||||
if [[ "${{github.event_name}}" != "pull_request" ]]; then
|
||||
./scripts/fossa.sh
|
||||
exit 0
|
||||
fi
|
||||
|
||||
URL="https://api.github.com/repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/files"
|
||||
FILES=$(curl -s -X GET -G $URL | jq -r '.[] | .filename')
|
||||
|
||||
cat<<EOF
|
||||
CHANGED FILES:
|
||||
$FILES
|
||||
|
||||
EOF
|
||||
|
||||
if [[ "${FILES}" =~ (.*package*\.json|requirements\/[a-z_-]+\.txt|setup\.py) ]]; then
|
||||
echo "Detected dependency changes... running fossa check"
|
||||
|
||||
./scripts/fossa.sh
|
||||
else
|
||||
echo "No dependency changes... skiping fossa check"
|
||||
fi
|
||||
shell: bash
|
||||
- name: Run license check
|
||||
run: ./scripts/check_license.sh
|
||||
|
|
@ -0,0 +1,91 @@
|
|||
name: Miscellaneous
|
||||
|
||||
on:
|
||||
push:
|
||||
branches-ignore:
|
||||
- "dependabot/**"
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
license_check:
|
||||
name: License Check
|
||||
runs-on: ubuntu-18.04
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Java
|
||||
uses: actions/setup-java@v1
|
||||
with:
|
||||
java-version: 8
|
||||
- name: Generate fossa report
|
||||
env:
|
||||
FOSSA_API_KEY: ${{ secrets.FOSSA_API_KEY }}
|
||||
run: |
|
||||
set -eo pipefail
|
||||
if [[ "${{github.event_name}}" != "pull_request" ]]; then
|
||||
./scripts/fossa.sh
|
||||
exit 0
|
||||
fi
|
||||
|
||||
URL="https://api.github.com/repos/${{ github.repository }}/pulls/${{ github.event.pull_request.number }}/files"
|
||||
FILES=$(curl -s -X GET -G $URL | jq -r '.[] | .filename')
|
||||
|
||||
cat<<EOF
|
||||
CHANGED FILES:
|
||||
$FILES
|
||||
|
||||
EOF
|
||||
|
||||
if [[ "${FILES}" =~ (.*package*\.json|requirements\/[a-z_-]+\.txt|setup\.py) ]]; then
|
||||
echo "Detected dependency changes... running fossa check"
|
||||
|
||||
./scripts/fossa.sh
|
||||
else
|
||||
echo "No dependency changes... skiping fossa check"
|
||||
fi
|
||||
shell: bash
|
||||
- name: Run license check
|
||||
run: ./scripts/check_license.sh
|
||||
|
||||
prefer_typescript:
|
||||
if: github.ref == 'ref/heads/master' && github.event_name == 'pull_request'
|
||||
name: Prefer Typescript
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Get changed files
|
||||
id: changed
|
||||
uses: ./.github/actions/file-changes-action
|
||||
with:
|
||||
githubToken: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Determine if a .js or .jsx file was added
|
||||
id: check
|
||||
run: |
|
||||
js_files_added() {
|
||||
jq -r '
|
||||
map(
|
||||
select(
|
||||
endswith(".js") or endswith(".jsx")
|
||||
)
|
||||
) | join("\n")
|
||||
' ${HOME}/files_added.json
|
||||
}
|
||||
echo ::set-output name=js_files_added::$(js_files_added)
|
||||
|
||||
- if: steps.check.outputs.js_files_added
|
||||
name: Add Comment to PR
|
||||
uses: ./.github/actions/comment-on-pr
|
||||
continue-on-error: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
msg: |
|
||||
### WARNING: Prefer TypeScript
|
||||
|
||||
Looks like your PR contains new `.js` or `.jsx` files:
|
||||
|
||||
```
|
||||
${{steps.check.outputs.js_files_added}}
|
||||
```
|
||||
|
||||
As decided in [SIP-36](https://github.com/apache/superset/issues/9101), all new frontend code should be written in TypeScript. Please convert above files to TypeScript then re-request review.
|
||||
|
|
@ -1,49 +0,0 @@
|
|||
name: Prefer TypeScript
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Get changed files
|
||||
id: changed
|
||||
uses: ./.github/actions/file-changes-action
|
||||
with:
|
||||
githubToken: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Determine if a .js or .jsx file was added
|
||||
id: check
|
||||
run: |
|
||||
js_files_added() {
|
||||
jq -r '
|
||||
map(
|
||||
select(
|
||||
endswith(".js") or endswith(".jsx")
|
||||
)
|
||||
) | join("\n")
|
||||
' ${HOME}/files_added.json
|
||||
}
|
||||
echo ::set-output name=js_files_added::$(js_files_added)
|
||||
|
||||
- if: steps.check.outputs.js_files_added
|
||||
name: Add Comment to PR
|
||||
uses: ./.github/actions/comment-on-pr
|
||||
continue-on-error: true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
msg: |
|
||||
### WARNING: Prefer TypeScript
|
||||
|
||||
Looks like your PR contains new `.js` or `.jsx` files:
|
||||
|
||||
```
|
||||
${{steps.check.outputs.js_files_added}}
|
||||
```
|
||||
|
||||
As decided in [SIP-36](https://github.com/apache/superset/issues/9101), all new frontend code should be written in TypeScript. Please convert above files to TypeScript then re-request review.
|
||||
|
|
@ -1,6 +1,12 @@
|
|||
name: E2E
|
||||
|
||||
on: [push, pull_request]
|
||||
on:
|
||||
push:
|
||||
branches-ignore:
|
||||
- "dependabot/**/docs/**"
|
||||
paths-ignore:
|
||||
- "docs/**"
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
Cypress:
|
||||
|
|
|
|||
|
|
@ -1,6 +1,11 @@
|
|||
name: Frontend
|
||||
|
||||
on: [push, pull_request]
|
||||
on:
|
||||
push:
|
||||
branches-ignore:
|
||||
- "dependabot/**/docs/**"
|
||||
- "dependabot/**/cypress-base/**"
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
|
|
|||
|
|
@ -1,7 +1,11 @@
|
|||
# Python Misc unit tests
|
||||
name: Python Misc
|
||||
|
||||
on: [push, pull_request]
|
||||
on:
|
||||
push:
|
||||
branches-ignore:
|
||||
- "dependabot/npm_and_yarn/**"
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
|
|
|
|||
|
|
@ -1,51 +0,0 @@
|
|||
# Python MySQL unit tests
|
||||
name: Python MySQL
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
test-mysql:
|
||||
runs-on: ubuntu-18.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.7]
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.superset_test_config
|
||||
REDIS_PORT: 16379
|
||||
SUPERSET__SQLALCHEMY_DATABASE_URI: |
|
||||
mysql+mysqldb://superset:superset@127.0.0.1:13306/superset?charset=utf8mb4&binary_prefix=true
|
||||
services:
|
||||
mysql:
|
||||
image: mysql:5.7
|
||||
env:
|
||||
MYSQL_ROOT_PASSWORD: root
|
||||
ports:
|
||||
- 13306:3306
|
||||
redis:
|
||||
image: redis:5-alpine
|
||||
options: --entrypoint redis-server
|
||||
ports:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: |
|
||||
apt-get-install
|
||||
pip-upgrade
|
||||
pip install -r requirements/testing.txt
|
||||
setup-mysql
|
||||
- name: Run celery
|
||||
run: celery worker --app=superset.tasks.celery_app:app -Ofair -c 2 &
|
||||
- name: Python unit tests (MySQL)
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
run: |
|
||||
bash <(curl -s https://codecov.io/bash) -cF python
|
||||
|
|
@ -1,52 +0,0 @@
|
|||
# Python Postgres unit tests
|
||||
name: Python Postgres
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
test-postgres:
|
||||
runs-on: ubuntu-18.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.7, 3.8]
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.superset_test_config
|
||||
REDIS_PORT: 16379
|
||||
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
POSTGRES_USER: superset
|
||||
POSTGRES_PASSWORD: superset
|
||||
ports:
|
||||
# Use custom ports for services to avoid accidentally connecting to
|
||||
# GitHub action runner's default installations
|
||||
- 15432:5432
|
||||
redis:
|
||||
image: redis:5-alpine
|
||||
ports:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: |
|
||||
apt-get-install
|
||||
pip-upgrade
|
||||
pip install -r requirements/testing.txt
|
||||
setup-postgres
|
||||
- name: Run celery
|
||||
run: celery worker --app=superset.tasks.celery_app:app -Ofair -c 2 &
|
||||
- name: Python unit tests (PostgreSQL)
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
run: |
|
||||
bash <(curl -s https://codecov.io/bash) -cF python
|
||||
|
|
@ -1,7 +1,11 @@
|
|||
# Python Presto/Hive unit tests
|
||||
name: Python Presto/Hive
|
||||
|
||||
on: [push, pull_request]
|
||||
on:
|
||||
push:
|
||||
branches-ignore:
|
||||
- "dependabot/npm_and_yarn/**"
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
test-postgres-presto:
|
||||
|
|
|
|||
|
|
@ -1,44 +0,0 @@
|
|||
# Python Sqlite unit tests
|
||||
name: Python Sqlite
|
||||
|
||||
on: [push, pull_request]
|
||||
|
||||
jobs:
|
||||
test-sqlite:
|
||||
runs-on: ubuntu-18.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.7]
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.superset_test_config
|
||||
REDIS_PORT: 16379
|
||||
SUPERSET__SQLALCHEMY_DATABASE_URI: |
|
||||
sqlite:///${{ github.workspace }}/.temp/unittest.db
|
||||
services:
|
||||
redis:
|
||||
image: redis:5-alpine
|
||||
ports:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: |
|
||||
apt-get-install
|
||||
pip-upgrade
|
||||
pip install -r requirements/testing.txt
|
||||
mkdir ${{ github.workspace }}/.temp
|
||||
- name: Run celery
|
||||
run: celery worker --app=superset.tasks.celery_app:app -Ofair -c 2 &
|
||||
- name: Python unit tests (SQLite)
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
run: |
|
||||
bash <(curl -s https://codecov.io/bash) -cF python
|
||||
|
|
@ -0,0 +1,141 @@
|
|||
# Python MySQL unit tests
|
||||
name: Python MySQL
|
||||
|
||||
on:
|
||||
push:
|
||||
branches-ignore:
|
||||
- "dependabot/npm_and_yarn/**"
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
test-mysql:
|
||||
runs-on: ubuntu-18.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.7]
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.superset_test_config
|
||||
REDIS_PORT: 16379
|
||||
SUPERSET__SQLALCHEMY_DATABASE_URI: |
|
||||
mysql+mysqldb://superset:superset@127.0.0.1:13306/superset?charset=utf8mb4&binary_prefix=true
|
||||
services:
|
||||
mysql:
|
||||
image: mysql:5.7
|
||||
env:
|
||||
MYSQL_ROOT_PASSWORD: root
|
||||
ports:
|
||||
- 13306:3306
|
||||
redis:
|
||||
image: redis:5-alpine
|
||||
options: --entrypoint redis-server
|
||||
ports:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: |
|
||||
apt-get-install
|
||||
pip-upgrade
|
||||
pip install -r requirements/testing.txt
|
||||
setup-mysql
|
||||
- name: Run celery
|
||||
run: celery worker --app=superset.tasks.celery_app:app -Ofair -c 2 &
|
||||
- name: Python unit tests (MySQL)
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
run: |
|
||||
bash <(curl -s https://codecov.io/bash) -cF python
|
||||
|
||||
test-postgres:
|
||||
runs-on: ubuntu-18.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.7, 3.8]
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.superset_test_config
|
||||
REDIS_PORT: 16379
|
||||
SUPERSET__SQLALCHEMY_DATABASE_URI: postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:10-alpine
|
||||
env:
|
||||
POSTGRES_USER: superset
|
||||
POSTGRES_PASSWORD: superset
|
||||
ports:
|
||||
# Use custom ports for services to avoid accidentally connecting to
|
||||
# GitHub action runner's default installations
|
||||
- 15432:5432
|
||||
redis:
|
||||
image: redis:5-alpine
|
||||
ports:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: |
|
||||
apt-get-install
|
||||
pip-upgrade
|
||||
pip install -r requirements/testing.txt
|
||||
setup-postgres
|
||||
- name: Run celery
|
||||
run: celery worker --app=superset.tasks.celery_app:app -Ofair -c 2 &
|
||||
- name: Python unit tests (PostgreSQL)
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
run: |
|
||||
bash <(curl -s https://codecov.io/bash) -cF python
|
||||
|
||||
test-sqlite:
|
||||
runs-on: ubuntu-18.04
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.7]
|
||||
env:
|
||||
PYTHONPATH: ${{ github.workspace }}
|
||||
SUPERSET_CONFIG: tests.superset_test_config
|
||||
REDIS_PORT: 16379
|
||||
SUPERSET__SQLALCHEMY_DATABASE_URI: |
|
||||
sqlite:///${{ github.workspace }}/.temp/unittest.db
|
||||
services:
|
||||
redis:
|
||||
image: redis:5-alpine
|
||||
ports:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install dependencies
|
||||
uses: ./.github/actions/cached-dependencies
|
||||
with:
|
||||
run: |
|
||||
apt-get-install
|
||||
pip-upgrade
|
||||
pip install -r requirements/testing.txt
|
||||
mkdir ${{ github.workspace }}/.temp
|
||||
- name: Run celery
|
||||
run: celery worker --app=superset.tasks.celery_app:app -Ofair -c 2 &
|
||||
- name: Python unit tests (SQLite)
|
||||
run: |
|
||||
./scripts/python_tests.sh
|
||||
- name: Upload code coverage
|
||||
run: |
|
||||
bash <(curl -s https://codecov.io/bash) -cF python
|
||||
|
|
@ -1,6 +1,10 @@
|
|||
name: Translations
|
||||
|
||||
on: [push, pull_request]
|
||||
on:
|
||||
push:
|
||||
branches-ignore:
|
||||
- "dependabot/npm_and_yarn/**"
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
frontend-check:
|
||||
|
|
|
|||
|
|
@ -0,0 +1,222 @@
|
|||
#!/usr/bin/env python3
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
"""
|
||||
Manually cancel previous GitHub Action workflow runs in queue.
|
||||
|
||||
Example:
|
||||
# Set up
|
||||
export GITHUB_TOKEN=394ba3b48494ab8f930fbc93
|
||||
export GITHUB_REPOSITORY=apache/incubator-superset
|
||||
|
||||
# cancel previous jobs for a PR
|
||||
./cancel_github_workflows.py 1042
|
||||
|
||||
# cancel previous jobs for a branch
|
||||
./cancel_github_workflows.py my-branch
|
||||
|
||||
# cancel all jobs
|
||||
./cancel_github_workflows.py 1024 --include-last
|
||||
"""
|
||||
import os
|
||||
from typing import Iterable, List, Optional, Union
|
||||
|
||||
import click
|
||||
import requests
|
||||
from click.exceptions import ClickException
|
||||
from dateutil import parser
|
||||
from typing_extensions import Literal
|
||||
|
||||
github_token = os.environ.get("GITHUB_TOKEN")
|
||||
github_repo = os.environ.get("GITHUB_REPOSITORY", "apache/incubator-superset")
|
||||
|
||||
|
||||
def request(method: Literal["GET", "POST", "DELETE", "PUT"], endpoint: str, **kwargs):
|
||||
resp = requests.request(
|
||||
method,
|
||||
f"https://api.github.com/{endpoint.lstrip('/')}",
|
||||
headers={"Authorization": f"Bearer {github_token}"},
|
||||
**kwargs,
|
||||
).json()
|
||||
if "message" in resp:
|
||||
raise ClickException(f"{endpoint} >> {resp['message']} <<")
|
||||
return resp
|
||||
|
||||
|
||||
def list_runs(repo: str, params=None):
|
||||
return request("GET", f"/repos/{repo}/actions/runs", params=params)
|
||||
|
||||
|
||||
def cancel_run(repo: str, run_id: Union[str, int]):
|
||||
return request("POST", f"/repos/{repo}/actions/runs/{run_id}/cancel")
|
||||
|
||||
|
||||
def get_pull_request(repo: str, pull_number: Union[str, int]):
|
||||
return request("GET", f"/repos/{repo}/pulls/{pull_number}")
|
||||
|
||||
|
||||
def get_runs_by_branch(
|
||||
repo: str,
|
||||
branch: str,
|
||||
user: Optional[str] = None,
|
||||
statuses: Iterable[str] = ("queued", "in_progress"),
|
||||
events: Iterable[str] = ("pull_request", "push"),
|
||||
):
|
||||
"""Get workflow runs associated with the given branch"""
|
||||
return [
|
||||
item
|
||||
for event in events
|
||||
for status in statuses
|
||||
for item in list_runs(
|
||||
repo, {"event": event, "status": status, "per_page": 100}
|
||||
)["workflow_runs"]
|
||||
if item["head_branch"] == branch
|
||||
and (user is None or (user == item["head_repository"]["owner"]["login"]))
|
||||
]
|
||||
|
||||
|
||||
def print_commit(commit):
|
||||
"""Print out commit message for verification"""
|
||||
indented_message = " \n".join(commit["message"].split("\n"))
|
||||
date_str = (
|
||||
parser.parse(commit["timestamp"])
|
||||
.astimezone(tz=None)
|
||||
.strftime("%a, %d %b %Y %H:%M:%S")
|
||||
)
|
||||
print(
|
||||
f"""HEAD {commit["id"]}
|
||||
Author: {commit["author"]["name"]} <{commit["author"]["email"]}>
|
||||
Date: {date_str}
|
||||
|
||||
{indented_message}
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option(
|
||||
"--repo",
|
||||
default=github_repo,
|
||||
help="The github repository name. For example, apache/incubator-superset.",
|
||||
)
|
||||
@click.option(
|
||||
"--event",
|
||||
type=click.Choice(["pull_request", "push", "issue"]),
|
||||
default=["pull_request", "push"],
|
||||
show_default=True,
|
||||
multiple=True,
|
||||
)
|
||||
@click.option(
|
||||
"--include-last/--skip-last",
|
||||
default=False,
|
||||
show_default=True,
|
||||
help="Whether to also cancel the lastest run.",
|
||||
)
|
||||
@click.option(
|
||||
"--include-running/--skip-running",
|
||||
default=True,
|
||||
show_default=True,
|
||||
help="Whether to also cancel running workflows.",
|
||||
)
|
||||
@click.argument("branch_or_pull")
|
||||
def cancel_github_workflows(
|
||||
branch_or_pull: str,
|
||||
repo,
|
||||
event: List[str],
|
||||
include_last: bool,
|
||||
include_running: bool,
|
||||
):
|
||||
"""Cancel running or queued GitHub workflows by branch or pull request ID"""
|
||||
if not github_token:
|
||||
raise ClickException("Please provide GITHUB_TOKEN as an env variable")
|
||||
|
||||
statuses = ("queued", "in_progress") if include_running else ("queued",)
|
||||
pr = None
|
||||
|
||||
if branch_or_pull.isdigit():
|
||||
pr = get_pull_request(repo, pull_number=branch_or_pull)
|
||||
target_type = "pull request"
|
||||
title = f"#{pr['number']} - {pr['title']}"
|
||||
else:
|
||||
target_type = "branch"
|
||||
title = branch_or_pull
|
||||
|
||||
print(
|
||||
f"\nCancel {'active' if include_running else 'previous'} "
|
||||
f"workflow runs for {target_type}\n\n {title}\n"
|
||||
)
|
||||
|
||||
if pr:
|
||||
# full branch name
|
||||
runs = get_runs_by_branch(
|
||||
repo,
|
||||
statuses=statuses,
|
||||
events=event,
|
||||
branch=pr["head"]["ref"],
|
||||
user=pr["user"]["login"],
|
||||
)
|
||||
else:
|
||||
user = None
|
||||
branch = branch_or_pull
|
||||
if ":" in branch:
|
||||
[user, branch] = branch.split(":", 2)
|
||||
runs = get_runs_by_branch(
|
||||
repo, statuses=statuses, events=event, branch=branch_or_pull, user=user
|
||||
)
|
||||
|
||||
runs = sorted(runs, key=lambda x: x["created_at"])
|
||||
if not runs:
|
||||
print(f"No {' or '.join(statuses)} workflow runs found.\n")
|
||||
return
|
||||
|
||||
if not include_last:
|
||||
# Only keep one item for each workflow
|
||||
seen = set()
|
||||
dups = []
|
||||
for item in reversed(runs):
|
||||
if item["workflow_id"] in seen:
|
||||
dups.append(item)
|
||||
else:
|
||||
seen.add(item["workflow_id"])
|
||||
if not dups:
|
||||
print(
|
||||
"Only the latest runs are in queue. "
|
||||
"Use --include-last to force cancelling them.\n"
|
||||
)
|
||||
return
|
||||
runs = dups[::-1]
|
||||
|
||||
last_sha = None
|
||||
|
||||
print(f"\nCancelling {len(runs)} jobs...\n")
|
||||
for entry in runs:
|
||||
head_commit = entry["head_commit"]
|
||||
if head_commit["id"] != last_sha:
|
||||
last_sha = head_commit["id"]
|
||||
print_commit(head_commit)
|
||||
try:
|
||||
print(f"[{entry['status']}] {entry['name']}", end="\r")
|
||||
cancel_run(repo, entry["id"])
|
||||
print(f"[Cancled] {entry['name']} ")
|
||||
except ClickException as error:
|
||||
print(f"[Error: {error.message}] {entry['name']} ")
|
||||
print("")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# pylint: disable=no-value-for-parameter
|
||||
cancel_github_workflows()
|
||||
|
|
@ -30,7 +30,7 @@ combine_as_imports = true
|
|||
include_trailing_comma = true
|
||||
line_length = 88
|
||||
known_first_party = superset
|
||||
known_third_party =alembic,apispec,backoff,bleach,cachelib,celery,click,colorama,contextlib2,cron_descriptor,croniter,cryptography,dateutil,flask,flask_appbuilder,flask_babel,flask_caching,flask_compress,flask_login,flask_migrate,flask_sqlalchemy,flask_talisman,flask_testing,flask_wtf,freezegun,geohash,geopy,holidays,humanize,isodate,jinja2,jwt,markdown,markupsafe,marshmallow,msgpack,numpy,pandas,parameterized,parsedatetime,pathlib2,pgsanity,pkg_resources,polyline,prison,pyarrow,pyhive,pyparsing,pytest,pytz,redis,retry,selenium,setuptools,simplejson,slack,sqlalchemy,sqlalchemy_utils,sqlparse,typing_extensions,werkzeug,wtforms,wtforms_json,yaml
|
||||
known_third_party =alembic,apispec,backoff,bleach,cachelib,celery,click,colorama,contextlib2,cron_descriptor,croniter,cryptography,dateutil,flask,flask_appbuilder,flask_babel,flask_caching,flask_compress,flask_login,flask_migrate,flask_sqlalchemy,flask_talisman,flask_testing,flask_wtf,freezegun,geohash,geopy,holidays,humanize,isodate,jinja2,jwt,markdown,markupsafe,marshmallow,msgpack,numpy,pandas,parameterized,parsedatetime,pathlib2,pgsanity,pkg_resources,polyline,prison,pyarrow,pyhive,pyparsing,pytest,pytz,redis,requests,retry,selenium,setuptools,simplejson,slack,sqlalchemy,sqlalchemy_utils,sqlparse,typing_extensions,werkzeug,wtforms,wtforms_json,yaml
|
||||
multi_line_output = 3
|
||||
order_by_type = false
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue