chore: only use cypress.io when triggered manually (#29077)
This commit is contained in:
parent
17d7e7e5e1
commit
b5d9ac0690
|
|
@ -129,35 +129,10 @@ cypress-install() {
|
|||
cache-save cypress
|
||||
}
|
||||
|
||||
# Run Cypress and upload coverage reports
|
||||
cypress-run() {
|
||||
cypress-run-all() {
|
||||
local USE_DASHBOARD=$1
|
||||
cd "$GITHUB_WORKSPACE/superset-frontend/cypress-base"
|
||||
|
||||
local page=$1
|
||||
local group=${2:-Default}
|
||||
local cypress="./node_modules/.bin/cypress run"
|
||||
local browser=${CYPRESS_BROWSER:-chrome}
|
||||
|
||||
export TERM="xterm"
|
||||
export ELECTRON_DISABLE_GPU=true # Attempt to disable GPU for Electron-based Cypress
|
||||
|
||||
say "::group::Run Cypress for [$page]"
|
||||
if [[ -z $CYPRESS_KEY ]]; then
|
||||
xvfb-run --auto-servernum --server-args='-screen 0, 1024x768x24' $cypress --spec "cypress/e2e/$page" --browser "$browser"
|
||||
else
|
||||
export CYPRESS_RECORD_KEY=$(echo $CYPRESS_KEY | base64 --decode)
|
||||
# additional flags for Cypress dashboard recording
|
||||
xvfb-run --auto-servernum --server-args='-screen 0, 1024x768x24' $cypress --spec "cypress/e2e/$page" --browser "$browser" \
|
||||
--record --group "$group" --tag "${GITHUB_REPOSITORY},${GITHUB_EVENT_NAME}" \
|
||||
--parallel --ci-build-id "${GITHUB_SHA:0:8}-${NONCE}"
|
||||
|
||||
fi
|
||||
|
||||
# don't add quotes to $record because we do want word splitting
|
||||
say "::endgroup::"
|
||||
}
|
||||
|
||||
cypress-run-all() {
|
||||
# Start Flask and run it in background
|
||||
# --no-debugger means disable the interactive debugger on the 500 page
|
||||
# so errors can print to stderr.
|
||||
|
|
@ -168,27 +143,17 @@ cypress-run-all() {
|
|||
nohup flask run --no-debugger -p $port >"$flasklog" 2>&1 </dev/null &
|
||||
local flaskProcessId=$!
|
||||
|
||||
cypress-run "*/**/*"
|
||||
USE_DASHBOARD_FLAG=''
|
||||
if [ "$USE_DASHBOARD" = "true" ]; then
|
||||
USE_DASHBOARD_FLAG='--use-dashboard'
|
||||
fi
|
||||
|
||||
python ../../scripts/cypress_run.py --parallelism $PARALLELISM --parallelism-id $PARALLEL_ID $USE_DASHBOARD_FLAG
|
||||
|
||||
# After job is done, print out Flask log for debugging
|
||||
say "::group::Flask log for default run"
|
||||
echo "::group::Flask log for default run"
|
||||
cat "$flasklog"
|
||||
say "::endgroup::"
|
||||
|
||||
# Rerun SQL Lab tests with backend persist disabled
|
||||
export SUPERSET_CONFIG=tests.integration_tests.superset_test_config_sqllab_backend_persist_off
|
||||
|
||||
# Restart Flask with new configs
|
||||
kill $flaskProcessId
|
||||
nohup flask run --no-debugger -p $port >"$flasklog" 2>&1 </dev/null &
|
||||
local flaskProcessId=$!
|
||||
|
||||
cypress-run "sqllab/*" "Backend persist"
|
||||
|
||||
say "::group::Flask log for backend persist"
|
||||
cat "$flasklog"
|
||||
say "::endgroup::"
|
||||
|
||||
echo "::endgroup::"
|
||||
# make sure the program exits
|
||||
kill $flaskProcessId
|
||||
}
|
||||
|
|
|
|||
|
|
@ -7,6 +7,20 @@ on:
|
|||
- "[0-9].[0-9]"
|
||||
pull_request:
|
||||
types: [synchronize, opened, reopened, ready_for_review]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
use_dashboard:
|
||||
description: 'Use Cypress Dashboard (true/false) [paid service - trigger manually when needed]'
|
||||
required: false
|
||||
default: 'false'
|
||||
ref:
|
||||
description: 'The branch or tag to checkout'
|
||||
required: false
|
||||
default: ''
|
||||
pr_id:
|
||||
description: 'The pull request ID to checkout'
|
||||
required: false
|
||||
default: ''
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.run_id }}
|
||||
|
|
@ -20,12 +34,12 @@ jobs:
|
|||
pull-requests: read
|
||||
strategy:
|
||||
# when one test fails, DO NOT cancel the other
|
||||
# containers, because this will kill Cypress processes
|
||||
# parallel_id, because this will kill Cypress processes
|
||||
# leaving the Dashboard hanging ...
|
||||
# https://github.com/cypress-io/github-action/issues/48
|
||||
fail-fast: false
|
||||
matrix:
|
||||
containers: [1, 2, 3]
|
||||
parallel_id: [0, 1, 2, 3, 4, 5]
|
||||
browser: ["chrome"]
|
||||
env:
|
||||
SUPERSET_ENV: development
|
||||
|
|
@ -34,6 +48,7 @@ jobs:
|
|||
PYTHONPATH: ${{ github.workspace }}
|
||||
REDIS_PORT: 16379
|
||||
GITHUB_TOKEN: ${{ github.token }}
|
||||
USE_DASHBOARD: ${{ github.event.inputs.use_dashboard || 'false' }}
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15-alpine
|
||||
|
|
@ -47,19 +62,30 @@ jobs:
|
|||
ports:
|
||||
- 16379:6379
|
||||
steps:
|
||||
- name: "Checkout (pull) ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
if: github.event_name == 'push'
|
||||
# -------------------------------------------------------
|
||||
# Conditional checkout based on context
|
||||
- name: Checkout for push or pull_request event
|
||||
if: github.event_name == 'push' || github.event_name == 'pull_request'
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.ref || github.ref }}
|
||||
persist-credentials: false
|
||||
submodules: recursive
|
||||
- name: "Checkout (pull_request) ${{ github.ref }} ( ${{ github.sha }} )"
|
||||
uses: actions/checkout@v4
|
||||
if: github.event_name == 'pull_request' || github.event_name == 'pull_request_target'
|
||||
- name: Checkout using ref (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.ref != ''
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
ref: "refs/pull/${{ github.event.number }}/merge"
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.inputs.ref }}
|
||||
submodules: recursive
|
||||
- name: Checkout using PR ID (workflow_dispatch)
|
||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.pr_id != ''
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: refs/pull/${{ github.event.inputs.pr_id }}/merge
|
||||
submodules: recursive
|
||||
# -------------------------------------------------------
|
||||
- name: Check for file changes
|
||||
id: check
|
||||
uses: ./.github/actions/change-detector/
|
||||
|
|
@ -103,12 +129,14 @@ jobs:
|
|||
uses: ./.github/actions/cached-dependencies
|
||||
env:
|
||||
CYPRESS_BROWSER: ${{ matrix.browser }}
|
||||
PARALLEL_ID: ${{ matrix.parallel_id }}
|
||||
PARALLELISM: 6
|
||||
CYPRESS_KEY: YjljODE2MzAtODcwOC00NTA3LWE4NmMtMTU3YmFmMjIzOTRhCg==
|
||||
with:
|
||||
run: cypress-run-all
|
||||
run: cypress-run-all ${{ env.USE_DASHBOARD }}
|
||||
- name: Upload Artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
if: steps.check.outputs.python || steps.check.outputs.frontend
|
||||
if: github.event_name == 'workflow_dispatch' && (steps.check.outputs.python || steps.check.outputs.frontend)
|
||||
with:
|
||||
name: screenshots
|
||||
path: ${{ github.workspace }}/superset-frontend/cypress-base/cypress/screenshots
|
||||
|
|
|
|||
|
|
@ -98,6 +98,7 @@ def print_files(files: List[str]) -> None:
|
|||
def main(event_type: str, sha: str, repo: str) -> None:
|
||||
"""Main function to check for file changes based on event context."""
|
||||
print("SHA:", sha)
|
||||
print("EVENT_TYPE", event_type)
|
||||
if event_type == "pull_request":
|
||||
pr_number = os.getenv("GITHUB_REF", "").split("/")[-2]
|
||||
files = fetch_changed_files_pr(repo, pr_number)
|
||||
|
|
@ -108,13 +109,19 @@ def main(event_type: str, sha: str, repo: str) -> None:
|
|||
files = fetch_changed_files_push(repo, sha)
|
||||
print("Files touched since previous commit:")
|
||||
print_files(files)
|
||||
|
||||
elif event_type == "workflow_dispatch":
|
||||
print("Workflow dispatched, assuming all changed")
|
||||
|
||||
else:
|
||||
raise ValueError("Unsupported event type")
|
||||
|
||||
changes_detected = {}
|
||||
for group, regex_patterns in PATTERNS.items():
|
||||
patterns_compiled = [re.compile(p) for p in regex_patterns]
|
||||
changes_detected[group] = detect_changes(files, patterns_compiled)
|
||||
changes_detected[group] = event_type == "workflow_dispatch" or detect_changes(
|
||||
files, patterns_compiled
|
||||
)
|
||||
|
||||
# Output results
|
||||
output_path = os.getenv("GITHUB_OUTPUT") or "/tmp/GITHUB_OUTPUT.txt"
|
||||
|
|
|
|||
|
|
@ -0,0 +1,144 @@
|
|||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
import argparse
|
||||
import hashlib
|
||||
import os
|
||||
import subprocess
|
||||
from datetime import datetime
|
||||
|
||||
XVFB_PRE_CMD = "xvfb-run --auto-servernum --server-args='-screen 0, 1024x768x24' "
|
||||
REPO = os.getenv("GITHUB_REPOSITORY") or "apache/superset"
|
||||
GITHUB_EVENT_NAME = os.getenv("GITHUB_REPOSITORY") or "push"
|
||||
|
||||
|
||||
def compute_hash(file_path: str) -> str:
|
||||
return hashlib.md5(file_path.encode()).hexdigest()
|
||||
|
||||
|
||||
def compute_group_index(hash_value: str, num_groups: int) -> int:
|
||||
return int(hash_value, 16) % num_groups
|
||||
|
||||
|
||||
def generate_build_id() -> str:
|
||||
now = datetime.now()
|
||||
rounded_minute = now.minute - (now.minute % 20)
|
||||
rounded_time = now.replace(minute=rounded_minute, second=0, microsecond=0)
|
||||
return (os.getenv("GITHUB_SHA") or "DUMMY")[:8] + rounded_time.strftime(
|
||||
"%Y%m%d%H%M"
|
||||
)
|
||||
|
||||
|
||||
def get_cypress_cmd(
|
||||
spec_list: list[str], _filter: str, group: str, use_dashboard: bool
|
||||
) -> str:
|
||||
cypress_cmd = "./node_modules/.bin/cypress run"
|
||||
|
||||
os.environ["TERM"] = "xterm"
|
||||
os.environ["ELECTRON_DISABLE_GPU"] = "true"
|
||||
build_id = generate_build_id()
|
||||
browser = os.getenv("CYPRESS_BROWSER", "chrome")
|
||||
|
||||
if use_dashboard:
|
||||
# Run using cypress.io service
|
||||
cypress_key = os.getenv("CYPRESS_KEY")
|
||||
command = f"echo {cypress_key} | base64 --decode"
|
||||
cypress_record_key = (
|
||||
subprocess.check_output(command, shell=True).decode("utf-8").strip()
|
||||
)
|
||||
os.environ["CYPRESS_RECORD_KEY"] = cypress_record_key
|
||||
spec: str = "*/**/*"
|
||||
cmd = (
|
||||
f"{XVFB_PRE_CMD} "
|
||||
f'{cypress_cmd} --spec "{spec}" --browser {browser} '
|
||||
f"--record --group {group} --tag {REPO},{GITHUB_EVENT_NAME} "
|
||||
f"--parallel --ci-build-id {build_id}"
|
||||
)
|
||||
else:
|
||||
# Run local, but split the execution
|
||||
os.environ.pop("CYPRESS_KEY", None)
|
||||
spec_list_str = ",".join(sorted(spec_list))
|
||||
if _filter:
|
||||
spec_list_str = ",".join(sorted([s for s in spec_list if _filter in s]))
|
||||
cmd = (
|
||||
f"{XVFB_PRE_CMD} "
|
||||
f"{cypress_cmd} --browser {browser} "
|
||||
f'--spec "{spec_list_str}" '
|
||||
)
|
||||
return cmd
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Generate Cypress commands based on test file hash"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--use-dashboard",
|
||||
action="store_true",
|
||||
help="Use Cypress Dashboard for parallelization",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--parallelism", type=int, default=10, help="Number of parallel groups"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--parallelism-id", type=int, required=True, help="ID of the parallelism group"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--filter", type=str, required=False, default=None, help="filter to test"
|
||||
)
|
||||
parser.add_argument("--group", type=str, default="Default", help="Group name")
|
||||
parser.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Print the command instead of executing it",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
cypress_base_path = "superset-frontend/cypress-base/"
|
||||
cypress_base_full_path = os.path.join(script_dir, "../", cypress_base_path)
|
||||
cypress_tests_path = os.path.join(cypress_base_full_path, "cypress/e2e")
|
||||
|
||||
test_files = []
|
||||
for root, _, files in os.walk(cypress_tests_path):
|
||||
for file in files:
|
||||
if file.endswith("test.ts") or file.endswith("test.js"):
|
||||
test_files.append(
|
||||
os.path.join(root, file).replace(cypress_base_full_path, "")
|
||||
)
|
||||
|
||||
# Initialize groups
|
||||
groups: dict[int, list[str]] = {i: [] for i in range(args.parallelism)}
|
||||
|
||||
# Sort test files to ensure deterministic distribution
|
||||
sorted_test_files = sorted(test_files)
|
||||
|
||||
# Distribute test files in a round-robin manner
|
||||
for index, test_file in enumerate(sorted_test_files):
|
||||
group_index = index % args.parallelism
|
||||
groups[group_index].append(test_file)
|
||||
|
||||
group_id = args.parallelism_id
|
||||
spec_list = groups[group_id]
|
||||
cmd = get_cypress_cmd(spec_list, args.filter, args.group, args.use_dashboard)
|
||||
print(f"RUN: {cmd}")
|
||||
if not args.dry_run:
|
||||
subprocess.run(cmd, shell=True, check=True, stdout=None, stderr=None)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -33,10 +33,10 @@ from superset.extensions import (
|
|||
)
|
||||
from superset.security import SupersetSecurityManager # noqa: F401
|
||||
|
||||
# All of the fields located here should be considered legacy. The correct way
|
||||
# to declare "global" dependencies is to define it in extensions.py,
|
||||
# then initialize it in app.create_app(). These fields will be removed
|
||||
# in subsequent PRs as things are migrated towards the factory pattern
|
||||
# All of the fields located here should be considered legacy. The correct way
|
||||
# to declare "global" dependencies is to define it in extensions.py,
|
||||
# then initialize it in app.create_app(). These fields will be removed
|
||||
# in subsequent PRs as things are migrated towards the factory pattern
|
||||
app: Flask = current_app
|
||||
cache = cache_manager.cache
|
||||
conf = LocalProxy(lambda: current_app.config)
|
||||
|
|
|
|||
Loading…
Reference in New Issue