[Build] Add Github workflows (#9517)

As an replacement to Travis CI.
This commit is contained in:
Jianchao Yang 2020-04-14 13:38:09 -07:00 committed by GitHub
parent aef06ded6b
commit 7b4b0d1c61
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 1387 additions and 535 deletions

133
.github/workflows/bashlib.sh vendored Normal file
View File

@ -0,0 +1,133 @@
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Echo only when not in parallel mode
say() {
if [[ ${INPUT_PARALLEL^^} != 'TRUE' ]]; then
echo "$1"
fi
}
# default command to run when the `run` input is empty
default-setup-command() {
pip-install
}
# install python dependencies
pip-install() {
cd "$GITHUB_WORKSPACE"
# Don't use pip cache as it doesn't seem to help much.
# cache-restore pip
say "::group::Install Python pacakges"
pip install -r requirements.txt
pip install -r requirements-dev.txt
pip install -e ".[postgres,mysql]"
say "::endgroup::"
# cache-save pip
}
# prepare (lint and build) frontend code
npm-install() {
cd "$GITHUB_WORKSPACE/superset-frontend"
cache-restore npm
say "::group::Install npm packages"
echo "npm: $(npm --version)"
echo "node: $(node --version)"
npm ci
say "::endgroup::"
cache-save npm
}
build-assets() {
cd "$GITHUB_WORKSPACE/superset-frontend"
say "::group::Build static assets"
npm run build -- --no-progress
say "::endgroup::"
}
npm-build() {
if [[ $1 = '--no-cache' ]]; then
build-assets
else
cache-restore assets
if [[ -f $GITHUB_WORKSPACE/superset/static/assets/manifest.json ]]; then
echo 'Skip frontend build because static assets already exist.'
else
build-assets
cache-save assets
fi
fi
}
cypress-install() {
cd "$GITHUB_WORKSPACE/superset-frontend/cypress-base"
cache-restore cypress
say "::group::Install Cypress"
npm ci
say "::endgroup::"
cache-save cypress
}
testdata() {
cd "$GITHUB_WORKSPACE"
say "::group::Load test data"
# must specify PYTHONPATH to make `tests.superset_test_config` importable
export PYTHONPATH="$GITHUB_WORKSPACE"
superset db upgrade
superset load_test_users
superset load_examples --load-test-data
superset init
say "::endgroup::"
}
setup-postgres() {
say "::group::Initialize database"
psql "postgresql://superset:superset@127.0.0.1:15432/superset" <<-EOF
DROP SCHEMA IF EXISTS sqllab_test_db;
CREATE SCHEMA sqllab_test_db;
DROP SCHEMA IF EXISTS admin_database;
CREATE SCHEMA admin_database;
EOF
say "::endgroup::"
}
setup-mysql() {
say "::group::Initialize database"
mysql -h 127.0.0.1 -P 13306 -u root --password=root <<-EOF
DROP DATABASE IF EXISTS superset;
CREATE DATABASE superset DEFAULT CHARACTER SET utf8 COLLATE utf8_unicode_ci;
DROP DATABASE IF EXISTS sqllab_test_db;
CREATE DATABASE sqllab_test_db DEFAULT CHARACTER SET utf8 COLLATE utf8_unicode_ci;
DROP DATABASE IF EXISTS admin_database;
CREATE DATABASE admin_database DEFAULT CHARACTER SET utf8 COLLATE utf8_unicode_ci;
CREATE USER 'superset'@'%' IDENTIFIED BY 'superset';
GRANT ALL ON *.* TO 'superset'@'%';
FLUSH PRIVILEGES;
EOF
say "::endgroup::"
}

52
.github/workflows/caches.js vendored Normal file
View File

@ -0,0 +1,52 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
// always use absolute directory
const workspaceDirectory = process.env.GITHUB_WORKSPACE;
const homeDirectory = process.env.HOME;
// Multi-layer cache definition
module.exports = {
pip: {
path: [`${homeDirectory}/.cache/pip`],
hashFiles: [`${workspaceDirectory}/requirements*.txt`],
},
npm: {
path: [`${homeDirectory}/.npm`],
hashFiles: ['superset-frontend/package-lock.json'],
},
assets: {
path: [
`${workspaceDirectory}/superset/static/assets`,
],
hashFiles: [
`${workspaceDirectory}/superset-frontend/src/**/*`,
`${workspaceDirectory}/superset-frontend/*.json`,
`${workspaceDirectory}/superset-frontend/*.js`,
],
// dont use restore keys as it may give an invalid older build
restoreKeys: ''
},
cypress: {
path: [`${homeDirectory}/.cache/Cypress`],
hashFiles: [
`${workspaceDirectory}/superset-frontend/cypress-base/package-lock.json`,
],
},
};

22
.github/workflows/license-check.yml vendored Normal file
View File

@ -0,0 +1,22 @@
name: License
on:
push:
branches: [ master ]
pull_request:
jobs:
check:
runs-on: ubuntu-18.04
steps:
- uses: actions/checkout@v2
- name: Setup Java
uses: actions/setup-java@v1
with:
java-version: 8
- name: Generate fossa report
env:
FOSSA_API_KEY: ${{ secrets.FOSSA_API_KEY }}
run: ./scripts/fossa.sh
- name: Run license check
run: ./scripts/check_license.sh

91
.github/workflows/superset-e2e.yml vendored Normal file
View File

@ -0,0 +1,91 @@
name: E2E
on:
push:
branches: [ master ]
pull_request:
jobs:
cypress:
name: Cypress
runs-on: ubuntu-18.04
strategy:
fail-fast: false
matrix:
browser: ['chrome']
env:
FLASK_ENV: development
SUPERSET_CONFIG: tests.superset_test_config
SUPERSET__SQLALCHEMY_DATABASE_URI:
postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
PYTHONPATH: ${{ github.workspace }}
REDIS_PORT: 16379
CI: github-actions
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
CYPRESS_RECORD_KEY: ${{ secrets.CYPRESS_RECORD_KEY }}
services:
postgres:
image: postgres:10-alpine
env:
POSTGRES_USER: superset
POSTGRES_PASSWORD: superset
ports:
- 15432:5432
redis:
image: redis:5-alpine
ports:
- 16379:6379
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v1
with:
python-version: '3.6'
- name: Install dependencies
uses: apache-superset/cached-dependencies@ddf7d7f
with:
# Run commands in parallel does help initial installation without cache
parallel: true
run: |
npm-install && npm-build
pip-install && setup-postgres && testdata
cypress-install
- name: Cypress run all
env:
CYPRESS_GROUP: Default
CYPRESS_PATH: 'cypress/integration/*/*'
run: |
# Start Flask and run Cypress
# --no-debugger means disable the interactive debugger on the 500 page
# so errors can print to stderr.
flask run --no-debugger --with-threads -p 8081 &
sleep 3 # wait for the Flask app to start
cd ${{ github.workspace }}/superset-frontend/cypress-base/
npm run cypress -- run \
--browser ${{ matrix.browser }} --spec "${{ env.CYPRESS_PATH }}" \
--record --group "${{ env.CYPRESS_GROUP }}" \
--ci-build-id ${{ github.event_name }}-${{ github.run_id }}
- name: Cypress run SQL Lab (with backend persist)
env:
SUPERSET_CONFIG: tests.superset_test_config_sqllab_backend_persist
CYPRESS_GROUP: Backend persist
CYPRESS_PATH: 'cypress/integration/sqllab/*'
run: |
# Start Flask with alternative config and run Cypress
killall python # exit the running Flask app
flask run --no-debugger --with-threads -p 8081 &
sleep 3 # wait for the Flask app to start
cd ${{ github.workspace }}/superset-frontend/cypress-base/
npm run cypress -- run \
--browser ${{ matrix.browser }} --spec "${{ env.CYPRESS_PATH }}" \
--record --group "${{ env.CYPRESS_GROUP }}" \
--ci-build-id ${{ github.event_name }}-${{ github.run_id }}

36
.github/workflows/superset-frontend.yml vendored Normal file
View File

@ -0,0 +1,36 @@
name: Frontend
on:
push:
branches: [ master ]
paths:
- superset-frontend/**
pull_request:
paths:
- superset-frontend/**
jobs:
frontend-build:
name: build
runs-on: ubuntu-18.04
env:
CI: github-actions
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Install dependencies
uses: apache-superset/cached-dependencies@ddf7d7f
with:
run: npm-install
- name: eslint
working-directory: ./superset-frontend
run: |
npm run lint
- name: unit tests
working-directory: ./superset-frontend
run: |
npm run test -- --coverage
- name: Upload code coverage
working-directory: ./superset-frontend
run: |
bash <(curl -s https://codecov.io/bash) -cF unittest,javascript

174
.github/workflows/superset-python.yml vendored Normal file
View File

@ -0,0 +1,174 @@
name: Python
on:
# only build on direct push to `master` branch
push:
branches: [ master ]
paths:
- ./**/*.py
- superset/**
- tests/**
- requirements*.txt
# but also build on pull requests to any branch
# (the so-called feature branch)
pull_request:
paths:
- ./**/*.py
- superset/**
- tests/**
- setup.py
- requirements*.txt
jobs:
lint:
runs-on: ubuntu-18.04
strategy:
matrix:
python-version: [3.6]
env:
PYTHON_LINT_TARGET: setup.py superset tests
CI: github-actions
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
uses: apache-superset/cached-dependencies@ddf7d7f
- name: black
run: black --check $(echo $PYTHON_LINT_TARGET)
- name: mypy
run: mypy $(echo $PYTHON_LINT_TARGET)
- name: isort
run: isort --check-only --recursive $(echo $PYTHON_LINT_TARGET)
- name: pylint
# `-j 0` run Pylint in parallel
run: pylint -j 0 superset
test-postgres:
runs-on: ubuntu-18.04
strategy:
matrix:
# run unit tests in multiple version just for fun
# (3.8 is not supported yet, some dependencies need an update)
python-version: [3.6, 3.7]
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.superset_test_config
REDIS_PORT: 16379
services:
postgres:
image: postgres:10-alpine
env:
POSTGRES_USER: superset
POSTGRES_PASSWORD: superset
ports:
# Use custom ports for services to avoid accidentally connecting to
# GitHub action runner's default installations
- 15432:5432
redis:
image: redis:5-alpine
ports:
- 16379:6379
steps:
- uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
uses: apache-superset/cached-dependencies@ddf7d7f
with:
run: |
pip-install
setup-postgres
- name: Python unit tests (PostgreSQL)
env:
SUPERSET__SQLALCHEMY_DATABASE_URI:
postgresql+psycopg2://superset:superset@127.0.0.1:15432/superset
run: |
./scripts/python_tests.sh
- name: Upload code coverage
run: |
bash <(curl -s https://codecov.io/bash) -cF unittest,python,postgres
test-mysql:
runs-on: ubuntu-18.04
strategy:
matrix:
python-version: [3.6]
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.superset_test_config
REDIS_PORT: 16379
services:
mysql:
image: mysql:5.7
env:
MYSQL_ROOT_PASSWORD: root
ports:
- 13306:3306
redis:
image: redis:5-alpine
options: --entrypoint redis-server
ports:
- 16379:6379
steps:
- uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
uses: apache-superset/cached-dependencies@ddf7d7f
with:
run: |
pip-install
setup-mysql
- name: Python unit tests (MySQL)
env:
SUPERSET__SQLALCHEMY_DATABASE_URI: |
mysql+mysqldb://superset:superset@127.0.0.1:13306/superset?charset=utf8mb4&binary_prefix=true
run: |
./scripts/python_tests.sh
- name: Upload code coverage
run: |
bash <(curl -s https://codecov.io/bash) -cF unittest,python,mysql
test-sqlite:
runs-on: ubuntu-18.04
strategy:
matrix:
python-version: [3.6]
env:
PYTHONPATH: ${{ github.workspace }}
SUPERSET_CONFIG: tests.superset_test_config
REDIS_PORT: 16379
services:
redis:
image: redis:5-alpine
ports:
- 16379:6379
steps:
- uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
uses: apache-superset/cached-dependencies@ddf7d7f
with:
run: |
pip-install
mkdir ${{ github.workspace }}/.temp
- name: Python unit tests (SQLite)
env:
SUPERSET__SQLALCHEMY_DATABASE_URI: |
sqlite:///${{ github.workspace }}/.temp/unittest.db
run: |
./scripts/python_tests.sh
- name: Upload code coverage
run: |
bash <(curl -s https://codecov.io/bash) -cF unittest,python,sqlite

View File

View File

@ -19,8 +19,9 @@
# This is the recommended way to install FOSSA's cli per the docs:
# https://docs.fossa.com/docs/travisci#section-add-fossa-steps-to-travisyml
curl -H 'Cache-Control: no-cache' https://raw.githubusercontent.com/fossas/fossa-cli/master/install.sh | sudo bash
curl -s -H 'Cache-Control: no-cache' https://raw.githubusercontent.com/fossas/fossa-cli/master/install.sh | sudo bash
# This key is a push-only API key, also recommended for public projects
# https://docs.fossa.com/docs/api-reference#section-push-only-api-token
FOSSA_API_KEY="f72e93645bdfeab94bd227c7bbdda4ef" fossa
export FOSSA_API_KEY="${FOSSA_API_KEY:-f72e93645bdfeab94bd227c7bbdda4ef}"
fossa analyze

27
scripts/python_tests.sh Executable file
View File

@ -0,0 +1,27 @@
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
set -e
export SUPERSET_CONFIG=${SUPERSET_CONFIG:-tests.superset_test_config}
echo "Superset config module: $SUPERSET_CONFIG"
superset db upgrade
superset init
nosetests --stop tests/load_examples_test.py
nosetests --stop --exclude=load_examples_test tests

View File

@ -0,0 +1 @@
cypress/screenshots/

View File

@ -1,13 +1,14 @@
{
"baseUrl": "http://localhost:8081",
"chromeWebSecurity": false,
"defaultCommandTimeout": 20000,
"requestTimeout": 20000,
"ignoreTestFiles": ["**/!(*.test.js)"],
"projectId": "fbf96q",
"defaultCommandTimeout": 5000,
"requestTimeout": 10000,
"ignoreTestFiles": [
"**/!(*.test.js)"
],
"video": false,
"videoUploadOnPasses": false,
"viewportWidth": 1280,
"viewportHeight": 800,
"requestTimeout": 10000
"viewportHeight": 1024,
"projectId": "dk2opw"
}

View File

@ -23,29 +23,15 @@ export default () =>
beforeEach(() => {
cy.server();
cy.login();
cy.visit(WORLD_HEALTH_DASHBOARD);
cy.get('#app').then(data => {
const bootstrapData = JSON.parse(data[0].dataset.bootstrap);
const dashboard = bootstrapData.dashboard_data;
const dashboardId = dashboard.id;
const boxplotChartId = dashboard.slices.find(
slice => slice.form_data.viz_type === 'box_plot',
).slice_id;
const formData = `{"slice_id":${boxplotChartId}}`;
const boxplotRequest = `/superset/explore_json/?form_data=${formData}&dashboard_id=${dashboardId}`;
cy.route('POST', boxplotRequest).as('boxplotRequest');
});
cy.get('.dashboard-header')
.contains('Edit dashboard')
.click();
});
it('remove, and add chart flow', () => {
// wait box_plot data and find box plot
cy.wait('@boxplotRequest');
cy.get('.grid-container .box_plot').should('be.exist');
// wait for box plot to appear
cy.get('.grid-container .box_plot');
cy.get('.fa.fa-trash')
.last()
@ -53,7 +39,6 @@ export default () =>
cy.wrap($el)
.invoke('show')
.click();
// box plot should be gone
cy.get('.grid-container .box_plot').should('not.exist');
});
@ -75,7 +60,7 @@ export default () =>
.trigger('mousedown', { which: 1 })
.trigger('dragstart', { dataTransfer })
.trigger('drag', {});
cy.get('.grid-content .dragdroppable')
cy.get('.grid-content div.grid-row.background--transparent')
.last()
.trigger('dragover', { dataTransfer })
.trigger('drop', { dataTransfer })

View File

@ -49,12 +49,15 @@ export default () =>
it('should load dashboard', () => {
// wait and verify one-by-one
cy.wait(aliases).then(requests => {
requests.forEach(async xhr => {
expect(xhr.status).to.eq(200);
const responseBody = await readResponseBlob(xhr.response.body);
expect(responseBody).to.have.property('error', null);
cy.get(`#slice-container-${xhr.response.body.form_data.slice_id}`);
});
return Promise.all(
requests.map(async xhr => {
expect(xhr.status).to.eq(200);
const responseBody = await readResponseBlob(xhr.response.body);
expect(responseBody).to.have.property('error', null);
const sliceId = responseBody.form_data.slice_id;
cy.get(`#chart-id-${sliceId}`).should('be.visible');
}),
);
});
});
});

View File

@ -52,7 +52,6 @@ export default () =>
it('should save as new dashboard', () => {
cy.wait('@copyRequest').then(xhr => {
expect(xhr.status).to.eq(200);
readResponseBlob(xhr.response.body).then(json => {
expect(json.id).to.be.gt(dashboardId);
});
@ -61,11 +60,7 @@ export default () =>
it('should save/overwrite dashboard', () => {
// should have box_plot chart
const formData = `{"slice_id":${boxplotChartId}}`;
const boxplotRequest = `/superset/explore_json/?form_data=${formData}&dashboard_id=${dashboardId}`;
cy.route('POST', boxplotRequest).as('boxplotRequest');
cy.wait('@boxplotRequest');
cy.get('.grid-container .box_plot').should('be.exist');
cy.get('.grid-container .box_plot', { timeout: 5000 }); // wait for 5 secs
// remove box_plot chart from dashboard
cy.get('.dashboard-header')

View File

@ -117,8 +117,9 @@ export default () =>
.last()
.find('.editable-title input')
.click();
cy.wait('@boxplotRequest');
cy.get('.grid-container .box_plot').should('be.exist');
// should exist a visible box_plot element
cy.get('.grid-container .box_plot');
});
it('should send new queries when tab becomes visible', () => {
@ -166,6 +167,7 @@ export default () =>
.last()
.find('.editable-title input')
.click();
cy.wait('@boxplotRequest').then(xhr => {
const requestFormData = xhr.request.body;
const requestParams = JSON.parse(requestFormData.get('form_data'));
@ -190,11 +192,12 @@ export default () =>
// trigger 1 new query
cy.wait('@treemapRequest');
// no other requests occurred
// make sure query API not requested multiple times
cy.on('fail', err => {
expect(err.message).to.include('Timed out retrying');
expect(err.message).to.include('timed out waiting');
return false;
});
cy.wait('@boxplotRequest', { timeout: 1000 }).then(() => {
throw new Error('Unexpected API call.');
});

View File

@ -56,8 +56,8 @@ export default () =>
it('should work', () => {
verify(BIG_NUMBER_FORM_DATA);
cy.get('.chart-container .header_line');
cy.get('.chart-container .subheader_line');
cy.get('.chart-container .header-line');
cy.get('.chart-container .subheader-line');
cy.get('.chart-container svg path.vx-linepath');
});
@ -66,8 +66,8 @@ export default () =>
...BIG_NUMBER_FORM_DATA,
compare_lag: null,
});
cy.get('.chart-container .header_line');
cy.get('.chart-container .subheader_line');
cy.get('.chart-container .header-line');
cy.get('.chart-container .subheader-line').should('not.exist');
cy.get('.chart-container svg path.vx-linepath');
});
@ -76,10 +76,8 @@ export default () =>
...BIG_NUMBER_FORM_DATA,
show_trend_line: false,
});
cy.get('.chart-container .header_line');
cy.get('.chart-container .subheader_line');
cy.get('.chart-container').then(containers => {
expect(containers[0].querySelector('svg')).to.equal(null);
});
cy.get('.chart-container .header-line');
cy.get('.chart-container .subheader-line');
cy.get('.chart-container svg').should('not.exist');
});
});

View File

@ -83,13 +83,11 @@ Cypress.Commands.add('verifyResponseCodes', async xhr => {
Cypress.Commands.add('verifySliceContainer', chartSelector => {
// After a wait response check for valid slice container
cy.get('.slice_container').within(() => {
cy.get('.slice_container').within(async () => {
if (chartSelector) {
cy.get(chartSelector).then(charts => {
const firstChart = charts[0];
expect(firstChart.clientWidth).greaterThan(0);
expect(firstChart.clientHeight).greaterThan(0);
});
const chart = await cy.get(chartSelector);
expect(chart[0].clientWidth).greaterThan(0);
expect(chart[0].clientHeight).greaterThan(0);
}
});
});

File diff suppressed because it is too large Load Diff

View File

@ -9,7 +9,9 @@
"author": "Apcahe",
"license": "Apache-2.0",
"dependencies": {
"cypress": "^3.6.1",
"shortid": "^2.2.15"
},
"devDependencies": {
"cypress": "^4.3.0"
}
}

View File

@ -17,6 +17,7 @@
* under the License.
*/
const zlib = require('zlib');
// eslint-disable-next-line import/no-extraneous-dependencies
const parsedArgs = require('yargs').argv;

View File

@ -51,7 +51,9 @@ CACHE_CONFIG = {"CACHE_TYPE": "simple"}
class CeleryConfig(object):
BROKER_URL = "redis://localhost"
BROKER_URL = "redis://{}:{}".format(
os.environ.get("REDIS_HOST", "localhost"), os.environ.get("REDIS_PORT", "6379")
)
CELERY_IMPORTS = ("superset.sql_lab",)
CELERY_ANNOTATIONS = {"sql_lab.add": {"rate_limit": "10/s"}}
CONCURRENCY = 1

View File

@ -19,46 +19,6 @@
import os
from copy import copy
from superset.config import *
AUTH_USER_REGISTRATION_ROLE = "alpha"
SQLALCHEMY_DATABASE_URI = "sqlite:///" + os.path.join(DATA_DIR, "unittests.db")
DEBUG = True
SUPERSET_WEBSERVER_PORT = 8081
# Allowing SQLALCHEMY_DATABASE_URI to be defined as an env var for
# continuous integration
if "SUPERSET__SQLALCHEMY_DATABASE_URI" in os.environ:
SQLALCHEMY_DATABASE_URI = os.environ["SUPERSET__SQLALCHEMY_DATABASE_URI"]
SQL_MAX_ROW = 666
SQLLAB_CTAS_NO_LIMIT = True # SQL_MAX_ROW will not take affect for the CTA queries
FEATURE_FLAGS = {"foo": "bar"}
def GET_FEATURE_FLAGS_FUNC(ff):
ff_copy = copy(ff)
ff_copy["super"] = "set"
return ff_copy
TESTING = True
SECRET_KEY = "thisismyscretkey"
WTF_CSRF_ENABLED = False
PUBLIC_ROLE_LIKE_GAMMA = True
AUTH_ROLE_PUBLIC = "Public"
EMAIL_NOTIFICATIONS = False
CACHE_CONFIG = {"CACHE_TYPE": "simple"}
class CeleryConfig(object):
BROKER_URL = "redis://localhost"
CELERY_IMPORTS = ("superset.sql_lab",)
CELERY_ANNOTATIONS = {"sql_lab.add": {"rate_limit": "10/s"}}
CONCURRENCY = 1
CELERY_CONFIG = CeleryConfig
from .superset_test_config import *
DEFAULT_FEATURE_FLAGS = {"SQLLAB_BACKEND_PERSISTENCE": True}