feat: improve docker-compose services boot sequence (#31747)

This commit is contained in:
Maxime Beauchemin 2025-01-09 16:24:53 -08:00 committed by GitHub
parent 5f18e849c1
commit 7bd53a84d5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
28 changed files with 205 additions and 88 deletions

View File

@ -134,4 +134,4 @@ jobs:
if: steps.check.outputs.docker
shell: bash
run: |
docker compose -f docker-compose-image-tag.yml up --exit-code-from superset-init
docker compose -f docker-compose-image-tag.yml up superset-init --exit-code-from superset-init

View File

@ -22,9 +22,6 @@
# unique random secure passwords and SECRET_KEY.
# -----------------------------------------------------------------------
x-superset-image: &superset-image apachesuperset.docker.scarf.sh/apache/superset:${TAG:-latest-dev}
x-superset-depends-on: &superset-depends-on
- db
- redis
x-superset-volumes:
&superset-volumes # /app/pythonpath_docker will be appended to the PYTHONPATH in the final container
- ./docker:/app/docker
@ -64,8 +61,12 @@ services:
restart: unless-stopped
ports:
- 8088:8088
depends_on: *superset-depends-on
depends_on:
superset-init:
condition: service_completed_successfully
volumes: *superset-volumes
environment:
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
superset-init:
image: *superset-image
@ -76,11 +77,18 @@ services:
required: true
- path: docker/.env-local # optional override
required: false
depends_on: *superset-depends-on
depends_on:
db:
condition: service_started
redis:
condition: service_started
user: "root"
volumes: *superset-volumes
healthcheck:
disable: true
environment:
SUPERSET_LOAD_EXAMPLES: "${SUPERSET_LOAD_EXAMPLES:-yes}"
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
superset-worker:
image: *superset-image
@ -92,7 +100,9 @@ services:
- path: docker/.env-local # optional override
required: false
restart: unless-stopped
depends_on: *superset-depends-on
depends_on:
superset-init:
condition: service_completed_successfully
user: "root"
volumes: *superset-volumes
healthcheck:
@ -101,6 +111,8 @@ services:
"CMD-SHELL",
"celery -A superset.tasks.celery_app:app inspect ping -d celery@$$HOSTNAME",
]
environment:
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
superset-worker-beat:
image: *superset-image
@ -112,11 +124,15 @@ services:
- path: docker/.env-local # optional override
required: false
restart: unless-stopped
depends_on: *superset-depends-on
depends_on:
superset-init:
condition: service_completed_successfully
user: "root"
volumes: *superset-volumes
healthcheck:
disable: true
environment:
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
volumes:
superset_home:

View File

@ -21,9 +21,6 @@
# create you own docker environment file (docker/.env) with your own
# unique random secure passwords and SECRET_KEY.
# -----------------------------------------------------------------------
x-superset-depends-on: &superset-depends-on
- db
- redis
x-superset-volumes:
&superset-volumes # /app/pythonpath_docker will be appended to the PYTHONPATH in the final container
- ./docker:/app/docker
@ -70,8 +67,12 @@ services:
restart: unless-stopped
ports:
- 8088:8088
depends_on: *superset-depends-on
depends_on:
superset-init:
condition: service_completed_successfully
volumes: *superset-volumes
environment:
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
superset-init:
container_name: superset_init
@ -83,11 +84,18 @@ services:
required: true
- path: docker/.env-local # optional override
required: false
depends_on: *superset-depends-on
depends_on:
db:
condition: service_started
redis:
condition: service_started
user: "root"
volumes: *superset-volumes
healthcheck:
disable: true
environment:
SUPERSET_LOAD_EXAMPLES: "${SUPERSET_LOAD_EXAMPLES:-yes}"
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
superset-worker:
build:
@ -100,7 +108,9 @@ services:
- path: docker/.env-local # optional override
required: false
restart: unless-stopped
depends_on: *superset-depends-on
depends_on:
superset-init:
condition: service_completed_successfully
user: "root"
volumes: *superset-volumes
healthcheck:
@ -109,6 +119,8 @@ services:
"CMD-SHELL",
"celery -A superset.tasks.celery_app:app inspect ping -d celery@$$HOSTNAME",
]
environment:
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
superset-worker-beat:
build:
@ -121,11 +133,15 @@ services:
- path: docker/.env-local # optional override
required: false
restart: unless-stopped
depends_on: *superset-depends-on
depends_on:
superset-init:
condition: service_completed_successfully
user: "root"
volumes: *superset-volumes
healthcheck:
disable: true
environment:
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
volumes:
superset_home:

View File

@ -22,9 +22,6 @@
# unique random secure passwords and SECRET_KEY.
# -----------------------------------------------------------------------
x-superset-user: &superset-user root
x-superset-depends-on: &superset-depends-on
- db
- redis
x-superset-volumes: &superset-volumes
# /app/pythonpath_docker will be appended to the PYTHONPATH in the final container
- ./docker:/app/docker
@ -95,10 +92,13 @@ services:
extra_hosts:
- "host.docker.internal:host-gateway"
user: *superset-user
depends_on: *superset-depends-on
depends_on:
superset-init:
condition: service_completed_successfully
volumes: *superset-volumes
environment:
CYPRESS_CONFIG: "${CYPRESS_CONFIG:-}"
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
superset-websocket:
container_name: superset_websocket
@ -143,11 +143,17 @@ services:
required: true
- path: docker/.env-local # optional override
required: false
depends_on: *superset-depends-on
depends_on:
db:
condition: service_started
redis:
condition: service_started
user: *superset-user
volumes: *superset-volumes
environment:
CYPRESS_CONFIG: "${CYPRESS_CONFIG:-}"
SUPERSET_LOAD_EXAMPLES: "${SUPERSET_LOAD_EXAMPLES:-yes}"
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
healthcheck:
disable: true
@ -174,7 +180,6 @@ services:
required: true
- path: docker/.env-local # optional override
required: false
depends_on: *superset-depends-on
volumes: *superset-volumes
superset-worker:
@ -189,8 +194,12 @@ services:
required: false
environment:
CELERYD_CONCURRENCY: 2
CYPRESS_CONFIG: "${CYPRESS_CONFIG:-}"
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
restart: unless-stopped
depends_on: *superset-depends-on
depends_on:
superset-init:
condition: service_completed_successfully
user: *superset-user
volumes: *superset-volumes
extra_hosts:
@ -212,11 +221,15 @@ services:
- path: docker/.env-local # optional override
required: false
restart: unless-stopped
depends_on: *superset-depends-on
depends_on:
- superset-worker
user: *superset-user
volumes: *superset-volumes
healthcheck:
disable: true
environment:
CYPRESS_CONFIG: "${CYPRESS_CONFIG:-}"
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
superset-tests-worker:
build:
@ -237,8 +250,11 @@ services:
REDIS_RESULTS_DB: 3
REDIS_HOST: localhost
CELERYD_CONCURRENCY: 8
SUPERSET_LOG_LEVEL: "${SUPERSET_LOG_LEVEL:-info}"
network_mode: host
depends_on: *superset-depends-on
depends_on:
superset-init:
condition: service_completed_successfully
user: *superset-user
volumes: *superset-volumes
healthcheck:

View File

@ -66,3 +66,4 @@ SUPERSET_SECRET_KEY=TEST_NON_DEV_SECRET
ENABLE_PLAYWRIGHT=false
PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
BUILD_SUPERSET_FRONTEND_IN_DOCKER=true
SUPERSET_LOG_LEVEL=info

View File

@ -76,7 +76,7 @@ if [ "$SUPERSET_LOAD_EXAMPLES" = "yes" ]; then
superset load_test_users
superset load_examples --load-test-data
else
superset load_examples --force
superset load_examples
fi
echo_step "4" "Complete" "Loading examples"
fi

View File

@ -104,6 +104,9 @@ WEBDRIVER_BASEURL = "http://superset:8088/" # When using docker compose baseurl
WEBDRIVER_BASEURL_USER_FRIENDLY = WEBDRIVER_BASEURL
SQLLAB_CTAS_NO_LIMIT = True
log_level_text = os.getenv("SUPERSET_LOG_LEVEL", "INFO")
LOG_LEVEL = getattr(logging, log_level_text.upper(), logging.INFO)
#
# Optionally import superset_config_docker.py (which will have been included on
# the PYTHONPATH) in order to allow for local settings to be overridden

View File

@ -72,6 +72,7 @@ documentation.
configured to be secure.
:::
### Supported environment variables
Affecting the Docker build process:
@ -79,6 +80,10 @@ Affecting the Docker build process:
- **INCLUDE_FIREFOX (default=false):** whether to include the Firefox headless browser in the build
- **INCLUDE_CHROMIUM (default=false):** whether to include the Firefox headless browser in the build
- **BUILD_TRANSLATIONS(default=false):** whether to compile the translations from the .po files available
- **SUPERSET_LOAD_EXAMPLES (default=yes):** whether to load the examples into the database upon startup,
save some precious time on startup by `SUPERSET_LOAD_EXAMPLES=no docker compose up`
- **SUPERSET_LOG_LEVEL (default=info)**: Can be set to debug, info, warning, error, critical
for more verbose logging
For more env vars that affect your configuration, see this
[superset_config.py](https://github.com/apache/superset/blob/master/docker/pythonpath_dev/superset_config.py)

View File

@ -32,10 +32,10 @@ def load_examples_run(
force: bool = False,
) -> None:
if only_metadata:
print("Loading examples metadata")
logger.info("Loading examples metadata")
else:
examples_db = database_utils.get_example_database()
print(f"Loading examples metadata and related data into {examples_db}")
logger.info(f"Loading examples metadata and related data into {examples_db}")
# pylint: disable=import-outside-toplevel
import superset.examples.data_loading as examples
@ -43,45 +43,45 @@ def load_examples_run(
examples.load_css_templates()
if load_test_data:
print("Loading energy related dataset")
logger.info("Loading energy related dataset")
examples.load_energy(only_metadata, force)
print("Loading [World Bank's Health Nutrition and Population Stats]")
logger.info("Loading [World Bank's Health Nutrition and Population Stats]")
examples.load_world_bank_health_n_pop(only_metadata, force)
print("Loading [Birth names]")
logger.info("Loading [Birth names]")
examples.load_birth_names(only_metadata, force)
if load_test_data:
print("Loading [Tabbed dashboard]")
logger.info("Loading [Tabbed dashboard]")
examples.load_tabbed_dashboard(only_metadata)
print("Loading [Supported Charts Dashboard]")
logger.info("Loading [Supported Charts Dashboard]")
examples.load_supported_charts_dashboard()
else:
print("Loading [Random long/lat data]")
logger.info("Loading [Random long/lat data]")
examples.load_long_lat_data(only_metadata, force)
print("Loading [Country Map data]")
logger.info("Loading [Country Map data]")
examples.load_country_map_data(only_metadata, force)
print("Loading [San Francisco population polygons]")
logger.info("Loading [San Francisco population polygons]")
examples.load_sf_population_polygons(only_metadata, force)
print("Loading [Flights data]")
logger.info("Loading [Flights data]")
examples.load_flights(only_metadata, force)
print("Loading [BART lines]")
logger.info("Loading [BART lines]")
examples.load_bart_lines(only_metadata, force)
print("Loading [Misc Charts] dashboard")
logger.info("Loading [Misc Charts] dashboard")
examples.load_misc_dashboard()
print("Loading DECK.gl demo")
logger.info("Loading DECK.gl demo")
examples.load_deck_dash()
if load_big_data:
print("Loading big synthetic data for tests")
logger.info("Loading big synthetic data for tests")
examples.load_big_data()
# load examples that are stored as YAML config files

View File

@ -14,6 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import pandas as pd
import polyline
@ -26,6 +27,8 @@ from superset.utils import json
from ..utils.database import get_example_database
from .helpers import get_example_url, get_table_connector_registry
logger = logging.getLogger(__name__)
def load_bart_lines(only_metadata: bool = False, force: bool = False) -> None:
tbl_name = "bart_lines"
@ -56,7 +59,7 @@ def load_bart_lines(only_metadata: bool = False, force: bool = False) -> None:
index=False,
)
print(f"Creating table {tbl_name} reference")
logger.debug(f"Creating table {tbl_name} reference")
table = get_table_connector_registry()
tbl = db.session.query(table).filter_by(table_name=tbl_name).first()
if not tbl:

View File

@ -14,6 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import random
import string
@ -21,6 +22,8 @@ import sqlalchemy.sql.sqltypes
from superset.utils.mock_data import add_data, ColumnInfo
logger = logging.getLogger(__name__)
COLUMN_TYPES = [
sqlalchemy.sql.sqltypes.INTEGER(),
sqlalchemy.sql.sqltypes.VARCHAR(length=255),
@ -34,7 +37,7 @@ COLUMN_TYPES = [
def load_big_data() -> None:
print("Creating table `wide_table` with 100 columns")
logger.debug("Creating table `wide_table` with 100 columns")
columns: list[ColumnInfo] = []
for i in range(100):
column: ColumnInfo = {
@ -48,7 +51,7 @@ def load_big_data() -> None:
columns.append(column)
add_data(columns=columns, num_rows=1000, table_name="wide_table")
print("Creating 1000 small tables")
logger.debug("Creating 1000 small tables")
columns = [
{
"name": "id",
@ -70,6 +73,6 @@ def load_big_data() -> None:
for i in range(1000):
add_data(columns=columns, num_rows=10, table_name=f"small_table_{i}")
print("Creating table with long name")
logger.debug("Creating table with long name")
name = "".join(random.choices(string.ascii_letters + string.digits, k=60)) # noqa: S311
add_data(columns=columns, num_rows=10, table_name=name)

View File

@ -14,6 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import textwrap
from typing import Union
@ -40,6 +41,8 @@ from .helpers import (
update_slice_ids,
)
logger = logging.getLogger(__name__)
def gen_filter(
subject: str, comparator: str, operator: str = "=="
@ -83,8 +86,8 @@ def load_data(tbl_name: str, database: Database, sample: bool = False) -> None:
method="multi",
index=False,
)
print("Done loading table!")
print("-" * 80)
logger.debug("Done loading table!")
logger.debug("-" * 80)
def load_birth_names(
@ -104,7 +107,7 @@ def load_birth_names(
table = get_table_connector_registry()
obj = db.session.query(table).filter_by(table_name=tbl_name, schema=schema).first()
if not obj:
print(f"Creating table [{tbl_name}] reference")
logger.debug(f"Creating table [{tbl_name}] reference")
obj = table(table_name=tbl_name, schema=schema)
db.session.add(obj)
@ -196,7 +199,7 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]:
"datasource_type": DatasourceType.TABLE,
}
print("Creating some slices")
logger.debug("Creating some slices")
slices = [
Slice(
**slice_kwargs,
@ -563,7 +566,7 @@ def create_slices(tbl: SqlaTable) -> tuple[list[Slice], list[Slice]]:
def create_dashboard(slices: list[Slice]) -> Dashboard:
print("Creating a dashboard")
logger.debug("Creating a dashboard")
dash = db.session.query(Dashboard).filter_by(slug="births").first()
if not dash:
dash = Dashboard()

View File

@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
import datetime
import logging
import pandas as pd
from sqlalchemy import BigInteger, Date, inspect, String
@ -35,6 +36,8 @@ from .helpers import (
misc_dash_slices,
)
logger = logging.getLogger(__name__)
def load_country_map_data(only_metadata: bool = False, force: bool = False) -> None:
"""Loading data for map with country map"""
@ -73,10 +76,10 @@ def load_country_map_data(only_metadata: bool = False, force: bool = False) -> N
},
index=False,
)
print("Done loading table!")
print("-" * 80)
logger.debug("Done loading table!")
logger.debug("-" * 80)
print("Creating table reference")
logger.debug("Creating table reference")
table = get_table_connector_registry()
obj = db.session.query(table).filter_by(table_name=tbl_name).first()
if not obj:
@ -108,7 +111,7 @@ def load_country_map_data(only_metadata: bool = False, force: bool = False) -> N
"select_country": "france",
}
print("Creating a slice")
logger.debug("Creating a slice")
slc = Slice(
slice_name="Birth in France by department in 2016",
viz_type="country_map",

View File

@ -14,15 +14,18 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import textwrap
from superset import db
from superset.models.core import CssTemplate
logger = logging.getLogger(__name__)
def load_css_templates() -> None:
"""Loads 2 css templates to demonstrate the feature"""
print("Creating default CSS templates")
logger.debug("Creating default CSS templates")
obj = db.session.query(CssTemplate).filter_by(template_name="Flat").first()
if not obj:

View File

@ -15,6 +15,8 @@
# specific language governing permissions and limitations
# under the License.
import logging
from superset import db
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
@ -28,6 +30,8 @@ from .helpers import (
update_slice_ids,
)
logger = logging.getLogger(__name__)
COLOR_RED = {"r": 205, "g": 0, "b": 3, "a": 0.82}
POSITION_JSON = """\
{
@ -180,7 +184,7 @@ POSITION_JSON = """\
def load_deck_dash() -> None: # pylint: disable=too-many-statements
print("Loading deck.gl dashboard")
logger.debug("Loading deck.gl dashboard")
slices = []
table = get_table_connector_registry()
tbl = db.session.query(table).filter_by(table_name="long_lat").first()
@ -210,7 +214,7 @@ def load_deck_dash() -> None: # pylint: disable=too-many-statements
"viz_type": "deck_scatter",
}
print("Creating Scatterplot slice")
logger.debug("Creating Scatterplot slice")
slc = Slice(
slice_name="Deck.gl Scatterplot",
viz_type="deck_scatter",
@ -245,7 +249,7 @@ def load_deck_dash() -> None: # pylint: disable=too-many-statements
"time_grain_sqla": None,
"groupby": [],
}
print("Creating Screen Grid slice")
logger.debug("Creating Screen Grid slice")
slc = Slice(
slice_name="Deck.gl Screen grid",
viz_type="deck_screengrid",
@ -281,7 +285,7 @@ def load_deck_dash() -> None: # pylint: disable=too-many-statements
"time_grain_sqla": None,
"groupby": [],
}
print("Creating Hex slice")
logger.debug("Creating Hex slice")
slc = Slice(
slice_name="Deck.gl Hexagons",
viz_type="deck_hex",
@ -318,7 +322,7 @@ def load_deck_dash() -> None: # pylint: disable=too-many-statements
"time_grain_sqla": None,
"groupby": [],
}
print("Creating Grid slice")
logger.debug("Creating Grid slice")
slc = Slice(
slice_name="Deck.gl Grid",
viz_type="deck_grid",
@ -409,7 +413,7 @@ def load_deck_dash() -> None: # pylint: disable=too-many-statements
"legend_position": "tr",
}
print("Creating Polygon slice")
logger.debug("Creating Polygon slice")
slc = Slice(
slice_name="Deck.gl Polygons",
viz_type="deck_polygon",
@ -459,7 +463,7 @@ def load_deck_dash() -> None: # pylint: disable=too-many-statements
"stroke_width": 1,
}
print("Creating Arc slice")
logger.debug("Creating Arc slice")
slc = Slice(
slice_name="Deck.gl Arcs",
viz_type="deck_arc",
@ -511,7 +515,7 @@ def load_deck_dash() -> None: # pylint: disable=too-many-statements
"js_onclick_href": "",
}
print("Creating Path slice")
logger.debug("Creating Path slice")
slc = Slice(
slice_name="Deck.gl Path",
viz_type="deck_path",
@ -526,7 +530,7 @@ def load_deck_dash() -> None: # pylint: disable=too-many-statements
slices.append(slc)
slug = "deck"
print("Creating a dashboard")
logger.debug("Creating a dashboard")
title = "deck.gl Demo"
dash = db.session.query(Dashboard).filter_by(slug=slug).first()

View File

@ -14,6 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import textwrap
import pandas as pd
@ -35,6 +36,8 @@ from .helpers import (
misc_dash_slices,
)
logger = logging.getLogger(__name__)
def load_energy(
only_metadata: bool = False, force: bool = False, sample: bool = False
@ -62,7 +65,7 @@ def load_energy(
method="multi",
)
print("Creating table [wb_health_population] reference")
logger.debug("Creating table [wb_health_population] reference")
table = get_table_connector_registry()
tbl = db.session.query(table).filter_by(table_name=tbl_name).first()
if not tbl:

View File

@ -14,6 +14,8 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import pandas as pd
from sqlalchemy import DateTime, inspect
@ -23,6 +25,8 @@ from superset.sql_parse import Table
from .helpers import get_example_url, get_table_connector_registry
logger = logging.getLogger(__name__)
def load_flights(only_metadata: bool = False, force: bool = False) -> None:
"""Loading random time series data from a zip file in the repo"""
@ -67,4 +71,4 @@ def load_flights(only_metadata: bool = False, force: bool = False) -> None:
tbl.database = database
tbl.filter_select_enabled = True
tbl.fetch_metadata()
print("Done loading table!")
logger.debug("Done loading table!")

View File

@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
import datetime
import logging
import random
import geohash
@ -35,6 +36,8 @@ from .helpers import (
misc_dash_slices,
)
logger = logging.getLogger(__name__)
def load_long_lat_data(only_metadata: bool = False, force: bool = False) -> None:
"""Loading lat/long data from a csv file in the repo"""
@ -85,10 +88,10 @@ def load_long_lat_data(only_metadata: bool = False, force: bool = False) -> None
},
index=False,
)
print("Done loading table!")
print("-" * 80)
logger.debug("Done loading table!")
logger.debug("-" * 80)
print("Creating table reference")
logger.debug("Creating table reference")
table = get_table_connector_registry()
obj = db.session.query(table).filter_by(table_name=tbl_name).first()
if not obj:
@ -112,7 +115,7 @@ def load_long_lat_data(only_metadata: bool = False, force: bool = False) -> None
"row_limit": 500000,
}
print("Creating a slice")
logger.debug("Creating a slice")
slc = Slice(
slice_name="Mapbox Long/Lat",
viz_type="mapbox",

View File

@ -14,6 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import textwrap
from superset import db
@ -22,13 +23,15 @@ from superset.utils import json
from .helpers import update_slice_ids
logger = logging.getLogger(__name__)
DASH_SLUG = "misc_charts"
def load_misc_dashboard() -> None:
"""Loading a dashboard featuring misc charts"""
print("Creating the dashboard")
logger.debug("Creating the dashboard")
db.session.expunge_all()
dash = db.session.query(Dashboard).filter_by(slug=DASH_SLUG).first()

View File

@ -14,6 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from typing import Optional
import pandas as pd
@ -33,6 +34,8 @@ from .helpers import (
misc_dash_slices,
)
logger = logging.getLogger(__name__)
def load_multiformat_time_series( # pylint: disable=too-many-locals
only_metadata: bool = False, force: bool = False
@ -75,10 +78,10 @@ def load_multiformat_time_series( # pylint: disable=too-many-locals
},
index=False,
)
print("Done loading table!")
print("-" * 80)
logger.debug("Done loading table!")
logger.debug("-" * 80)
print(f"Creating table [{tbl_name}] reference")
logger.debug(f"Creating table [{tbl_name}] reference")
table = get_table_connector_registry()
obj = db.session.query(table).filter_by(table_name=tbl_name).first()
if not obj:
@ -105,7 +108,7 @@ def load_multiformat_time_series( # pylint: disable=too-many-locals
obj.fetch_metadata()
tbl = obj
print("Creating Heatmap charts")
logger.debug("Creating Heatmap charts")
for i, col in enumerate(tbl.columns):
slice_data = {
"metrics": ["count"],

View File

@ -15,6 +15,8 @@
# specific language governing permissions and limitations
# under the License.
import logging
import pandas as pd
from sqlalchemy import inspect, String, Text
@ -25,6 +27,8 @@ from superset.utils import json
from .helpers import get_example_url, get_table_connector_registry
logger = logging.getLogger(__name__)
def load_paris_iris_geojson(only_metadata: bool = False, force: bool = False) -> None:
tbl_name = "paris_iris_mapping"
@ -53,7 +57,7 @@ def load_paris_iris_geojson(only_metadata: bool = False, force: bool = False) ->
index=False,
)
print(f"Creating table {tbl_name} reference")
logger.debug(f"Creating table {tbl_name} reference")
table = get_table_connector_registry()
tbl = db.session.query(table).filter_by(table_name=tbl_name).first()
if not tbl:

View File

@ -14,6 +14,8 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import pandas as pd
from sqlalchemy import DateTime, inspect, String
@ -30,6 +32,8 @@ from .helpers import (
merge_slice,
)
logger = logging.getLogger(__name__)
def load_random_time_series_data(
only_metadata: bool = False, force: bool = False
@ -59,10 +63,10 @@ def load_random_time_series_data(
dtype={"ds": DateTime if database.backend != "presto" else String(255)},
index=False,
)
print("Done loading table!")
print("-" * 80)
logger.debug("Done loading table!")
logger.debug("-" * 80)
print(f"Creating table [{tbl_name}] reference")
logger.debug(f"Creating table [{tbl_name}] reference")
table = get_table_connector_registry()
obj = db.session.query(table).filter_by(table_name=tbl_name).first()
if not obj:
@ -85,7 +89,7 @@ def load_random_time_series_data(
"subdomain_granularity": "day",
}
print("Creating a slice")
logger.debug("Creating a slice")
slc = Slice(
slice_name="Calendar Heatmap",
viz_type="cal_heatmap",

View File

@ -15,6 +15,8 @@
# specific language governing permissions and limitations
# under the License.
import logging
import pandas as pd
from sqlalchemy import BigInteger, Float, inspect, Text
@ -25,6 +27,8 @@ from superset.utils import json
from .helpers import get_example_url, get_table_connector_registry
logger = logging.getLogger(__name__)
def load_sf_population_polygons(
only_metadata: bool = False, force: bool = False
@ -55,7 +59,7 @@ def load_sf_population_polygons(
index=False,
)
print(f"Creating table {tbl_name} reference")
logger.debug(f"Creating table {tbl_name} reference")
table = get_table_connector_registry()
tbl = db.session.query(table).filter_by(table_name=tbl_name).first()
if not tbl:

View File

@ -15,6 +15,7 @@
# specific language governing permissions and limitations
# under the License.
# pylint: disable=too-many-lines
import logging
import textwrap
from sqlalchemy import inspect
@ -36,6 +37,7 @@ from .helpers import (
)
DASH_SLUG = "supported_charts_dash"
logger = logging.getLogger(__name__)
def create_slices(tbl: SqlaTable) -> list[Slice]:
@ -445,7 +447,7 @@ def load_supported_charts_dashboard() -> None:
)
create_slices(obj)
print("Creating the dashboard")
logger.debug("Creating the dashboard")
db.session.expunge_all()
dash = db.session.query(Dashboard).filter_by(slug=DASH_SLUG).first()

View File

@ -14,6 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import textwrap
from superset import db
@ -22,11 +23,13 @@ from superset.utils import json
from .helpers import update_slice_ids
logger = logging.getLogger(__name__)
def load_tabbed_dashboard(_: bool = False) -> None:
"""Creating a tabbed dashboard"""
print("Creating a dashboard with nested tabs")
logger.debug("Creating a dashboard with nested tabs")
slug = "tabbed_dash"
dash = db.session.query(Dashboard).filter_by(slug=slug).first()

View File

@ -14,6 +14,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import os
import pandas as pd
@ -38,6 +39,8 @@ from superset.sql_parse import Table
from superset.utils import core as utils, json
from superset.utils.core import DatasourceType
logger = logging.getLogger(__name__)
def load_world_bank_health_n_pop( # pylint: disable=too-many-locals
only_metadata: bool = False,
@ -79,7 +82,7 @@ def load_world_bank_health_n_pop( # pylint: disable=too-many-locals
index=False,
)
print("Creating table [wb_health_population] reference")
logger.debug("Creating table [wb_health_population] reference")
table = get_table_connector_registry()
tbl = db.session.query(table).filter_by(table_name=tbl_name).first()
if not tbl:
@ -115,7 +118,7 @@ def load_world_bank_health_n_pop( # pylint: disable=too-many-locals
for slc in slices:
merge_slice(slc)
print("Creating a World's Health Bank dashboard")
logger.debug("Creating a World's Health Bank dashboard")
dash_name = "World Bank's Data"
slug = "world_health"
dash = db.session.query(Dashboard).filter_by(slug=slug).first()

View File

@ -335,13 +335,13 @@ class ImportExportMixin(UUIDMixin):
is_new_obj = True
# Create new DB object
obj = cls(**dict_rep)
logger.info("Importing new %s %s", obj.__tablename__, str(obj))
logger.debug("Importing new %s %s", obj.__tablename__, str(obj))
if cls.export_parent and parent:
setattr(obj, cls.export_parent, parent)
db.session.add(obj)
else:
is_new_obj = False
logger.info("Updating %s %s", obj.__tablename__, str(obj))
logger.debug("Updating %s %s", obj.__tablename__, str(obj))
# Update columns
for k, v in dict_rep.items():
setattr(obj, k, v)
@ -372,7 +372,7 @@ class ImportExportMixin(UUIDMixin):
db.session.query(child_class).filter(and_(*delete_filters))
).difference(set(added))
for o in to_delete:
logger.info("Deleting %s %s", child, str(obj))
logger.debug("Deleting %s %s", child, str(obj))
db.session.delete(o)
return obj

View File

@ -22,7 +22,12 @@ from flask import Flask
from flask_babel import lazy_gettext as _
from sqlalchemy import text, TypeDecorator
from sqlalchemy.engine import Connection, Dialect, Row
from sqlalchemy_utils import EncryptedType
from sqlalchemy_utils import EncryptedType as SqlaEncryptedType
class EncryptedType(SqlaEncryptedType):
cache_ok = True
ENC_ADAPTER_TAG_ATTR_NAME = "__created_by_enc_field_adapter__"
logger = logging.getLogger(__name__)