feat(report): capture dashboard reports in specific states (#20552)

This commit is contained in:
Jesse Yang 2022-07-26 17:01:56 -07:00 committed by GitHub
parent 1b577d15c1
commit 4d192e6e4d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
37 changed files with 598 additions and 368 deletions

View File

@ -31,8 +31,10 @@ logger = logging.getLogger(__name__)
class CreateDashboardPermalinkCommand(BaseDashboardPermalinkCommand):
"""
Get or create a permalink key for the given dashboard in certain state.
Will reuse the key for the same user and dashboard state.
Get or create a permalink key for the dashboard.
The same dashboard_id and state for the same user will return the
same permalink.
"""
def __init__(

View File

@ -31,7 +31,7 @@ from sqlalchemy import Column, Float, Integer, String, Text
from sqlalchemy.ext.declarative import declarative_base
from superset import db
from superset.models.reports import ReportState
from superset.reports.models import ReportState
Base = declarative_base()

View File

@ -0,0 +1,53 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""rename report_schedule.extra to extra_json
So we can reuse the ExtraJSONMixin
Revision ID: ffa79af61a56
Revises: 409c7b420ab0
Create Date: 2022-07-11 11:26:00.010714
"""
# revision identifiers, used by Alembic.
revision = "ffa79af61a56"
down_revision = "409c7b420ab0"
from alembic import op
from sqlalchemy.types import Text
def upgrade():
op.alter_column(
"report_schedule",
"extra",
new_column_name="extra_json",
# existing info is required for MySQL
existing_type=Text,
existing_nullable=True,
)
def downgrade():
op.alter_column(
"report_schedule",
"extra_json",
new_column_name="extra",
existing_type=Text,
existing_nullable=True,
)

View File

@ -438,13 +438,15 @@ class Dashboard(Model, AuditMixinNullable, ImportExportMixin):
)
@classmethod
def get(cls, id_or_slug: str) -> Dashboard:
def get(cls, id_or_slug: Union[str, int]) -> Dashboard:
session = db.session()
qry = session.query(Dashboard).filter(id_or_slug_filter(id_or_slug))
return qry.one_or_none()
def id_or_slug_filter(id_or_slug: str) -> BinaryExpression:
def id_or_slug_filter(id_or_slug: Union[int, str]) -> BinaryExpression:
if isinstance(id_or_slug, int):
return Dashboard.id == id_or_slug
if id_or_slug.isdigit():
return Dashboard.id == int(id_or_slug)
return Dashboard.slug == id_or_slug

View File

@ -55,7 +55,7 @@ from flask_babel import lazy_gettext as _
from jinja2.exceptions import TemplateError
from sqlalchemy import and_, Column, or_, UniqueConstraint
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import Mapper, Session
from sqlalchemy.orm import Mapper, Session, validates
from sqlalchemy.orm.exc import MultipleResultsFound
from sqlalchemy.sql.elements import ColumnElement, literal_column, TextClause
from sqlalchemy.sql.expression import Label, Select, TextAsFrom
@ -567,14 +567,15 @@ class ExtraJSONMixin:
@property
def extra(self) -> Dict[str, Any]:
try:
return json.loads(self.extra_json) if self.extra_json else {}
return json.loads(self.extra_json or "{}") or {}
except (TypeError, JSONDecodeError) as exc:
logger.error(
"Unable to load an extra json: %r. Leaving empty.", exc, exc_info=True
)
return {}
def set_extra_json(self, extras: Dict[str, Any]) -> None:
@extra.setter
def extra(self, extras: Dict[str, Any]) -> None:
self.extra_json = json.dumps(extras)
def set_extra_json_key(self, key: str, value: Any) -> None:
@ -582,6 +583,16 @@ class ExtraJSONMixin:
extra[key] = value
self.extra_json = json.dumps(extra)
@validates("extra_json")
def ensure_extra_json_is_not_none( # pylint: disable=no-self-use
self,
_: str,
value: Optional[Dict[str, Any]],
) -> Any:
if value is None:
return "{}"
return value
class CertificationMixin:
"""Mixin to add extra certification fields"""

View File

@ -30,7 +30,6 @@ from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod
from superset.dashboards.filters import DashboardAccessFilter
from superset.databases.filters import DatabaseFilter
from superset.extensions import event_logger
from superset.models.reports import ReportSchedule
from superset.reports.commands.bulk_delete import BulkDeleteReportScheduleCommand
from superset.reports.commands.create import CreateReportScheduleCommand
from superset.reports.commands.delete import DeleteReportScheduleCommand
@ -45,6 +44,7 @@ from superset.reports.commands.exceptions import (
)
from superset.reports.commands.update import UpdateReportScheduleCommand
from superset.reports.filters import ReportScheduleAllTextFilter
from superset.reports.models import ReportSchedule
from superset.reports.schemas import (
get_delete_ids_schema,
openapi_spec_methods_override,
@ -94,6 +94,7 @@ class ReportScheduleRestApi(BaseSupersetModelRestApi):
"database.database_name",
"database.id",
"description",
"extra",
"force_screenshot",
"grace_period",
"last_eval_dttm",
@ -135,6 +136,7 @@ class ReportScheduleRestApi(BaseSupersetModelRestApi):
"crontab_humanized",
"dashboard_id",
"description",
"extra",
"id",
"last_eval_dttm",
"last_state",
@ -156,6 +158,7 @@ class ReportScheduleRestApi(BaseSupersetModelRestApi):
"dashboard",
"database",
"description",
"extra",
"force_screenshot",
"grace_period",
"log_retention",

View File

@ -27,7 +27,6 @@ from flask_babel import lazy_gettext as _
from superset import app, jinja_context, security_manager
from superset.commands.base import BaseCommand
from superset.models.reports import ReportSchedule, ReportScheduleValidatorType
from superset.reports.commands.exceptions import (
AlertQueryError,
AlertQueryInvalidTypeError,
@ -36,6 +35,7 @@ from superset.reports.commands.exceptions import (
AlertQueryTimeout,
AlertValidatorConfigError,
)
from superset.reports.models import ReportSchedule, ReportScheduleValidatorType
from superset.utils.core import override_user
from superset.utils.retries import retry_call

View File

@ -22,14 +22,15 @@ from marshmallow import ValidationError
from superset.charts.dao import ChartDAO
from superset.commands.base import BaseCommand
from superset.dashboards.dao import DashboardDAO
from superset.models.reports import ReportCreationMethod
from superset.reports.commands.exceptions import (
ChartNotFoundValidationError,
ChartNotSavedValidationError,
DashboardNotFoundValidationError,
DashboardNotSavedValidationError,
ReportScheduleChartOrDashboardValidationError,
ReportScheduleEitherChartOrDashboardError,
ReportScheduleOnlyChartOrDashboardError,
)
from superset.reports.models import ReportCreationMethod
logger = logging.getLogger(__name__)
@ -62,7 +63,8 @@ class BaseReportScheduleCommand(BaseCommand):
return
if chart_id and dashboard_id:
exceptions.append(ReportScheduleChartOrDashboardValidationError())
exceptions.append(ReportScheduleOnlyChartOrDashboardError())
if chart_id:
chart = ChartDAO.find_by_id(chart_id)
if not chart:
@ -74,4 +76,4 @@ class BaseReportScheduleCommand(BaseCommand):
exceptions.append(DashboardNotFoundValidationError())
self._properties["dashboard"] = dashboard
elif not update:
exceptions.append(ReportScheduleChartOrDashboardValidationError())
exceptions.append(ReportScheduleEitherChartOrDashboardError())

View File

@ -21,13 +21,13 @@ from superset import security_manager
from superset.commands.base import BaseCommand
from superset.dao.exceptions import DAODeleteFailedError
from superset.exceptions import SupersetSecurityException
from superset.models.reports import ReportSchedule
from superset.reports.commands.exceptions import (
ReportScheduleBulkDeleteFailedError,
ReportScheduleForbiddenError,
ReportScheduleNotFoundError,
)
from superset.reports.dao import ReportScheduleDAO
from superset.reports.models import ReportSchedule
logger = logging.getLogger(__name__)

View File

@ -18,13 +18,12 @@ import json
import logging
from typing import Any, Dict, List, Optional
from flask_appbuilder.models.sqla import Model
from flask_babel import gettext as _
from marshmallow import ValidationError
from superset.commands.base import CreateMixin
from superset.dao.exceptions import DAOCreateFailedError
from superset.databases.dao import DatabaseDAO
from superset.models.reports import ReportCreationMethod, ReportScheduleType
from superset.reports.commands.base import BaseReportScheduleCommand
from superset.reports.commands.exceptions import (
DatabaseNotFoundValidationError,
@ -36,6 +35,12 @@ from superset.reports.commands.exceptions import (
ReportScheduleRequiredTypeValidationError,
)
from superset.reports.dao import ReportScheduleDAO
from superset.reports.models import (
ReportCreationMethod,
ReportSchedule,
ReportScheduleType,
)
from superset.reports.types import ReportScheduleExtra
logger = logging.getLogger(__name__)
@ -44,7 +49,7 @@ class CreateReportScheduleCommand(CreateMixin, BaseReportScheduleCommand):
def __init__(self, data: Dict[str, Any]):
self._properties = data.copy()
def run(self) -> Model:
def run(self) -> ReportSchedule:
self.validate()
try:
report_schedule = ReportScheduleDAO.create(self._properties)
@ -117,20 +122,26 @@ class CreateReportScheduleCommand(CreateMixin, BaseReportScheduleCommand):
raise exception
def _validate_report_extra(self, exceptions: List[ValidationError]) -> None:
extra = self._properties.get("extra")
extra: Optional[ReportScheduleExtra] = self._properties.get("extra")
dashboard = self._properties.get("dashboard")
if extra is None or dashboard is None:
return
dashboard_tab_ids = extra.get("dashboard_tab_ids")
if dashboard_tab_ids is None:
dashboard_state = extra.get("dashboard")
if not dashboard_state:
return
position_data = json.loads(dashboard.position_json)
invalid_tab_ids = [
tab_id for tab_id in dashboard_tab_ids if tab_id not in position_data
]
position_data = json.loads(dashboard.position_json or "{}")
active_tabs = dashboard_state.get("activeTabs") or []
anchor = dashboard_state.get("anchor")
invalid_tab_ids = set(active_tabs) - set(position_data.keys())
if anchor and anchor not in position_data:
invalid_tab_ids.add(anchor)
if invalid_tab_ids:
exceptions.append(
ValidationError(f"Invalid tab IDs selected: {invalid_tab_ids}", "extra")
ValidationError(
_("Invalid tab ids: %s(tab_ids)", tab_ids=str(invalid_tab_ids)),
"extra",
)
)

View File

@ -23,13 +23,13 @@ from superset import security_manager
from superset.commands.base import BaseCommand
from superset.dao.exceptions import DAODeleteFailedError
from superset.exceptions import SupersetSecurityException
from superset.models.reports import ReportSchedule
from superset.reports.commands.exceptions import (
ReportScheduleDeleteFailedError,
ReportScheduleForbiddenError,
ReportScheduleNotFoundError,
)
from superset.reports.dao import ReportScheduleDAO
from superset.reports.models import ReportSchedule
logger = logging.getLogger(__name__)

View File

@ -24,7 +24,7 @@ from superset.commands.exceptions import (
ForbiddenError,
ValidationError,
)
from superset.models.reports import ReportScheduleType
from superset.reports.models import ReportScheduleType
class DatabaseNotFoundValidationError(ValidationError):
@ -72,7 +72,7 @@ class ReportScheduleRequiredTypeValidationError(ValidationError):
super().__init__(_("Type is required"), field_name="type")
class ReportScheduleChartOrDashboardValidationError(ValidationError):
class ReportScheduleOnlyChartOrDashboardError(ValidationError):
"""
Marshmallow validation error for report schedule accept exlusive chart or dashboard
"""
@ -81,6 +81,17 @@ class ReportScheduleChartOrDashboardValidationError(ValidationError):
super().__init__(_("Choose a chart or dashboard not both"), field_name="chart")
class ReportScheduleEitherChartOrDashboardError(ValidationError):
"""
Marshmallow validation error for report schedule missing both dashboard and chart id
"""
def __init__(self) -> None:
super().__init__(
_("Must choose either a chart or a dashboard"), field_name="chart"
)
class ChartNotSavedValidationError(ValidationError):
"""
Marshmallow validation error for charts that haven't been saved yet

View File

@ -17,7 +17,7 @@
import json
import logging
from datetime import datetime, timedelta
from typing import Any, List, Optional
from typing import Any, List, Optional, Union
from uuid import UUID
import pandas as pd
@ -29,16 +29,10 @@ from superset import app, security_manager
from superset.commands.base import BaseCommand
from superset.commands.exceptions import CommandException
from superset.common.chart_data import ChartDataResultFormat, ChartDataResultType
from superset.extensions import feature_flag_manager, machine_auth_provider_factory
from superset.models.reports import (
ReportDataFormat,
ReportExecutionLog,
ReportRecipients,
ReportRecipientType,
ReportSchedule,
ReportScheduleType,
ReportState,
from superset.dashboards.permalink.commands.create import (
CreateDashboardPermalinkCommand,
)
from superset.extensions import feature_flag_manager, machine_auth_provider_factory
from superset.reports.commands.alert import AlertCommand
from superset.reports.commands.exceptions import (
ReportScheduleAlertGracePeriodError,
@ -61,16 +55,21 @@ from superset.reports.dao import (
REPORT_SCHEDULE_ERROR_NOTIFICATION_MARKER,
ReportScheduleDAO,
)
from superset.reports.models import (
ReportDataFormat,
ReportExecutionLog,
ReportRecipients,
ReportRecipientType,
ReportSchedule,
ReportScheduleType,
ReportState,
)
from superset.reports.notifications import create_notification
from superset.reports.notifications.base import NotificationContent
from superset.reports.notifications.exceptions import NotificationError
from superset.utils.celery import session_scope
from superset.utils.csv import get_chart_csv_data, get_chart_dataframe
from superset.utils.screenshots import (
BaseScreenshot,
ChartScreenshot,
DashboardScreenshot,
)
from superset.utils.screenshots import ChartScreenshot, DashboardScreenshot
from superset.utils.urls import get_url_path
from superset.utils.webdriver import DashboardStandaloneMode
@ -174,6 +173,16 @@ class BaseReportState:
force=force,
**kwargs,
)
# If we need to render dashboard in a specific sate, use stateful permalink
dashboard_state = self._report_schedule.extra.get("dashboard")
if dashboard_state:
permalink_key = CreateDashboardPermalinkCommand(
dashboard_id=self._report_schedule.dashboard_id,
state=dashboard_state,
).run()
return get_url_path("Superset.dashboard_permalink", key=permalink_key)
return get_url_path(
"Superset.dashboard",
user_friendly=user_friendly,
@ -197,53 +206,34 @@ class BaseReportState:
Get chart or dashboard screenshots
:raises: ReportScheduleScreenshotFailedError
"""
image_data = []
screenshots: List[BaseScreenshot] = []
if self._report_schedule.chart:
url = self._get_url()
logger.info("Screenshotting chart at %s", url)
screenshots = [
ChartScreenshot(
url,
self._report_schedule.chart.digest,
window_size=app.config["WEBDRIVER_WINDOW"]["slice"],
thumb_size=app.config["WEBDRIVER_WINDOW"]["slice"],
)
]
else:
tabs: Optional[List[str]] = json.loads(self._report_schedule.extra).get(
"dashboard_tab_ids", None
)
dashboard_base_url = self._get_url()
if tabs is None:
urls = [dashboard_base_url]
else:
urls = [f"{dashboard_base_url}#{tab_id}" for tab_id in tabs]
screenshots = [
DashboardScreenshot(
url,
self._report_schedule.dashboard.digest,
window_size=app.config["WEBDRIVER_WINDOW"]["dashboard"],
thumb_size=app.config["WEBDRIVER_WINDOW"]["dashboard"],
)
for url in urls
]
url = self._get_url()
user = self._get_user()
for screenshot in screenshots:
try:
image = screenshot.get_screenshot(user=user)
except SoftTimeLimitExceeded as ex:
logger.warning("A timeout occurred while taking a screenshot.")
raise ReportScheduleScreenshotTimeout() from ex
except Exception as ex:
raise ReportScheduleScreenshotFailedError(
f"Failed taking a screenshot {str(ex)}"
) from ex
if image is not None:
image_data.append(image)
if not image_data:
if self._report_schedule.chart:
screenshot: Union[ChartScreenshot, DashboardScreenshot] = ChartScreenshot(
url,
self._report_schedule.chart.digest,
window_size=app.config["WEBDRIVER_WINDOW"]["slice"],
thumb_size=app.config["WEBDRIVER_WINDOW"]["slice"],
)
else:
screenshot = DashboardScreenshot(
url,
self._report_schedule.dashboard.digest,
window_size=app.config["WEBDRIVER_WINDOW"]["dashboard"],
thumb_size=app.config["WEBDRIVER_WINDOW"]["dashboard"],
)
try:
image = screenshot.get_screenshot(user=user)
except SoftTimeLimitExceeded as ex:
logger.warning("A timeout occurred while taking a screenshot.")
raise ReportScheduleScreenshotTimeout() from ex
except Exception as ex:
raise ReportScheduleScreenshotFailedError(
f"Failed taking a screenshot {str(ex)}"
) from ex
if not image:
raise ReportScheduleScreenshotFailedError()
return image_data
return [image]
def _get_csv_data(self) -> bytes:
url = self._get_url(result_format=ChartDataResultFormat.CSV)

View File

@ -19,9 +19,9 @@ from datetime import datetime, timedelta
from superset.commands.base import BaseCommand
from superset.dao.exceptions import DAODeleteFailedError
from superset.models.reports import ReportSchedule
from superset.reports.commands.exceptions import ReportSchedulePruneLogError
from superset.reports.dao import ReportScheduleDAO
from superset.reports.models import ReportSchedule
from superset.utils.celery import session_scope
logger = logging.getLogger(__name__)

View File

@ -26,7 +26,6 @@ from superset.commands.base import UpdateMixin
from superset.dao.exceptions import DAOUpdateFailedError
from superset.databases.dao import DatabaseDAO
from superset.exceptions import SupersetSecurityException
from superset.models.reports import ReportSchedule, ReportScheduleType, ReportState
from superset.reports.commands.base import BaseReportScheduleCommand
from superset.reports.commands.exceptions import (
DatabaseNotFoundValidationError,
@ -37,6 +36,7 @@ from superset.reports.commands.exceptions import (
ReportScheduleUpdateFailedError,
)
from superset.reports.dao import ReportScheduleDAO
from superset.reports.models import ReportSchedule, ReportScheduleType, ReportState
logger = logging.getLogger(__name__)

View File

@ -26,7 +26,7 @@ from sqlalchemy.orm import Session
from superset.dao.base import BaseDAO
from superset.dao.exceptions import DAOCreateFailedError, DAODeleteFailedError
from superset.extensions import db
from superset.models.reports import (
from superset.reports.models import (
ReportExecutionLog,
ReportRecipients,
ReportSchedule,
@ -154,7 +154,7 @@ class ReportScheduleDAO(BaseDAO):
return found_id is None or found_id == expect_id
@classmethod
def create(cls, properties: Dict[str, Any], commit: bool = True) -> Model:
def create(cls, properties: Dict[str, Any], commit: bool = True) -> ReportSchedule:
"""
create a report schedule and nested recipients
:raises: DAOCreateFailedError
@ -186,7 +186,7 @@ class ReportScheduleDAO(BaseDAO):
@classmethod
def update(
cls, model: Model, properties: Dict[str, Any], commit: bool = True
) -> Model:
) -> ReportSchedule:
"""
create a report schedule and nested recipients
:raises: DAOCreateFailedError

View File

@ -20,7 +20,7 @@ from flask_babel import lazy_gettext as _
from sqlalchemy import or_
from sqlalchemy.orm.query import Query
from superset.models.reports import ReportSchedule
from superset.reports.models import ReportSchedule
from superset.views.base import BaseFilter

View File

@ -25,8 +25,8 @@ from flask_appbuilder.models.sqla.interface import SQLAInterface
from superset import is_feature_enabled
from superset.constants import MODEL_API_RW_METHOD_PERMISSION_MAP, RouteMethod
from superset.models.reports import ReportExecutionLog
from superset.reports.logs.schemas import openapi_spec_methods_override
from superset.reports.models import ReportExecutionLog
from superset.views.base_api import BaseSupersetModelRestApi
logger = logging.getLogger(__name__)

View File

@ -16,8 +16,6 @@
# under the License.
"""A collection of ORM sqlalchemy models for Superset"""
import enum
import json
from typing import Any, Dict, Optional
from cron_descriptor import get_description
from flask_appbuilder import Model
@ -33,15 +31,16 @@ from sqlalchemy import (
Table,
Text,
)
from sqlalchemy.orm import backref, relationship, validates
from sqlalchemy.orm import backref, relationship
from sqlalchemy.schema import UniqueConstraint
from sqlalchemy_utils import UUIDType
from superset.extensions import security_manager
from superset.models.core import Database
from superset.models.dashboard import Dashboard
from superset.models.helpers import AuditMixinNullable
from superset.models.helpers import AuditMixinNullable, ExtraJSONMixin
from superset.models.slice import Slice
from superset.reports.types import ReportScheduleExtra
metadata = Model.metadata # pylint: disable=no-member
@ -95,7 +94,7 @@ report_schedule_user = Table(
)
class ReportSchedule(Model, AuditMixinNullable):
class ReportSchedule(Model, AuditMixinNullable, ExtraJSONMixin):
"""
Report Schedules, supports alerts and reports
@ -147,12 +146,11 @@ class ReportSchedule(Model, AuditMixinNullable):
# (Alerts/Reports) Unlock a possible stalled working state
working_timeout = Column(Integer, default=60 * 60 * 1)
# Store the selected dashboard tabs etc.
extra = Column(Text, default="{}")
# (Reports) When generating a screenshot, bypass the cache?
force_screenshot = Column(Boolean, default=False)
extra: ReportScheduleExtra # type: ignore
def __repr__(self) -> str:
return str(self.name)
@ -160,13 +158,6 @@ class ReportSchedule(Model, AuditMixinNullable):
def crontab_humanized(self) -> str:
return get_description(self.crontab)
@validates("extra")
# pylint: disable=unused-argument,no-self-use
def validate_extra(self, key: str, value: Dict[Any, Any]) -> Optional[str]:
if value is not None:
return json.dumps(value)
return None
class ReportRecipients(Model, AuditMixinNullable):
"""

View File

@ -15,7 +15,7 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from superset.models.reports import ReportRecipients
from superset.reports.models import ReportRecipients
from superset.reports.notifications.base import BaseNotification, NotificationContent
from superset.reports.notifications.email import EmailNotification
from superset.reports.notifications.slack import SlackNotification

View File

@ -20,7 +20,7 @@ from typing import Any, List, Optional, Type
import pandas as pd
from superset.models.reports import ReportRecipients, ReportRecipientType
from superset.reports.models import ReportRecipients, ReportRecipientType
@dataclass

View File

@ -26,7 +26,7 @@ import bleach
from flask_babel import gettext as __
from superset import app
from superset.models.reports import ReportRecipientType
from superset.reports.models import ReportRecipientType
from superset.reports.notifications.base import BaseNotification
from superset.reports.notifications.exceptions import NotificationError
from superset.utils.core import send_email_smtp

View File

@ -26,7 +26,7 @@ from slack import WebClient
from slack.errors import SlackApiError, SlackClientError
from superset import app
from superset.models.reports import ReportRecipientType
from superset.reports.models import ReportRecipientType
from superset.reports.notifications.base import BaseNotification
from superset.reports.notifications.exceptions import NotificationError
from superset.utils.decorators import statsd_gauge

View File

@ -23,7 +23,7 @@ from marshmallow.validate import Length, Range, ValidationError
from marshmallow_enum import EnumField
from pytz import all_timezones
from superset.models.reports import (
from superset.reports.models import (
ReportCreationMethod,
ReportDataFormat,
ReportRecipientType,
@ -297,4 +297,5 @@ class ReportSchedulePutSchema(Schema):
default=ReportDataFormat.VISUALIZATION,
validate=validate.OneOf(choices=tuple(key.value for key in ReportDataFormat)),
)
extra = fields.Dict(default=None)
force_screenshot = fields.Boolean(default=False)

23
superset/reports/types.py Normal file
View File

@ -0,0 +1,23 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import TypedDict
from superset.dashboards.permalink.types import DashboardPermalinkState
class ReportScheduleExtra(TypedDict):
dashboard: DashboardPermalinkState

View File

@ -71,7 +71,7 @@ from superset.exceptions import (
SupersetSecurityException,
)
from superset.models.helpers import ImportExportMixin
from superset.models.reports import ReportRecipientType
from superset.reports.models import ReportRecipientType
from superset.superset_typing import FlaskResponse
from superset.translations.utils import get_language_pack
from superset.utils import core as utils

View File

@ -30,7 +30,7 @@ from superset.connectors.sqla.models import SqlaTable
from superset.extensions import cache_manager, db
from superset.models.core import Database, FavStar, FavStarClassName
from superset.models.dashboard import Dashboard
from superset.models.reports import ReportSchedule, ReportScheduleType
from superset.reports.models import ReportSchedule, ReportScheduleType
from superset.models.slice import Slice
from superset.utils.core import get_example_default_schema

View File

@ -34,7 +34,7 @@ from sqlalchemy import and_
from superset import db, security_manager
from superset.models.dashboard import Dashboard
from superset.models.core import FavStar, FavStarClassName
from superset.models.reports import ReportSchedule, ReportScheduleType
from superset.reports.models import ReportSchedule, ReportScheduleType
from superset.models.slice import Slice
from superset.utils.core import backend
from superset.views.base import generate_download_headers

View File

@ -43,7 +43,7 @@ from superset.db_engine_specs.gsheets import GSheetsEngineSpec
from superset.db_engine_specs.hana import HanaEngineSpec
from superset.errors import SupersetError
from superset.models.core import Database, ConfigurationMethod
from superset.models.reports import ReportSchedule, ReportScheduleType
from superset.reports.models import ReportSchedule, ReportScheduleType
from superset.utils.database import get_example_database, get_main_database
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.fixtures.birth_names_dashboard import (

View File

@ -20,16 +20,16 @@ import pytest
from superset import db
from superset.models.dashboard import Dashboard
from tests.integration_tests.dashboard_utils import create_dashboard
from tests.integration_tests.test_app import app
from superset.utils.core import shortid
from tests.integration_tests.dashboards.superset_factory_util import create_dashboard
@pytest.fixture(scope="session")
def tabbed_dashboard():
@pytest.fixture
def tabbed_dashboard(app_context):
position_json = {
"DASHBOARD_VERSION_KEY": "v2",
"GRID_ID": {
"children": ["TABS-IpViLohnyP"],
"children": ["TABS-L1A", "TABS-L1B"],
"id": "GRID_ID",
"parents": ["ROOT_ID"],
"type": "GRID",
@ -40,38 +40,102 @@ def tabbed_dashboard():
"type": "HEADER",
},
"ROOT_ID": {"children": ["GRID_ID"], "id": "ROOT_ID", "type": "ROOT"},
"TAB-j53G4gtKGF": {
"TAB-L1AA": {
"children": [],
"id": "TAB-j53G4gtKGF",
"id": "TAB-L1AA",
"meta": {
"defaultText": "Tab title",
"placeholder": "Tab title",
"text": "Tab 1",
"text": "Tab L1AA",
},
"parents": ["ROOT_ID", "GRID_ID", "TABS-IpViLohnyP"],
"parents": ["ROOT_ID", "GRID_ID", "TABS-L1A"],
"type": "TAB",
},
"TAB-nerWR09Ju": {
"TAB-L1AB": {
"children": [],
"id": "TAB-nerWR09Ju",
"id": "TAB-L1AB",
"meta": {
"defaultText": "Tab title",
"placeholder": "Tab title",
"text": "Tab L1AB",
},
"parents": ["ROOT_ID", "GRID_ID", "TABS-L1A"],
"type": "TAB",
},
"TABS-L1A": {
"children": ["TAB-L1AA", "TAB-L1AB"],
"id": "TABS-L1A",
"meta": {},
"parents": ["ROOT_ID", "GRID_ID"],
"type": "TABS",
},
"TAB-L1BA": {
"children": [],
"id": "TAB-L1BA",
"meta": {
"defaultText": "Tab title",
"placeholder": "Tab title",
"text": "Tab L1B",
},
"parents": ["ROOT_ID", "GRID_ID", "TABS-L1B"],
"type": "TAB",
},
"TAB-L1BB": {
"children": ["TABS-L2A"],
"id": "TAB-L1BB",
"meta": {
"defaultText": "Tab title",
"placeholder": "Tab title",
"text": "Tab 2",
},
"parents": ["ROOT_ID", "GRID_ID", "TABS-IpViLohnyP"],
"parents": ["ROOT_ID", "GRID_ID", "TABS-L1B"],
"type": "TAB",
},
"TABS-IpViLohnyP": {
"children": ["TAB-j53G4gtKGF", "TAB-nerWR09Ju"],
"id": "TABS-IpViLohnyP",
"TABS-L1B": {
"children": ["TAB-L1BA", "TAB-L1BB"],
"id": "TABS-L1B",
"meta": {},
"parents": ["ROOT_ID", "GRID_ID"],
"type": "TABS",
},
"TAB-L2AA": {
"children": [],
"id": "TAB-L2AA",
"meta": {
"defaultText": "Tab title",
"placeholder": "Tab title",
"text": "Tab L2AA",
},
"parents": ["ROOT_ID", "GRID_ID", "TABS-L2A"],
"type": "TAB",
},
"TAB-L2AB": {
"children": [],
"id": "TAB-L2AB",
"meta": {
"defaultText": "Tab title",
"placeholder": "Tab title",
"text": "Tab L2AB",
},
"parents": ["ROOT_ID", "GRID_ID", "TABS-L2A"],
"type": "TAB",
},
"TABS-L2A": {
"children": ["TAB-L2AA", "TAB-L2AB"],
"id": "TABS-L2A",
"meta": {},
"parents": ["ROOT_ID", "GRID_ID", "TABS-L1BB"],
"type": "TABS",
},
}
with app.app_context():
dash = create_dashboard(
"tabbed-dash-test", "Tabbed Dash Test", json.dumps(position_json), []
)
dash = create_dashboard(
slug=f"tabbed-dash-{shortid()}",
dashboard_title="Test tabbed dash",
position_json=json.dumps(position_json),
slices=[],
)
db.session.add(dash)
db.session.commit()
yield dash
db.session.query(Dashboard).filter_by(id=dash.id).delete()
db.session.commit()

View File

@ -29,7 +29,7 @@ from superset import db
from superset.models.core import Database
from superset.models.slice import Slice
from superset.models.dashboard import Dashboard
from superset.models.reports import (
from superset.reports.models import (
ReportSchedule,
ReportCreationMethod,
ReportRecipients,
@ -45,7 +45,6 @@ from tests.integration_tests.fixtures.birth_names_dashboard import (
load_birth_names_dashboard_with_slices,
load_birth_names_data,
)
from tests.integration_tests.fixtures.tabbed_dashboard import tabbed_dashboard
from tests.integration_tests.reports.utils import insert_report_schedule
REPORTS_COUNT = 10
@ -281,6 +280,7 @@ class TestReportSchedulesApi(SupersetTestCase):
"crontab_humanized",
"dashboard_id",
"description",
"extra",
"id",
"last_eval_dttm",
"last_state",
@ -1528,84 +1528,3 @@ class TestReportSchedulesApi(SupersetTestCase):
assert rv.status_code == 405
rv = self.client.delete(uri)
assert rv.status_code == 405
@pytest.mark.usefixtures("create_report_schedules")
@pytest.mark.usefixtures("tabbed_dashboard")
def test_when_invalid_tab_ids_are_given_it_raises_bad_request(self):
"""
when tab ids are specified in the extra argument, make sure that the
tab ids are valid.
"""
self.login(username="admin")
dashboard = (
db.session.query(Dashboard)
.filter(Dashboard.slug == "tabbed-dash-test")
.first()
)
example_db = get_example_database()
report_schedule_data = {
"type": ReportScheduleType.ALERT,
"name": "new3",
"description": "description",
"crontab": "0 9 * * *",
"creation_method": ReportCreationMethod.ALERTS_REPORTS,
"recipients": [
{
"type": ReportRecipientType.EMAIL,
"recipient_config_json": {"target": "target@superset.org"},
},
],
"grace_period": 14400,
"working_timeout": 3600,
"chart": None,
"dashboard": dashboard.id,
"database": example_db.id,
"extra": {"dashboard_tab_ids": ["INVALID-TAB-ID-1", "TABS-IpViLohnyP"]},
}
response = self.post_assert_metric(
"api/v1/report/", report_schedule_data, "post"
)
assert response.status_code == 422
assert response.json == {
"message": {"extra": ["Invalid tab IDs selected: ['INVALID-TAB-ID-1']"]}
}
@pytest.mark.usefixtures("create_report_schedules")
@pytest.mark.usefixtures("tabbed_dashboard")
def test_when_tab_ids_are_given_it_gets_added_to_extra(self):
self.login(username="admin")
dashboard = (
db.session.query(Dashboard)
.filter(Dashboard.slug == "tabbed-dash-test")
.first()
)
example_db = get_example_database()
report_schedule_data = {
"type": ReportScheduleType.ALERT,
"name": "new3",
"description": "description",
"crontab": "0 9 * * *",
"creation_method": ReportCreationMethod.ALERTS_REPORTS,
"recipients": [
{
"type": ReportRecipientType.EMAIL,
"recipient_config_json": {"target": "target@superset.org"},
},
],
"grace_period": 14400,
"working_timeout": 3600,
"chart": None,
"dashboard": dashboard.id,
"database": example_db.id,
"extra": {"dashboard_tab_ids": ["TABS-IpViLohnyP"]},
}
response = self.post_assert_metric(
"api/v1/report/", report_schedule_data, "post"
)
assert response.status_code == 201
assert json.loads(
db.session.query(ReportSchedule)
.filter(ReportSchedule.id == response.json["id"])
.first()
.extra
) == {"dashboard_tab_ids": ["TABS-IpViLohnyP"]}

View File

@ -0,0 +1,91 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
from superset import db
from superset.models.dashboard import Dashboard
from superset.reports.commands.create import CreateReportScheduleCommand
from superset.reports.commands.exceptions import ReportScheduleInvalidError
from superset.reports.models import (
ReportCreationMethod,
ReportRecipientType,
ReportScheduleType,
)
from tests.integration_tests.fixtures.tabbed_dashboard import tabbed_dashboard
DASHBOARD_REPORT_SCHEDULE_DEFAULTS = {
"type": ReportScheduleType.REPORT,
"description": "description",
"crontab": "0 9 * * *",
"creation_method": ReportCreationMethod.ALERTS_REPORTS,
"recipients": [
{
"type": ReportRecipientType.EMAIL,
"recipient_config_json": {"target": "target@example.com"},
},
],
"grace_period": 14400,
"working_timeout": 3600,
}
@pytest.mark.usefixtures("login_as_admin")
def test_accept_valid_tab_ids(tabbed_dashboard: Dashboard) -> None:
report_schedule = CreateReportScheduleCommand(
{
**DASHBOARD_REPORT_SCHEDULE_DEFAULTS,
"name": "tabbed dashboard report (valid tabs id)",
"dashboard": tabbed_dashboard.id,
"extra": {"dashboard": {"activeTabs": ["TAB-L1AA", "TAB-L2AB"]}},
}
).run()
assert report_schedule.extra == {
"dashboard": {"activeTabs": ["TAB-L1AA", "TAB-L2AB"]}
}
db.session.delete(report_schedule)
db.session.commit()
@pytest.mark.usefixtures("login_as_admin")
def test_raise_exception_for_invalid_tab_ids(tabbed_dashboard: Dashboard) -> None:
with pytest.raises(ReportScheduleInvalidError) as exc_info:
CreateReportScheduleCommand(
{
**DASHBOARD_REPORT_SCHEDULE_DEFAULTS,
"name": "tabbed dashboard report (invalid tab ids)",
"dashboard": tabbed_dashboard.id,
"extra": {"dashboard": {"activeTabs": ["TAB-INVALID_ID"]}},
}
).run()
assert "Invalid tab ids" in str(exc_info.value.normalized_messages())
with pytest.raises(ReportScheduleInvalidError) as exc_info:
CreateReportScheduleCommand(
{
**DASHBOARD_REPORT_SCHEDULE_DEFAULTS,
"name": "tabbed dashboard report (invalid tab ids in anchor)",
"dashboard": tabbed_dashboard.id,
"extra": {
"dashboard": {
"activeTabs": ["TAB-L1AA"],
"anchor": "TAB-INVALID_ID",
}
},
}
).run()
assert "Invalid tab ids" in str(exc_info.value.normalized_messages())

View File

@ -0,0 +1,68 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from datetime import datetime
from unittest.mock import MagicMock, patch
from uuid import uuid4
from flask import current_app
from superset.dashboards.permalink.commands.create import (
CreateDashboardPermalinkCommand,
)
from superset.models.dashboard import Dashboard
from superset.reports.commands.execute import AsyncExecuteReportScheduleCommand
from tests.integration_tests.fixtures.tabbed_dashboard import tabbed_dashboard
from tests.integration_tests.reports.utils import create_dashboard_report
@patch("superset.reports.notifications.email.send_email_smtp")
@patch(
"superset.reports.commands.execute.DashboardScreenshot",
)
@patch(
"superset.dashboards.permalink.commands.create.CreateDashboardPermalinkCommand.run"
)
def test_report_for_dashboard_with_tabs(
create_dashboard_permalink_mock: MagicMock,
dashboard_screenshot_mock: MagicMock,
send_email_smtp_mock: MagicMock,
tabbed_dashboard: Dashboard,
) -> None:
create_dashboard_permalink_mock.return_value = "permalink"
dashboard_screenshot_mock.get_screenshot.return_value = b"test-image"
current_app.config["ALERT_REPORTS_NOTIFICATION_DRY_RUN"] = False
with create_dashboard_report(
dashboard=tabbed_dashboard,
extra={"active_tabs": ["TAB-L1B", "TAB-L2BB"]},
name="test report tabbed dashboard",
) as report_schedule:
dashboard: Dashboard = report_schedule.dashboard
AsyncExecuteReportScheduleCommand(
str(uuid4()), report_schedule.id, datetime.utcnow()
).run()
dashboard_state = report_schedule.extra.get("dashboard", {})
permalink_key = CreateDashboardPermalinkCommand(
dashboard.id, dashboard_state
).run()
assert dashboard_screenshot_mock.call_count == 1
(url, digest) = dashboard_screenshot_mock.call_args.args
assert url.endswith(f"/superset/dashboard/p/{permalink_key}/")
assert digest == dashboard.digest
assert send_email_smtp_mock.call_count == 1
assert len(send_email_smtp_mock.call_args.kwargs["images"]) == 1

View File

@ -27,19 +27,9 @@ from flask_sqlalchemy import BaseQuery
from freezegun import freeze_time
from sqlalchemy.sql import func
from superset import db, security_manager
from superset import db
from superset.models.core import Database
from superset.models.dashboard import Dashboard
from superset.models.reports import (
ReportDataFormat,
ReportExecutionLog,
ReportRecipients,
ReportRecipientType,
ReportSchedule,
ReportScheduleType,
ReportScheduleValidatorType,
ReportState,
)
from superset.models.slice import Slice
from superset.reports.commands.exceptions import (
AlertQueryError,
@ -57,29 +47,37 @@ from superset.reports.commands.exceptions import (
)
from superset.reports.commands.execute import AsyncExecuteReportScheduleCommand
from superset.reports.commands.log_prune import AsyncPruneReportScheduleLogCommand
from superset.reports.models import (
ReportDataFormat,
ReportExecutionLog,
ReportSchedule,
ReportScheduleType,
ReportScheduleValidatorType,
ReportState,
)
from superset.utils.database import get_example_database
from tests.integration_tests.fixtures.birth_names_dashboard import (
load_birth_names_dashboard_with_slices,
load_birth_names_data,
)
from tests.integration_tests.fixtures.tabbed_dashboard import tabbed_dashboard
from tests.integration_tests.fixtures.world_bank_dashboard import (
load_world_bank_dashboard_with_slices_module_scope,
load_world_bank_data,
)
from tests.integration_tests.reports.utils import insert_report_schedule
from tests.integration_tests.reports.utils import (
cleanup_report_schedule,
create_report_notification,
CSV_FILE,
OWNER_EMAIL,
SCREENSHOT_FILE,
TEST_ID,
)
from tests.integration_tests.test_app import app
from tests.integration_tests.utils import read_fixture
pytestmark = pytest.mark.usefixtures(
"load_world_bank_dashboard_with_slices_module_scope"
)
TEST_ID = str(uuid4())
CSV_FILE = read_fixture("trends.csv")
SCREENSHOT_FILE = read_fixture("sample.png")
OWNER_EMAIL = "admin@fab.org"
def get_target_from_report_schedule(report_schedule: ReportSchedule) -> List[str]:
return [
@ -129,78 +127,6 @@ def assert_log(state: str, error_message: Optional[str] = None):
assert log.value_row_json is None
def create_report_notification(
email_target: Optional[str] = None,
slack_channel: Optional[str] = None,
chart: Optional[Slice] = None,
dashboard: Optional[Dashboard] = None,
database: Optional[Database] = None,
sql: Optional[str] = None,
report_type: Optional[str] = None,
validator_type: Optional[str] = None,
validator_config_json: Optional[str] = None,
grace_period: Optional[int] = None,
report_format: Optional[ReportDataFormat] = None,
name: Optional[str] = None,
extra: Optional[Dict[str, Any]] = None,
force_screenshot: bool = False,
) -> ReportSchedule:
report_type = report_type or ReportScheduleType.REPORT
target = email_target or slack_channel
config_json = {"target": target}
owner = (
db.session.query(security_manager.user_model)
.filter_by(email=OWNER_EMAIL)
.one_or_none()
)
if slack_channel:
recipient = ReportRecipients(
type=ReportRecipientType.SLACK,
recipient_config_json=json.dumps(config_json),
)
else:
recipient = ReportRecipients(
type=ReportRecipientType.EMAIL,
recipient_config_json=json.dumps(config_json),
)
if name is None:
name = "report_with_csv" if report_format else "report"
report_schedule = insert_report_schedule(
type=report_type,
name=name,
crontab="0 9 * * *",
description="Daily report",
sql=sql,
chart=chart,
dashboard=dashboard,
database=database,
recipients=[recipient],
owners=[owner],
validator_type=validator_type,
validator_config_json=validator_config_json,
grace_period=grace_period,
report_format=report_format or ReportDataFormat.VISUALIZATION,
extra=extra,
force_screenshot=force_screenshot,
)
return report_schedule
def cleanup_report_schedule(report_schedule: ReportSchedule) -> None:
db.session.query(ReportExecutionLog).filter(
ReportExecutionLog.report_schedule == report_schedule
).delete()
db.session.query(ReportRecipients).filter(
ReportRecipients.report_schedule == report_schedule
).delete()
db.session.delete(report_schedule)
db.session.commit()
@contextmanager
def create_test_table_context(database: Database):
database.get_sqla_engine().execute(
@ -308,23 +234,6 @@ def create_report_email_dashboard_force_screenshot():
cleanup_report_schedule(report_schedule)
@pytest.fixture()
def create_report_email_tabbed_dashboard(tabbed_dashboard):
with app.app_context():
report_schedule = create_report_notification(
email_target="target@email.com",
dashboard=tabbed_dashboard,
extra={
"dashboard_tab_ids": [
"TAB-j53G4gtKGF",
"TAB-nerWR09Ju",
]
},
)
yield report_schedule
cleanup_report_schedule(report_schedule)
@pytest.fixture()
def create_report_slack_chart():
with app.app_context():
@ -1900,41 +1809,9 @@ def test_grace_period_error_flap(
)
@patch("superset.reports.dao.ReportScheduleDAO.bulk_delete_logs")
def test_prune_log_soft_time_out(bulk_delete_logs, create_report_email_dashboard):
from datetime import datetime, timedelta
from celery.exceptions import SoftTimeLimitExceeded
bulk_delete_logs.side_effect = SoftTimeLimitExceeded()
with pytest.raises(SoftTimeLimitExceeded) as excinfo:
AsyncPruneReportScheduleLogCommand().run()
assert str(excinfo.value) == "SoftTimeLimitExceeded()"
@pytest.mark.usefixtures(
"create_report_email_tabbed_dashboard",
)
@patch("superset.reports.notifications.email.send_email_smtp")
@patch(
"superset.reports.commands.execute.DashboardScreenshot",
)
def test_when_tabs_are_selected_it_takes_screenshots_for_every_tabs(
dashboard_screenshot_mock,
send_email_smtp_mock,
create_report_email_tabbed_dashboard,
):
dashboard_screenshot_mock.get_screenshot.return_value = b"test-image"
dashboard = create_report_email_tabbed_dashboard.dashboard
AsyncExecuteReportScheduleCommand(
TEST_ID, create_report_email_tabbed_dashboard.id, datetime.utcnow()
).run()
tabs = json.loads(create_report_email_tabbed_dashboard.extra)["dashboard_tab_ids"]
assert dashboard_screenshot_mock.call_count == 2
for index, tab in enumerate(tabs):
assert dashboard_screenshot_mock.call_args_list[index].args == (
f"http://0.0.0.0:8080/superset/dashboard/{dashboard.id}/?standalone=3&force=false#{tab}",
f"{dashboard.digest}",
)
assert send_email_smtp_mock.called is True
assert len(send_email_smtp_mock.call_args.kwargs["images"]) == 2

View File

@ -17,12 +17,11 @@
from typing import List
from unittest.mock import patch
import pytest
from freezegun import freeze_time
from freezegun.api import FakeDatetime # type: ignore
from superset.extensions import db
from superset.models.reports import ReportScheduleType
from superset.reports.models import ReportScheduleType
from superset.tasks.scheduler import scheduler
from tests.integration_tests.reports.utils import insert_report_schedule
from tests.integration_tests.test_app import app

View File

@ -15,21 +15,33 @@
# specific language governing permissions and limitations
# under the License.
import json
from contextlib import contextmanager
from typing import Any, Dict, List, Optional
from uuid import uuid4
from flask_appbuilder.security.sqla.models import User
from superset import db
from superset import db, security_manager
from superset.models.core import Database
from superset.models.dashboard import Dashboard
from superset.models.reports import (
from superset.models.slice import Slice
from superset.reports.models import (
ReportDataFormat,
ReportExecutionLog,
ReportRecipients,
ReportRecipientType,
ReportSchedule,
ReportScheduleType,
ReportState,
)
from superset.models.slice import Slice
from tests.integration_tests.test_app import app
from tests.integration_tests.utils import read_fixture
TEST_ID = str(uuid4())
CSV_FILE = read_fixture("trends.csv")
SCREENSHOT_FILE = read_fixture("sample.png")
OWNER_EMAIL = "admin@fab.org"
def insert_report_schedule(
@ -83,3 +95,100 @@ def insert_report_schedule(
db.session.add(report_schedule)
db.session.commit()
return report_schedule
def create_report_notification(
email_target: Optional[str] = None,
slack_channel: Optional[str] = None,
chart: Optional[Slice] = None,
dashboard: Optional[Dashboard] = None,
database: Optional[Database] = None,
sql: Optional[str] = None,
report_type: ReportScheduleType = ReportScheduleType.REPORT,
validator_type: Optional[str] = None,
validator_config_json: Optional[str] = None,
grace_period: Optional[int] = None,
report_format: Optional[ReportDataFormat] = None,
name: Optional[str] = None,
extra: Optional[Dict[str, Any]] = None,
force_screenshot: bool = False,
) -> ReportSchedule:
owner = (
db.session.query(security_manager.user_model)
.filter_by(email=OWNER_EMAIL)
.one_or_none()
)
if slack_channel:
recipient = ReportRecipients(
type=ReportRecipientType.SLACK,
recipient_config_json=json.dumps(
{
"target": slack_channel,
}
),
)
else:
recipient = ReportRecipients(
type=ReportRecipientType.EMAIL,
recipient_config_json=json.dumps({"target": email_target}),
)
if name is None:
name = "report_with_csv" if report_format else "report"
report_schedule = insert_report_schedule(
report_type,
name=name,
crontab="0 9 * * *",
description="Daily report",
sql=sql,
chart=chart,
dashboard=dashboard,
database=database,
recipients=[recipient],
owners=[owner],
validator_type=validator_type,
validator_config_json=validator_config_json,
grace_period=grace_period,
report_format=report_format or ReportDataFormat.VISUALIZATION,
extra=extra,
force_screenshot=force_screenshot,
)
return report_schedule
def cleanup_report_schedule(report_schedule: ReportSchedule) -> None:
db.session.query(ReportExecutionLog).filter(
ReportExecutionLog.report_schedule == report_schedule
).delete()
db.session.query(ReportRecipients).filter(
ReportRecipients.report_schedule == report_schedule
).delete()
db.session.delete(report_schedule)
db.session.commit()
@contextmanager
def create_dashboard_report(dashboard, extra, **kwargs):
report_schedule = create_report_notification(
email_target="target@example.com",
dashboard=dashboard,
extra={
"dashboard": extra,
},
**kwargs
)
error = None
try:
yield report_schedule
except Exception as ex: # pylint: disable=broad-except
error = ex
# make sure to clean up in case of yield exceptions
cleanup_report_schedule(report_schedule)
if error:
raise error

View File

@ -29,5 +29,8 @@ app = create_app()
def login(
client: "FlaskClient[Any]", username: str = "admin", password: str = "general"
):
resp = client.post("/login/", data=dict(username=username, password=password))
resp = client.post(
"/login/",
data=dict(username=username, password=password),
).get_data(as_text=True)
assert "User confirmation needed" not in resp