This commit is contained in:
Maxime 2015-09-09 17:37:59 +00:00
parent 9858304468
commit 67c5f637d1
7 changed files with 138 additions and 139 deletions

View File

@ -2,10 +2,7 @@ import logging
from flask import Flask
from flask.ext.appbuilder import SQLA, AppBuilder, IndexView
"""
Logging configuration
"""
# Logging configuration
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(name)s:%(message)s')
logging.getLogger().setLevel(logging.DEBUG)
@ -13,6 +10,7 @@ app = Flask(__name__)
app.config.from_object('panoramix.config')
db = SQLA(app)
class MyIndexView(IndexView):
index_template = 'index.html'
@ -21,5 +19,4 @@ appbuilder = AppBuilder(
indexview=MyIndexView)
get_session = appbuilder.get_session
from panoramix import views

View File

@ -1,5 +1,7 @@
import os
from flask_appbuilder.security.manager import AUTH_OID, AUTH_REMOTE_USER, AUTH_DB, AUTH_LDAP, AUTH_OAUTH
from flask_appbuilder.security.manager import AUTH_DB
# from flask_appbuilder.security.manager import (
# AUTH_OID, AUTH_REMOTE_USER, AUTH_DB, AUTH_LDAP, AUTH_OAUTH)
basedir = os.path.abspath(os.path.dirname(__file__))
from dateutil import tz
@ -10,32 +12,32 @@ in your PYTHONPATH.
There' a ``from local_config import *`` at the end of this file.
"""
#---------------------------------------------------------
# ---------------------------------------------------------
# Panoramix specifix config
#---------------------------------------------------------
# ---------------------------------------------------------
ROW_LIMIT = 5000
WEBSERVER_THREADS = 8
PANORAMIX_WEBSERVER_PORT = 8088
#---------------------------------------------------------
# ---------------------------------------------------------
# Your App secret key
SECRET_KEY = '\2\1thisismyscretkey\1\2\e\y\y\h'
# The SQLAlchemy connection string.
SQLALCHEMY_DATABASE_URI = 'sqlite:///tmp/panoramix.db'
#SQLALCHEMY_DATABASE_URI = 'mysql://myapp@localhost/myapp'
#SQLALCHEMY_DATABASE_URI = 'postgresql://root:password@localhost/myapp'
# SQLALCHEMY_DATABASE_URI = 'mysql://myapp@localhost/myapp'
# SQLALCHEMY_DATABASE_URI = 'postgresql://root:password@localhost/myapp'
# Flask-WTF flag for CSRF
CSRF_ENABLED = True
#Whether to run the web server in debug mode or not
# Whether to run the web server in debug mode or not
DEBUG = True
#------------------------------
# ------------------------------
# GLOBALS FOR APP Builder
#------------------------------
# ------------------------------
# Uncomment to setup Your App name
APP_NAME = "Panoramix"
@ -48,9 +50,9 @@ APP_ICON = "/static/chaudron_white.png"
# other tz can be overridden by providing a local_config
DRUID_TZ = tz.tzutc()
#----------------------------------------------------
# ----------------------------------------------------
# AUTHENTICATION CONFIG
#----------------------------------------------------
# ----------------------------------------------------
# The authentication type
# AUTH_OID : Is for OpenID
# AUTH_DB : Is for database (username/password()
@ -59,37 +61,37 @@ DRUID_TZ = tz.tzutc()
AUTH_TYPE = AUTH_DB
# Uncomment to setup Full admin role name
#AUTH_ROLE_ADMIN = 'Admin'
# AUTH_ROLE_ADMIN = 'Admin'
# Uncomment to setup Public role name, no authentication needed
#AUTH_ROLE_PUBLIC = 'Public'
# AUTH_ROLE_PUBLIC = 'Public'
# Will allow user self registration
#AUTH_USER_REGISTRATION = True
# AUTH_USER_REGISTRATION = True
# The default user self registration role
#AUTH_USER_REGISTRATION_ROLE = "Public"
# AUTH_USER_REGISTRATION_ROLE = "Public"
# When using LDAP Auth, setup the ldap server
#AUTH_LDAP_SERVER = "ldap://ldapserver.new"
# AUTH_LDAP_SERVER = "ldap://ldapserver.new"
# Uncomment to setup OpenID providers example for OpenID authentication
#OPENID_PROVIDERS = [
# OPENID_PROVIDERS = [
# { 'name': 'Yahoo', 'url': 'https://me.yahoo.com' },
# { 'name': 'AOL', 'url': 'http://openid.aol.com/<username>' },
# { 'name': 'Flickr', 'url': 'http://www.flickr.com/<username>' },
# { 'name': 'MyOpenID', 'url': 'https://www.myopenid.com' }]
#---------------------------------------------------
# ---------------------------------------------------
# Babel config for translations
#---------------------------------------------------
# ---------------------------------------------------
# Setup default language
BABEL_DEFAULT_LOCALE = 'en'
# Your application default translation path
BABEL_DEFAULT_FOLDER = 'translations'
# The allowed translation for you app
LANGUAGES = {
'en': {'flag':'us', 'name':'English'},
'fr': {'flag':'fr', 'name':'French'},
'en': {'flag': 'us', 'name': 'English'},
'fr': {'flag': 'fr', 'name': 'French'},
}
"""
'pt': {'flag':'pt', 'name':'Portuguese'},
@ -99,9 +101,9 @@ LANGUAGES = {
'zh': {'flag':'cn', 'name':'Chinese'},
'ru': {'flag':'ru', 'name':'Russian'}
"""
#---------------------------------------------------
# ---------------------------------------------------
# Image and file configuration
#---------------------------------------------------
# ---------------------------------------------------
# The file upload folder, when using models with files
UPLOAD_FOLDER = basedir + '/app/static/uploads/'
@ -111,24 +113,27 @@ IMG_UPLOAD_FOLDER = basedir + '/app/static/uploads/'
# The image upload url, when using models with images
IMG_UPLOAD_URL = '/static/uploads/'
# Setup image size default is (300, 200, True)
#IMG_SIZE = (300, 200, True)
# IMG_SIZE = (300, 200, True)
# ---------------------------------------------------
# Theme configuration
# these are located on static/appbuilder/css/themes
# you can create your own and easily use them placing them on the same dir structure to override
#APP_THEME = "bootstrap-theme.css" # default bootstrap
#APP_THEME = "cerulean.css"
#APP_THEME = "amelia.css"
#APP_THEME = "cosmo.css"
#APP_THEME = "cyborg.css"
#APP_THEME = "flatly.css"
#APP_THEME = "journal.css"
#APP_THEME = "readable.css"
#APP_THEME = "simplex.css"
#APP_THEME = "slate.css"
#APP_THEME = "spacelab.css"
#APP_THEME = "united.css"
#APP_THEME = "yeti.css"
# you can create your own and easily use them placing them on the
# same dir structure to override
# ---------------------------------------------------
# APP_THEME = "bootstrap-theme.css" # default bootstrap
# APP_THEME = "cerulean.css"
# APP_THEME = "amelia.css"
# APP_THEME = "cosmo.css"
# APP_THEME = "cyborg.css"
# APP_THEME = "flatly.css"
# APP_THEME = "journal.css"
# APP_THEME = "readable.css"
# APP_THEME = "simplex.css"
# APP_THEME = "slate.css"
# APP_THEME = "spacelab.css"
# APP_THEME = "united.css"
# APP_THEME = "yeti.css"
try:
from panoramix_config import *

View File

@ -1,12 +1,10 @@
from wtforms import Field, Form, SelectMultipleField, SelectField, TextField
from flask_appbuilder.fieldwidgets import Select2Widget, Select2ManyWidget
class OmgWtForm(Form):
field_order = tuple()
css_classes = dict()
@property
def fields(self):
fields = []
@ -64,21 +62,21 @@ def form_factory(datasource, viz, form_args=None):
select2 = [
'viz_type', 'metrics', 'groupby',
'row_limit', 'rolling_type', 'series',
'entity', 'x', 'y', 'size',]
'entity', 'x', 'y', 'size']
field_css_classes['since'] += ['select2_free_since']
field_css_classes['until'] += ['select2_free_until']
field_css_classes['granularity'] += ['select2_free_granularity']
for field in select2:
field_css_classes[field] += ['select2']
class QueryForm(OmgWtForm):
field_order = viz.form_fields
css_classes = field_css_classes
for i in range(10):
setattr(QueryForm, 'flt_col_' + str(i), SelectField(
'Filter 1', choices=[(s, s) for s in datasource.filterable_column_names]))
'Filter 1',
choices=[(s, s) for s in datasource.filterable_column_names]))
setattr(QueryForm, 'flt_op_' + str(i), SelectField(
'Filter 1', choices=[(m, m) for m in ['in', 'not in']]))
setattr(QueryForm, 'flt_eq_' + str(i), TextField("Super"))

View File

@ -1,7 +1,5 @@
import pandas
from collections import defaultdict
import copy
import json
from pandas.io.json import dumps
@ -9,6 +7,7 @@ class BaseHighchart(object):
stockchart = False
tooltip_formatter = ""
target_div = 'chart'
@property
def javascript_cmd(self):
js = dumps(self.chart)
@ -18,7 +17,7 @@ class BaseHighchart(object):
)
if self.stockchart:
return "new Highcharts.StockChart(%s);" % js
return "new Highcharts.Chart(%s);" %js
return "new Highcharts.Chart(%s);" % js
class Highchart(BaseHighchart):
@ -127,7 +126,6 @@ class Highchart(BaseHighchart):
"""
return tf
def serialize_series(self):
df = self.df
chart = self.chart
@ -140,7 +138,8 @@ class Highchart(BaseHighchart):
continue
sec = name in self.secondary_y
d = {
"name": name if not sec or self.mark_right else name + " (right)",
"name":
name if not sec or self.mark_right else name + " (right)",
"yAxis": int(sec),
"data": zip(df.index, data.tolist())
}
@ -150,8 +149,6 @@ class Highchart(BaseHighchart):
d['compare'] = self.compare # either `value` or `percent`
if self.chart_type in ("area", "column", "bar") and self.stacked:
d["stacking"] = 'normal'
#if kwargs.get("style"):
# d["dashStyle"] = pd2hc_linestyle(kwargs["style"].get(name, "-"))
chart["series"].append(d)
def serialize_xaxis(self):
@ -219,7 +216,6 @@ class HighchartBubble(BaseHighchart):
chart['chart']["height"] = height
def series(self):
#df = self.df[['name', 'x', 'y', 'z']]
df = self.df
series = defaultdict(list)
for row in df.to_dict(orient='records'):

View File

@ -1,22 +1,21 @@
from flask.ext.appbuilder import Model
from datetime import timedelta
from flask.ext.appbuilder.models.mixins import AuditMixin
from flask import request, redirect, flash, Response
from sqlalchemy import Column, Integer, String, ForeignKey, Text, Boolean, DateTime
from sqlalchemy import create_engine, MetaData, desc
from sqlalchemy import Table as sqlaTable
from sqlalchemy.orm import relationship
from dateutil.parser import parse
from flask import flash
from flask.ext.appbuilder import Model
from flask.ext.appbuilder.models.mixins import AuditMixin
from pandas import read_sql_query
from pydruid import client
from pydruid.utils.filters import Dimension, Filter
from pandas import read_sql_query
from sqlalchemy import (
Column, Integer, String, ForeignKey, Text, Boolean, DateTime)
from sqlalchemy import Table as sqlaTable
from sqlalchemy import create_engine, MetaData, desc, select, and_
from sqlalchemy.orm import relationship
from sqlalchemy.sql import table, literal_column
from sqlalchemy import select, and_, text, String
from copy import deepcopy, copy
from collections import namedtuple
from datetime import datetime
import logging
import json
import sqlparse
import requests
@ -41,13 +40,13 @@ class Queryable(object):
def filterable_column_names(self):
return sorted([c.column_name for c in self.columns if c.filterable])
class Database(Model, AuditMixin):
__tablename__ = 'dbs'
id = Column(Integer, primary_key=True)
database_name = Column(String(255), unique=True)
sqlalchemy_uri = Column(String(1024))
def __repr__(self):
return self.database_name
@ -115,7 +114,9 @@ class Table(Model, Queryable, AuditMixin):
to_dttm_iso = to_dttm.isoformat()
if metrics:
main_metric_expr = [m.expression for m in self.metrics if m.metric_name == metrics[0]][0]
main_metric_expr = [
m.expression for m in self.metrics
if m.metric_name == metrics[0]][0]
else:
main_metric_expr = "COUNT(*)"
@ -150,29 +151,30 @@ class Table(Model, Queryable, AuditMixin):
on_clause = " AND ".join(["{g} = __{g}".format(g=g) for g in groupby])
limiting_join = ""
if timeseries_limit and groupby:
inner_select = ", ".join(["{g} as __{g}".format(g=g) for g in inner_groupby_exprs])
inner_select = ", ".join([
"{g} as __{g}".format(g=g) for g in inner_groupby_exprs])
inner_groupby_exprs = ", ".join(inner_groupby_exprs)
limiting_join = (
"JOIN ( \n"
" SELECT {inner_select} \n"
" FROM {self.table_name} \n"
" WHERE \n"
" {where_clause}\n"
" GROUP BY {inner_groupby_exprs}\n"
" ORDER BY {main_metric_expr} DESC\n"
" LIMIT {timeseries_limit}\n"
") z ON {on_clause}\n"
"JOIN ( \n"
" SELECT {inner_select} \n"
" FROM {self.table_name} \n"
" WHERE \n"
" {where_clause}\n"
" GROUP BY {inner_groupby_exprs}\n"
" ORDER BY {main_metric_expr} DESC\n"
" LIMIT {timeseries_limit}\n"
") z ON {on_clause}\n"
).format(**locals())
sql = (
"SELECT\n"
" {select_exprs}\n"
"FROM {self.table_name}\n"
"{limiting_join}"
"WHERE\n"
" {where_clause}\n"
"GROUP BY\n"
" {groupby_exprs}\n"
"SELECT\n"
" {select_exprs}\n"
"FROM {self.table_name}\n"
"{limiting_join}"
"WHERE\n"
" {where_clause}\n"
"GROUP BY\n"
" {groupby_exprs}\n"
).format(**locals())
df = read_sql_query(
sql=sql,
@ -200,8 +202,9 @@ class Table(Model, Queryable, AuditMixin):
for m in self.metrics if m.metric_name in metrics]
if metrics:
main_metric_expr = literal_column(
[m.expression for m in self.metrics if m.metric_name == metrics[0]][0])
main_metric_expr = literal_column([
m.expression for m in self.metrics
if m.metric_name == metrics[0]][0])
else:
main_metric_expr = literal_column("COUNT(*)")
@ -211,7 +214,8 @@ class Table(Model, Queryable, AuditMixin):
if groupby:
select_exprs = [literal_column(s) for s in groupby]
groupby_exprs = [literal_column(s) for s in groupby]
inner_groupby_exprs = [literal_column(s).label('__' + s) for s in groupby]
inner_groupby_exprs = [
literal_column(s).label('__' + s) for s in groupby]
if granularity != "all":
select_exprs += [timestamp]
groupby_exprs += [timestamp]
@ -245,7 +249,8 @@ class Table(Model, Queryable, AuditMixin):
subq = subq.limit(timeseries_limit)
on_clause = []
for gb in groupby:
on_clause.append(literal_column(gb)==literal_column("__" + gb))
on_clause.append(
literal_column(gb) == literal_column("__" + gb))
from_clause = from_clause.join(subq.alias(), and_(*on_clause))
@ -261,7 +266,6 @@ class Table(Model, Queryable, AuditMixin):
return QueryResult(
df=df, duration=datetime.now() - qry_start_dttm, query=sql)
def fetch_metadata(self):
try:
table = self.database.get_table(self.table_name)
@ -284,8 +288,8 @@ class Table(Model, Queryable, AuditMixin):
dbcol = (
db.session
.query(TC)
.filter(TC.table==self)
.filter(TC.column_name==col.name)
.filter(TC.table == self)
.filter(TC.column_name == col.name)
.first()
)
db.session.flush()
@ -344,8 +348,8 @@ class Table(Model, Queryable, AuditMixin):
for metric in metrics:
m = (
db.session.query(M)
.filter(M.metric_name==metric.metric_name)
.filter(M.table==self)
.filter(M.metric_name == metric.metric_name)
.filter(M.table == self)
.first()
)
metric.table = self
@ -356,15 +360,13 @@ class Table(Model, Queryable, AuditMixin):
self.main_datetime_column = any_date_col
class SqlMetric(Model, AuditMixin):
__tablename__ = 'sql_metrics'
id = Column(Integer, primary_key=True)
metric_name = Column(String(512))
verbose_name = Column(String(1024))
metric_type = Column(String(32))
table_id = Column(Integer,ForeignKey('tables.id'))
table_id = Column(Integer, ForeignKey('tables.id'))
table = relationship(
'Table', backref='metrics', foreign_keys=[table_id])
expression = Column(Text)
@ -395,6 +397,7 @@ class TableColumn(Model, AuditMixin):
def isnum(self):
return self.type in ('LONG', 'DOUBLE', 'FLOAT')
class Cluster(Model, AuditMixin):
__tablename__ = 'clusters'
id = Column(Integer, primary_key=True)
@ -440,9 +443,10 @@ class Datasource(Model, AuditMixin, Queryable):
default_endpoint = Column(Text)
user_id = Column(Integer, ForeignKey('ab_user.id'))
owner = relationship('User', backref='datasources', foreign_keys=[user_id])
cluster_name = Column(String(255),
ForeignKey('clusters.cluster_name'))
cluster = relationship('Cluster', backref='datasources', foreign_keys=[cluster_name])
cluster_name = Column(
String(255), ForeignKey('clusters.cluster_name'))
cluster = relationship(
'Cluster', backref='datasources', foreign_keys=[cluster_name])
@property
def metrics_combo(self):
@ -517,7 +521,6 @@ class Datasource(Model, AuditMixin, Queryable):
col_obj.type = cols[col]['type']
col_obj.datasource = datasource
col_obj.generate_metrics()
#session.commit()
def query(
self, groupby, metrics,
@ -529,7 +532,9 @@ class Datasource(Model, AuditMixin, Queryable):
timeseries_limit=None,
row_limit=None):
qry_start_dttm = datetime.now()
from_dttm = from_dttm.replace(tzinfo=config.DRUID_TZ) # add tzinfo to native datetime with config
# add tzinfo to native datetime with config
from_dttm = from_dttm.replace(tzinfo=config.DRUID_TZ)
to_dttm = to_dttm.replace(tzinfo=config.DRUID_TZ)
query_str = ""
@ -545,25 +550,25 @@ class Datasource(Model, AuditMixin, Queryable):
dimensions=groupby,
aggregations=aggregations,
granularity=granularity,
intervals= from_dttm.isoformat() + '/' + to_dttm.isoformat(),
intervals=from_dttm.isoformat() + '/' + to_dttm.isoformat(),
)
filters = None
for col, op, eq in filter:
cond = None
if op == '==':
cond = Dimension(col)==eq
cond = Dimension(col) == eq
elif op == '!=':
cond = ~(Dimension(col)==eq)
cond = ~(Dimension(col) == eq)
elif op in ('in', 'not in'):
fields = []
splitted = eq.split(',')
if len(splitted) > 1:
for s in eq.split(','):
s = s.strip()
fields.append(Filter.build_filter(Dimension(col)==s))
fields.append(Filter.build_filter(Dimension(col) == s))
cond = Filter(type="or", fields=fields)
else:
cond = Dimension(col)==eq
cond = Dimension(col) == eq
if op == 'not in':
cond = ~cond
if filters:
@ -596,7 +601,7 @@ class Datasource(Model, AuditMixin, Queryable):
query_str += json.dumps(client.query_dict, indent=2) + "\n"
query_str += "//\nPhase 2 (built based on phase one's results)\n"
df = client.export_pandas()
if not df is None and not df.empty:
if df is not None and not df.empty:
dims = qry['dimensions']
filters = []
for index, row in df.iterrows():
@ -637,7 +642,6 @@ class Datasource(Model, AuditMixin, Queryable):
duration=datetime.now() - qry_start_dttm)
#class Metric(Model, AuditMixin):
class Metric(Model):
__tablename__ = 'metrics'
id = Column(Integer, primary_key=True)
@ -655,7 +659,7 @@ class Metric(Model):
def json_obj(self):
try:
obj = json.loads(self.json)
except Exception as e:
except:
obj = {}
return obj
@ -695,11 +699,14 @@ class Column(Model, AuditMixin):
json=json.dumps({'type': 'count', 'name': 'count'})
))
# Somehow we need to reassign this for UDAFs
corrected_type = 'DOUBLE' if self.type in ('DOUBLE', 'FLOAT') else self.type
if self.type in ('DOUBLE', 'FLOAT'):
corrected_type = 'DOUBLE'
else:
corrected_type = self.type
if self.sum and self.isnum:
mt = corrected_type.lower() + 'Sum'
name='sum__' + self.column_name
name = 'sum__' + self.column_name
metrics.append(Metric(
metric_name=name,
metric_type='sum',
@ -709,7 +716,7 @@ class Column(Model, AuditMixin):
))
if self.min and self.isnum:
mt = corrected_type.lower() + 'Min'
name='min__' + self.column_name
name = 'min__' + self.column_name
metrics.append(Metric(
metric_name=name,
metric_type='min',
@ -719,7 +726,7 @@ class Column(Model, AuditMixin):
))
if self.max and self.isnum:
mt = corrected_type.lower() + 'Max'
name='max__' + self.column_name
name = 'max__' + self.column_name
metrics.append(Metric(
metric_name=name,
metric_type='max',
@ -729,7 +736,7 @@ class Column(Model, AuditMixin):
))
if self.count_distinct:
mt = 'count_distinct'
name='count_distinct__' + self.column_name
name = 'count_distinct__' + self.column_name
metrics.append(Metric(
metric_name=name,
verbose_name='COUNT(DISTINCT {})'.format(self.column_name),
@ -743,9 +750,9 @@ class Column(Model, AuditMixin):
for metric in metrics:
m = (
session.query(M)
.filter(M.metric_name==metric.metric_name)
.filter(M.datasource_name==self.datasource_name)
.filter(Cluster.cluster_name==self.datasource.cluster_name)
.filter(M.metric_name == metric.metric_name)
.filter(M.datasource_name == self.datasource_name)
.filter(Cluster.cluster_name == self.datasource.cluster_name)
.first()
)
metric.datasource_name = self.datasource_name

View File

@ -12,14 +12,18 @@ from flask.ext.appbuilder.actions import action
from panoramix import appbuilder, db, models, viz, utils, app
def validate_json(form, field):
try:
json.loads(field.data)
except Exception as e:
logging.exception(e)
raise ValidationError("Json isn't valid")
class DeleteMixin(object):
@action("muldelete", "Delete", "Delete all Really?", "fa-trash", single=False)
@action(
"muldelete", "Delete", "Delete all Really?", "fa-trash", single=False)
def muldelete(self, items):
self.datamodel.delete_all(items)
self.update_redirect()
@ -53,17 +57,15 @@ class ColumnInlineView(CompactCRUDMixin, ModelView):
def post_update(self, col):
col.generate_metrics()
def post_update(self, col):
col.generate_metrics()
appbuilder.add_view_no_menu(ColumnInlineView)
class SqlMetricInlineView(CompactCRUDMixin, ModelView):
datamodel = SQLAInterface(models.SqlMetric)
list_columns = ['metric_name', 'verbose_name', 'metric_type' ]
list_columns = ['metric_name', 'verbose_name', 'metric_type']
edit_columns = [
'metric_name', 'description', 'verbose_name', 'metric_type',
'expression', 'table',]
'expression', 'table']
add_columns = edit_columns
page_size = 100
appbuilder.add_view_no_menu(SqlMetricInlineView)
@ -71,7 +73,7 @@ appbuilder.add_view_no_menu(SqlMetricInlineView)
class MetricInlineView(CompactCRUDMixin, ModelView):
datamodel = SQLAInterface(models.Metric)
list_columns = ['metric_name', 'verbose_name', 'metric_type' ]
list_columns = ['metric_name', 'verbose_name', 'metric_type']
edit_columns = [
'metric_name', 'description', 'verbose_name', 'metric_type',
'datasource', 'json']
@ -120,7 +122,8 @@ class TableView(ModelView, DeleteMixin):
datamodel = SQLAInterface(models.Table)
list_columns = ['table_link', 'database']
add_columns = ['table_name', 'database', 'default_endpoint']
edit_columns = ['table_name', 'database', 'main_datetime_column', 'default_endpoint']
edit_columns = [
'table_name', 'database', 'main_datetime_column', 'default_endpoint']
related_views = [TableColumnInlineView, SqlMetricInlineView]
def post_add(self, table):
@ -193,9 +196,6 @@ class Panoramix(BaseView):
json.dumps(obj.get_query(), indent=4),
status=200,
mimetype="application/json")
if not hasattr(obj, 'df') or obj.df is None or obj.df.empty:
pass
#return obj.render_no_data()
return obj.render()
@has_access
@ -263,5 +263,4 @@ appbuilder.add_link(
category_icon='fa-cogs',
icon="fa-cog")
#models.Metric.__table__.drop(db.engine)
db.create_all()

View File

@ -45,7 +45,6 @@ class BaseViz(object):
logging.exception(e)
self.error_msg = str(e)
def form_class(self):
return form_factory(self.datasource, self, request.args)
@ -68,7 +67,6 @@ class BaseViz(object):
"""
Building a query object
"""
ds = self.datasource
args = self.form_data
groupby = args.getlist("groupby") or []
metrics = args.getlist("metrics") or ['count']
@ -129,11 +127,12 @@ class TableViz(BaseViz):
return super(TableViz, self).render(error_msg=self.error_msg)
df = self.df
row_limit = request.args.get("row_limit")
if df is None or df.empty:
return super(TableViz, self).render(error_msg="No data.")
else:
if self.form_data.get("granularity") == "all" and 'timestamp' in df:
if (
self.form_data.get("granularity") == "all" and
'timestamp' in df):
del df['timestamp']
for m in self.metrics:
df[m + '__perc'] = np.rint((df[m] / np.max(df[m])) * 100)
@ -185,8 +184,6 @@ class BubbleViz(HighchartsViz):
return d
def render(self):
metrics = self.metrics
if not self.error_msg:
df = self.df.fillna(0)
df['x'] = df[[self.x_metric]]
@ -268,7 +265,7 @@ class TimeSeriesCompareValueViz(TimeSeriesViz):
class TimeSeriesAreaViz(TimeSeriesViz):
verbose_name = "Time Series - Stacked Area Chart"
stacked=True
stacked = True
chart_type = "area"