text
stringlengths
81
112k
Compute the partition at each `level` from the dataframe. def levels_for(self, time_op, groups, df): """ Compute the partition at each `level` from the dataframe. """ levels = {} for i in range(0, len(groups) + 1): agg_df = df.groupby(groups[:i]) if i else df ...
Nest values at each level on the back-end with access and setting, instead of summing from the bottom. def nest_values(self, levels, level=0, metric=None, dims=()): """ Nest values at each level on the back-end with access and setting, instead of summing from the bottom. """ ...
Data representation of the datasource sent to the frontend def short_data(self): """Data representation of the datasource sent to the frontend""" return { 'edit_url': self.url, 'id': self.id, 'uid': self.uid, 'schema': self.schema, 'name': sel...
Data representation of the datasource sent to the frontend def data(self): """Data representation of the datasource sent to the frontend""" order_by_choices = [] # self.column_names return sorted column_names for s in self.column_names: s = str(s or '') order_by_...
Update ORM one-to-many list from object list Used for syncing metrics and columns using the same code def get_fk_many_from_list( self, object_list, fkmany, fkmany_class, key_attr): """Update ORM one-to-many list from object list Used for syncing metrics and columns using the same ...
Update datasource from a data structure The UI's table editor crafts a complex data structure that contains most of the datasource's properties as well as an array of metrics and columns objects. This method receives the object from the UI and syncs the datasource to match it. S...
Returns a pandas dataframe based on the query object def get_query_result(self, query_object): """Returns a pandas dataframe based on the query object""" # Here, we assume that all the queries will use the same datasource, which is # is a valid assumption for current setting. In a long term, w...
Converting metrics to numeric when pandas.read_sql cannot def df_metrics_to_num(self, df, query_object): """Converting metrics to numeric when pandas.read_sql cannot""" metrics = [metric for metric in query_object.metrics] for col, dtype in df.dtypes.items(): if dtype.type == np.obj...
Returns a payload of metadata and data def get_single_payload(self, query_obj): """Returns a payload of metadata and data""" payload = self.get_df_payload(query_obj) df = payload.get('df') status = payload.get('status') if status != utils.QueryStatus.FAILED: if df is...
Handles caching around the df paylod retrieval def get_df_payload(self, query_obj, **kwargs): """Handles caching around the df paylod retrieval""" cache_key = query_obj.cache_key( datasource=self.datasource.uid, **kwargs) if query_obj else None logging.info('Cache key: {}'.format(ca...
Data used to render slice in templates def data(self): """Data used to render slice in templates""" d = {} self.token = '' try: d = self.viz.data self.token = d.get('token') except Exception as e: logging.exception(e) d['error'] = ...
Creates :py:class:viz.BaseViz object from the url_params_multidict. :return: object of the 'viz_type' type that is taken from the url_params_multidict or self.params. :rtype: :py:class:viz.BaseViz def get_viz(self, force=False): """Creates :py:class:viz.BaseViz object from the url_...
Inserts or overrides slc in the database. remote_id and import_time fields in params_dict are set to track the slice origin and ensure correct overrides for multiple imports. Slice.perm is used to find the datasources and connect them. :param Slice slc_to_import: Slice object to import...
Imports the dashboard from the object to the database. Once dashboard is imported, json_metadata field is extended and stores remote_id and import_time. It helps to decide if the dashboard has to be overridden or just copies over. Slices that belong to this dashboard will be wired t...
Get the effective user, especially during impersonation. :param url: SQL Alchemy URL object :param user_name: Default username :return: The effective username def get_effective_user(self, url, user_name=None): """ Get the effective user, especially during impersonation. ...
Generates a ``select *`` statement in the proper dialect def select_star( self, table_name, schema=None, limit=100, show_cols=False, indent=True, latest_partition=False, cols=None): """Generates a ``select *`` statement in the proper dialect""" eng = self.get_sqla_engine( ...
Parameters need to be passed as keyword arguments. def all_table_names_in_database(self, cache=False, cache_timeout=None, force=False): """Parameters need to be passed as keyword arguments.""" if not self.allow_multi_schema_metadata_fetch: return [] ...
Parameters need to be passed as keyword arguments. For unused parameters, they are referenced in cache_util.memoized_func decorator. :param schema: schema name :type schema: str :param cache: whether cache is enabled for the function :type cache: bool :param cac...
Parameters need to be passed as keyword arguments. For unused parameters, they are referenced in cache_util.memoized_func decorator. :param schema: schema name :type schema: str :param cache: whether cache is enabled for the function :type cache: bool :param cac...
Parameters need to be passed as keyword arguments. For unused parameters, they are referenced in cache_util.memoized_func decorator. :param cache: whether cache is enabled for the function :type cache: bool :param cache_timeout: timeout in seconds for the cache :type ca...
Allowing to lookup grain by either label or duration For backward compatibility def grains_dict(self): """Allowing to lookup grain by either label or duration For backward compatibility""" d = {grain.duration: grain for grain in self.grains()} d.update({grain.label: grain for ...
Decorator to log user actions def log_this(cls, f): """Decorator to log user actions""" @functools.wraps(f) def wrapper(*args, **kwargs): user_id = None if g.user: user_id = g.user.get_id() d = request.form.to_dict() or {} # reque...
A decorator to label an endpoint as an API. Catches uncaught exceptions and return the response in the JSON format def api(f): """ A decorator to label an endpoint as an API. Catches uncaught exceptions and return the response in the JSON format """ def wraps(self, *args, **kwargs): try...
A decorator to catch superset exceptions. Use it after the @api decorator above so superset exception handler is triggered before the handler for generic exceptions. def handle_api_exception(f): """ A decorator to catch superset exceptions. Use it after the @api decorator above so superset exception ha...
Meant to be used in `pre_update` hooks on models to enforce ownership Admin have all access, and other users need to be referenced on either the created_by field that comes with the ``AuditMixin``, or in a field named ``owners`` which is expected to be a one-to-many with the User model. It is meant to ...
Customize how fields are bound by stripping all whitespace. :param form: The form :param unbound_field: The unbound field :param options: The field options :returns: The bound field def bind_field( self, form: DynamicForm, unbound_field: UnboundField, options: Dict[Any,...
Common data always sent to the client def common_bootsrap_payload(self): """Common data always sent to the client""" messages = get_flashed_messages(with_categories=True) locale = str(get_locale()) return { 'flash_messages': messages, 'conf': {k: conf.get(k) for ...
Delete function logic, override to implement diferent logic deletes the record with primary_key = pk :param pk: record primary key to delete def _delete(self, pk): """ Delete function logic, override to implement diferent logic deletes the record...
Returns a set of tuples with the perm name and view menu name def get_all_permissions(self): """Returns a set of tuples with the perm name and view menu name""" perms = set() for role in self.get_user_roles(): for perm_view in role.permissions: t = (perm_view.permiss...
Returns the details of view_menus for a perm name def get_view_menus(self, permission_name): """Returns the details of view_menus for a perm name""" vm = set() for perm_name, vm_name in self.get_all_permissions(): if perm_name == permission_name: vm.add(vm_name) ...
Destroy a driver def destroy_webdriver(driver): """ Destroy a driver """ # This is some very flaky code in selenium. Hence the retries # and catch-all exceptions try: retry_call(driver.close, tries=2) except Exception: pass try: driver.quit() except Exceptio...
Given a schedule, delivery the dashboard as an email report def deliver_dashboard(schedule): """ Given a schedule, delivery the dashboard as an email report """ dashboard = schedule.dashboard dashboard_url = _get_url_path( 'Superset.dashboard', dashboard_id=dashboard.id, ) ...
Given a schedule, delivery the slice as an email report def deliver_slice(schedule): """ Given a schedule, delivery the slice as an email report """ if schedule.email_format == SliceEmailReportFormat.data: email = _get_slice_data(schedule) elif schedule.email_format == SliceEmailReportForma...
Find all active schedules and schedule celery tasks for each of them with a specific ETA (determined by parsing the cron schedule for the schedule) def schedule_window(report_type, start_at, stop_at, resolution): """ Find all active schedules and schedule celery tasks for each of them with a specif...
Celery beat job meant to be invoked hourly def schedule_hourly(): """ Celery beat job meant to be invoked hourly """ if not config.get('ENABLE_SCHEDULED_EMAIL_REPORTS'): logging.info('Scheduled email reports not enabled in config') return resolution = config.get('EMAIL_REPORTS_CRON_RESOLU...
De-duplicates a list of string by suffixing a counter Always returns the same number of entries as provided, and always returns unique values. Case sensitive comparison by default. >>> print(','.join(dedup(['foo', 'bar', 'bar', 'bar', 'Bar']))) foo,bar,bar__1,bar__2,Bar >>> print(','.join(dedup(['...
Given a numpy dtype, Returns a generic database type def db_type(cls, dtype): """Given a numpy dtype, Returns a generic database type""" if isinstance(dtype, ExtensionDtype): return cls.type_map.get(dtype.kind) elif hasattr(dtype, 'char'): return cls.type_map.get(dtype.c...
Provides metadata about columns for data visualization. :return: dict, with the fields name, type, is_date, is_dim and agg. def columns(self): """Provides metadata about columns for data visualization. :return: dict, with the fields name, type, is_date, is_dim and agg. """ if ...
Getting the time component of the query def get_timestamp_expression(self, time_grain): """Getting the time component of the query""" label = utils.DTTM_ALIAS db = self.table.database pdf = self.python_date_format is_epoch = pdf in ('epoch_s', 'epoch_ms') if not self.ex...
Convert datetime object to a SQL expression string If database_expression is empty, the internal dttm will be parsed as the string with the pattern that the user inputted (python_date_format) If database_expression is not empty, the internal dttm will be parsed as the sql senten...
Takes a sql alchemy column object and adds label info if supported by engine. :param sqla_col: sql alchemy column instance :param label: alias/label that column is expected to have :return: either a sql alchemy column or label instance if supported by engine def make_sqla_column_compatible(self...
Runs query against sqla to retrieve some sample values for the given column. def values_for_column(self, column_name, limit=10000): """Runs query against sqla to retrieve some sample values for the given column. """ cols = {col.column_name: col for col in self.columns} t...
Apply config's SQL_QUERY_MUTATOR Typically adds comments to the query with context def mutate_query_from_config(self, sql): """Apply config's SQL_QUERY_MUTATOR Typically adds comments to the query with context""" SQL_QUERY_MUTATOR = config.get('SQL_QUERY_MUTATOR') if SQL_QUERY...
Turn an adhoc metric into a sqlalchemy column. :param dict metric: Adhoc metric definition :param dict cols: Columns for the current table :returns: The metric defined as a sqlalchemy column :rtype: sqlalchemy.sql.column def adhoc_metric_to_sqla(self, metric, cols): """ ...
Querying any sqla table from this common interface def get_sqla_query( # sqla self, groupby, metrics, granularity, from_dttm, to_dttm, filter=None, # noqa is_timeseries=True, timeseries_limit=15, timeseries_limit_metric=N...
Fetches the metadata for the table and merges it in def fetch_metadata(self): """Fetches the metadata for the table and merges it in""" try: table = self.get_sqla_table_object() except Exception as e: logging.exception(e) raise Exception(_( "T...
Imports the datasource from the object to the database. Metrics and columns and datasource will be overrided if exists. This function can be used to import/export dashboards between multiple superset instances. Audit metadata isn't copies over. def import_obj(cls, i_datasource, import_time=...
Loading lat/long data from a csv file in the repo def load_long_lat_data(): """Loading lat/long data from a csv file in the repo""" data = get_example_data('san_francisco.csv.gz', make_bytes=True) pdf = pd.read_csv(data, encoding='utf-8') start = datetime.datetime.now().replace( hour=0, minute=...
Gets column info from the source system def external_metadata(self, datasource_type=None, datasource_id=None): """Gets column info from the source system""" if datasource_type == 'druid': datasource = ConnectorRegistry.get_datasource( datasource_type, datasource_id, db.sessi...
Returns a list of non empty values or None def filter_not_empty_values(value): """Returns a list of non empty values or None""" if not value: return None data = [x for x in value if x] if not data: return None return data
If the user has access to the database or all datasource 1. if schemas_allowed_for_csv_upload is empty a) if database does not support schema user is able to upload csv without specifying schema name b) if database supports schema user ...
Filter queries to only those owned by current user if can_only_access_owned_queries permission is set. :returns: query def apply( self, query: BaseQuery, func: Callable) -> BaseQuery: """ Filter queries to only those owned by current user if ...
Simple hack to redirect to explore view after saving def edit(self, pk): """Simple hack to redirect to explore view after saving""" resp = super(TableModelView, self).edit(pk) if isinstance(resp, str): return resp return redirect('/superset/explore/table/{}/'.format(pk))
Get/cache a language pack Returns the langugage pack from cache if it exists, caches otherwise >>> get_language_pack('fr')['Dashboards'] "Tableaux de bords" def get_language_pack(locale): """Get/cache a language pack Returns the langugage pack from cache if it exists, caches otherwise >>> g...
Build `form_data` for chart GET request from dashboard's `default_filters`. When a dashboard has `default_filters` they need to be added as extra filters in the GET request for charts. def get_form_data(chart_id, dashboard=None): """ Build `form_data` for chart GET request from dashboard's `default_f...
Return external URL for warming up a given chart/table cache. def get_url(params): """Return external URL for warming up a given chart/table cache.""" baseurl = 'http://{SUPERSET_WEBSERVER_ADDRESS}:{SUPERSET_WEBSERVER_PORT}/'.format( **app.config) with app.test_request_context(): return url...
Warm up cache. This task periodically hits charts to warm up the cache. def cache_warmup(strategy_name, *args, **kwargs): """ Warm up cache. This task periodically hits charts to warm up the cache. """ logger.info('Loading strategy') class_ = None for class_ in strategies: if...
Mocked. Retrieve the logs produced by the execution of the query. Can be called multiple times to fetch the logs produced after the previous call. :returns: list<str> :raises: ``ProgrammingError`` when no query has been started .. note:: This is not a part of DB-API. def fetch_logs(self, ma...
Refresh metadata of all datasources in the cluster If ``datasource_name`` is specified, only that datasource is updated def refresh_datasources( self, datasource_name=None, merge_flag=True, refreshAll=True): """Refresh metadata of all datasources in the c...
Fetches metadata for the specified datasources and merges to the Superset database def refresh(self, datasource_names, merge_flag, refreshAll): """ Fetches metadata for the specified datasources and merges to the Superset database """ session = db.session ds_list...
Refresh metrics based on the column metadata def refresh_metrics(self): """Refresh metrics based on the column metadata""" metrics = self.get_metrics() dbmetrics = ( db.session.query(DruidMetric) .filter(DruidMetric.datasource_id == self.datasource_id) .filte...
Imports the datasource from the object to the database. Metrics and columns and datasource will be overridden if exists. This function can be used to import/export dashboards between multiple superset instances. Audit metadata isn't copies over. def import_obj(cls, i_datasource, import_time...
Merges the ds config from druid_config into one stored in the db. def sync_to_db_from_config( cls, druid_config, user, cluster, refresh=True): """Merges the ds config from druid_config into one stored in the db.""" session = db.session ...
For a metric specified as `postagg` returns the kind of post aggregation for pydruid. def get_post_agg(mconf): """ For a metric specified as `postagg` returns the kind of post aggregation for pydruid. """ if mconf.get('type') == 'javascript': return Javascrip...
Return a list of metrics that are post aggregations def find_postaggs_for(postagg_names, metrics_dict): """Return a list of metrics that are post aggregations""" postagg_metrics = [ metrics_dict[name] for name in postagg_names if metrics_dict[name].metric_type == POST_AGG_TYPE ...
Retrieve some values for the given column def values_for_column(self, column_name, limit=10000): """Retrieve some values for the given column""" logging.info( 'Getting values for columns [{}] limited to [{}]' .format(column_nam...
Returns a dictionary of aggregation metric names to aggregation json objects :param metrics_dict: dictionary of all the metrics :param saved_metrics: list of saved metric names :param adhoc_metrics: list of adhoc metric names :raise SupersetException: if one or more metr...
Replace dimensions specs with their `dimension` values, and ignore those without def _dimensions_to_values(dimensions): """ Replace dimensions specs with their `dimension` values, and ignore those without """ values = [] for dimension in dimensions: i...
Runs a query against Druid and returns a dataframe. def run_query( # noqa / druid self, groupby, metrics, granularity, from_dttm, to_dttm, filter=None, # noqa is_timeseries=True, timeseries_limit=None, timeseries_limit_me...
Converting all GROUPBY columns to strings When grouping by a numeric (say FLOAT) column, pydruid returns strings in the dataframe. This creates issues downstream related to having mixed types in the dataframe Here we replace None with <NULL> and make the whole series a str inst...
Given Superset filter data structure, returns pydruid Filter(s) def get_filters(cls, raw_filters, num_cols, columns_dict): # noqa """Given Superset filter data structure, returns pydruid Filter(s)""" filters = None for flt in raw_filters: col = flt.get('col') op = flt.g...
Get the environment variable or raise exception. def get_env_variable(var_name, default=None): """Get the environment variable or raise exception.""" try: return os.environ[var_name] except KeyError: if default is not None: return default else: error_msg = 'T...
Returns datasource with columns and metrics. def get_eager_datasource(cls, session, datasource_type, datasource_id): """Returns datasource with columns and metrics.""" datasource_class = ConnectorRegistry.sources[datasource_type] return ( session.query(datasource_class) ...
Loading a dashboard featuring misc charts def load_misc_dashboard(): """Loading a dashboard featuring misc charts""" print('Creating the dashboard') db.session.expunge_all() dash = db.session.query(Dash).filter_by(slug=DASH_SLUG).first() if not dash: dash = Dash() js = textwrap.dedent...
Loads the world bank health dataset, slices and a dashboard def load_world_bank_health_n_pop(): """Loads the world bank health dataset, slices and a dashboard""" tbl_name = 'wb_health_population' data = get_example_data('countries.json.gz') pdf = pd.read_json(data) pdf.columns = [col.replace('.', '...
Loading data for map with country map def load_country_map_data(): """Loading data for map with country map""" csv_bytes = get_example_data( 'birth_france_data_for_country_map.csv', is_gzip=False, make_bytes=True) data = pd.read_csv(csv_bytes, encoding='utf-8') data['dttm'] = datetime.datetime....
Returns a list of SQL statements as strings, stripped def get_statements(self): """Returns a list of SQL statements as strings, stripped""" statements = [] for statement in self._parsed: if statement: sql = str(statement).strip(' \n;\t') if sql: ...
Reformats the query into the create table as query. Works only for the single select SQL statements, in all other cases the sql query is not modified. :param superset_query: string, sql query that will be executed :param table_name: string, will contain the results of the qu...
returns the query with the specified limit def get_query_with_new_limit(self, new_limit): """returns the query with the specified limit""" """does not change the underlying query""" if not self._limit: return self.sql + ' LIMIT ' + str(new_limit) limit_pos = None tok...
Read a url or post parameter and use it in your SQL Lab query When in SQL Lab, it's possible to add arbitrary URL "query string" parameters, and use those in your SQL code. For instance you can alter your url and add `?foo=bar`, as in `{domain}/superset/sqllab?foo=bar`. Then if your query is something ...
Gets a values for a particular filter as a list This is useful if: - you want to use a filter box to filter a query where the name of filter box column doesn't match the one in the select statement - you want to have the ability for filter inside the main query for speed purposes Thi...
Processes a sql template >>> sql = "SELECT '{{ datetime(2017, 1, 1).isoformat() }}'" >>> process_template(sql) "SELECT '2017-01-01T00:00:00'" def process_template(self, sql, **kwargs): """Processes a sql template >>> sql = "SELECT '{{ datetime(2017, 1, 1).isoformat() }}'" ...
Compatibility layer for handling of datasource info datasource_id & datasource_type used to be passed in the URL directory, now they should come as part of the form_data, This function allows supporting both without duplicating code def get_datasource_info(datasource_id, datasource_type, form_data): "...
Protecting from has_access failing from missing perms/view def can_access(self, permission_name, view_name): """Protecting from has_access failing from missing perms/view""" user = g.user if user.is_anonymous: return self.is_item_public(permission_name, view_name) return sel...
Creates missing perms for datasources, schemas and metrics def create_missing_perms(self): """Creates missing perms for datasources, schemas and metrics""" from superset import db from superset.models import core as models logging.info( 'Fetching a set of all perms to looku...
FAB leaves faulty permissions that need to be cleaned up def clean_perms(self): """FAB leaves faulty permissions that need to be cleaned up""" logging.info('Cleaning faulty perms') sesh = self.get_session pvms = ( sesh.query(ab_models.PermissionView) .filter(or_(...
Inits the Superset application with security roles and such def sync_role_definitions(self): """Inits the Superset application with security roles and such""" from superset import conf logging.info('Syncing role definition') self.create_custom_permissions() # Creating default ...
Exports the supported import/export schema to a dictionary def export_schema_to_dict(back_references): """Exports the supported import/export schema to a dictionary""" databases = [Database.export_schema(recursive=True, include_parent_ref=back_references)] clusters = [DruidCluster.export_s...
Exports databases and druid clusters to a dictionary def export_to_dict(session, recursive, back_references, include_defaults): """Exports databases and druid clusters to a dictionary""" logging.info('Starting export') dbs = session.query(Database) ...
Imports databases and druid clusters from dictionary def import_from_dict(session, data, sync=[]): """Imports databases and druid clusters from dictionary""" if isinstance(data, dict): logging.info('Importing %d %s', len(data.get(DATABASES_KEY, [])), DATABASES_...
Takes a query_obj constructed in the client and returns payload data response for the given query_obj. params: query_context: json_blob def query(self): """ Takes a query_obj constructed in the client and returns payload data response for the given query_obj. params: que...
Get the formdata stored in the database for existing slice. params: slice_id: integer def query_form_data(self): """ Get the formdata stored in the database for existing slice. params: slice_id: integer """ form_data = {} slice_id = request.args.get('slice_id') ...
Loads 2 css templates to demonstrate the feature def load_css_templates(): """Loads 2 css templates to demonstrate the feature""" print('Creating default CSS templates') obj = db.session.query(CssTemplate).filter_by(template_name='Flat').first() if not obj: obj = CssTemplate(template_name='Fla...
Get a mapping of foreign name to the local name of foreign keys def _parent_foreign_key_mappings(cls): """Get a mapping of foreign name to the local name of foreign keys""" parent_rel = cls.__mapper__.relationships.get(cls.export_parent) if parent_rel: return {l.name: r.name for (l,...
Get all (single column and multi column) unique constraints def _unique_constrains(cls): """Get all (single column and multi column) unique constraints""" unique = [{c.name for c in u.columns} for u in cls.__table_args__ if isinstance(u, UniqueConstraint)] unique.extend({c.nam...
Export schema as a dictionary def export_schema(cls, recursive=True, include_parent_ref=False): """Export schema as a dictionary""" parent_excludes = {} if not include_parent_ref: parent_ref = cls.__mapper__.relationships.get(cls.export_parent) if parent_ref: ...
Import obj from a dictionary def import_from_dict(cls, session, dict_rep, parent=None, recursive=True, sync=[]): """Import obj from a dictionary""" parent_refs = cls._parent_foreign_key_mappings() export_fields = set(cls.export_fields) | set(parent_refs.keys()) ...
Export obj to dictionary def export_to_dict(self, recursive=True, include_parent_ref=False, include_defaults=False): """Export obj to dictionary""" cls = self.__class__ parent_excludes = {} if recursive and not include_parent_ref: parent_ref = cls.__ma...
Overrides the plain fields of the dashboard. def override(self, obj): """Overrides the plain fields of the dashboard.""" for field in obj.__class__.export_fields: setattr(self, field, getattr(obj, field))
Move since and until to time_range. def update_time_range(form_data): """Move since and until to time_range.""" if 'since' in form_data or 'until' in form_data: form_data['time_range'] = '{} : {}'.format( form_data.pop('since', '') or '', form_data.pop('until', '') or '', ...