PKNxredash_stmo/__init__.py# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, you can obtain one at http://mozilla.org/MPL/2.0/. """Extensions to Redash by Mozilla""" __version__ = "2019.6.1" PKN3svHredash_stmo/dockerflow.py# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, you can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import absolute_import import logging from dockerflow.flask import Dockerflow from redash import migrate, redis_connection from redash.models import db logger = logging.getLogger(__name__) def extension(app): logger.info('Loading Redash Extension for Dockerflow') dockerflow = Dockerflow(app, db=db, migrate=migrate, redis=redis_connection) logger.info('Loaded Redash Extension for Dockerflow') return dockerflow PKN[CCredash_stmo/resources.pyfrom redash.handlers.api import api def add_resource(app, *args, **kwargs): """ After api.init_app() is called, api.app should be set by Flask (but it's not) so that further calls to add_resource() are handled immediately for the given app. """ api.app = app api.add_org_resource(*args, **kwargs) PKN/redash_stmo/settings.pyimport os from redash.settings.helpers import parse_boolean, set_from_string # Frequency of health query runs in minutes (12 hours by default) HEALTH_QUERIES_REFRESH_SCHEDULE = int( os.environ.get("REDASH_HEALTH_QUERIES_REFRESH_SCHEDULE", 720) ) # When enabled this will match the given remote groups request header with a # configured list of allowed user groups. REMOTE_GROUPS_ENABLED = parse_boolean( os.environ.get("REDASH_REMOTE_GROUPS_ENABLED", "false") ) REMOTE_GROUPS_HEADER = os.environ.get( "REDASH_REMOTE_GROUPS_HEADER", "X-Forwarded-Remote-Groups" ) REMOTE_GROUPS_ALLOWED = set_from_string( os.environ.get("REDASH_REMOTE_GROUPS_ALLOWED", "") ) PKN$redash_stmo/data_sources/__init__.pyPKNmY  "redash_stmo/data_sources/health.pyimport os import json import time from redash_stmo import settings from redash_stmo.resources import add_resource from flask import jsonify from flask_login import login_required from celery.utils.log import get_task_logger from redash import models, redis_connection, statsd_client from redash.worker import celery from redash.utils import parse_human_time from redash.monitor import get_status as original_get_status from redash.handlers.base import routes, BaseResource from redash.permissions import require_super_admin logger = get_task_logger(__name__) class DataSourceHealthResource(BaseResource): def get(self): health_data = json.loads(redis_connection.get('data_sources:health') or '{}') return jsonify(health_data) def store_health_status(data_source_id, data_source_name, query_text, data): key = "data_sources:health" cache = json.loads(redis_connection.get(key) or '{}') if data_source_id not in cache: cache[data_source_id] = { "metadata": {"name": data_source_name}, "queries": {} } cache[data_source_id]["queries"][query_text] = data cache[data_source_id]["status"] = "SUCCESS" for query_status in cache[data_source_id]["queries"].values(): if query_status["status"] == "FAIL": cache[data_source_id]["status"] = "FAIL" break redis_connection.set(key, json.dumps(cache)) @celery.task(name="redash_stmo.health.update_health_status", time_limit=90, soft_time_limit=60) def update_health_status(): for data_source in models.DataSource.query: logger.info(u"task=update_health_status state=start ds_id=%s", data_source.id) runtime = None query_text = data_source.query_runner.noop_query ds_id = str(data_source.id) custom_query_env_var = "REDASH_CUSTOM_HEALTH_QUERIES_{data_source_id}".format(data_source_id=ds_id) custom_query = os.environ.get(custom_query_env_var, "") query_text = custom_query or query_text try: start_time = time.time() test_connection(data_source.query_runner, query_text) runtime = time.time() - start_time except NotImplementedError: logger.info(u"Unable to compute health status without test query for %s", data_source.name) continue except Exception: logger.warning(u"Failed health check for the data source: %s", data_source.name, exc_info=1) statsd_client.incr('update_health_status.error') logger.info(u"task=update_health_status state=error ds_id=%s runtime=%.2f", data_source.id, time.time() - start_time) status = { "status": "FAIL" if runtime is None else "SUCCESS", "last_run": start_time, "last_run_human": str(parse_human_time(str(start_time))), "runtime": runtime } store_health_status(ds_id, data_source.name, query_text, status) def test_connection(self, custom_query_text=None): if self.noop_query is None: raise NotImplementedError() query_text = custom_query_text or self.noop_query data, error = self.run_query(query_text, None) if error is not None: raise Exception(error) @login_required @require_super_admin def stmo_status_api(): status = original_get_status() health_data = json.loads(redis_connection.get('data_sources:health') or '{}') # Get the top level status for each data source for health_data_point in health_data.values(): data_source_name = health_data_point["metadata"]["name"] dashboard_label = "[Data Source Health] {name}".format(name=data_source_name) status[dashboard_label] = health_data_point["status"] return jsonify(status) def extension(app=None): """A Redash extension to add datasource health status reporting.""" # Override the default status API view with our extended view app.view_functions['%s.status_api' % routes.name] = stmo_status_api # Add a new endpoint with full health data add_resource(app, DataSourceHealthResource, '/status/data_sources/health.json') def periodic_task(): """Add the update_health_status task to a list of periodic tasks""" return { 'sig': update_health_status.s(), 'schedule': settings.HEALTH_QUERIES_REFRESH_SCHEDULE, } PKN)redash_stmo/data_sources/link/__init__.pyPKN.MV V *redash_stmo/data_sources/link/extension.pyfrom redash.models import DataSource from redash.handlers.base import BaseResource, get_object_or_404 from redash.permissions import require_access, view_only from redash.query_runner import query_runners from redash_stmo.resources import add_resource DATASOURCE_URLS = { "bigquery": "https://cloud.google.com/bigquery/docs/reference/legacy-sql", "Cassandra": "http://cassandra.apache.org/doc/latest/cql/index.html", "dynamodb_sql": "https://dql.readthedocs.io/en/latest/", "baseelasticsearch": "https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html", "google_spreadsheets": "http://redash.readthedocs.io/en/latest/datasources.html#google-spreadsheets", "hive": "https://cwiki.apache.org/confluence/display/Hive/LanguageManual", "impala": "http://www.cloudera.com/documentation/enterprise/latest/topics/impala_langref.html", "influxdb": "https://docs.influxdata.com/influxdb/v1.0/query_language/spec/", "jirajql": "https://confluence.atlassian.com/jirasoftwarecloud/advanced-searching-764478330.html", "mongodb": "https://docs.mongodb.com/manual/reference/operator/query/", "mssql": "https://msdn.microsoft.com/en-us/library/bb510741.aspx", "mysql": "https://dev.mysql.com/doc/refman/5.7/en/", "oracle": "http://docs.oracle.com/database/121/SQLRF/toc.htm", "pg": "https://www.postgresql.org/docs/current/", "redshift": "http://docs.aws.amazon.com/redshift/latest/dg/cm_chap_SQLCommandRef.html", "presto": "https://prestodb.io/docs/current/", "python": "http://redash.readthedocs.io/en/latest/datasources.html#python", "insecure_script": "http://redash.readthedocs.io/en/latest/datasources.html#python", "sqlite": "http://sqlite.org/lang.html", "treasuredata": "https://docs.treasuredata.com/categories/hive", "url": "http://redash.readthedocs.io/en/latest/datasources.html#url", "vertica": ( "https://my.vertica.com/docs/8.0.x/HTML/index.htm#Authoring/" "ConceptsGuide/Other/SQLOverview.htm%3FTocPath%3DSQL" "%2520Reference%2520Manual%7C_____1" ) } class DataSourceLinkResource(BaseResource): def get(self, data_source_id): data_source = get_object_or_404( DataSource.get_by_id_and_org, data_source_id, self.current_org, ) require_access(data_source.groups, self.current_user, view_only) try: result = { "type_name": data_source.query_runner.name(), "doc_url": data_source.options.get("doc_url", None) } except Exception as e: return {"message": unicode(e), "ok": False} else: return {"message": result, "ok": True} def extension(app=None): for runner_type, runner_class in query_runners.items(): if runner_type not in DATASOURCE_URLS: continue runner_class.add_configuration_property("doc_url", { "type": "string", "title": "Documentation URL", "default": DATASOURCE_URLS[runner_type]}) add_resource(app, DataSourceLinkResource, '/api/data_sources//link') PKN$Y7redash_stmo/data_sources/link/bundle/datasource_link.js/* eslint-disable no-console, camelcase */ import React from 'react'; import PropTypes from 'prop-types'; import { react2angular } from 'react2angular'; class DatasourceLink extends React.Component { static propTypes = { clientConfig: PropTypes.object.isRequired, datasourceId: PropTypes.number.isRequired, } constructor(props) { super(props); this.state = { type_name: '', doc_url: '', }; } componentDidMount() { this.loadURLData(); } componentDidUpdate(prevProps) { if (this.props.datasourceId !== prevProps.datasourceId) { this.loadURLData(); } } loadURLData() { fetch(`${this.props.clientConfig.basePath}api/data_sources/${this.props.datasourceId}/link`) .then((response) => { if (response.status === 200) { return response.json(); } return {}; }) .catch((error) => { console.error(`Error loading data source URL: ${error}`); return {}; }) .then((json) => { const { type_name, doc_url } = json.message; this.setState({ type_name, doc_url }); }); } render() { if (!this.state.doc_url) { return null; } return ( {this.state.type_name} documentation ); } } export default function init(ngModule) { ngModule.component('datasourceLink', react2angular(DatasourceLink, ['datasourceId'], ['clientConfig'])); } init.init = true; PKN.redash_stmo/data_sources/validator/__init__.pyPKN="]]/redash_stmo/data_sources/validator/extension.pyimport subprocess import apiclient.errors from flask import request from redash import models from redash.handlers.base import BaseResource, get_object_or_404 from redash.permissions import not_view_only, has_access from redash.utils import collect_parameters_from_request, json_loads, mustache_render from redash_stmo.resources import add_resource class DataSourceValidatorResource(BaseResource): def validate_pg(self, query_runner, query): p = subprocess.Popen("pgsanity", stdin=subprocess.PIPE, stdout=subprocess.PIPE) stdout, _ = p.communicate(query + ";") return p.returncode == 0, stdout def validate_bigquery(self, query_runner, query): jobs = query_runner._get_bigquery_service().jobs() project_id = query_runner._get_project_id() job_data = { "configuration": { "query": { "query": query, }, "dryRun": True, } } if query_runner.configuration.get('useStandardSql', False): job_data['configuration']['query']['useLegacySql'] = False if query_runner.configuration.get('userDefinedFunctionResourceUri'): resource_uris = query_runner.configuration["userDefinedFunctionResourceUri"].split(',') job_data["configuration"]["query"]["userDefinedFunctionResources"] = map( lambda resource_uri: {"resourceUri": resource_uri}, resource_uris) if "maximumBillingTier" in query_runner.configuration: job_data["configuration"]["query"]["maximumBillingTier"] = query_runner.configuration["maximumBillingTier"] insert_response = jobs.insert(projectId=project_id, body=job_data).execute() if 'errorResult' in insert_response['status']: return False, insert_response['status']['errorResult'] return True, "This query will process %s bytes." % (insert_response['statistics']['totalBytesProcessed'],) def get_validator(self, query_runner): """Return the query validator for the given query runner""" try: validator = getattr(self, 'validate_%s' % query_runner.type()) if callable(validator): return validator except AttributeError: pass def post(self, data_source_id): params = request.get_json(force=True) parameter_values = collect_parameters_from_request(request.args) query = mustache_render(params['query'], parameter_values) data_source = get_object_or_404( models.DataSource.get_by_id_and_org, data_source_id, self.current_org, ) # get the validator method validator = self.get_validator(data_source.query_runner) if (validator is None or not has_access(data_source.groups, self.current_user, not_view_only)): return { 'valid': False, 'report': 'You do not have permission to run queries with this data source.' }, 403 try: valid, report = validator(data_source.query_runner, query) return {'valid': valid, 'report': report}, 200 except apiclient.errors.HttpError as e: if e.resp.status == 400: error = json_loads(e.content)['error']['message'] else: error = e.content return {'valid': False, 'report': error}, 200 except Exception as e: return {'valid': False, 'report': str(e)}, 500 def extension(app): add_resource(app, DataSourceValidatorResource, '/api/data_sources//validate') PKN&߃Aredash_stmo/data_sources/validator/bundle/datasource_validator.js/* eslint-disable camelcase */ import React from 'react'; import { render } from 'react-dom'; import { debounce } from 'lodash'; function getValidateQuery($http, dataSourceId, query) { return $http.post(`api/data_sources/${dataSourceId}/validate`, { query }); } class DataSourceValidator extends React.Component { constructor(props) { super(props); this.getValidateQuery = debounce(getValidateQuery, 500); this.state = { valid: false, report: '', }; } componentDidMount() { this.componentDidUpdate(); } componentDidUpdate(prevProps, prevState) { const queryTextChanged = !prevProps || prevProps.queryText !== this.props.queryText; const reportChanged = prevState && prevState.report !== this.state.report; if (queryTextChanged || reportChanged) { const p = this.getValidateQuery(this.props.$http, this.props.dataSourceId, this.props.queryText); if (p) { p.then((response) => { this.setState(response.data); }); } } } render() { return ( {this.state.report} ); } } export default function init(ngModule) { ngModule.decorator('queryEditorDirective', ['$delegate', '$http', ($delegate, $http) => { const controller = $delegate[0].controller; const controllerFunc = controller[controller.length - 1]; controllerFunc.prototype.origRender = controllerFunc.prototype.render; controllerFunc.prototype.render = function newRender() { this.origRender(); const container = document.querySelector('.editor__control div'); let status = document.querySelector('#stmo_datasource_validator'); if (!status) { status = document.createElement('div'); status.id = 'stmo_datasource_validator'; } else { container.removeChild(status); } container.appendChild(status); render( , status, ); }; return $delegate; }]); } init.init = true; PKN,redash_stmo/data_sources/version/__init__.pyPKN-redash_stmo/data_sources/version/extension.pyimport json import logging from redash_stmo.resources import add_resource from redash.models import DataSource from redash.handlers.base import BaseResource, get_object_or_404 from redash.permissions import require_access, view_only logger = logging.getLogger(__name__) DATASOURCE_VERSION_PARSE_INFO = { "pg": { "version_query": "SELECT version();", "delimiter": " ", "index": 1 }, "redshift": { "version_query": "SELECT version();", "delimiter": " ", "index": -1 }, "mysql": { "version_query": "SELECT VERSION() AS version;", "delimiter": "-", "index": 0 }, } class DataSourceVersionResource(BaseResource): def get(self, data_source_id): data_source = get_object_or_404( DataSource.get_by_id_and_org, data_source_id, self.current_org ) require_access(data_source.groups, self.current_user, view_only) version_info = get_data_source_version(data_source.query_runner) return {"version": version_info} def get_data_source_version(query_runner): parse_info = DATASOURCE_VERSION_PARSE_INFO.get(query_runner.type()) if parse_info is None: return None data, error = query_runner.run_query(parse_info["version_query"], None) if error is not None: logger.error( "Unable to run version query for %s: %s", query_runner.type(), error) return None try: version = json.loads(data)['rows'][0]['version'] except (KeyError, IndexError) as err: logger.exception( "Unable to parse data source version for %s: %s", query_runner.type(), err) return None version = version.split(parse_info["delimiter"])[parse_info["index"]] return version def extension(app=None): add_resource(app, DataSourceVersionResource, '/api/data_sources//version') PKNehh=redash_stmo/data_sources/version/bundle/datasource_version.js/* eslint-disable no-console, camelcase */ import React from 'react'; import PropTypes from 'prop-types'; import { react2angular } from 'react2angular'; class DatasourceVersion extends React.Component { static propTypes = { clientConfig: PropTypes.object.isRequired, datasourceId: PropTypes.number.isRequired, } constructor(props) { super(props); this.state = { version: '', }; } componentDidMount() { this.loadURLData(); } componentDidUpdate(prevProps) { if (this.props.datasourceId !== prevProps.datasourceId) { this.loadURLData(); } } loadURLData() { fetch(`${this.props.clientConfig.basePath}api/data_sources/${this.props.datasourceId}/version`) .then((response) => { if (response.status === 200) { return response.json(); } return {}; }) .catch((error) => { console.error(`Error loading data source version: ${error}`); return {}; }) .then((json) => { this.setState({ version: json.version }); }); } render() { if (!this.state.version) { return null; } return ( {this.state.version} ); } } export default function init(ngModule) { ngModule.component('datasourceVersion', react2angular(DatasourceVersion, ['datasourceId'], ['clientConfig'])); } init.init = true; PKN redash_stmo/handlers/__init__.pyPKN/redash_stmo/handlers/authentication/__init__.pyPKNO;7redash_stmo/handlers/authentication/remote_user_auth.pyfrom flask import redirect, request, url_for from redash import settings from redash.authentication import get_next_path from redash.authentication.org_resolving import current_org from redash.authentication.remote_user_auth import logger from redash_stmo import settings as extension_settings def check_remote_groups(): """Check if there is a header of user groups and if yes check it against a list of allowed user groups from the settings""" # Quick shortcut out if remote user auth or remote groups aren't enabled if ( not settings.REMOTE_USER_LOGIN_ENABLED or not extension_settings.REMOTE_GROUPS_ENABLED ): return # Generate the URL to the remote auth login endpoint if settings.MULTI_ORG: org = current_org._get_current_object() remote_auth_path = url_for("remote_user_auth.login", org_slug=org.slug) else: remote_auth_path = url_for("remote_user_auth.login") # Then only act if the request is for the remote user auth view if request.path.startswith(remote_auth_path): remote_groups = settings.set_from_string( request.headers.get(extension_settings.REMOTE_GROUPS_HEADER) or "" ) # Finally check if the remote groups found in the request header # intersect with the allowed remote groups if not extension_settings.REMOTE_GROUPS_ALLOWED.intersection(remote_groups): logger.error( "User groups provided in the %s header are not " "matching the allowed groups.", extension_settings.REMOTE_GROUPS_HEADER, ) # Otherwise redirect back to the frontpage unsafe_next_path = request.args.get("next") next_path = get_next_path(unsafe_next_path) if settings.MULTI_ORG: org = current_org._get_current_object() index_url = url_for("redash.index", org_slug=org.slug, next=next_path) else: index_url = url_for("redash.index", next=next_path) return redirect(index_url) def extension(app): """An extension that checks the REMOTE_GROUPS_HEADER.""" app.before_request(check_remote_groups) PKN.redash_stmo/handlers/query_results/__init__.pyPKNj j /redash_stmo/handlers/query_results/extension.pyfrom redash.handlers.query_results import QueryResultResource from redash.permissions import require_access, require_permission, view_only from redash.handlers.base import get_object_or_404 from redash import models from ...resources import add_resource from .parser import extract_table_names class StmoQueryResultResource(QueryResultResource): @require_permission('view_query') def get(self, query_id=None, query_result_id=None, filetype='json'): if query_result_id: query_result = get_object_or_404( models.QueryResult.get_by_id_and_org, query_result_id, self.current_org, ) # Look for queries matching this result whose data source is 'Query Results'. if models.Query.query.join( models.DataSource, ).filter( models.Query.query_hash == query_result.query_hash, models.DataSource.type == 'results', ).first(): table_names = extract_table_names(query_result.query_text) for table_name in table_names: # Look for query IDs being accessed. if table_name.startswith("query_"): try: qid = int(table_name.split('_', 1)[1]) except ValueError: # If it's not "query_NNN" it can't affect our permissions check here. continue upstream_q = models.Query.query.filter( models.Query.id == qid ).first() if upstream_q is None: continue # If the user making this request doesn't have permission to # view the query results being accessed in this query, deny # access. require_access(upstream_q.data_source.groups, self.current_user, view_only) require_access(query_result.data_source.groups, self.current_user, view_only) return super(StmoQueryResultResource, self).get(query_id, query_result_id, filetype) def extension(app): # remove the original resource del app.view_functions['query_result'] add_resource( app, StmoQueryResultResource, '/api/query_results/.', '/api/query_results/', '/api/queries//results.', '/api/queries//results/.', endpoint='query_result', ) PKNt9??,redash_stmo/handlers/query_results/parser.py#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (C) 2009-2018 the sqlparse authors and contributors # # # This example is part of python-sqlparse and is released under # the BSD License: https://opensource.org/licenses/BSD-3-Clause # # This example illustrates how to extract table names from nested # SELECT statements. # # See: # https://groups.google.com/forum/#!forum/sqlparse/browse_thread/thread/b0bd9a022e9d4895 import sqlparse from sqlparse.sql import IdentifierList, Identifier from sqlparse.tokens import Keyword, DML def is_subselect(parsed): if not parsed.is_group: return False for item in parsed.tokens: if item.ttype is DML and item.value.upper() == 'SELECT': return True return False def extract_from_part(parsed): from_seen = False for item in parsed.tokens: if from_seen: if is_subselect(item): for x in extract_from_part(item): yield x elif item.ttype is Keyword: raise StopIteration else: yield item elif item.ttype is Keyword and item.value.upper() == 'FROM': from_seen = True def extract_table_identifiers(token_stream): for item in token_stream: if isinstance(item, IdentifierList): for identifier in item.get_identifiers(): yield identifier.get_real_name() elif isinstance(item, Identifier): yield item.get_real_name() # It's a bug to check for Keyword here, but in the example # above some tables names are identified as keywords... elif item.ttype is Keyword: yield item.value def extract_table_names(sql): stream = extract_from_part(sqlparse.parse(sql)[0]) return list(extract_table_identifiers(stream)) PKN$redash_stmo/query_runner/__init__.pyPKNs8ss&redash_stmo/query_runner/activedata.py""" A custom Redash query runner for Mozilla's ActiveData service: More information: - https://wiki.mozilla.org/EngineeringProductivity/Projects/ActiveData - https://github.com/mozilla/ActiveData/blob/dev/docs/redash.md Originally written by Github user @klahnakoski Original link: https://github.com/klahnakoski/ActiveData-redash-query-runner/blob/c0e7286c09c6f1eb6746a6c7cca581bea79f4757/active_data.py """ import json import logging import requests from redash.query_runner import (TYPE_INTEGER, TYPE_STRING, TYPE_FLOAT, BaseSQLQueryRunner, register) from redash.utils import JSONEncoder logger = logging.getLogger(__name__) TYPES_MAP = { bool: TYPE_INTEGER, str: TYPE_STRING, unicode: TYPE_STRING, dict: TYPE_STRING, list: TYPE_STRING, int: TYPE_INTEGER, long: TYPE_INTEGER, float: TYPE_FLOAT, "string": TYPE_STRING, "object": TYPE_STRING, "long": TYPE_STRING, "double": TYPE_FLOAT, "integer": TYPE_FLOAT } class ActiveData(BaseSQLQueryRunner): noop_query = "SELECT 1" @classmethod def configuration_schema(cls): return { "type": "object", "properties": { "host_url": { "type": "string", "title": "Host URL", "default": "https://activedata.allizom.org:80", "info": "Please include a port. Do not end with a trailing slash." }, "doc_url": { "type": "string", "title": "Documentation URL", "default": "https://github.com/klahnakoski/ActiveData/tree/dev/docs" }, "toggle_table_string": { "type": "string", "title": "Toggle Table String", "default": "_v", "info": "This string will be used to toggle visibility of tables in the schema browser when editing a query in order to remove non-useful tables from sight." } }, "required": ["host_url"] } @classmethod def name(cls): return "ActiveData" @classmethod def type(cls): return "activedata" @classmethod def enabled(cls): return True def _get_tables(self, schema): query = { "from": "meta.columns", "select": [ "name", "type", "table" ], "where": {"not": {"prefix": {"es_index": "meta."}}}, "limit": 1000, "format": "list" } results = self.run_jx_query(query, None) for row in results['data']: table_name = row['table'] if table_name not in schema: schema[table_name] = {'name': table_name, 'columns': []} schema[table_name]['columns'].append( row['name'] + ' (' + TYPES_MAP.get(row['type'], TYPE_STRING) + ')' ) return [ { 'name': table['name'], 'columns': sorted(table['columns']) } for table in schema.values() ] def run_jx_query(self, query, user): data = json.dumps(query, ensure_ascii=False) result = requests.post( self.configuration['host_url'] + "/query", data=data, ) response = json.loads(result.content) if response.get('type') == "ERROR": cause = self.find_error_cause(response) raise Exception(cause) return response def run_query(self, annotated_query, user): request = {} comment, request["sql"] = annotated_query.split("*/", 2) meta = request['meta'] = {} for kv in comment.strip()[2:].split(","): k, v = [s.strip() for s in kv.split(':')] meta[k] = v logger.debug("Send ActiveData a SQL query: %s", request['sql']) data = json.dumps(request, ensure_ascii=False) result = requests.post( self.configuration['host_url'] + "/sql", data=data, ) response = json.loads(result.content) if response.get('type') == "ERROR": cause = self.find_error_cause(response) return None, cause output = self.normalize_response(response) json_data = json.dumps(output, cls=JSONEncoder) return json_data, None def normalize_response(self, table): columns = {} # MAP FROM name TO (MAP FROM type TO (full_name)) output = [] def get_unique_name(name, type): all_types = columns.get(name) if all_types is None: all_types = columns[name] = {} specific_type = all_types.get(type) if specific_type is None: if all_types: specific_type = all_types[type] = name + "." + type else: specific_type = all_types[type] = name return specific_type for r in table['data']: new_row = {} for i, cname in enumerate(table['header']): val = r[i] if val is None: continue if isinstance(val, (dict, list)): val = json.dumps(val, cls=JSONEncoder) col = get_unique_name(cname, TYPES_MAP.get(type(val), TYPE_STRING)) new_row[col] = val output.append(new_row) output_columns = [ { "name": full_name, "type": ctype, "friendly_name": full_name } for cname, types in columns.items() for ctype, full_name in types.items() ] return { 'columns': output_columns, 'rows': output } def find_error_cause(self, response): while response.get('cause') is not None: cause = response['cause'] if isinstance(cause, list): response = cause[0] else: response = cause return response.get('template') register(ActiveData) PKNz z "redash_stmo/query_runner/presto.pyimport collections import logging import six from pyhive import presto from redash.query_runner.presto import Presto from redash.query_runner import register logger = logging.getLogger(__name__) class STMOPresto(Presto): """ A custom Presto query runner. Currently empty. """ class STMOConnection(presto.Connection): """ A custom Presto connection that uses the custom Presto cursor as the default cursor. """ def cursor(self): return STMOPrestoCursor(*self._args, **self._kwargs) class STMOPrestoCursor(presto.Cursor): """ A custom Presto cursor that processes the data after it has been handled by the parent cursor to apply various transformations. """ def _process_response(self, response): super(STMOPrestoCursor, self)._process_response(response) self._data = self._process_data() def _process_data(self): processed_data = collections.deque() for row in self._data: # the top-level is an iterable of records (i.e. rows) item = [] for column, row in zip(self._columns, row): item.append(self._format_data(column["typeSignature"], row)) processed_data.append(item) return processed_data def _format_data(self, column, data): """Given a Presto column and its data, return a more human-readable format of its data for some data types.""" type = column["rawType"] try: iter(data) # check if the data is iterable except TypeError: return data # non-iterables can simply be directly shown # records should have their fields associated with types # but keep the tuple format for backward-compatibility if type == "row": keys = column["literalArguments"] values = [ self._format_data(c, d) for c, d in zip(column["typeArguments"], data) ] return tuple(zip(keys, values)) # arrays should have their element types associated with each element elif type == "array": rep = [ column["typeArguments"][0] ] * len(data) return [ self._format_data(c, d) for c, d in zip(rep, data) ] # maps should have their value types associated with each value # (note that keys are always strings), but keep the tuple format # for backward-compatibility elif type == "map": value_type = column["typeArguments"][1] return [ (k, self._format_data(value_type, v)) for k, v in six.iteritems(data) ] else: # unknown type, don't process it return data def stmo_connect(*args, **kwargs): """ A custom connect function to be used to override the default pyhive.presto.connect function. """ return STMOConnection(*args, **kwargs) def extension(app): logger.info('Loading Redash Extension for the custom Presto query runner') # Monkeypatch the pyhive.presto.connect function presto.connect = stmo_connect # and register our own STMOPresto query runner class # which automatically overwrites the default presto query runner register(STMOPresto) logger.info('Loaded Redash Extension for the custom Presto query runner') return stmo_connect PK!Hn/redash_stmo-2019.6.1.dist-info/entry_points.txtan s0i'&%Bqfn,TKx}6'd)Ō x@|Dd({Y35]Yv%n?YUC#l-&ڋ/]2y]j $6jhwyZҭZć)$b[s\w6۫ RZwƙLRW"SN߿ m{$7ʤnhU]Ak;{78ǪOvi*1zQ[UMJqu%3qY 5w[mu[W>W6_[X}WOYm7q2qeZUuNExg[V{x#:-U~%,NFp T+uoUW=,_}R׮Wk;?v< L|VÇӭ2@'ծzKDߏ "ִSߛ?FFMןjN6#+mMS tZ)^'T UdY;~q5K ~ vRx3칥g&$qox.y9Kό?$2\?/?ҁ!6Yqs U*,6vImD.JD*BeWΑ"gI2 ]Ru۴Ptɏen}^S"vW9~|)vzfb*#iL b/V2ۨ|IQUt\"3X[wWg~Zg2?=,,PdQ[uxv'[%|EvOo.IU#n8al@~t+ ~q.>t]63!!/-jԋhɢ髡@A3FjНܛ 9DɊw]6$:t-E872B<4@Zt`g  ) ;9'NQQŅa'HS9 i}>ᮈҷP؊,Y%#l2ի| a25pի-L 9I.٣QDu 7HD)l۹oV)[^ fJ'P-JQ}o;>6\W#V kL,"di,(Xfl9o2.~4HJs"Q=*6LיN?Zy-rWȷI>p#XuWw>'X~RGQ, DnlA;>&(^n?!G&/UQ[jq(PV :@:.{V4L]p1:'.D.k-Bq9Z\3ʱ#o౱r"%Q %Pc_|PL]r]ܪ8|Y[hQ)79j|ơOUUW3F`B GFXX&ebTF{s(w %L_u-jAA`!-ŷo<1ZD!;:\1M3QNk*YLEQX\ȑ ># L7.dЍ!%xRS~.}9Q(ڙOD`TPDJVu@89عm뺀kPkրC3*v%X.,75]QE829Ɋ5v\v8̈m@` Q;#^62^( c5U/eI{[A^ݞsJa8AtBmd^ ڍXXrнt! @G=6v4ޢ\`CԸvU56z(~84 T]sf~c~TAJ,_NޗJ)u.jJú}vJ~Oth{`2rcUgc8oSሏ9><@;a=4Oqc0|cͶ㭣 1Ȧ;jVi2TaAVigV"jJ&G0 VVfMt!0!DEBQx$Q ̺C:(1jÓӾ"^IK=ϸ3CZڌ~yliPa;E"UEg<1*YV+wyGȏk8&+txNH!W8l J_۞ `9a>Ӆu,[q୕ 9 xIqL^G$x7a;?4錊yk2孔y^Igy(ǡ'L#nbv  u5/)Alt0KbkK `: b0j.WqVfL&ҵg9?e#tCZ`yL,r 3͟PO'}}L\DF+B76ϟ[lǭhzէwP /*C10!(XU d퉢Bn̚QHrѩEnK[ølA3ڥ͆uܓ̸|0yK=F͒JC!ԃJEBD3YAQ 7t6&PEFHy$3qIjᆲnao8d 7/T'; H:%1)@ C|܍Аh]CA˙s8ӞP .Sc Myu31/q!:j>v(کQ3og ]`~"6`|ZRq3L\`K߸@MW o$RYVqOD/F>jgnus3G}W?b8Õ| kp~'jO* Ga&ȽWOy9f2T9bw{uy %f,H-,>m ko,rrI3]CFʿxpAzL⨸30hNy4ecW'h|/^_/Y][3P}",<렴hk1c>P_ۿ %NNdSK$ۧMY,9Ǔ$ɰxC5>1gTk dn7A0/C%(Ti' c몾2ß=Wbuzk`ӹ{IFh#$~TK'k6K]fr8YZ\Yt44}o ^A!8=,|E >Ue݃`@d 5`sZ tcٻyROA9!CUpώwe7u5dvȌBeN!'m(Hغ2x iTbhpbV1*ȫ+ZO)F!3upρdUD߅*I}++ p0jfؙkt~#<ڔ\a)=1IEnASoؠk"iB2'^?X]_]| .PKNxredash_stmo/__init__.pyPKN3svH<redash_stmo/dockerflow.pyPKN[CCredash_stmo/resources.pyPKN/tredash_stmo/settings.pyPKN$Mredash_stmo/data_sources/__init__.pyPKNmY  "redash_stmo/data_sources/health.pyPKN)redash_stmo/data_sources/link/__init__.pyPKN.MV V *"redash_stmo/data_sources/link/extension.pyPKN$Y7&redash_stmo/data_sources/link/bundle/datasource_link.jsPKN.,redash_stmo/data_sources/validator/__init__.pyPKN="]]/B-redash_stmo/data_sources/validator/extension.pyPKN&߃A;redash_stmo/data_sources/validator/bundle/datasource_validator.jsPKN,Dredash_stmo/data_sources/version/__init__.pyPKN-Eredash_stmo/data_sources/version/extension.pyPKNehh=Lredash_stmo/data_sources/version/bundle/datasource_version.jsPKN Rredash_stmo/handlers/__init__.pyPKN/Rredash_stmo/handlers/authentication/__init__.pyPKNO;7=Sredash_stmo/handlers/authentication/remote_user_auth.pyPKN.C\redash_stmo/handlers/query_results/__init__.pyPKNj j /\redash_stmo/handlers/query_results/extension.pyPKNt9??,Fgredash_stmo/handlers/query_results/parser.pyPKN$nredash_stmo/query_runner/__init__.pyPKNs8ss&oredash_stmo/query_runner/activedata.pyPKNz z "ȇredash_stmo/query_runner/presto.pyPK!Hn/redash_stmo-2019.6.1.dist-info/entry_points.txtPKNi0UAUA&redash_stmo-2019.6.1.dist-info/LICENSEPK!HMuSa$Nredash_stmo-2019.6.1.dist-info/WHEELPK!Hyp5x-'redash_stmo-2019.6.1.dist-info/METADATAPK!H~p %redash_stmo-2019.6.1.dist-info/RECORDPK S