diff --git a/migrations/versions/0ec979123ba4_.py b/migrations/versions/0ec979123ba4_.py index 4dfbe1ba15..9f931b1c56 100644 --- a/migrations/versions/0ec979123ba4_.py +++ b/migrations/versions/0ec979123ba4_.py @@ -7,7 +7,7 @@ """ from alembic import op import sqlalchemy as sa -from sqlalchemy.dialects import postgresql +from sqlalchemy.dialects.postgresql import JSON # revision identifiers, used by Alembic. revision = '0ec979123ba4' @@ -18,7 +18,7 @@ def upgrade(): # ### commands auto generated by Alembic - please adjust! ### - op.add_column('dashboards', sa.Column('options', postgresql.JSON(astext_type=sa.Text()), server_default='{}', nullable=False)) + op.add_column('dashboards', sa.Column('options', JSON(astext_type=sa.Text()), server_default='{}', nullable=False)) # ### end Alembic commands ### diff --git a/migrations/versions/640888ce445d_.py b/migrations/versions/640888ce445d_.py index 0017a09527..876ce8b7ba 100644 --- a/migrations/versions/640888ce445d_.py +++ b/migrations/versions/640888ce445d_.py @@ -10,8 +10,7 @@ from alembic import op import sqlalchemy as sa from sqlalchemy.sql import table - -from redash.models import MutableDict, PseudoJSON +from redash.models import MutableDict # revision identifiers, used by Alembic. @@ -41,7 +40,7 @@ def upgrade(): "queries", sa.Column( "schedule", - MutableDict.as_mutable(PseudoJSON), + sa.Text(), nullable=False, server_default=json.dumps({}), ), @@ -51,7 +50,7 @@ def upgrade(): queries = table( "queries", sa.Column("id", sa.Integer, primary_key=True), - sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)), + sa.Column("schedule", sa.Text()), sa.Column("old_schedule", sa.String(length=10)), ) @@ -85,7 +84,7 @@ def downgrade(): "queries", sa.Column( "old_schedule", - MutableDict.as_mutable(PseudoJSON), + sa.Text(), nullable=False, server_default=json.dumps({}), ), @@ -93,8 +92,8 @@ def downgrade(): queries = table( "queries", - sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)), - sa.Column("old_schedule", MutableDict.as_mutable(PseudoJSON)), + sa.Column("schedule", sa.Text()), + sa.Column("old_schedule", sa.Text()), ) op.execute(queries.update().values({"old_schedule": queries.c.schedule})) @@ -106,7 +105,7 @@ def downgrade(): "queries", sa.Column("id", sa.Integer, primary_key=True), sa.Column("schedule", sa.String(length=10)), - sa.Column("old_schedule", MutableDict.as_mutable(PseudoJSON)), + sa.Column("old_schedule", sa.Text()), ) conn = op.get_bind() diff --git a/migrations/versions/7205816877ec_change_type_of_json_fields_from_varchar_.py b/migrations/versions/7205816877ec_change_type_of_json_fields_from_varchar_.py new file mode 100644 index 0000000000..7364893da6 --- /dev/null +++ b/migrations/versions/7205816877ec_change_type_of_json_fields_from_varchar_.py @@ -0,0 +1,146 @@ +"""change type of json fields from varchar to json + +Revision ID: 7205816877ec +Revises: 7ce5925f832b +Create Date: 2024-01-03 13:55:18.885021 + +""" +from alembic import op +import sqlalchemy as sa +from sqlalchemy.dialects.postgresql import JSONB, JSON + + +# revision identifiers, used by Alembic. +revision = '7205816877ec' +down_revision = '7ce5925f832b' +branch_labels = None +depends_on = None + + +def upgrade(): + connection = op.get_bind() + op.alter_column('queries', 'options', + existing_type=sa.Text(), + type_=JSONB(astext_type=sa.Text()), + nullable=True, + postgresql_using='options::jsonb', + server_default=sa.text("'{}'::jsonb")) + op.alter_column('queries', 'schedule', + existing_type=sa.Text(), + type_=JSONB(astext_type=sa.Text()), + nullable=True, + postgresql_using='schedule::jsonb', + server_default=sa.text("'{}'::jsonb")) + op.alter_column('events', 'additional_properties', + existing_type=sa.Text(), + type_=JSONB(astext_type=sa.Text()), + nullable=True, + postgresql_using='additional_properties::jsonb', + server_default=sa.text("'{}'::jsonb")) + op.alter_column('organizations', 'settings', + existing_type=sa.Text(), + type_=JSONB(astext_type=sa.Text()), + nullable=True, + postgresql_using='settings::jsonb', + server_default=sa.text("'{}'::jsonb")) + op.alter_column('alerts', 'options', + existing_type=JSON(astext_type=sa.Text()), + type_=JSONB(astext_type=sa.Text()), + nullable=True, + postgresql_using='options::jsonb', + server_default=sa.text("'{}'::jsonb")) + op.alter_column('dashboards', 'options', + existing_type=JSON(astext_type=sa.Text()), + type_=JSONB(astext_type=sa.Text()), + postgresql_using='options::jsonb', + server_default=sa.text("'{}'::jsonb")) + op.alter_column('dashboards', 'layout', + existing_type=sa.Text(), + type_=JSONB(astext_type=sa.Text()), + postgresql_using='layout::jsonb', + server_default=sa.text("'{}'::jsonb")) + op.alter_column('query_results', 'data', + existing_type=sa.Text(), + type_=JSONB(astext_type=sa.Text()), + nullable=True, + postgresql_using='data::text', + server_default=sa.text("'{}'::jsonb")) + op.alter_column('changes', 'change', + existing_type=JSON(astext_type=sa.Text()), + type_=JSONB(astext_type=sa.Text()), + postgresql_using='change::jsonb', + server_default=sa.text("'{}'::jsonb")) + op.alter_column('visualizations', 'options', + existing_type=sa.Text(), + type_=JSONB(astext_type=sa.Text()), + postgresql_using='options::jsonb', + server_default=sa.text("'{}'::jsonb")) + op.alter_column('widgets', 'options', + existing_type=sa.Text(), + type_=JSONB(astext_type=sa.Text()), + postgresql_using='options::jsonb', + server_default=sa.text("'{}'::jsonb")) + + +def downgrade(): + connection = op.get_bind() + op.alter_column('queries', 'options', + existing_type=JSONB(astext_type=sa.Text()), + type_=sa.Text(), + postgresql_using='options::text', + existing_nullable=True, + server_default=sa.text("'{}'::text")) + op.alter_column('queries', 'schedule', + existing_type=JSONB(astext_type=sa.Text()), + type_=sa.Text(), + postgresql_using='schedule::text', + existing_nullable=True, + server_default=sa.text("'{}'::text")) + op.alter_column('events', 'additional_properties', + existing_type=JSONB(astext_type=sa.Text()), + type_=sa.Text(), + postgresql_using='additional_properties::text', + existing_nullable=True, + server_default=sa.text("'{}'::text")) + op.alter_column('organizations', 'settings', + existing_type=JSONB(astext_type=sa.Text()), + type_=sa.Text(), + postgresql_using='settings::text', + existing_nullable=True, + server_default=sa.text("'{}'::text")) + op.alter_column('alerts', 'options', + existing_type=JSONB(astext_type=sa.Text()), + type_=JSON(astext_type=sa.Text()), + postgresql_using='options::json', + existing_nullable=True, + server_default=sa.text("'{}'::json")) + op.alter_column('dashboards', 'options', + existing_type=JSONB(astext_type=sa.Text()), + type_=JSON(astext_type=sa.Text()), + postgresql_using='options::json', + server_default=sa.text("'{}'::json")) + op.alter_column('dashboards', 'layout', + existing_type=JSONB(astext_type=sa.Text()), + type_=sa.Text(), + postgresql_using='layout::text', + server_default=sa.text("'{}'::text")) + op.alter_column('query_results', 'data', + existing_type=JSONB(astext_type=sa.Text()), + type_=sa.Text(), + postgresql_using='data::text', + server_default=sa.text("'{}'::text")) + op.alter_column('changes', 'change', + existing_type=JSONB(astext_type=sa.Text()), + type_=JSON(astext_type=sa.Text()), + postgresql_using='change::json', + server_default=sa.text("'{}'::json")) + op.alter_column('visualizations', 'options', + type_=sa.Text(), + existing_type=JSONB(astext_type=sa.Text()), + postgresql_using='options::text', + server_default=sa.text("'{}'::text")) + op.alter_column('widgets', 'options', + type_=sa.Text(), + existing_type=JSONB(astext_type=sa.Text()), + postgresql_using='options::text', + server_default=sa.text("'{}'::text")) diff --git a/migrations/versions/73beceabb948_bring_back_null_schedule.py b/migrations/versions/73beceabb948_bring_back_null_schedule.py index b510639dd2..2a07a0b7bd 100644 --- a/migrations/versions/73beceabb948_bring_back_null_schedule.py +++ b/migrations/versions/73beceabb948_bring_back_null_schedule.py @@ -7,10 +7,9 @@ """ from alembic import op import sqlalchemy as sa -from sqlalchemy.dialects import postgresql from sqlalchemy.sql import table -from redash.models import MutableDict, PseudoJSON +from redash.models import MutableDict # revision identifiers, used by Alembic. revision = "73beceabb948" @@ -43,7 +42,7 @@ def upgrade(): queries = table( "queries", sa.Column("id", sa.Integer, primary_key=True), - sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)), + sa.Column("schedule", sa.Text()), ) conn = op.get_bind() diff --git a/migrations/versions/98af61feea92_add_encrypted_options_to_data_sources.py b/migrations/versions/98af61feea92_add_encrypted_options_to_data_sources.py index 23670adfee..0c22043613 100644 --- a/migrations/versions/98af61feea92_add_encrypted_options_to_data_sources.py +++ b/migrations/versions/98af61feea92_add_encrypted_options_to_data_sources.py @@ -7,7 +7,7 @@ """ from alembic import op import sqlalchemy as sa -from sqlalchemy.dialects import postgresql +from sqlalchemy.dialects.postgresql import BYTEA from sqlalchemy.sql import table from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine @@ -18,7 +18,6 @@ Configuration, MutableDict, MutableList, - PseudoJSON, ) # revision identifiers, used by Alembic. @@ -31,7 +30,7 @@ def upgrade(): op.add_column( "data_sources", - sa.Column("encrypted_options", postgresql.BYTEA(), nullable=True), + sa.Column("encrypted_options", BYTEA(), nullable=True), ) # copy values diff --git a/migrations/versions/a92d92aa678e_inline_tags.py b/migrations/versions/a92d92aa678e_inline_tags.py index f79924dc62..40421cf468 100644 --- a/migrations/versions/a92d92aa678e_inline_tags.py +++ b/migrations/versions/a92d92aa678e_inline_tags.py @@ -9,7 +9,7 @@ from funcy import flatten, compact from alembic import op import sqlalchemy as sa -from sqlalchemy.dialects import postgresql +from sqlalchemy.dialects.postgresql import ARRAY from redash import models # revision identifiers, used by Alembic. @@ -21,10 +21,10 @@ def upgrade(): op.add_column( - "dashboards", sa.Column("tags", postgresql.ARRAY(sa.Unicode()), nullable=True) + "dashboards", sa.Column("tags", ARRAY(sa.Unicode()), nullable=True) ) op.add_column( - "queries", sa.Column("tags", postgresql.ARRAY(sa.Unicode()), nullable=True) + "queries", sa.Column("tags", ARRAY(sa.Unicode()), nullable=True) ) diff --git a/migrations/versions/d7d747033183_encrypt_alert_destinations.py b/migrations/versions/d7d747033183_encrypt_alert_destinations.py index 252e5bc225..d66460c3f2 100644 --- a/migrations/versions/d7d747033183_encrypt_alert_destinations.py +++ b/migrations/versions/d7d747033183_encrypt_alert_destinations.py @@ -7,7 +7,7 @@ """ from alembic import op import sqlalchemy as sa -from sqlalchemy.dialects import postgresql +from sqlalchemy.dialects.postgresql import BYTEA from sqlalchemy.sql import table from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine @@ -30,7 +30,7 @@ def upgrade(): op.add_column( "notification_destinations", - sa.Column("encrypted_options", postgresql.BYTEA(), nullable=True) + sa.Column("encrypted_options", BYTEA(), nullable=True) ) # copy values diff --git a/migrations/versions/e7f8a917aa8e_add_user_details_json_column.py b/migrations/versions/e7f8a917aa8e_add_user_details_json_column.py index a5a827091c..77c4f54485 100644 --- a/migrations/versions/e7f8a917aa8e_add_user_details_json_column.py +++ b/migrations/versions/e7f8a917aa8e_add_user_details_json_column.py @@ -7,7 +7,7 @@ """ from alembic import op import sqlalchemy as sa -from sqlalchemy.dialects import postgresql +from sqlalchemy.dialects.postgresql import JSON # revision identifiers, used by Alembic. revision = "e7f8a917aa8e" @@ -21,7 +21,7 @@ def upgrade(): "users", sa.Column( "details", - postgresql.JSON(astext_type=sa.Text()), + JSON(astext_type=sa.Text()), server_default="{}", nullable=True, ), diff --git a/migrations/versions/fd4fc850d7ea_.py b/migrations/versions/fd4fc850d7ea_.py index 75177c3299..dae95586d6 100644 --- a/migrations/versions/fd4fc850d7ea_.py +++ b/migrations/versions/fd4fc850d7ea_.py @@ -7,7 +7,7 @@ """ from alembic import op import sqlalchemy as sa -from sqlalchemy.dialects import postgresql +from sqlalchemy.dialects.postgresql import JSON, JSONB from redash.models import db @@ -23,8 +23,8 @@ def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.alter_column('users', 'details', - existing_type=postgresql.JSON(astext_type=sa.Text()), - type_=postgresql.JSONB(astext_type=sa.Text()), + existing_type=JSON(astext_type=sa.Text()), + type_=JSONB(astext_type=sa.Text()), existing_nullable=True, existing_server_default=sa.text("'{}'::jsonb")) ### end Alembic commands ### @@ -52,8 +52,8 @@ def downgrade(): connection.execute(update_query) db.session.commit() op.alter_column('users', 'details', - existing_type=postgresql.JSONB(astext_type=sa.Text()), - type_=postgresql.JSON(astext_type=sa.Text()), + existing_type=JSONB(astext_type=sa.Text()), + type_=JSON(astext_type=sa.Text()), existing_nullable=True, existing_server_default=sa.text("'{}'::json")) diff --git a/redash/handlers/base.py b/redash/handlers/base.py index a3f7ffffba..1bd04ceda4 100644 --- a/redash/handlers/base.py +++ b/redash/handlers/base.py @@ -5,7 +5,7 @@ from flask_login import current_user, login_required from flask_restful import Resource, abort from sqlalchemy import cast -from sqlalchemy.dialects import postgresql +from sqlalchemy.dialects.postgresql import ARRAY from sqlalchemy.orm.exc import NoResultFound from sqlalchemy_utils.functions import sort_query @@ -114,7 +114,7 @@ def json_response(response): def filter_by_tags(result_set, column): if request.args.getlist("tags"): tags = request.args.getlist("tags") - result_set = result_set.filter(cast(column, postgresql.ARRAY(db.Text)).contains(tags)) + result_set = result_set.filter(cast(column, ARRAY(db.Text)).contains(tags)) return result_set diff --git a/redash/handlers/dashboards.py b/redash/handlers/dashboards.py index 56813ea16c..2eee9b8bc5 100644 --- a/redash/handlers/dashboards.py +++ b/redash/handlers/dashboards.py @@ -96,7 +96,7 @@ def post(self): org=self.current_org, user=self.current_user, is_draft=True, - layout="[]", + layout=[], ) models.db.session.add(dashboard) models.db.session.commit() diff --git a/redash/handlers/visualizations.py b/redash/handlers/visualizations.py index d6f630bbb5..f29a1fb36c 100644 --- a/redash/handlers/visualizations.py +++ b/redash/handlers/visualizations.py @@ -7,7 +7,6 @@ require_permission, ) from redash.serializers import serialize_visualization -from redash.utils import json_dumps class VisualizationListResource(BaseResource): @@ -18,7 +17,6 @@ def post(self): query = get_object_or_404(models.Query.get_by_id_and_org, kwargs.pop("query_id"), self.current_org) require_object_modify_permission(query, self.current_user) - kwargs["options"] = json_dumps(kwargs["options"]) kwargs["query_rel"] = query vis = models.Visualization(**kwargs) @@ -34,8 +32,6 @@ def post(self, visualization_id): require_object_modify_permission(vis.query_rel, self.current_user) kwargs = request.get_json(force=True) - if "options" in kwargs: - kwargs["options"] = json_dumps(kwargs["options"]) kwargs.pop("id", None) kwargs.pop("query_id", None) diff --git a/redash/handlers/widgets.py b/redash/handlers/widgets.py index 6a16a7ba55..80e3482c70 100644 --- a/redash/handlers/widgets.py +++ b/redash/handlers/widgets.py @@ -9,7 +9,6 @@ view_only, ) from redash.serializers import serialize_widget -from redash.utils import json_dumps class WidgetListResource(BaseResource): @@ -30,7 +29,6 @@ def post(self): dashboard = models.Dashboard.get_by_id_and_org(widget_properties.get("dashboard_id"), self.current_org) require_object_modify_permission(dashboard, self.current_user) - widget_properties["options"] = json_dumps(widget_properties["options"]) widget_properties.pop("id", None) visualization_id = widget_properties.pop("visualization_id") @@ -65,7 +63,7 @@ def post(self, widget_id): require_object_modify_permission(widget.dashboard, self.current_user) widget_properties = request.get_json(force=True) widget.text = widget_properties["text"] - widget.options = json_dumps(widget_properties["options"]) + widget.options = widget_properties["options"] models.db.session.commit() return serialize_widget(widget) diff --git a/redash/models/__init__.py b/redash/models/__init__.py index 4588f2340d..5beefac06b 100644 --- a/redash/models/__init__.py +++ b/redash/models/__init__.py @@ -6,7 +6,7 @@ import pytz from sqlalchemy import UniqueConstraint, and_, cast, distinct, func, or_ -from sqlalchemy.dialects import postgresql +from sqlalchemy.dialects.postgresql import ARRAY, DOUBLE_PRECISION, JSONB from sqlalchemy.event import listens_for from sqlalchemy.ext.hybrid import hybrid_property from sqlalchemy.orm import ( @@ -50,8 +50,7 @@ EncryptedConfiguration, MutableDict, MutableList, - PseudoJSON, - pseudo_json_cast_property, + json_cast_property, ) from redash.models.users import ( # noqa AccessPermission, @@ -127,7 +126,10 @@ class DataSource(BelongsToOrgMixin, db.Model): data_source_groups = db.relationship("DataSourceGroup", back_populates="data_source", cascade="all") __tablename__ = "data_sources" - __table_args__ = (db.Index("data_sources_org_id_name", "org_id", "name"),) + __table_args__ = ( + db.Index("data_sources_org_id_name", "org_id", "name"), + {"extend_existing": True}, + ) def __eq__(self, other): return self.id == other.id @@ -301,34 +303,11 @@ class DataSourceGroup(db.Model): view_only = Column(db.Boolean, default=False) __tablename__ = "data_source_groups" - - -DESERIALIZED_DATA_ATTR = "_deserialized_data" - - -class DBPersistence: - @property - def data(self): - if self._data is None: - return None - - if not hasattr(self, DESERIALIZED_DATA_ATTR): - setattr(self, DESERIALIZED_DATA_ATTR, json_loads(self._data)) - - return self._deserialized_data - - @data.setter - def data(self, data): - if hasattr(self, DESERIALIZED_DATA_ATTR): - delattr(self, DESERIALIZED_DATA_ATTR) - self._data = data - - -QueryResultPersistence = settings.dynamic_settings.QueryResultPersistence or DBPersistence + __table_args__ = ({"extend_existing": True},) @generic_repr("id", "org_id", "data_source_id", "query_hash", "runtime", "retrieved_at") -class QueryResult(db.Model, QueryResultPersistence, BelongsToOrgMixin): +class QueryResult(db.Model, BelongsToOrgMixin): id = primary_key("QueryResult") org_id = Column(key_type("Organization"), db.ForeignKey("organizations.id")) org = db.relationship(Organization) @@ -336,8 +315,8 @@ class QueryResult(db.Model, QueryResultPersistence, BelongsToOrgMixin): data_source = db.relationship(DataSource, backref=backref("query_results")) query_hash = Column(db.String(32), index=True) query_text = Column("query", db.Text) - _data = Column("data", db.Text) - runtime = Column(postgresql.DOUBLE_PRECISION) + data = Column(MutableDict.as_mutable(JSONB), nullable=True) + runtime = Column(DOUBLE_PRECISION) retrieved_at = Column(db.DateTime(True)) __tablename__ = "query_results" @@ -478,11 +457,11 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model): last_modified_by = db.relationship(User, backref="modified_queries", foreign_keys=[last_modified_by_id]) is_archived = Column(db.Boolean, default=False, index=True) is_draft = Column(db.Boolean, default=True, index=True) - schedule = Column(MutableDict.as_mutable(PseudoJSON), nullable=True) - interval = pseudo_json_cast_property(db.Integer, "schedule", "interval", default=0) + schedule = Column(MutableDict.as_mutable(JSONB), nullable=True) + interval = json_cast_property(db.Integer, "schedule", "interval", default=0) schedule_failures = Column(db.Integer, default=0) visualizations = db.relationship("Visualization", cascade="all, delete-orphan") - options = Column(MutableDict.as_mutable(PseudoJSON), default={}) + options = Column(MutableDict.as_mutable(JSONB), default={}) search_vector = Column( TSVectorType( "id", @@ -493,7 +472,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model): ), nullable=True, ) - tags = Column("tags", MutableList.as_mutable(postgresql.ARRAY(db.Unicode)), nullable=True) + tags = Column("tags", MutableList.as_mutable(ARRAY(db.Unicode)), nullable=True) query_class = SearchBaseQuery __tablename__ = "queries" @@ -529,7 +508,7 @@ def create(cls, **kwargs): name="Table", description="", type="TABLE", - options="{}", + options={}, ) ) return query @@ -595,7 +574,7 @@ def by_api_key(cls, api_key): @classmethod def past_scheduled_queries(cls): now = utils.utcnow() - queries = Query.query.filter(Query.schedule.isnot(None)).order_by(Query.id) + queries = Query.query.filter(func.jsonb_typeof(Query.schedule) != "null").order_by(Query.id) return [ query for query in queries @@ -607,7 +586,7 @@ def past_scheduled_queries(cls): def outdated_queries(cls): queries = ( Query.query.options(joinedload(Query.latest_query_data).load_only("retrieved_at")) - .filter(Query.schedule.isnot(None)) + .filter(func.jsonb_typeof(Query.schedule) != "null") .order_by(Query.id) .all() ) @@ -953,7 +932,7 @@ class Alert(TimestampMixin, BelongsToOrgMixin, db.Model): query_rel = db.relationship(Query, backref=backref("alerts", cascade="all")) user_id = Column(key_type("User"), db.ForeignKey("users.id")) user = db.relationship(User, backref="alerts") - options = Column(MutableDict.as_mutable(PseudoJSON)) + options = Column(MutableDict.as_mutable(JSONB), nullable=True) state = Column(db.String(255), default=UNKNOWN_STATE) subscriptions = db.relationship("AlertSubscription", cascade="all, delete-orphan") last_triggered_at = Column(db.DateTime(True), nullable=True) @@ -1064,13 +1043,13 @@ class Dashboard(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model user_id = Column(key_type("User"), db.ForeignKey("users.id")) user = db.relationship(User) # layout is no longer used, but kept so we know how to render old dashboards. - layout = Column(db.Text) + layout = Column(MutableList.as_mutable(JSONB), default=[]) dashboard_filters_enabled = Column(db.Boolean, default=False) is_archived = Column(db.Boolean, default=False, index=True) is_draft = Column(db.Boolean, default=True, index=True) widgets = db.relationship("Widget", backref="dashboard", lazy="dynamic") - tags = Column("tags", MutableList.as_mutable(postgresql.ARRAY(db.Unicode)), nullable=True) - options = Column(MutableDict.as_mutable(postgresql.JSON), server_default="{}", default={}) + tags = Column("tags", MutableList.as_mutable(ARRAY(db.Unicode)), nullable=True) + options = Column(MutableDict.as_mutable(JSONB), default={}) __tablename__ = "dashboards" __mapper_args__ = {"version_id_col": version} @@ -1183,7 +1162,7 @@ class Visualization(TimestampMixin, BelongsToOrgMixin, db.Model): query_rel = db.relationship(Query, back_populates="visualizations") name = Column(db.String(255)) description = Column(db.String(4096), nullable=True) - options = Column(db.Text) + options = Column(MutableDict.as_mutable(JSONB), nullable=True) __tablename__ = "visualizations" @@ -1210,7 +1189,7 @@ class Widget(TimestampMixin, BelongsToOrgMixin, db.Model): visualization = db.relationship(Visualization, backref=backref("widgets", cascade="delete")) text = Column(db.Text, nullable=True) width = Column(db.Integer) - options = Column(db.Text) + options = Column(MutableDict.as_mutable(JSONB), default={}) dashboard_id = Column(key_type("Dashboard"), db.ForeignKey("dashboards.id"), index=True) __tablename__ = "widgets" @@ -1242,7 +1221,7 @@ class Event(db.Model): action = Column(db.String(255)) object_type = Column(db.String(255)) object_id = Column(db.String(255), nullable=True) - additional_properties = Column(MutableDict.as_mutable(PseudoJSON), nullable=True, default={}) + additional_properties = Column(MutableDict.as_mutable(JSONB), nullable=True, default={}) created_at = Column(db.DateTime(True), default=db.func.now()) __tablename__ = "events" diff --git a/redash/models/base.py b/redash/models/base.py index 2edbb36fd4..2ed95c38fb 100644 --- a/redash/models/base.py +++ b/redash/models/base.py @@ -1,13 +1,13 @@ import functools from flask_sqlalchemy import BaseQuery, SQLAlchemy -from sqlalchemy.dialects import postgresql +from sqlalchemy.dialects.postgresql import UUID from sqlalchemy.orm import object_session from sqlalchemy.pool import NullPool from sqlalchemy_searchable import SearchQueryMixin, make_searchable, vectorizer from redash import settings -from redash.utils import json_dumps +from redash.utils import json_dumps, json_loads class RedashSQLAlchemy(SQLAlchemy): @@ -28,7 +28,10 @@ def apply_pool_defaults(self, app, options): return options -db = RedashSQLAlchemy(session_options={"expire_on_commit": False}) +db = RedashSQLAlchemy( + session_options={"expire_on_commit": False}, + engine_options={"json_serializer": json_dumps, "json_deserializer": json_loads}, +) # Make sure the SQLAlchemy mappers are all properly configured first. # This is required by SQLAlchemy-Searchable as it adds DDL listeners # on the configuration phase of models. @@ -50,7 +53,7 @@ def integer_vectorizer(column): return db.func.cast(column, db.Text) -@vectorizer(postgresql.UUID) +@vectorizer(UUID) def uuid_vectorizer(column): return db.func.cast(column, db.Text) diff --git a/redash/models/changes.py b/redash/models/changes.py index be676a475d..3858f91415 100644 --- a/redash/models/changes.py +++ b/redash/models/changes.py @@ -1,8 +1,8 @@ +from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.inspection import inspect from sqlalchemy_utils.models import generic_repr from .base import Column, GFKBase, db, key_type, primary_key -from .types import PseudoJSON @generic_repr("id", "object_type", "object_id", "created_at") @@ -13,7 +13,7 @@ class Change(GFKBase, db.Model): object_version = Column(db.Integer, default=0) user_id = Column(key_type("User"), db.ForeignKey("users.id")) user = db.relationship("User", backref="changes") - change = Column(PseudoJSON) + change = Column(JSONB) created_at = Column(db.DateTime(True), default=db.func.now()) __tablename__ = "changes" diff --git a/redash/models/organizations.py b/redash/models/organizations.py index 4f5ec03289..0bf57499d6 100644 --- a/redash/models/organizations.py +++ b/redash/models/organizations.py @@ -1,3 +1,4 @@ +from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.orm.attributes import flag_modified from sqlalchemy_utils.models import generic_repr @@ -5,7 +6,7 @@ from .base import Column, db, primary_key from .mixins import TimestampMixin -from .types import MutableDict, PseudoJSON +from .types import MutableDict from .users import Group, User @@ -17,7 +18,7 @@ class Organization(TimestampMixin, db.Model): id = primary_key("Organization") name = Column(db.String(255)) slug = Column(db.String(255), unique=True) - settings = Column(MutableDict.as_mutable(PseudoJSON)) + settings = Column(MutableDict.as_mutable(JSONB), default={}) groups = db.relationship("Group", lazy="dynamic") events = db.relationship("Event", lazy="dynamic", order_by="desc(Event.created_at)") diff --git a/redash/models/types.py b/redash/models/types.py index ae1c4fd8e7..77c1cb4965 100644 --- a/redash/models/types.py +++ b/redash/models/types.py @@ -1,11 +1,8 @@ -from sqlalchemy import cast -from sqlalchemy.dialects.postgresql import JSON from sqlalchemy.ext.indexable import index_property from sqlalchemy.ext.mutable import Mutable from sqlalchemy.types import TypeDecorator from sqlalchemy_utils import EncryptedType -from redash.utils import json_dumps, json_loads from redash.utils.configuration import ConfigurationContainer from .base import db @@ -31,22 +28,6 @@ def process_result_value(self, value, dialect): ) -# XXX replace PseudoJSON and MutableDict with real JSON field -class PseudoJSON(TypeDecorator): - impl = db.Text - - def process_bind_param(self, value, dialect): - if value is None: - return value - - return json_dumps(value) - - def process_result_value(self, value, dialect): - if not value: - return value - return json_loads(value) - - class MutableDict(Mutable, dict): @classmethod def coerce(cls, key, value): @@ -107,19 +88,3 @@ def __init__(self, cast_type, *args, **kwargs): def expr(self, model): expr = super(json_cast_property, self).expr(model) return expr.astext.cast(self.cast_type) - - -class pseudo_json_cast_property(index_property): - """ - A SQLAlchemy index property that is able to cast the - entity attribute as the specified cast type. Useful - for PseudoJSON colums for easier querying/filtering. - """ - - def __init__(self, cast_type, *args, **kwargs): - super().__init__(*args, **kwargs) - self.cast_type = cast_type - - def expr(self, model): - expr = cast(getattr(model, self.attr_name), JSON)[self.index] - return expr.astext.cast(self.cast_type) diff --git a/redash/models/users.py b/redash/models/users.py index f46d3954aa..6b9a83db80 100644 --- a/redash/models/users.py +++ b/redash/models/users.py @@ -8,7 +8,7 @@ from flask import current_app, request_started, url_for from flask_login import AnonymousUserMixin, UserMixin, current_user from passlib.apps import custom_app_context as pwd_context -from sqlalchemy.dialects import postgresql +from sqlalchemy.dialects.postgresql import ARRAY, JSONB from sqlalchemy_utils import EmailType from sqlalchemy_utils.models import generic_repr @@ -84,14 +84,14 @@ class User(TimestampMixin, db.Model, BelongsToOrgMixin, UserMixin, PermissionsCh password_hash = Column(db.String(128), nullable=True) group_ids = Column( "groups", - MutableList.as_mutable(postgresql.ARRAY(key_type("Group"))), + MutableList.as_mutable(ARRAY(key_type("Group"))), nullable=True, ) api_key = Column(db.String(40), default=lambda: generate_token(40), unique=True) disabled_at = Column(db.DateTime(True), default=None, nullable=True) details = Column( - MutableDict.as_mutable(postgresql.JSONB), + MutableDict.as_mutable(JSONB), nullable=True, server_default="{}", default={}, @@ -267,7 +267,7 @@ class Group(db.Model, BelongsToOrgMixin): org = db.relationship("Organization", back_populates="groups") type = Column(db.String(255), default=REGULAR_GROUP) name = Column(db.String(100)) - permissions = Column(postgresql.ARRAY(db.String(255)), default=DEFAULT_PERMISSIONS) + permissions = Column(ARRAY(db.String(255)), default=DEFAULT_PERMISSIONS) created_at = Column(db.DateTime(True), default=db.func.now()) __tablename__ = "groups" diff --git a/redash/query_runner/__init__.py b/redash/query_runner/__init__.py index 6fe84514a0..d713a40d7c 100644 --- a/redash/query_runner/__init__.py +++ b/redash/query_runner/__init__.py @@ -9,7 +9,6 @@ from sshtunnel import open_tunnel from redash import settings, utils -from redash.utils import json_loads from redash.utils.requests_session import ( UnacceptableAddressException, requests_or_advocate, @@ -243,7 +242,7 @@ def _run_query_internal(self, query): if error is not None: raise Exception("Failed running query [%s]." % query) - return json_loads(results)["rows"] + return results["rows"] @classmethod def to_dict(cls): diff --git a/redash/query_runner/arango.py b/redash/query_runner/arango.py index 9bae862807..c47e7cd3b1 100644 --- a/redash/query_runner/arango.py +++ b/redash/query_runner/arango.py @@ -7,7 +7,6 @@ BaseQueryRunner, register, ) -from redash.utils import json_dumps logger = logging.getLogger(__name__) @@ -81,12 +80,11 @@ def run_query(self, query, user): "rows": result, } - json_data = json_dumps(data, allow_nan=False) error = None except Exception: raise - return json_data, error + return data, error register(Arango) diff --git a/redash/query_runner/athena.py b/redash/query_runner/athena.py index 2287d193eb..acde734f02 100644 --- a/redash/query_runner/athena.py +++ b/redash/query_runner/athena.py @@ -12,7 +12,6 @@ register, ) from redash.settings import parse_boolean -from redash.utils import json_dumps, json_loads logger = logging.getLogger(__name__) ANNOTATE_QUERY = parse_boolean(os.environ.get("ATHENA_ANNOTATE_QUERY", "true")) @@ -210,7 +209,6 @@ def get_schema(self, get_stats=False): if error is not None: self._handle_run_query_error(error) - results = json_loads(results) for row in results["rows"]: table_name = "{0}.{1}".format(row["table_schema"], row["table_name"]) if table_name not in schema: @@ -257,14 +255,13 @@ def run_query(self, query, user): }, } - json_data = json_dumps(data, allow_nan=False) error = None except Exception: if cursor.query_id: cursor.cancel() raise - return json_data, error + return data, error register(Athena) diff --git a/redash/query_runner/axibase_tsd.py b/redash/query_runner/axibase_tsd.py index ac86e8bf65..3c535c4568 100644 --- a/redash/query_runner/axibase_tsd.py +++ b/redash/query_runner/axibase_tsd.py @@ -13,7 +13,7 @@ JobTimeoutException, register, ) -from redash.utils import json_dumps, json_loads +from redash.utils import json_loads logger = logging.getLogger(__name__) @@ -157,17 +157,16 @@ def run_query(self, query, user): columns, rows = generate_rows_and_columns(data) data = {"columns": columns, "rows": rows} - json_data = json_dumps(data) error = None except SQLException as e: - json_data = None + data = None error = e.content except (KeyboardInterrupt, InterruptException, JobTimeoutException): sql.cancel_query(query_id) raise - return json_data, error + return data, error def get_schema(self, get_stats=False): connection = atsd_client.connect_url( diff --git a/redash/query_runner/azure_kusto.py b/redash/query_runner/azure_kusto.py index 217bc2ed31..c7372fe184 100644 --- a/redash/query_runner/azure_kusto.py +++ b/redash/query_runner/azure_kusto.py @@ -8,7 +8,7 @@ BaseQueryRunner, register, ) -from redash.utils import json_dumps, json_loads +from redash.utils import json_loads try: from azure.kusto.data.exceptions import KustoServiceError @@ -124,16 +124,15 @@ def run_query(self, query, user): error = None data = {"columns": columns, "rows": rows} - json_data = json_dumps(data) except KustoServiceError as err: - json_data = None + data = None try: error = err.args[1][0]["error"]["@message"] except (IndexError, KeyError): error = err.args[1] - return json_data, error + return data, error def get_schema(self, get_stats=False): query = ".show database schema as json" @@ -143,8 +142,6 @@ def get_schema(self, get_stats=False): if error is not None: self._handle_run_query_error(error) - results = json_loads(results) - schema_as_json = json_loads(results["rows"][0]["DatabaseSchema"]) tables_list = schema_as_json["Databases"][self.configuration["database"]]["Tables"].values() diff --git a/redash/query_runner/big_query.py b/redash/query_runner/big_query.py index 87b6454be9..b8d2e7f538 100644 --- a/redash/query_runner/big_query.py +++ b/redash/query_runner/big_query.py @@ -16,7 +16,7 @@ JobTimeoutException, register, ) -from redash.utils import json_dumps, json_loads +from redash.utils import json_loads logger = logging.getLogger(__name__) @@ -318,7 +318,6 @@ def get_schema(self, get_stats=False): if error is not None: self._handle_run_query_error(error) - results = json_loads(results) for row in results["rows"]: table_name = "{0}.{1}".format(row["table_schema"], row["table_name"]) if table_name not in schema: @@ -346,9 +345,8 @@ def run_query(self, query, user): data = self._get_query_result(jobs, query) error = None - json_data = json_dumps(data, allow_nan=False) except apiclient.errors.HttpError as e: - json_data = None + data = None if e.resp.status in [400, 404]: error = json_loads(e.content)["error"]["message"] else: @@ -363,7 +361,7 @@ def run_query(self, query, user): raise - return json_data, error + return data, error register(BigQuery) diff --git a/redash/query_runner/cass.py b/redash/query_runner/cass.py index 21081ae115..c95e1c68cd 100644 --- a/redash/query_runner/cass.py +++ b/redash/query_runner/cass.py @@ -5,7 +5,6 @@ from tempfile import NamedTemporaryFile from redash.query_runner import BaseQueryRunner, register -from redash.utils import JSONEncoder, json_dumps, json_loads logger = logging.getLogger(__name__) @@ -27,11 +26,10 @@ def generate_ssl_options_dict(protocol, cert_path=None): return ssl_options -class CassandraJSONEncoder(JSONEncoder): - def default(self, o): - if isinstance(o, sortedset): - return list(o) - return super(CassandraJSONEncoder, self).default(o) +def json_encoder(dec, o): + if isinstance(o, sortedset): + return list(o) + return None class Cassandra(BaseQueryRunner): @@ -86,7 +84,6 @@ def get_schema(self, get_stats=False): select release_version from system.local; """ results, error = self.run_query(query, None) - results = json_loads(results) release_version = results["rows"][0]["release_version"] query = """ @@ -107,7 +104,6 @@ def get_schema(self, get_stats=False): ) results, error = self.run_query(query, None) - results = json_loads(results) schema = {} for row in results["rows"]: @@ -155,9 +151,8 @@ def run_query(self, query, user): rows = [dict(zip(column_names, row)) for row in result] data = {"columns": columns, "rows": rows} - json_data = json_dumps(data, cls=CassandraJSONEncoder) - return json_data, None + return data, None def _generate_cert_file(self): cert_encoded_bytes = self.configuration.get("sslCertificateFile", None) diff --git a/redash/query_runner/clickhouse.py b/redash/query_runner/clickhouse.py index dddff2712e..752aaee7a2 100644 --- a/redash/query_runner/clickhouse.py +++ b/redash/query_runner/clickhouse.py @@ -15,7 +15,6 @@ register, split_sql_statements, ) -from redash.utils import json_dumps, json_loads logger = logging.getLogger(__name__) @@ -85,8 +84,6 @@ def _get_tables(self, schema): if error is not None: self._handle_run_query_error(error) - results = json_loads(results) - for row in results["rows"]: table_name = "{}.{}".format(row["database"], row["table"]) @@ -200,25 +197,24 @@ def run_query(self, query, user): queries = split_multi_query(query) if not queries: - json_data = None + data = None error = "Query is empty" - return json_data, error + return data, error try: # If just one query was given no session is needed if len(queries) == 1: - results = self._clickhouse_query(queries[0]) + data = self._clickhouse_query(queries[0]) else: # If more than one query was given, a session is needed. Parameter session_check must be false # for the first query session_id = "redash_{}".format(uuid4().hex) - results = self._clickhouse_query(queries[0], session_id, session_check=False) + data = self._clickhouse_query(queries[0], session_id, session_check=False) for query in queries[1:]: - results = self._clickhouse_query(query, session_id, session_check=True) + data = self._clickhouse_query(query, session_id, session_check=True) - data = json_dumps(results) error = None except Exception as e: data = None diff --git a/redash/query_runner/cloudwatch.py b/redash/query_runner/cloudwatch.py index dbaf4aa22d..699834c0a9 100644 --- a/redash/query_runner/cloudwatch.py +++ b/redash/query_runner/cloudwatch.py @@ -3,7 +3,7 @@ import yaml from redash.query_runner import BaseQueryRunner, register -from redash.utils import json_dumps, parse_human_time +from redash.utils import parse_human_time try: import boto3 @@ -121,7 +121,7 @@ def run_query(self, query, user): rows, columns = parse_response(results) - return json_dumps({"rows": rows, "columns": columns}), None + return {"rows": rows, "columns": columns}, None register(CloudWatch) diff --git a/redash/query_runner/cloudwatch_insights.py b/redash/query_runner/cloudwatch_insights.py index 817b5e8170..f0ebcea117 100644 --- a/redash/query_runner/cloudwatch_insights.py +++ b/redash/query_runner/cloudwatch_insights.py @@ -4,7 +4,7 @@ import yaml from redash.query_runner import BaseQueryRunner, register -from redash.utils import json_dumps, parse_human_time +from redash.utils import parse_human_time try: import boto3 @@ -146,7 +146,7 @@ def run_query(self, query, user): time.sleep(POLL_INTERVAL) elapsed += POLL_INTERVAL - return json_dumps(data), None + return data, None register(CloudWatchInsights) diff --git a/redash/query_runner/corporate_memory.py b/redash/query_runner/corporate_memory.py index f5049ae816..0eb33c89f1 100644 --- a/redash/query_runner/corporate_memory.py +++ b/redash/query_runner/corporate_memory.py @@ -9,7 +9,6 @@ from os import environ from redash.query_runner import BaseQueryRunner -from redash.utils import json_dumps, json_loads from . import register @@ -115,7 +114,7 @@ def _transform_sparql_results(results): logger.info("results are: {}".format(results)) # Not sure why we do not use the json package here but all other # query runner do it the same way :-) - sparql_results = json_loads(results) + sparql_results = results # transform all bindings to redash rows rows = [] for sparql_row in sparql_results["results"]["bindings"]: @@ -133,7 +132,7 @@ def _transform_sparql_results(results): columns.append({"name": var, "friendly_name": var, "type": "string"}) # Not sure why we do not use the json package here but all other # query runner do it the same way :-) - return json_dumps({"columns": columns, "rows": rows}) + return {"columns": columns, "rows": rows} @classmethod def name(cls): diff --git a/redash/query_runner/couchbase.py b/redash/query_runner/couchbase.py index 99f6ce7e1a..4a40ad7499 100644 --- a/redash/query_runner/couchbase.py +++ b/redash/query_runner/couchbase.py @@ -10,7 +10,6 @@ BaseQueryRunner, register, ) -from redash.utils import json_dumps logger = logging.getLogger(__name__) try: @@ -155,7 +154,7 @@ def run_query(self, query, user): rows, columns = parse_results(result.json()["results"]) data = {"columns": columns, "rows": rows} - return json_dumps(data), None + return data, None @classmethod def name(cls): diff --git a/redash/query_runner/csv.py b/redash/query_runner/csv.py index a4311317fa..3d3cf61b9c 100644 --- a/redash/query_runner/csv.py +++ b/redash/query_runner/csv.py @@ -4,7 +4,6 @@ import yaml from redash.query_runner import BaseQueryRunner, NotSupported, register -from redash.utils import json_dumps from redash.utils.requests_session import ( UnacceptableAddressException, requests_or_advocate, @@ -96,19 +95,18 @@ def run_query(self, query, user): break data["rows"] = df[labels].replace({np.nan: None}).to_dict(orient="records") - json_data = json_dumps(data) error = None except KeyboardInterrupt: error = "Query cancelled by user." - json_data = None + data = None except UnacceptableAddressException: error = "Can't query private addresses." - json_data = None + data = None except Exception as e: error = "Error reading {0}. {1}".format(path, str(e)) - json_data = None + data = None - return json_data, error + return data, error def get_schema(self): raise NotSupported() diff --git a/redash/query_runner/databend.py b/redash/query_runner/databend.py index c2f7d111a7..5e8062061b 100644 --- a/redash/query_runner/databend.py +++ b/redash/query_runner/databend.py @@ -16,7 +16,6 @@ BaseQueryRunner, register, ) -from redash.utils import json_dumps, json_loads class Databend(BaseQueryRunner): @@ -85,11 +84,10 @@ def run_query(self, query, user): data = {"columns": columns, "rows": rows} error = None - json_data = json_dumps(data) finally: connection.close() - return json_data, error + return data, error def get_schema(self, get_stats=False): query = """ @@ -106,7 +104,6 @@ def get_schema(self, get_stats=False): self._handle_run_query_error(error) schema = {} - results = json_loads(results) for row in results["rows"]: table_name = "{}.{}".format(row["table_schema"], row["table_name"]) @@ -133,7 +130,6 @@ def _get_tables(self): self._handle_run_query_error(error) schema = {} - results = json_loads(results) for row in results["rows"]: table_name = "{}.{}".format(row["table_schema"], row["table_name"]) diff --git a/redash/query_runner/databricks.py b/redash/query_runner/databricks.py index b29906debe..886ba9b8b4 100644 --- a/redash/query_runner/databricks.py +++ b/redash/query_runner/databricks.py @@ -16,7 +16,6 @@ split_sql_statements, ) from redash.settings import cast_int_or_default -from redash.utils import json_dumps, json_loads try: import pyodbc @@ -115,16 +114,13 @@ def run_query(self, query, user): logger.warning("Truncated result set.") statsd_client.incr("redash.query_runner.databricks.truncated") data["truncated"] = True - json_data = json_dumps(data) error = None else: error = None - json_data = json_dumps( - { - "columns": [{"name": "result", "type": TYPE_STRING}], - "rows": [{"result": "No data was returned."}], - } - ) + data = { + "columns": [{"name": "result", "type": TYPE_STRING}], + "rows": [{"result": "No data was returned."}], + } cursor.close() except pyodbc.Error as e: @@ -132,9 +128,9 @@ def run_query(self, query, user): error = str(e.args[1]) else: error = str(e) - json_data = None + data = None - return json_data, error + return data, error def get_schema(self): raise NotSupported() @@ -146,8 +142,6 @@ def get_databases(self): if error is not None: self._handle_run_query_error(error) - results = json_loads(results) - first_column_name = results["columns"][0]["name"] return [row[first_column_name] for row in results["rows"]] diff --git a/redash/query_runner/db2.py b/redash/query_runner/db2.py index f05a9f84e0..88a843af98 100644 --- a/redash/query_runner/db2.py +++ b/redash/query_runner/db2.py @@ -11,7 +11,6 @@ JobTimeoutException, register, ) -from redash.utils import json_dumps, json_loads logger = logging.getLogger(__name__) @@ -78,8 +77,6 @@ def _get_definitions(self, schema, query): if error is not None: self._handle_run_query_error(error) - results = json_loads(results) - for row in results["rows"]: if row["TABLE_SCHEMA"] != "public": table_name = "{}.{}".format(row["TABLE_SCHEMA"], row["TABLE_NAME"]) @@ -130,23 +127,22 @@ def run_query(self, query, user): data = {"columns": columns, "rows": rows} error = None - json_data = json_dumps(data) else: error = "Query completed but it returned no data." - json_data = None + data = None except (select.error, OSError): error = "Query interrupted. Please retry." - json_data = None + data = None except ibm_db_dbi.DatabaseError as e: error = str(e) - json_data = None + data = None except (KeyboardInterrupt, InterruptException, JobTimeoutException): connection.cancel() raise finally: connection.close() - return json_data, error + return data, error register(DB2) diff --git a/redash/query_runner/dgraph.py b/redash/query_runner/dgraph.py index 0d83552bf3..302a474b91 100644 --- a/redash/query_runner/dgraph.py +++ b/redash/query_runner/dgraph.py @@ -8,7 +8,6 @@ enabled = False from redash.query_runner import BaseQueryRunner, register -from redash.utils import json_dumps def reduce_item(reduced_item, key, value): @@ -81,7 +80,7 @@ def run_dgraph_query_raw(self, query): client_stub.close() def run_query(self, query, user): - json_data = None + data = None error = None try: @@ -109,12 +108,10 @@ def run_query(self, query, user): # finally, assemble both the columns and data data = {"columns": columns, "rows": processed_data} - - json_data = json_dumps(data) except Exception as e: error = e - return json_data, error + return data, error def get_schema(self, get_stats=False): """Queries Dgraph for all the predicates, their types, their tokenizers, etc. diff --git a/redash/query_runner/drill.py b/redash/query_runner/drill.py index b91e7f5c9d..a011e8590c 100644 --- a/redash/query_runner/drill.py +++ b/redash/query_runner/drill.py @@ -13,7 +13,6 @@ guess_type, register, ) -from redash.utils import json_dumps, json_loads logger = logging.getLogger(__name__) @@ -98,9 +97,7 @@ def run_query(self, query, user): if error is not None: return None, error - results = parse_response(response.json()) - - return json_dumps(results), None + return parse_response(response.json()), None def get_schema(self, get_stats=False): query = """ @@ -132,8 +129,6 @@ def get_schema(self, get_stats=False): if error is not None: self._handle_run_query_error(error) - results = json_loads(results) - schema = {} for row in results["rows"]: diff --git a/redash/query_runner/druid.py b/redash/query_runner/druid.py index 74c9861712..b20a01953d 100644 --- a/redash/query_runner/druid.py +++ b/redash/query_runner/druid.py @@ -12,7 +12,6 @@ BaseQueryRunner, register, ) -from redash.utils import json_dumps, json_loads TYPES_MAP = {1: TYPE_STRING, 2: TYPE_INTEGER, 3: TYPE_BOOLEAN} @@ -59,12 +58,10 @@ def run_query(self, query, user): data = {"columns": columns, "rows": rows} error = None - json_data = json_dumps(data) - print(json_data) finally: connection.close() - return json_data, error + return data, error def get_schema(self, get_stats=False): query = """ @@ -81,7 +78,6 @@ def get_schema(self, get_stats=False): self._handle_run_query_error(error) schema = {} - results = json_loads(results) for row in results["rows"]: table_name = "{}.{}".format(row["TABLE_SCHEMA"], row["TABLE_NAME"]) diff --git a/redash/query_runner/e6data.py b/redash/query_runner/e6data.py index 7e70b810c6..0087c22e1a 100644 --- a/redash/query_runner/e6data.py +++ b/redash/query_runner/e6data.py @@ -19,7 +19,6 @@ except ImportError: enabled = False -from redash.utils import json_dumps logger = logging.getLogger(__name__) @@ -106,18 +105,17 @@ def run_query(self, query, user): columns.append({"name": column_name, "type": column_type}) rows = [dict(zip([c["name"] for c in columns], r)) for r in results] data = {"columns": columns, "rows": rows} - json_data = json_dumps(data) error = None except Exception as error: logger.debug(error) - json_data = None + data = None finally: if cursor is not None: cursor.clear() cursor.close() - return json_data, error + return data, error def test_connection(self): self.noop_query = "SELECT 1" diff --git a/redash/query_runner/elasticsearch.py b/redash/query_runner/elasticsearch.py index c1f7699743..5fc1d574ca 100644 --- a/redash/query_runner/elasticsearch.py +++ b/redash/query_runner/elasticsearch.py @@ -16,7 +16,7 @@ JobTimeoutException, register, ) -from redash.utils import json_dumps, json_loads +from redash.utils import json_loads try: import http.client as http_client @@ -406,18 +406,18 @@ def run_query(self, query, user): # TODO: Handle complete ElasticSearch queries (JSON based sent over HTTP POST) raise Exception("Advanced queries are not supported") - json_data = json_dumps({"columns": result_columns, "rows": result_rows}) + data = {"columns": result_columns, "rows": result_rows} except requests.HTTPError as e: logger.exception(e) r = e.response error = "Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text) - json_data = None + data = None except requests.exceptions.RequestException as e: logger.exception(e) error = "Connection refused" - json_data = None + data = None - return json_data, error + return data, error class ElasticSearch(BaseElasticSearch): @@ -460,20 +460,20 @@ def run_query(self, query, user): result_rows = [] self._parse_results(mappings, result_fields, r.json(), result_columns, result_rows) - json_data = json_dumps({"columns": result_columns, "rows": result_rows}) + data = {"columns": result_columns, "rows": result_rows} except (KeyboardInterrupt, JobTimeoutException) as e: logger.exception(e) raise except requests.HTTPError as e: logger.exception(e) error = "Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text) - json_data = None + data = None except requests.exceptions.RequestException as e: logger.exception(e) error = "Connection refused" - json_data = None + data = None - return json_data, error + return data, error register(Kibana) diff --git a/redash/query_runner/elasticsearch2.py b/redash/query_runner/elasticsearch2.py index 1ed4f136f3..003dd2a4f8 100644 --- a/redash/query_runner/elasticsearch2.py +++ b/redash/query_runner/elasticsearch2.py @@ -10,7 +10,6 @@ BaseHTTPQueryRunner, register, ) -from redash.utils import json_dumps, json_loads logger = logging.getLogger(__name__) @@ -62,11 +61,9 @@ def run_query(self, query, user): query_results = response.json() data = self._parse_results(result_fields, query_results) error = None - json_data = json_dumps(data) - return json_data, error + return data, error def _build_query(self, query: str) -> Tuple[dict, str, Optional[list]]: - query = json_loads(query) index_name = query.pop("index", "") result_fields = query.pop("result_fields", None) url = "/{}/_search".format(index_name) diff --git a/redash/query_runner/exasol.py b/redash/query_runner/exasol.py index b7fd53c2a2..a5fdd7df13 100644 --- a/redash/query_runner/exasol.py +++ b/redash/query_runner/exasol.py @@ -9,7 +9,6 @@ BaseQueryRunner, register, ) -from redash.utils import json_dumps def _exasol_type_mapper(val, data_type): @@ -109,14 +108,13 @@ def run_query(self, query, user): rows = [dict(zip(cnames, row)) for row in statement] data = {"columns": columns, "rows": rows} - json_data = json_dumps(data) finally: if statement is not None: statement.close() connection.close() - return json_data, error + return data, error def get_schema(self, get_stats=False): query = """ diff --git a/redash/query_runner/excel.py b/redash/query_runner/excel.py index c1cb67fe50..488632e022 100644 --- a/redash/query_runner/excel.py +++ b/redash/query_runner/excel.py @@ -3,7 +3,6 @@ import yaml from redash.query_runner import BaseQueryRunner, NotSupported, register -from redash.utils import json_dumps from redash.utils.requests_session import ( UnacceptableAddressException, requests_or_advocate, @@ -94,19 +93,18 @@ def run_query(self, query, user): break data["rows"] = df[labels].replace({np.nan: None}).to_dict(orient="records") - json_data = json_dumps(data) error = None except KeyboardInterrupt: error = "Query cancelled by user." - json_data = None + data = None except UnacceptableAddressException: error = "Can't query private addresses." - json_data = None + data = None except Exception as e: error = "Error reading {0}. {1}".format(path, str(e)) - json_data = None + data = None - return json_data, error + return data, error def get_schema(self): raise NotSupported() diff --git a/redash/query_runner/google_analytics.py b/redash/query_runner/google_analytics.py index 25d32e01c5..dede7340e7 100644 --- a/redash/query_runner/google_analytics.py +++ b/redash/query_runner/google_analytics.py @@ -12,7 +12,7 @@ BaseSQLQueryRunner, register, ) -from redash.utils import json_dumps, json_loads +from redash.utils import json_loads logger = logging.getLogger(__name__) @@ -180,15 +180,14 @@ def run_query(self, query, user): response = api.get(**params).execute() data = parse_ga_response(response) error = None - json_data = json_dumps(data) except HttpError as e: # Make sure we return a more readable error to the end user error = e._get_reason() - json_data = None + data = None else: error = "Wrong query format." - json_data = None - return json_data, error + data = None + return data, error register(GoogleAnalytics) diff --git a/redash/query_runner/google_analytics4.py b/redash/query_runner/google_analytics4.py index ac557bf28d..302e5ae909 100644 --- a/redash/query_runner/google_analytics4.py +++ b/redash/query_runner/google_analytics4.py @@ -13,7 +13,7 @@ BaseQueryRunner, register, ) -from redash.utils import json_dumps, json_loads +from redash.utils import json_loads logger = logging.getLogger(__name__) @@ -160,9 +160,8 @@ def run_query(self, query, user): data = parse_ga_response(raw_result) error = None - json_data = json_dumps(data) - return json_data, error + return data, error def test_connection(self): try: diff --git a/redash/query_runner/google_search_console.py b/redash/query_runner/google_search_console.py index ba7785b941..e0106a023a 100644 --- a/redash/query_runner/google_search_console.py +++ b/redash/query_runner/google_search_console.py @@ -11,7 +11,7 @@ BaseSQLQueryRunner, register, ) -from redash.utils import json_dumps, json_loads +from redash.utils import json_loads logger = logging.getLogger(__name__) @@ -151,15 +151,14 @@ def run_query(self, query, user): response = api.searchanalytics().query(siteUrl=site_url, body=params).execute() data = parse_ga_response(response, params["dimensions"]) error = None - json_data = json_dumps(data) except HttpError as e: # Make sure we return a more readable error to the end user error = e._get_reason() - json_data = None + data = None else: error = "Wrong query format." - json_data = None - return json_data, error + data = None + return data, error register(GoogleSearchConsole) diff --git a/redash/query_runner/google_spreadsheets.py b/redash/query_runner/google_spreadsheets.py index 17df3593f8..6ea9757c4e 100644 --- a/redash/query_runner/google_spreadsheets.py +++ b/redash/query_runner/google_spreadsheets.py @@ -16,7 +16,7 @@ guess_type, register, ) -from redash.utils import json_dumps, json_loads +from redash.utils import json_loads logger = logging.getLogger(__name__) @@ -257,7 +257,7 @@ def run_query(self, query, user): data = parse_spreadsheet(SpreadsheetWrapper(spreadsheet), worksheet_num_or_title) - return json_dumps(data), None + return data, None except gspread.SpreadsheetNotFound: return ( None, diff --git a/redash/query_runner/graphite.py b/redash/query_runner/graphite.py index f306b9d3c7..06bdbc61db 100644 --- a/redash/query_runner/graphite.py +++ b/redash/query_runner/graphite.py @@ -10,7 +10,6 @@ BaseQueryRunner, register, ) -from redash.utils import json_dumps logger = logging.getLogger(__name__) @@ -35,8 +34,7 @@ def _transform_result(response): } ) - data = {"columns": columns, "rows": rows} - return json_dumps(data) + return {"columns": columns, "rows": rows} class Graphite(BaseQueryRunner): diff --git a/redash/query_runner/hive_ds.py b/redash/query_runner/hive_ds.py index d66f5fbb9f..cb44a9457e 100644 --- a/redash/query_runner/hive_ds.py +++ b/redash/query_runner/hive_ds.py @@ -12,7 +12,6 @@ JobTimeoutException, register, ) -from redash.utils import json_dumps logger = logging.getLogger(__name__) @@ -139,7 +138,6 @@ def run_query(self, query, user): rows = [dict(zip(column_names, row)) for row in cursor] data = {"columns": columns, "rows": rows} - json_data = json_dumps(data) error = None except (KeyboardInterrupt, JobTimeoutException): if connection: @@ -150,12 +148,12 @@ def run_query(self, query, user): error = e.args[0].status.errorMessage except AttributeError: error = str(e) - json_data = None + data = None finally: if connection: connection.close() - return json_data, error + return data, error class HiveHttp(Hive): diff --git a/redash/query_runner/ignite.py b/redash/query_runner/ignite.py index 4c444c3256..bba24fb064 100644 --- a/redash/query_runner/ignite.py +++ b/redash/query_runner/ignite.py @@ -12,7 +12,6 @@ JobTimeoutException, register, ) -from redash.utils import json_dumps, json_loads ignite_available = importlib.util.find_spec("pyignite") is not None gridgain_available = importlib.util.find_spec("pygridgain") is not None @@ -81,8 +80,6 @@ def _get_tables(self, schema): if error is not None: raise Exception("Failed getting schema.") - results = json_loads(results) - for row in results["rows"]: if row["SCHEMA_NAME"] != self.configuration.get("schema", "PUBLIC"): table_name = "{}.{}".format(row["SCHEMA_NAME"], row["TABLE_NAME"]) @@ -160,8 +157,8 @@ def run_query(self, query, user): ) logger.debug("Ignite running query: %s", query) - data = self._parse_results(cursor) - json_data = json_dumps({"columns": data[0], "rows": data[1]}) + result = self._parse_results(cursor) + data = {"columns": result[0], "rows": result[1]} error = None except (KeyboardInterrupt, JobTimeoutException): @@ -171,7 +168,7 @@ def run_query(self, query, user): if connection: connection.close() - return json_data, error + return data, error register(Ignite) diff --git a/redash/query_runner/impala_ds.py b/redash/query_runner/impala_ds.py index 2c8b223af1..8a78147346 100644 --- a/redash/query_runner/impala_ds.py +++ b/redash/query_runner/impala_ds.py @@ -10,7 +10,6 @@ JobTimeoutException, register, ) -from redash.utils import json_dumps logger = logging.getLogger(__name__) @@ -120,14 +119,13 @@ def run_query(self, query, user): rows = [dict(zip(column_names, row)) for row in cursor] data = {"columns": columns, "rows": rows} - json_data = json_dumps(data) error = None cursor.close() except DatabaseError as e: - json_data = None + data = None error = str(e) except RPCError as e: - json_data = None + data = None error = "Metastore Error [%s]" % str(e) except (KeyboardInterrupt, JobTimeoutException): connection.cancel() @@ -136,7 +134,7 @@ def run_query(self, query, user): if connection: connection.close() - return json_data, error + return data, error register(Impala) diff --git a/redash/query_runner/influx_db.py b/redash/query_runner/influx_db.py index 025318b059..7f5249b1ac 100644 --- a/redash/query_runner/influx_db.py +++ b/redash/query_runner/influx_db.py @@ -7,7 +7,6 @@ BaseQueryRunner, register, ) -from redash.utils import json_dumps logger = logging.getLogger(__name__) @@ -64,7 +63,7 @@ def _transform_result(results): else: result_columns = [{"name": c, "type": TYPE_STRING} for c in column_names] - return json_dumps({"columns": result_columns, "rows": result_rows}) + return {"columns": result_columns, "rows": result_rows} class InfluxDB(BaseQueryRunner): diff --git a/redash/query_runner/influx_db_v2.py b/redash/query_runner/influx_db_v2.py index ab5add25ac..1c23ad5ac1 100644 --- a/redash/query_runner/influx_db_v2.py +++ b/redash/query_runner/influx_db_v2.py @@ -13,7 +13,6 @@ BaseQueryRunner, register, ) -from redash.utils import json_dumps try: from influxdb_client import InfluxDBClient @@ -188,7 +187,7 @@ def run_query(self, query: str, user: str) -> Tuple[Optional[str], Optional[str] 2. element: An error message, if an error occured. None, if no error occurred. """ - json_data = None + data = None error = None try: @@ -204,14 +203,12 @@ def run_query(self, query: str, user: str) -> Tuple[Optional[str], Optional[str] tables = client.query_api().query(query) data = self._get_data_from_tables(tables) - - json_data = json_dumps(data) except Exception as ex: error = str(ex) finally: self._cleanup_cert_files(influx_kwargs) - return json_data, error + return data, error register(InfluxDBv2) diff --git a/redash/query_runner/jql.py b/redash/query_runner/jql.py index 1079a3aba8..f7d26aa24c 100644 --- a/redash/query_runner/jql.py +++ b/redash/query_runner/jql.py @@ -2,7 +2,7 @@ from collections import OrderedDict from redash.query_runner import TYPE_STRING, BaseHTTPQueryRunner, register -from redash.utils import json_dumps, json_loads +from redash.utils import json_loads # TODO: make this more general and move into __init__.py @@ -26,7 +26,7 @@ def add_column(self, column, column_type=TYPE_STRING): } def to_json(self): - return json_dumps({"rows": self.rows, "columns": list(self.columns.values())}) + return {"rows": self.rows, "columns": list(self.columns.values())} def merge(self, set): self.rows = self.rows + set.rows diff --git a/redash/query_runner/json_ds.py b/redash/query_runner/json_ds.py index 4508430fe0..4f47878c6c 100644 --- a/redash/query_runner/json_ds.py +++ b/redash/query_runner/json_ds.py @@ -14,7 +14,6 @@ BaseHTTPQueryRunner, register, ) -from redash.utils import json_dumps class QueryParseError(Exception): @@ -158,11 +157,10 @@ def test_connection(self): def run_query(self, query, user): query = parse_query(query) - results, error = self._run_json_query(query) + data, error = self._run_json_query(query) if error is not None: return None, error - data = json_dumps(results) if data: return data, None return None, "Got empty response from '{}'.".format(query["url"]) diff --git a/redash/query_runner/kylin.py b/redash/query_runner/kylin.py index caeead2e05..18f6ff9c6b 100644 --- a/redash/query_runner/kylin.py +++ b/redash/query_runner/kylin.py @@ -15,7 +15,6 @@ BaseQueryRunner, register, ) -from redash.utils import json_dumps logger = logging.getLogger(__name__) @@ -102,7 +101,7 @@ def run_query(self, query, user): columns = self.get_columns(data["columnMetas"]) rows = self.get_rows(columns, data["results"]) - return json_dumps({"columns": columns, "rows": rows}), None + return {"columns": columns, "rows": rows}, None def get_schema(self, get_stats=False): url = self.configuration["url"] diff --git a/redash/query_runner/memsql_ds.py b/redash/query_runner/memsql_ds.py index fabe4b2ddc..f24c230816 100644 --- a/redash/query_runner/memsql_ds.py +++ b/redash/query_runner/memsql_ds.py @@ -10,7 +10,6 @@ JobTimeoutException, register, ) -from redash.utils import json_dumps logger = logging.getLogger(__name__) @@ -130,7 +129,6 @@ def run_query(self, query, user): columns.append({"name": column, "friendly_name": column, "type": TYPE_STRING}) data = {"columns": columns, "rows": rows} - json_data = json_dumps(data) error = None except (KeyboardInterrupt, JobTimeoutException): cursor.close() @@ -139,7 +137,7 @@ def run_query(self, query, user): if cursor: cursor.close() - return json_data, error + return data, error register(MemSQL) diff --git a/redash/query_runner/mongodb.py b/redash/query_runner/mongodb.py index c79637f46c..000aaf2d9c 100644 --- a/redash/query_runner/mongodb.py +++ b/redash/query_runner/mongodb.py @@ -13,7 +13,7 @@ BaseQueryRunner, register, ) -from redash.utils import JSONEncoder, json_dumps, json_loads, parse_human_time +from redash.utils import json_loads, parse_human_time logger = logging.getLogger(__name__) @@ -42,15 +42,14 @@ } -class MongoDBJSONEncoder(JSONEncoder): - def default(self, o): - if isinstance(o, ObjectId): - return str(o) - elif isinstance(o, Timestamp): - return super(MongoDBJSONEncoder, self).default(o.as_datetime()) - elif isinstance(o, Decimal128): - return o.to_decimal() - return super(MongoDBJSONEncoder, self).default(o) +def json_encoder(dec, o): + if isinstance(o, ObjectId): + return str(o) + elif isinstance(o, Timestamp): + return dec.default(o.as_datetime()) + elif isinstance(o, Decimal128): + return o.to_decimal() + return None date_regex = re.compile(r'ISODate\("(.*)"\)', re.IGNORECASE) @@ -348,9 +347,8 @@ def run_query(self, query, user): # noqa: C901 data = {"columns": columns, "rows": rows} error = None - json_data = json_dumps(data, cls=MongoDBJSONEncoder) - return json_data, error + return data, error register(MongoDB) diff --git a/redash/query_runner/mssql.py b/redash/query_runner/mssql.py index 3d17fee596..9ba3904aab 100644 --- a/redash/query_runner/mssql.py +++ b/redash/query_runner/mssql.py @@ -8,7 +8,6 @@ JobTimeoutException, register, ) -from redash.utils import json_dumps, json_loads logger = logging.getLogger(__name__) @@ -87,8 +86,6 @@ def _get_tables(self, schema): if error is not None: self._handle_run_query_error(error) - results = json_loads(results) - for row in results["rows"]: if row["table_schema"] != self.configuration["db"]: table_name = "{}.{}".format(row["table_schema"], row["table_name"]) @@ -140,11 +137,10 @@ def run_query(self, query, user): rows = [dict(zip((column["name"] for column in columns), row)) for row in data] data = {"columns": columns, "rows": rows} - json_data = json_dumps(data) error = None else: error = "No data was returned." - json_data = None + data = None cursor.close() connection.commit() @@ -155,7 +151,7 @@ def run_query(self, query, user): except IndexError: # Connection errors are `args[0][1]` error = e.args[0][1] - json_data = None + data = None except (KeyboardInterrupt, JobTimeoutException): connection.cancel() raise @@ -163,7 +159,7 @@ def run_query(self, query, user): if connection: connection.close() - return json_data, error + return data, error register(SqlServer) diff --git a/redash/query_runner/mssql_odbc.py b/redash/query_runner/mssql_odbc.py index 1d088a1b1d..6a4f57da47 100644 --- a/redash/query_runner/mssql_odbc.py +++ b/redash/query_runner/mssql_odbc.py @@ -6,7 +6,6 @@ register, ) from redash.query_runner.mssql import types_map -from redash.utils import json_dumps, json_loads logger = logging.getLogger(__name__) @@ -94,8 +93,6 @@ def _get_tables(self, schema): if error is not None: self._handle_run_query_error(error) - results = json_loads(results) - for row in results["rows"]: if row["table_schema"] != self.configuration["db"]: table_name = "{}.{}".format(row["table_schema"], row["table_name"]) @@ -139,11 +136,10 @@ def run_query(self, query, user): rows = [dict(zip((column["name"] for column in columns), row)) for row in data] data = {"columns": columns, "rows": rows} - json_data = json_dumps(data) error = None else: error = "No data was returned." - json_data = None + data = None cursor.close() except pyodbc.Error as e: @@ -153,7 +149,7 @@ def run_query(self, query, user): except IndexError: # Connection errors are `args[0][1]` error = e.args[0][1] - json_data = None + data = None except (KeyboardInterrupt, JobTimeoutException): connection.cancel() raise @@ -161,7 +157,7 @@ def run_query(self, query, user): if connection: connection.close() - return json_data, error + return data, error register(SQLServerODBC) diff --git a/redash/query_runner/mysql.py b/redash/query_runner/mysql.py index a7e59b318e..1788ce07fc 100644 --- a/redash/query_runner/mysql.py +++ b/redash/query_runner/mysql.py @@ -14,7 +14,6 @@ register, ) from redash.settings import parse_boolean -from redash.utils import json_dumps, json_loads try: import MySQLdb @@ -161,8 +160,6 @@ def _get_tables(self, schema): if error is not None: self._handle_run_query_error(error) - results = json_loads(results) - for row in results["rows"]: if row["table_schema"] != self.configuration["db"]: table_name = "{}.{}".format(row["table_schema"], row["table_name"]) @@ -194,7 +191,7 @@ def run_query(self, query, user): t.join() raise - return r.json_data, r.error + return r.data, r.error def _run_query(self, query, user, connection, r, ev): try: @@ -216,17 +213,17 @@ def _run_query(self, query, user, connection, r, ev): rows = [dict(zip((column["name"] for column in columns), row)) for row in data] data = {"columns": columns, "rows": rows} - r.json_data = json_dumps(data) + r.data = data r.error = None else: - r.json_data = None + r.data = None r.error = "No data was returned." cursor.close() except MySQLdb.Error as e: if cursor: cursor.close() - r.json_data = None + r.data = None r.error = e.args[1] finally: ev.set() diff --git a/redash/query_runner/nz.py b/redash/query_runner/nz.py index 6939cc23e8..51f68ef1ed 100644 --- a/redash/query_runner/nz.py +++ b/redash/query_runner/nz.py @@ -1,4 +1,3 @@ -import json import logging import traceback @@ -150,7 +149,7 @@ def type_map(self, typid, func): return typ def run_query(self, query, user): - json_data, error = None, None + data, error = None, None try: with self.connection.cursor() as cursor: cursor.execute(query) @@ -165,10 +164,10 @@ def run_query(self, query, user): ) rows = [dict(zip((column["name"] for column in columns), row)) for row in cursor] - json_data = json.dumps({"columns": columns, "rows": rows}) + data = {"columns": columns, "rows": rows} except Exception: error = traceback.format_exc() - return json_data, error + return data, error register(Netezza) diff --git a/redash/query_runner/oracle.py b/redash/query_runner/oracle.py index 41a45a14c7..c721de9069 100644 --- a/redash/query_runner/oracle.py +++ b/redash/query_runner/oracle.py @@ -10,7 +10,6 @@ JobTimeoutException, register, ) -from redash.utils import json_dumps, json_loads try: import oracledb @@ -98,8 +97,6 @@ def _get_tables(self, schema): if error is not None: self._handle_run_query_error(error) - results = json_loads(results) - for row in results["rows"]: if row["OWNER"] is not None: table_name = "{}.{}".format(row["OWNER"], row["TABLE_NAME"]) @@ -168,19 +165,17 @@ def run_query(self, query, user): rows = [dict(zip((c["name"] for c in columns), row)) for row in cursor] data = {"columns": columns, "rows": rows} error = None - json_data = json_dumps(data) else: columns = [{"name": "Row(s) Affected", "type": "TYPE_INTEGER"}] rows = [{"Row(s) Affected": rows_count}] data = {"columns": columns, "rows": rows} - json_data = json_dumps(data) connection.commit() except oracledb.DatabaseError as err: (err_args,) = err.args line_number = query.count("\n", 0, err_args.offset) + 1 column_number = err_args.offset - query.rfind("\n", 0, err_args.offset) - 1 error = "Query failed at line {}, column {}: {}".format(str(line_number), str(column_number), str(err)) - json_data = None + data = None except (KeyboardInterrupt, JobTimeoutException): connection.cancel() raise @@ -188,7 +183,7 @@ def run_query(self, query, user): os.environ.pop("NLS_LANG", None) connection.close() - return json_data, error + return data, error register(Oracle) diff --git a/redash/query_runner/pg.py b/redash/query_runner/pg.py index a9074530db..763dc2735d 100644 --- a/redash/query_runner/pg.py +++ b/redash/query_runner/pg.py @@ -20,7 +20,6 @@ JobTimeoutException, register, ) -from redash.utils import JSONEncoder, json_dumps, json_loads logger = logging.getLogger(__name__) @@ -56,18 +55,16 @@ } -class PostgreSQLJSONEncoder(JSONEncoder): - def default(self, o): - if isinstance(o, Range): - # From: https://github.com/psycopg/psycopg2/pull/779 - if o._bounds is None: - return "" +def json_encoder(dec, o): + if isinstance(o, Range): + # From: https://github.com/psycopg/psycopg2/pull/779 + if o._bounds is None: + return "" - items = [o._bounds[0], str(o._lower), ", ", str(o._upper), o._bounds[1]] + items = [o._bounds[0], str(o._lower), ", ", str(o._upper), o._bounds[1]] - return "".join(items) - - return super(PostgreSQLJSONEncoder, self).default(o) + return "".join(items) + return None def _wait(conn, timeout=None): @@ -204,8 +201,6 @@ def _get_definitions(self, schema, query): if error is not None: self._handle_run_query_error(error) - results = json_loads(results) - build_schema(results, schema) def _get_tables(self, schema): @@ -282,16 +277,15 @@ def run_query(self, query, user): data = {"columns": columns, "rows": rows} error = None - json_data = json_dumps(data, allow_nan=False, cls=PostgreSQLJSONEncoder) else: error = "Query completed but it returned no data." - json_data = None + data = None except (select.error, OSError): error = "Query interrupted. Please retry." - json_data = None + data = None except psycopg2.DatabaseError as e: error = str(e) - json_data = None + data = None except (KeyboardInterrupt, InterruptException, JobTimeoutException): connection.cancel() raise @@ -299,7 +293,7 @@ def run_query(self, query, user): connection.close() _cleanup_ssl_certs(self.ssl_config) - return json_data, error + return data, error class Redshift(PostgreSQL): diff --git a/redash/query_runner/phoenix.py b/redash/query_runner/phoenix.py index eb1e8df02a..e76b2f7d3c 100644 --- a/redash/query_runner/phoenix.py +++ b/redash/query_runner/phoenix.py @@ -9,7 +9,6 @@ BaseQueryRunner, register, ) -from redash.utils import json_dumps, json_loads logger = logging.getLogger(__name__) @@ -82,8 +81,6 @@ def get_schema(self, get_stats=False): if error is not None: self._handle_run_query_error(error) - results = json_loads(results) - for row in results["rows"]: table_name = "{}.{}".format(row["TABLE_SCHEM"], row["TABLE_NAME"]) @@ -105,17 +102,16 @@ def run_query(self, query, user): columns = self.fetch_columns(column_tuples) rows = [dict(zip(([column["name"] for column in columns]), r)) for i, r in enumerate(cursor.fetchall())] data = {"columns": columns, "rows": rows} - json_data = json_dumps(data) error = None cursor.close() except Error as e: - json_data = None + data = None error = "code: {}, sql state:{}, message: {}".format(e.code, e.sqlstate, str(e)) finally: if connection: connection.close() - return json_data, error + return data, error register(Phoenix) diff --git a/redash/query_runner/pinot.py b/redash/query_runner/pinot.py index 9b8a34fd70..0bcdcef9ed 100644 --- a/redash/query_runner/pinot.py +++ b/redash/query_runner/pinot.py @@ -19,7 +19,6 @@ BaseQueryRunner, register, ) -from redash.utils import json_dumps logger = logging.getLogger(__name__) @@ -92,12 +91,11 @@ def run_query(self, query, user): data = {"columns": columns, "rows": rows} error = None - json_data = json_dumps(data) logger.debug("Pinot execute query [%s]", query) finally: connection.close() - return json_data, error + return data, error def get_schema(self, get_stats=False): schema = {} diff --git a/redash/query_runner/presto.py b/redash/query_runner/presto.py index 721efc67e5..e6e7bc785e 100644 --- a/redash/query_runner/presto.py +++ b/redash/query_runner/presto.py @@ -11,7 +11,6 @@ JobTimeoutException, register, ) -from redash.utils import json_dumps, json_loads logger = logging.getLogger(__name__) @@ -89,8 +88,6 @@ def get_schema(self, get_stats=False): if error is not None: self._handle_run_query_error(error) - results = json_loads(results) - for row in results["rows"]: table_name = "{}.{}".format(row["table_schema"], row["table_name"]) @@ -120,10 +117,9 @@ def run_query(self, query, user): columns = self.fetch_columns(column_tuples) rows = [dict(zip(([column["name"] for column in columns]), r)) for i, r in enumerate(cursor.fetchall())] data = {"columns": columns, "rows": rows} - json_data = json_dumps(data) error = None except DatabaseError as db: - json_data = None + data = None default_message = "Unspecified DatabaseError: {0}".format(str(db)) if isinstance(db.args[0], dict): message = db.args[0].get("failureInfo", {"message", None}).get("message") @@ -134,7 +130,7 @@ def run_query(self, query, user): cursor.cancel() raise - return json_data, error + return data, error register(Presto) diff --git a/redash/query_runner/prometheus.py b/redash/query_runner/prometheus.py index 29b6577b3e..34b5aa94d5 100644 --- a/redash/query_runner/prometheus.py +++ b/redash/query_runner/prometheus.py @@ -14,7 +14,6 @@ BaseQueryRunner, register, ) -from redash.utils import json_dumps def get_instant_rows(metrics_data): @@ -247,7 +246,7 @@ def run_query(self, query, user): else: rows = get_instant_rows(metrics) - json_data = json_dumps({"rows": rows, "columns": columns}) + data = {"rows": rows, "columns": columns} except requests.RequestException as e: return None, str(e) @@ -256,7 +255,7 @@ def run_query(self, query, user): finally: self._cleanup_cert_files(promehteus_kwargs) - return json_data, error + return data, error register(Prometheus) diff --git a/redash/query_runner/python.py b/redash/query_runner/python.py index 1fc994ea48..27f1146ae5 100644 --- a/redash/query_runner/python.py +++ b/redash/query_runner/python.py @@ -23,7 +23,6 @@ BaseQueryRunner, register, ) -from redash.utils import json_dumps, json_loads from redash.utils.pandas import pandas_installed if pandas_installed: @@ -228,7 +227,7 @@ def execute_query(data_source_name_or_id, query, result_type=None): raise Exception(error) # TODO: allow avoiding the JSON dumps/loads in same process - query_result = json_loads(data) + query_result = data if result_type == "dataframe" and pandas_installed: return pd.DataFrame(query_result["rows"]) @@ -357,15 +356,14 @@ def run_query(self, query, user): exec(code, restricted_globals, self._script_locals) - result = self._script_locals["result"] - self.validate_result(result) - result["log"] = self._custom_print.lines - json_data = json_dumps(result) + data = self._script_locals["result"] + self.validate_result(data) + data["log"] = self._custom_print.lines except Exception as e: error = str(type(e)) + " " + str(e) - json_data = None + data = None - return json_data, error + return data, error register(Python) diff --git a/redash/query_runner/qubole.py b/redash/query_runner/qubole.py index 55bb091566..717b2cdc48 100644 --- a/redash/query_runner/qubole.py +++ b/redash/query_runner/qubole.py @@ -10,7 +10,6 @@ JobTimeoutException, register, ) -from redash.utils import json_dumps try: import qds_sdk # noqa: F401 @@ -125,13 +124,13 @@ def run_query(self, query, user): columns = self.fetch_columns([(i, TYPE_STRING) for i in data.pop(0).split("\t")]) rows = [dict(zip((column["name"] for column in columns), row.split("\t"))) for row in data] - json_data = json_dumps({"columns": columns, "rows": rows}) + data = {"columns": columns, "rows": rows} except (KeyboardInterrupt, JobTimeoutException): logging.info("Sending KILL signal to Qubole Command Id: %s", cmd.id) cmd.cancel() raise - return json_data, error + return data, error def get_schema(self, get_stats=False): schemas = {} diff --git a/redash/query_runner/query_results.py b/redash/query_runner/query_results.py index 9217bc1497..ea83e5089b 100644 --- a/redash/query_runner/query_results.py +++ b/redash/query_runner/query_results.py @@ -13,7 +13,7 @@ guess_type, register, ) -from redash.utils import json_dumps, json_loads +from redash.utils import json_dumps logger = logging.getLogger(__name__) @@ -76,8 +76,6 @@ def get_query_results(user, query_id, bring_from_cache, params=None): results, error = query.data_source.query_runner.run_query(query_text, user) if error: raise Exception("Failed loading results for query id {}.".format(query.id)) - else: - results = json_loads(results) return results @@ -194,16 +192,15 @@ def run_query(self, query, user): data = {"columns": columns, "rows": rows} error = None - json_data = json_dumps(data) else: error = "Query completed but it returned no data." - json_data = None + data = None except (KeyboardInterrupt, JobTimeoutException): connection.cancel() raise finally: connection.close() - return json_data, error + return data, error register(Results) diff --git a/redash/query_runner/rockset.py b/redash/query_runner/rockset.py index 1d358881c4..96910b8be9 100644 --- a/redash/query_runner/rockset.py +++ b/redash/query_runner/rockset.py @@ -8,7 +8,6 @@ BaseSQLQueryRunner, register, ) -from redash.utils import json_dumps def _get_type(value): @@ -121,7 +120,7 @@ def run_query(self, query, user): columns = [] for k in rows[0]: columns.append({"name": k, "friendly_name": k, "type": _get_type(rows[0][k])}) - data = json_dumps({"columns": columns, "rows": rows}) + data = {"columns": columns, "rows": rows} return data, None diff --git a/redash/query_runner/salesforce.py b/redash/query_runner/salesforce.py index 4a937455c0..5e0e018c2e 100644 --- a/redash/query_runner/salesforce.py +++ b/redash/query_runner/salesforce.py @@ -12,7 +12,6 @@ BaseQueryRunner, register, ) -from redash.utils import json_dumps logger = logging.getLogger(__name__) @@ -166,11 +165,10 @@ def run_query(self, query, user): columns = self.fetch_columns(cols) error = None data = {"columns": columns, "rows": rows} - json_data = json_dumps(data) except SalesforceError as err: error = err.content - json_data = None - return json_data, error + data = None + return data, error def get_schema(self, get_stats=False): sf = self._get_sf() diff --git a/redash/query_runner/snowflake.py b/redash/query_runner/snowflake.py index fad1ba9548..bb67c20d68 100644 --- a/redash/query_runner/snowflake.py +++ b/redash/query_runner/snowflake.py @@ -17,7 +17,6 @@ BaseSQLQueryRunner, register, ) -from redash.utils import json_dumps TYPES_MAP = { 0: TYPE_INTEGER, @@ -135,12 +134,11 @@ def run_query(self, query, user): data = self._parse_results(cursor) error = None - json_data = json_dumps(data) finally: cursor.close() connection.close() - return json_data, error + return data, error def _run_query_without_warehouse(self, query): connection = self._get_connection() diff --git a/redash/query_runner/sparql_endpoint.py b/redash/query_runner/sparql_endpoint.py index 2cc7768880..2f47c21db1 100644 --- a/redash/query_runner/sparql_endpoint.py +++ b/redash/query_runner/sparql_endpoint.py @@ -8,7 +8,6 @@ from os import environ from redash.query_runner import BaseQueryRunner -from redash.utils import json_dumps, json_loads from . import register @@ -83,7 +82,7 @@ def _transform_sparql_results(results): logger.info("results are: {}".format(results)) # Not sure why we do not use the json package here but all other # query runner do it the same way :-) - sparql_results = json_loads(results) + sparql_results = results # transform all bindings to redash rows rows = [] for sparql_row in sparql_results["results"]["bindings"]: @@ -101,7 +100,7 @@ def _transform_sparql_results(results): columns.append({"name": var, "friendly_name": var, "type": "string"}) # Not sure why we do not use the json package here but all other # query runner do it the same way :-) - return json_dumps({"columns": columns, "rows": rows}) + return {"columns": columns, "rows": rows} @classmethod def name(cls): diff --git a/redash/query_runner/sqlite.py b/redash/query_runner/sqlite.py index 6177187aab..97c416b0b1 100644 --- a/redash/query_runner/sqlite.py +++ b/redash/query_runner/sqlite.py @@ -6,7 +6,6 @@ JobTimeoutException, register, ) -from redash.utils import json_dumps, json_loads logger = logging.getLogger(__name__) @@ -40,8 +39,6 @@ def _get_tables(self, schema): if error is not None: raise Exception("Failed getting schema.") - results = json_loads(results) - for row in results["rows"]: table_name = row["tbl_name"] schema[table_name] = {"name": table_name, "columns": []} @@ -49,7 +46,6 @@ def _get_tables(self, schema): if error is not None: self._handle_run_query_error(error) - results_table = json_loads(results_table) for row_column in results_table["rows"]: schema[table_name]["columns"].append(row_column["name"]) @@ -69,16 +65,15 @@ def run_query(self, query, user): data = {"columns": columns, "rows": rows} error = None - json_data = json_dumps(data) else: error = "Query completed but it returned no data." - json_data = None + data = None except (KeyboardInterrupt, JobTimeoutException): connection.cancel() raise finally: connection.close() - return json_data, error + return data, error register(Sqlite) diff --git a/redash/query_runner/treasuredata.py b/redash/query_runner/treasuredata.py index ff768cb94a..ddd1d50e46 100644 --- a/redash/query_runner/treasuredata.py +++ b/redash/query_runner/treasuredata.py @@ -9,7 +9,6 @@ BaseQueryRunner, register, ) -from redash.utils import json_dumps logger = logging.getLogger(__name__) @@ -112,15 +111,14 @@ def run_query(self, query, user): else: rows = [dict(zip(([column["name"] for column in columns]), r)) for r in cursor.fetchall()] data = {"columns": columns, "rows": rows} - json_data = json_dumps(data) error = None except errors.InternalError as e: - json_data = None + data = None error = "%s: %s" % ( str(e), cursor.show_job().get("debug", {}).get("stderr", "No stderr message in the response"), ) - return json_data, error + return data, error register(TreasureData) diff --git a/redash/query_runner/trino.py b/redash/query_runner/trino.py index c1fb082363..fbbfab9bd7 100644 --- a/redash/query_runner/trino.py +++ b/redash/query_runner/trino.py @@ -12,7 +12,6 @@ JobTimeoutException, register, ) -from redash.utils import json_dumps, json_loads logger = logging.getLogger(__name__) @@ -100,8 +99,6 @@ def get_schema(self, get_stats=False): if error is not None: self._handle_run_query_error(error) - results = json_loads(results) - for row in results["rows"]: table_name = f'{catalog}.{row["table_schema"]}.{row["table_name"]}' @@ -122,8 +119,6 @@ def _get_catalogs(self): if error is not None: self._handle_run_query_error(error) - results = json_loads(results) - catalogs = [] for row in results["rows"]: catalog = row["Catalog"] @@ -158,10 +153,9 @@ def run_query(self, query, user): columns = self.fetch_columns([(c[0], TRINO_TYPES_MAPPING.get(c[1], None)) for c in description]) rows = [dict(zip([c["name"] for c in columns], r)) for r in results] data = {"columns": columns, "rows": rows} - json_data = json_dumps(data) error = None except DatabaseError as db: - json_data = None + data = None default_message = "Unspecified DatabaseError: {0}".format(str(db)) if isinstance(db.args[0], dict): message = db.args[0].get("failureInfo", {"message", None}).get("message") @@ -172,7 +166,7 @@ def run_query(self, query, user): cursor.cancel() raise - return json_data, error + return data, error register(Trino) diff --git a/redash/query_runner/uptycs.py b/redash/query_runner/uptycs.py index c1b0ea2811..6e354181e5 100644 --- a/redash/query_runner/uptycs.py +++ b/redash/query_runner/uptycs.py @@ -5,7 +5,7 @@ import requests from redash.query_runner import BaseSQLQueryRunner, register -from redash.utils import json_dumps, json_loads +from redash.utils import json_loads logger = logging.getLogger(__name__) @@ -58,8 +58,7 @@ def transformed_to_redash_json(self, data): if "items" in data: rows = data["items"] - redash_json_data = {"columns": transformed_columns, "rows": rows} - return redash_json_data + return {"columns": transformed_columns, "rows": rows} def api_call(self, sql): # JWT encoded header @@ -86,22 +85,21 @@ def api_call(self, sql): else: error = "status_code " + str(response.status_code) + "\n" error = error + "failed to connect" - json_data = {} - return json_data, error + data = {} + return data, error # if we get right status code then call transfored_to_redash - json_data = self.transformed_to_redash_json(response_output) + data = self.transformed_to_redash_json(response_output) error = None # if we got error from Uptycs include error information if "error" in response_output: error = response_output["error"]["message"]["brief"] error = error + "\n" + response_output["error"]["message"]["detail"] - return json_data, error + return data, error def run_query(self, query, user): data, error = self.api_call(query) - json_data = json_dumps(data) - logger.debug("%s", json_data) - return json_data, error + logger.debug("%s", data) + return data, error def get_schema(self, get_stats=False): header = self.generate_header(self.configuration.get("key"), self.configuration.get("secret")) diff --git a/redash/query_runner/vertica.py b/redash/query_runner/vertica.py index 64e7d06b22..e178ed15f9 100644 --- a/redash/query_runner/vertica.py +++ b/redash/query_runner/vertica.py @@ -10,7 +10,6 @@ BaseSQLQueryRunner, register, ) -from redash.utils import json_dumps, json_loads logger = logging.getLogger(__name__) @@ -85,8 +84,6 @@ def _get_tables(self, schema): if error is not None: self._handle_run_query_error(error) - results = json_loads(results) - for row in results["rows"]: table_name = "{}.{}".format(row["table_schema"], row["table_name"]) @@ -101,9 +98,9 @@ def run_query(self, query, user): import vertica_python if query == "": - json_data = None + data = None error = "Query is empty" - return json_data, error + return data, error connection = None try: @@ -131,10 +128,9 @@ def run_query(self, query, user): rows = [dict(zip(([c["name"] for c in columns]), r)) for r in cursor.fetchall()] data = {"columns": columns, "rows": rows} - json_data = json_dumps(data) error = None else: - json_data = None + data = None error = "No data was returned." cursor.close() @@ -142,7 +138,7 @@ def run_query(self, query, user): if connection: connection.close() - return json_data, error + return data, error register(Vertica) diff --git a/redash/query_runner/yandex_disk.py b/redash/query_runner/yandex_disk.py index 145c52b212..5d305e23c8 100644 --- a/redash/query_runner/yandex_disk.py +++ b/redash/query_runner/yandex_disk.py @@ -5,7 +5,6 @@ import yaml from redash.query_runner import BaseSQLQueryRunner, register -from redash.utils import json_dumps from redash.utils.pandas import pandas_installed openpyxl_installed = find_spec("openpyxl") @@ -157,7 +156,7 @@ def run_query(self, query, user): new_df = pd.concat(new_df, ignore_index=True) df = new_df.copy() - data = json_dumps(pandas_to_result(df)) + data = pandas_to_result(df) error = None return data, error diff --git a/redash/query_runner/yandex_metrica.py b/redash/query_runner/yandex_metrica.py index 731fdbeca2..f8c7156ca8 100644 --- a/redash/query_runner/yandex_metrica.py +++ b/redash/query_runner/yandex_metrica.py @@ -13,7 +13,6 @@ BaseSQLQueryRunner, register, ) -from redash.utils import json_dumps logger = logging.getLogger(__name__) @@ -168,7 +167,7 @@ def run_query(self, query, user): return data, error try: - data = json_dumps(parse_ym_response(self._send_query(**params))) + data = parse_ym_response(self._send_query(**params)) error = None except Exception as e: logging.exception(e) diff --git a/redash/serializers/__init__.py b/redash/serializers/__init__.py index 0cdeaf612b..2041168f32 100644 --- a/redash/serializers/__init__.py +++ b/redash/serializers/__init__.py @@ -16,14 +16,13 @@ serialize_query_result_to_dsv, serialize_query_result_to_xlsx, ) -from redash.utils import json_loads def public_widget(widget): res = { "id": widget.id, "width": widget.width, - "options": json_loads(widget.options), + "options": widget.options, "text": widget.text, "updated_at": widget.updated_at, "created_at": widget.created_at, @@ -35,7 +34,7 @@ def public_widget(widget): "type": v.type, "name": v.name, "description": v.description, - "options": json_loads(v.options), + "options": v.options, "updated_at": v.updated_at, "created_at": v.created_at, "query": { @@ -146,7 +145,7 @@ def serialize_visualization(object, with_query=True): "type": object.type, "name": object.name, "description": object.description, - "options": json_loads(object.options), + "options": object.options, "updated_at": object.updated_at, "created_at": object.created_at, } @@ -161,7 +160,7 @@ def serialize_widget(object): d = { "id": object.id, "width": object.width, - "options": json_loads(object.options), + "options": object.options, "dashboard_id": object.dashboard_id, "text": object.text, "updated_at": object.updated_at, @@ -197,7 +196,7 @@ def serialize_alert(alert, full=True): def serialize_dashboard(obj, with_widgets=False, user=None, with_favorite_state=True): - layout = json_loads(obj.layout) + layout = obj.layout widgets = [] diff --git a/redash/utils/__init__.py b/redash/utils/__init__.py index 2125055dbf..b69658a1ed 100644 --- a/redash/utils/__init__.py +++ b/redash/utils/__init__.py @@ -9,6 +9,7 @@ import os import random import re +import sys import uuid import pystache @@ -69,11 +70,21 @@ def generate_token(length): return "".join(rand.choice(chars) for x in range(length)) +json_encoders = [m.custom_json_encoder for m in sys.modules if hasattr(m, "custom_json_encoder")] + + class JSONEncoder(json.JSONEncoder): """Adapter for `json.dumps`.""" + def __init__(self, **kwargs): + self.encoders = json_encoders + super().__init__(**kwargs) + def default(self, o): - # Some SQLAlchemy collections are lazy. + for encoder in self.encoders: + result = encoder(self, o) + if result: + return result if isinstance(o, Query): result = list(o) elif isinstance(o, decimal.Decimal): diff --git a/tests/factories.py b/tests/factories.py index 139be00a7b..ddb56de38a 100644 --- a/tests/factories.py +++ b/tests/factories.py @@ -70,7 +70,7 @@ def __call__(self): redash.models.Dashboard, name="test", user=user_factory.create, - layout="[]", + layout=[], is_draft=False, org=1, ) @@ -122,7 +122,7 @@ def __call__(self): query_result_factory = ModelFactory( redash.models.QueryResult, - data='{"columns":{}, "rows":[]}', + data={"columns": {}, "rows": []}, runtime=1, retrieved_at=utcnow, query_text="SELECT 1", @@ -137,13 +137,13 @@ def __call__(self): query_rel=query_factory.create, name="Chart", description="", - options="{}", + options={}, ) widget_factory = ModelFactory( redash.models.Widget, width=1, - options="{}", + options={}, dashboard=dashboard_factory.create, visualization=visualization_factory.create, ) diff --git a/tests/handlers/test_dashboards.py b/tests/handlers/test_dashboards.py index 5f81b27010..b20614d731 100644 --- a/tests/handlers/test_dashboards.py +++ b/tests/handlers/test_dashboards.py @@ -74,7 +74,7 @@ def test_get_dashboard_filters_unauthorized_widgets(self): vis = self.factory.create_visualization(query_rel=query) restricted_widget = self.factory.create_widget(visualization=vis, dashboard=dashboard) widget = self.factory.create_widget(dashboard=dashboard) - dashboard.layout = "[[{}, {}]]".format(widget.id, restricted_widget.id) + dashboard.layout = [[widget.id, restricted_widget.id]] db.session.commit() rv = self.make_request("get", "/api/dashboards/{0}".format(dashboard.id)) @@ -94,7 +94,7 @@ def test_update_dashboard(self): rv = self.make_request( "post", "/api/dashboards/{0}".format(d.id), - data={"name": new_name, "layout": "[]"}, + data={"name": new_name, "layout": []}, ) self.assertEqual(rv.status_code, 200) self.assertEqual(rv.json["name"], new_name) @@ -107,7 +107,7 @@ def test_raises_error_in_case_of_conflict(self): rv = self.make_request( "post", "/api/dashboards/{0}".format(d.id), - data={"name": new_name, "layout": "[]", "version": d.version - 1}, + data={"name": new_name, "layout": [], "version": d.version - 1}, ) self.assertEqual(rv.status_code, 409) @@ -120,7 +120,7 @@ def test_overrides_existing_if_no_version_specified(self): rv = self.make_request( "post", "/api/dashboards/{0}".format(d.id), - data={"name": new_name, "layout": "[]"}, + data={"name": new_name, "layout": []}, ) self.assertEqual(rv.status_code, 200) @@ -133,7 +133,7 @@ def test_works_for_non_owner_with_permission(self): rv = self.make_request( "post", "/api/dashboards/{0}".format(d.id), - data={"name": new_name, "layout": "[]", "version": d.version}, + data={"name": new_name, "layout": [], "version": d.version}, user=user, ) self.assertEqual(rv.status_code, 403) @@ -143,7 +143,7 @@ def test_works_for_non_owner_with_permission(self): rv = self.make_request( "post", "/api/dashboards/{0}".format(d.id), - data={"name": new_name, "layout": "[]", "version": d.version}, + data={"name": new_name, "layout": [], "version": d.version}, user=user, ) diff --git a/tests/handlers/test_query_results.py b/tests/handlers/test_query_results.py index c5139d8d89..1170d6941e 100644 --- a/tests/handlers/test_query_results.py +++ b/tests/handlers/test_query_results.py @@ -1,6 +1,5 @@ from redash.handlers.query_results import error_messages, run_query from redash.models import db -from redash.utils import json_dumps from tests import BaseTestCase @@ -362,7 +361,7 @@ def test_allows_access_if_unassociated_but_user_has_access(self): query_result = self.factory.create_query_result() data = {"rows": [], "columns": [{"name": "whatever"}]} - query_result = self.factory.create_query_result(data=json_dumps(data)) + query_result = self.factory.create_query_result(data=data) unrelated_dropdown_query = self.factory.create_query(latest_query_data=query_result) # unrelated_dropdown_query has not been associated with query @@ -378,7 +377,7 @@ def test_allows_access_if_unassociated_but_user_has_access(self): def test_allows_access_if_associated_and_has_access_to_parent(self): query_result = self.factory.create_query_result() data = {"rows": [], "columns": [{"name": "whatever"}]} - query_result = self.factory.create_query_result(data=json_dumps(data)) + query_result = self.factory.create_query_result(data=data) dropdown_query = self.factory.create_query(latest_query_data=query_result) options = {"parameters": [{"name": "param", "type": "query", "queryId": dropdown_query.id}]} @@ -423,7 +422,7 @@ def test_renders_excel_file_when_rows_have_missing_columns(self): "rows": [{"test": 1}, {"test": 2, "test2": 3}], "columns": [{"name": "test"}, {"name": "test2"}], } - query_result = self.factory.create_query_result(data=json_dumps(data)) + query_result = self.factory.create_query_result(data=data) rv = self.make_request( "get", diff --git a/tests/models/test_alerts.py b/tests/models/test_alerts.py index d4b1c9a8a5..e40a8b2e56 100644 --- a/tests/models/test_alerts.py +++ b/tests/models/test_alerts.py @@ -2,7 +2,6 @@ from unittest import TestCase from redash.models import OPERATORS, Alert, db, next_state -from redash.utils import json_dumps from tests import BaseTestCase @@ -43,7 +42,7 @@ def test_return_each_alert_only_once(self): def get_results(value): - return json_dumps({"rows": [{"foo": value}], "columns": [{"name": "foo", "type": "STRING"}]}) + return {"rows": [{"foo": value}], "columns": [{"name": "foo", "type": "STRING"}]} class TestAlertEvaluate(BaseTestCase): @@ -66,7 +65,7 @@ def test_evaluate_return_unknown_when_missing_column(self): self.assertEqual(alert.evaluate(), Alert.UNKNOWN_STATE) def test_evaluate_return_unknown_when_empty_results(self): - results = json_dumps({"rows": [], "columns": [{"name": "foo", "type": "STRING"}]}) + results = {"rows": [], "columns": [{"name": "foo", "type": "STRING"}]} alert = self.create_alert(results) self.assertEqual(alert.evaluate(), Alert.UNKNOWN_STATE) diff --git a/tests/models/test_dashboards.py b/tests/models/test_dashboards.py index 2aef2e566c..6a3f4f8341 100644 --- a/tests/models/test_dashboards.py +++ b/tests/models/test_dashboards.py @@ -15,7 +15,7 @@ def create_tagged_dashboard(self, tags): widget1 = self.factory.create_widget(visualization=vis1, dashboard=dashboard) widget2 = self.factory.create_widget(visualization=vis2, dashboard=dashboard) widget3 = self.factory.create_widget(visualization=vis3, dashboard=dashboard) - dashboard.layout = "[[{}, {}, {}]]".format(widget1.id, widget2.id, widget3.id) + dashboard.layout = [[widget1.id, widget2.id, widget3.id]] db.session.commit() return dashboard diff --git a/tests/models/test_queries.py b/tests/models/test_queries.py index e0a36108a0..e914ecd6ca 100644 --- a/tests/models/test_queries.py +++ b/tests/models/test_queries.py @@ -159,7 +159,7 @@ def test_search_query_parser_parenthesis(self): q2 = self.factory.create_query(name="Testing searching") q3 = self.factory.create_query(name="Testing finding") - queries = list(Query.search("(testing search) or finding", [self.factory.default_group.id])) + queries = list(Query.search("testing (search or finding)", [self.factory.default_group.id])) self.assertIn(q1, queries) self.assertIn(q2, queries) self.assertIn(q3, queries) @@ -373,16 +373,26 @@ def test_fork_with_visualizations(self): query = self.factory.create_query(data_source=data_source, description="this is description") # create default TABLE - query factory does not create it - self.factory.create_visualization(query_rel=query, name="Table", description="", type="TABLE", options="{}") + self.factory.create_visualization(query_rel=query, name="Table", description="", type="TABLE", options={}) visualization_chart = self.factory.create_visualization( query_rel=query, description="chart vis", type="CHART", - options="""{"yAxis": [{"type": "linear"}, {"type": "linear", "opposite": true}], "series": {"stacking": null}, "globalSeriesType": "line", "sortX": true, "seriesOptions": {"count": {"zIndex": 0, "index": 0, "type": "line", "yAxis": 0}}, "xAxis": {"labels": {"enabled": true}, "type": "datetime"}, "columnMapping": {"count": "y", "created_at": "x"}, "bottomMargin": 50, "legend": {"enabled": true}}""", + options={ + "yAxis": [{"type": "linear"}, {"type": "linear", "opposite": True}], + "series": {"stacking": None}, + "globalSeriesType": "line", + "sortX": True, + "seriesOptions": {"count": {"zIndex": 0, "index": 0, "type": "line", "yAxis": 0}}, + "xAxis": {"labels": {"enabled": True}, "type": "datetime"}, + "columnMapping": {"count": "y", "created_at": "x"}, + "bottomMargin": 50, + "legend": {"enabled": True}, + }, ) visualization_box = self.factory.create_visualization( - query_rel=query, description="box vis", type="BOXPLOT", options="{}" + query_rel=query, description="box vis", type="BOXPLOT", options={} ) fork_user = self.factory.create_user() forked_query = query.fork(fork_user) @@ -417,7 +427,7 @@ def test_fork_with_visualizations(self): self.assertEqual(count_table, 1) self.assertEqual(forked_table.name, "Table") self.assertEqual(forked_table.description, "") - self.assertEqual(forked_table.options, "{}") + self.assertEqual(forked_table.options, {}) def test_fork_from_query_that_has_no_visualization(self): # prepare original query and visualizations @@ -425,7 +435,7 @@ def test_fork_from_query_that_has_no_visualization(self): query = self.factory.create_query(data_source=data_source, description="this is description") # create default TABLE - query factory does not create it - self.factory.create_visualization(query_rel=query, name="Table", description="", type="TABLE", options="{}") + self.factory.create_visualization(query_rel=query, name="Table", description="", type="TABLE", options={}) fork_user = self.factory.create_user() @@ -457,7 +467,7 @@ def setUp(self): self.query_hash = gen_query_hash(self.query) self.runtime = 123 self.utcnow = utcnow() - self.data = "data" + self.data = {"columns": {}, "rows": []} def test_updates_existing_queries(self): query1 = self.factory.create_query(query_text=self.query) diff --git a/tests/models/test_query_results.py b/tests/models/test_query_results.py index bcaa5e5651..16ea2de3d7 100644 --- a/tests/models/test_query_results.py +++ b/tests/models/test_query_results.py @@ -1,10 +1,6 @@ import datetime -from unittest import TestCase - -from mock import patch from redash import models -from redash.models import DBPersistence from redash.utils import utcnow from tests import BaseTestCase @@ -71,28 +67,9 @@ def test_store_result_does_not_modify_query_update_at(self): query.data_source, query.query_hash, query.query_text, - "", + {}, 0, utcnow(), ) self.assertEqual(original_updated_at, query.updated_at) - - -class TestDBPersistence(TestCase): - def test_updating_data_removes_cached_result(self): - p = DBPersistence() - p.data = '{"test": 1}' - self.assertDictEqual(p.data, {"test": 1}) - p.data = '{"test": 2}' - self.assertDictEqual(p.data, {"test": 2}) - - @patch("redash.models.json_loads") - def test_calls_json_loads_only_once(self, json_loads_patch): - json_loads_patch.return_value = "1" - p = DBPersistence() - json_data = '{"test": 1}' - p.data = json_data - a = p.data # noqa - b = p.data # noqa - json_loads_patch.assert_called_once_with(json_data) diff --git a/tests/query_runner/test_clickhouse.py b/tests/query_runner/test_clickhouse.py index ffae88053a..9005fb8db7 100644 --- a/tests/query_runner/test_clickhouse.py +++ b/tests/query_runner/test_clickhouse.py @@ -87,7 +87,7 @@ def test_send_single_query(self, post_request): self.assertIsNone(error) self.assertEqual( - json.loads(data), + data, { "columns": [ {"name": "1", "friendly_name": "1", "type": TYPE_INTEGER}, @@ -139,7 +139,7 @@ def test_send_multi_query(self, post_request): self.assertIsNone(error) self.assertEqual( - json.loads(data), + data, { "columns": [ {"name": "1", "friendly_name": "1", "type": TYPE_INTEGER}, diff --git a/tests/query_runner/test_e6data.py b/tests/query_runner/test_e6data.py index e3832b8bf7..aa16a0df81 100644 --- a/tests/query_runner/test_e6data.py +++ b/tests/query_runner/test_e6data.py @@ -2,7 +2,6 @@ from redash.query_runner import TYPE_INTEGER, TYPE_STRING from redash.query_runner.e6data import e6data -from redash.utils import json_dumps runner = e6data( { @@ -28,15 +27,13 @@ def test_run_query(mock_cursor): json_data, error = runner.run_query(query, user) - expected_json_data = json_dumps( - { - "columns": [ - {"name": "id", "type": TYPE_INTEGER}, - {"name": "name", "type": TYPE_STRING}, - ], - "rows": [{"id": 1, "name": "John"}], - } - ) + expected_json_data = { + "columns": [ + {"name": "id", "type": TYPE_INTEGER}, + {"name": "name", "type": TYPE_STRING}, + ], + "rows": [{"id": 1, "name": "John"}], + } assert json_data == expected_json_data @@ -50,7 +47,7 @@ def test_test_connection(mock_cursor): json_data, error = runner.run_query(query, user) - expected_json_data = json_dumps({"columns": [{"name": "EXPR$0", "type": TYPE_INTEGER}], "rows": [{"EXPR$0": 1}]}) + expected_json_data = {"columns": [{"name": "EXPR$0", "type": TYPE_INTEGER}], "rows": [{"EXPR$0": 1}]} assert json_data == expected_json_data diff --git a/tests/query_runner/test_influx_db.py b/tests/query_runner/test_influx_db.py index c9b42b3e52..1c152d64d1 100644 --- a/tests/query_runner/test_influx_db.py +++ b/tests/query_runner/test_influx_db.py @@ -1,5 +1,3 @@ -import json - from influxdb.resultset import ResultSet from redash.query_runner import ( @@ -40,7 +38,7 @@ def test_influxdb_result_types_with_rows(): {"k1": "bar", "time": "2023-10-06T13:31:08.882953339Z", "v1": 0.6, "v2": 4}, ], } - assert json.loads(transformed) == expected + assert transformed == expected def test_influxdb_result_types_with_no_rows_are_string(): @@ -55,4 +53,4 @@ def test_influxdb_result_types_with_no_rows_are_string(): ], "rows": [], } - assert json.loads(transformed) == expected + assert transformed == expected diff --git a/tests/query_runner/test_influx_db_v2.py b/tests/query_runner/test_influx_db_v2.py index c8493bebd3..6e134c976f 100644 --- a/tests/query_runner/test_influx_db_v2.py +++ b/tests/query_runner/test_influx_db_v2.py @@ -1,5 +1,3 @@ -import json - import mock import pytest from influxdb_client.client.flux_table import ( @@ -277,10 +275,8 @@ def test_get_data_from_tables(self, influx_table_list: TableList): @mock.patch("redash.query_runner.influx_db_v2.InfluxDBClient") @mock.patch("redash.query_runner.influx_db_v2.InfluxDBv2." "_cleanup_cert_files") @mock.patch("redash.query_runner.influx_db_v2.logger") - @mock.patch("redash.query_runner.influx_db_v2.json_dumps") def test_run_query( self, - json_dumps_mock: mock.MagicMock, logger_mock: mock.MagicMock, cleanup_cert_files_mock: mock.MagicMock, influx_db_client_mock: mock.MagicMock, @@ -310,28 +306,24 @@ def test_run_query( ], "rows": [{"col_1": "col_value_1", "col_2": 1}, {"col_1": "col_value_2", "col_2": 2}, {"col_3": 3.0}], } - json_dumps_data = json.dumps(result_data) query_mock = influx_db_client_mock.return_value.__enter__().query_api().query query_mock.return_value = influx_table_list - json_dumps_mock.return_value = json_dumps_data # 1. case: successful query data data, error = influx_db_v2.run_query(query, "user") - assert data == json_dumps_data + assert data == result_data assert error is None influx_db_client_mock.assert_called_once_with(url="url", token="token", org="org", **influx_kwargs) logger_mock.debug.assert_called_once_with(f"InfluxDB got query: {query!r}") query_mock.assert_called_once_with(query) - json_dumps_mock.assert_called_once_with(result_data) cleanup_cert_files_mock.assert_called_once_with(influx_kwargs) influx_db_client_mock.reset_mock() logger_mock.reset_mock() query_mock.reset_mock() - json_dumps_mock.reset_mock() cleanup_cert_files_mock.reset_mock() # 2. case: unsuccessful query data @@ -344,5 +336,4 @@ def test_run_query( influx_db_client_mock.assert_called_once_with(url="url", token="token", org="org", **influx_kwargs) logger_mock.debug.assert_called_once_with(f"InfluxDB got query: {query!r}") query_mock.assert_called_once_with(query) - json_dumps_mock.assert_not_called() cleanup_cert_files_mock.assert_called_once_with(influx_kwargs) diff --git a/tests/query_runner/test_prometheus.py b/tests/query_runner/test_prometheus.py index a89c44321d..1a2124b914 100644 --- a/tests/query_runner/test_prometheus.py +++ b/tests/query_runner/test_prometheus.py @@ -5,7 +5,6 @@ import mock from redash.query_runner.prometheus import Prometheus, get_instant_rows, get_range_rows -from redash.utils import json_dumps class TestPrometheus(TestCase): @@ -350,7 +349,7 @@ def test_run_query( {"friendly_name": "foo_bar", "type": "string", "name": "foo_bar"}, ] - data_expected = json_dumps({"rows": rows, "columns": columns}) + data_expected = {"rows": rows, "columns": columns} requests_get_mock.return_value = mock.Mock( json=mock.Mock(return_value={"data": {"result": self.instant_query_result}}) @@ -424,7 +423,7 @@ def test_run_query( {"friendly_name": "foo_bar", "type": "string", "name": "foo_bar"}, ] - data_expected = json_dumps({"rows": rows, "columns": columns}) + data_expected = {"rows": rows, "columns": columns} requests_get_mock.return_value = mock.Mock( json=mock.Mock(return_value={"data": {"result": self.range_query_result}}) @@ -490,7 +489,7 @@ def test_run_query( {"friendly_name": "foo_bar", "type": "string", "name": "foo_bar"}, ] - data_expected = json_dumps({"rows": rows, "columns": columns}) + data_expected = {"rows": rows, "columns": columns} now_datetime = datetime(2023, 12, 12, 11, 00, 00) end_timestamp_expected = int(time.mktime(now_datetime.timetuple())) diff --git a/tests/query_runner/test_python.py b/tests/query_runner/test_python.py index 66e4afc470..2ba51dd5b6 100644 --- a/tests/query_runner/test_python.py +++ b/tests/query_runner/test_python.py @@ -15,7 +15,7 @@ def test_print_in_query_string_success(self, mock_dt): query_string = "print('test')" mock_dt.utcnow = mock.Mock(return_value=datetime(1901, 12, 21)) result = self.python.run_query(query_string, "user") - self.assertEqual(result[0], '{"rows": [], "columns": [], "log": ["[1901-12-21T00:00:00] test"]}') + self.assertEqual(result[0], {"rows": [], "columns": [], "log": ["[1901-12-21T00:00:00] test"]}) def test_empty_result(self): query_string = "result={}" @@ -68,11 +68,11 @@ def test_valid_result_type(self): result = self.python.run_query(query_string, "user") self.assertEqual( result[0], - '{"columns": [{"name": "col1", "type": "string"},' - ' {"name": "col2", "type": "integer"}],' - ' "rows": [{"col1": "foo", "col2": 100},' - ' {"col1": "bar", "col2": 200}],' - ' "log": []}', + { + "columns": [{"name": "col1", "type": "string"}, {"name": "col2", "type": "integer"}], + "rows": [{"col1": "foo", "col2": 100}, {"col1": "bar", "col2": 200}], + "log": [], + }, ) @mock.patch("datetime.datetime") @@ -89,11 +89,11 @@ def test_valid_result_type_with_print(self, mock_dt): result = self.python.run_query(query_string, "user") self.assertEqual( result[0], - '{"columns": [{"name": "col1", "type": "string"},' - ' {"name": "col2", "type": "integer"}],' - ' "rows": [{"col1": "foo", "col2": 100},' - ' {"col1": "bar", "col2": 200}],' - ' "log": ["[1901-12-21T00:00:00] test"]}', + { + "columns": [{"name": "col1", "type": "string"}, {"name": "col2", "type": "integer"}], + "rows": [{"col1": "foo", "col2": 100}, {"col1": "bar", "col2": 200}], + "log": ["[1901-12-21T00:00:00] test"], + }, ) diff --git a/tests/query_runner/test_query_results.py b/tests/query_runner/test_query_results.py index 0d61169c84..d9eb45ae42 100644 --- a/tests/query_runner/test_query_results.py +++ b/tests/query_runner/test_query_results.py @@ -17,7 +17,6 @@ prepare_parameterized_query, replace_query_parameters, ) -from redash.utils import json_dumps from tests import BaseTestCase @@ -235,5 +234,5 @@ def test_non_cached_query_result(self): with mock.patch.object(PostgreSQL, "run_query") as qr: query_result_data = {"columns": [], "rows": []} - qr.return_value = (json_dumps(query_result_data), None) + qr.return_value = (query_result_data, None) self.assertEqual(query_result_data, get_query_results(self.factory.user, query.id, False)) diff --git a/tests/query_runner/test_tinybird.py b/tests/query_runner/test_tinybird.py index 79fbf4c4bc..121d213f8e 100644 --- a/tests/query_runner/test_tinybird.py +++ b/tests/query_runner/test_tinybird.py @@ -83,7 +83,7 @@ def test_run_query(self, get_request): self.assertIsNone(error) self.assertEqual( - json.loads(data), + data, { "columns": [ {"name": "string_attribute", "friendly_name": "string_attribute", "type": TYPE_STRING}, diff --git a/tests/query_runner/test_trino.py b/tests/query_runner/test_trino.py index 87e5dd8b00..e77f243aa1 100644 --- a/tests/query_runner/test_trino.py +++ b/tests/query_runner/test_trino.py @@ -28,7 +28,16 @@ def test_get_schema_catalog_set(self, mock_run_query, mock__get_catalogs): def _assert_schema_catalog(self, mock_run_query, mock__get_catalogs, runner): mock_run_query.return_value = ( - f'{{"rows": [{{"table_schema": "{TestTrino.schema_name}", "table_name": "{TestTrino.table_name}", "column_name": "{TestTrino.column_name}", "data_type": "{TestTrino.column_type}"}}]}}', + { + "rows": [ + { + "table_schema": TestTrino.schema_name, + "table_name": TestTrino.table_name, + "column_name": TestTrino.column_name, + "data_type": TestTrino.column_type, + } + ] + }, None, ) mock__get_catalogs.return_value = [TestTrino.catalog_name] @@ -36,14 +45,14 @@ def _assert_schema_catalog(self, mock_run_query, mock__get_catalogs, runner): expected_schema = [ { "name": f"{TestTrino.catalog_name}.{TestTrino.schema_name}.{TestTrino.table_name}", - "columns": [{"name": f"{TestTrino.column_name}", "type": f"{TestTrino.column_type}"}], + "columns": [{"name": TestTrino.column_name, "type": TestTrino.column_type}], } ] self.assertEqual(schema, expected_schema) @patch.object(Trino, "run_query") def test__get_catalogs(self, mock_run_query): - mock_run_query.return_value = (f'{{"rows": [{{"Catalog": "{TestTrino.catalog_name}"}}]}}', None) + mock_run_query.return_value = ({"rows": [{"Catalog": TestTrino.catalog_name}]}, None) runner = Trino({}) catalogs = runner._get_catalogs() expected_catalogs = [TestTrino.catalog_name] diff --git a/tests/query_runner/test_yandex_disk.py b/tests/query_runner/test_yandex_disk.py index 437e8068bb..df8f3515be 100644 --- a/tests/query_runner/test_yandex_disk.py +++ b/tests/query_runner/test_yandex_disk.py @@ -4,7 +4,6 @@ import yaml from redash.query_runner.yandex_disk import enabled -from redash.utils import json_dumps if enabled: import pandas as pd @@ -114,22 +113,20 @@ def test_run_query(mocked_requests, mock_yandex_disk): mock_readers = EXTENSIONS_READERS.copy() mock_readers["csv"] = mock_ext_readers_return - expected_data = json_dumps( - { - "columns": [ - {"name": "id", "friendly_name": "id", "type": "integer"}, - {"name": "name", "friendly_name": "name", "type": "string"}, - {"name": "age", "friendly_name": "age", "type": "integer"}, - ], - "rows": [ - {"id": 1, "name": "Alice", "age": 20}, - {"id": 2, "name": "Bob", "age": 21}, - {"id": 3, "name": "Charlie", "age": 22}, - {"id": 4, "name": "Dave", "age": 23}, - {"id": 5, "name": "Eve", "age": 24}, - ], - } - ) + expected_data = { + "columns": [ + {"name": "id", "friendly_name": "id", "type": "integer"}, + {"name": "name", "friendly_name": "name", "type": "string"}, + {"name": "age", "friendly_name": "age", "type": "integer"}, + ], + "rows": [ + {"id": 1, "name": "Alice", "age": 20}, + {"id": 2, "name": "Bob", "age": 21}, + {"id": 3, "name": "Charlie", "age": 22}, + {"id": 4, "name": "Dave", "age": 23}, + {"id": 5, "name": "Eve", "age": 24}, + ], + } with mock.patch.dict("redash.query_runner.yandex_disk.EXTENSIONS_READERS", mock_readers, clear=True): data, error = mock_yandex_disk.run_query(yaml.dump({"path": "/tmp/file.csv"}), "user") @@ -204,23 +201,21 @@ def test_run_query_multiple_sheets(mocked_requests, mock_yandex_disk): data, error = mock_yandex_disk.run_query(query, "user") assert error is None - assert data == json_dumps( - { - "columns": [ - {"name": "id", "friendly_name": "id", "type": "integer"}, - {"name": "name", "friendly_name": "name", "type": "string"}, - {"name": "age", "friendly_name": "age", "type": "integer"}, - {"name": "sheet_name", "friendly_name": "sheet_name", "type": "string"}, - ], - "rows": [ - {"id": 1, "name": "Alice", "age": 20, "sheet_name": "sheet1"}, - {"id": 2, "name": "Bob", "age": 21, "sheet_name": "sheet1"}, - {"id": 3, "name": "Charlie", "age": 22, "sheet_name": "sheet1"}, - {"id": 4, "name": "Dave", "age": 23, "sheet_name": "sheet1"}, - {"id": 5, "name": "Eve", "age": 24, "sheet_name": "sheet1"}, - ], - } - ) + assert data == { + "columns": [ + {"name": "id", "friendly_name": "id", "type": "integer"}, + {"name": "name", "friendly_name": "name", "type": "string"}, + {"name": "age", "friendly_name": "age", "type": "integer"}, + {"name": "sheet_name", "friendly_name": "sheet_name", "type": "string"}, + ], + "rows": [ + {"id": 1, "name": "Alice", "age": 20, "sheet_name": "sheet1"}, + {"id": 2, "name": "Bob", "age": 21, "sheet_name": "sheet1"}, + {"id": 3, "name": "Charlie", "age": 22, "sheet_name": "sheet1"}, + {"id": 4, "name": "Dave", "age": 23, "sheet_name": "sheet1"}, + {"id": 5, "name": "Eve", "age": 24, "sheet_name": "sheet1"}, + ], + } @skip_condition diff --git a/tests/query_runner/test_yandex_metrica.py b/tests/query_runner/test_yandex_metrica.py index e483b48b40..0c53a64eea 100644 --- a/tests/query_runner/test_yandex_metrica.py +++ b/tests/query_runner/test_yandex_metrica.py @@ -98,7 +98,7 @@ def test_yandex_metrica_query(mocked_requests_get): data, error = query_runner.run_query(example_query, None) assert error is None - assert json.loads(data) == expected_data + assert data == expected_data def test_yandex_metrica_429(mocked_requests_get): diff --git a/tests/serializers/test_query_results.py b/tests/serializers/test_query_results.py index 2bb9b443a6..b1a2443871 100644 --- a/tests/serializers/test_query_results.py +++ b/tests/serializers/test_query_results.py @@ -5,7 +5,6 @@ serialize_query_result, serialize_query_result_to_dsv, ) -from redash.utils import json_dumps from tests import BaseTestCase data = { @@ -26,19 +25,19 @@ class QueryResultSerializationTest(BaseTestCase): def test_serializes_all_keys_for_authenticated_users(self): - query_result = self.factory.create_query_result(data=json_dumps({})) + query_result = self.factory.create_query_result(data={}) serialized = serialize_query_result(query_result, False) self.assertSetEqual(set(query_result.to_dict().keys()), set(serialized.keys())) def test_doesnt_serialize_sensitive_keys_for_unauthenticated_users(self): - query_result = self.factory.create_query_result(data=json_dumps({})) + query_result = self.factory.create_query_result(data={}) serialized = serialize_query_result(query_result, True) self.assertSetEqual(set(["data", "retrieved_at"]), set(serialized.keys())) class DsvSerializationTest(BaseTestCase): def delimited_content(self, delimiter): - query_result = self.factory.create_query_result(data=json_dumps(data)) + query_result = self.factory.create_query_result(data=data) return serialize_query_result_to_dsv(query_result, delimiter) def test_serializes_booleans_correctly(self): diff --git a/tests/tasks/test_queries.py b/tests/tasks/test_queries.py index e7f849e149..93d5d73b86 100644 --- a/tests/tasks/test_queries.py +++ b/tests/tasks/test_queries.py @@ -10,7 +10,6 @@ enqueue_query, execute_query, ) -from redash.utils import json_dumps from tests import BaseTestCase @@ -181,7 +180,7 @@ def test_success(self, _): """ with patch.object(PostgreSQL, "run_query") as qr: query_result_data = {"columns": [], "rows": []} - qr.return_value = (json_dumps(query_result_data), None) + qr.return_value = (query_result_data, None) result_id = execute_query("SELECT 1, 2", self.factory.data_source.id, {}) self.assertEqual(1, qr.call_count) result = models.QueryResult.query.get(result_id) @@ -193,7 +192,16 @@ def test_success_scheduled(self, _): """ q = self.factory.create_query(query_text="SELECT 1, 2", schedule={"interval": 300}) with patch.object(PostgreSQL, "run_query") as qr: - qr.return_value = ([1, 2], None) + qr.return_value = ( + { + "columns": [ + {"name": "_col0", "friendly_name": "_col0", "type": "integer"}, + {"name": "_col1", "friendly_name": "_col1", "type": "integer"}, + ], + "rows": [{"_col0": 1, "_col1": 2}], + }, + None, + ) result_id = execute_query( "SELECT 1, 2", self.factory.data_source.id, @@ -251,7 +259,16 @@ def test_success_after_failure(self, _): self.assertEqual(q.schedule_failures, 1) with patch.object(PostgreSQL, "run_query") as qr: - qr.return_value = ([1, 2], None) + qr.return_value = ( + { + "columns": [ + {"name": "_col0", "friendly_name": "_col0", "type": "integer"}, + {"name": "_col1", "friendly_name": "_col1", "type": "integer"}, + ], + "rows": [{"_col0": 1, "_col1": 2}], + }, + None, + ) execute_query( "SELECT 1, 2", self.factory.data_source.id, @@ -280,7 +297,16 @@ def test_adhoc_success_after_scheduled_failure(self, _): self.assertEqual(q.schedule_failures, 1) with patch.object(PostgreSQL, "run_query") as qr: - qr.return_value = ([1, 2], None) + qr.return_value = ( + { + "columns": [ + {"name": "_col0", "friendly_name": "_col0", "type": "integer"}, + {"name": "_col1", "friendly_name": "_col1", "type": "integer"}, + ], + "rows": [{"_col0": 1, "_col1": 2}], + }, + None, + ) execute_query( "SELECT 1, 2", self.factory.data_source.id, diff --git a/tests/test_models.py b/tests/test_models.py index ec601adca4..f7d563aaae 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -328,7 +328,7 @@ def test_archived_query_doesnt_return_in_all(self): query.data_source, query.query_hash, query.query_text, - "1", + {"columns": {}, "rows": []}, 123, yesterday, ) @@ -504,7 +504,7 @@ def setUp(self): self.query_hash = gen_query_hash(self.query) self.runtime = 123 self.utcnow = utcnow() - self.data = '{"a": 1}' + self.data = {"a": 1} def test_stores_the_result(self): query_result = models.QueryResult.store_result( @@ -517,7 +517,7 @@ def test_stores_the_result(self): self.utcnow, ) - self.assertEqual(query_result._data, self.data) + self.assertEqual(query_result.data, self.data) self.assertEqual(query_result.runtime, self.runtime) self.assertEqual(query_result.retrieved_at, self.utcnow) self.assertEqual(query_result.query_text, self.query)