Bug description

I try to build a deck.gl Scatterplot chart and when I click on update chart buttom I got the 'Unexpected error' message

I set FLASK_DEBUG=True to expect more details in the logs, but I didn't find anything suspicious except an 400 http code also I disable talisman config TALISMAN_ENABLED=False

# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements.  See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership.  The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License.  You may obtain a copy of the License at
#
#   http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

#
# This file is included in the final Docker image and SHOULD be overridden when
# deploying the image to prod. Settings configured here are intended for use in local
# development environments. Also note that superset_config_docker.py is imported
# as a final step as a means to override "defaults" configured here

import logging
import os

from cachelib.redis import RedisCache
from celery.schedules import crontab
from superset.superset_typing import CacheConfig
from superset.utils import core as utils
from datetime import timedelta

logger = logging.getLogger()

# Attributes used for connect to metadata database
DATABASE_DIALECT = os.getenv("DATABASE_DIALECT")
DATABASE_USER = os.getenv("DATABASE_USER")
DATABASE_PASSWORD = os.getenv("DATABASE_PASSWORD")
DATABASE_HOST = os.getenv("DATABASE_HOST")
DATABASE_PORT = os.getenv("DATABASE_PORT")
DATABASE_DB = os.getenv("DATABASE_DB")

# The SQLAlchemy connection string.
SQLALCHEMY_DATABASE_URI = (
    f"{DATABASE_DIALECT}://"
    f"{DATABASE_USER}:{DATABASE_PASSWORD}@"
    f"{DATABASE_HOST}:{DATABASE_PORT}/{DATABASE_DB}"
)

# Attributes used for connect to redis database
REDIS_HOST = os.getenv("REDIS_HOST")
REDIS_PORT = os.getenv("REDIS_PORT")
REDIS_PASSWORD = os.getenv("REDIS_PASSWORD", "redis")
REDIS_CELERY_DB = os.getenv("REDIS_CELERY_DB", "0")
REDIS_RESULT_DB = os.getenv("REDIS_RESULT_DB", "1")
REDIS_BACKEND_DB = os.getenv("REDIS_BACKEND_DB", "2")
REDIS_FILTER_DB = os.getenv("REDIS_FILTER_DB", "3")
REDIS_EXPLORE_DB = os.getenv("REDIS_EXPLORE_DB", "4")

# Caching Superset's own metadata
CACHE_CONFIG: CacheConfig = {
    "CACHE_TYPE": "RedisCache",
    "CACHE_DEFAULT_TIMEOUT": int(timedelta(days=3).total_seconds()),
    "CACHE_KEY_PREFIX": "superset_",
    "CACHE_REDIS_HOST": {"REDIS_HOST"},
    "CACHE_REDIS_PORT": {"REDIS_PORT"},
    "CACHE_REDIS_DB": {"REDIS_RESULT_DB"},
    "CACHE_REDIS_PASSWORD": {"REDIS_PASSWORD"},
    "CACHE_REDIS_URL": f"redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/{REDIS_RESULT_DB}",
}
# Caching charting data queried from connected datasources
DATA_CACHE_CONFIG = CACHE_CONFIG

# If enabled, it can be used to store the results of long-running queries
# in SQL Lab by using the "Run Async" button/feature
RESULTS_BACKEND = RedisCache(
    host=REDIS_HOST,
    port=REDIS_PORT,
    password=REDIS_PASSWORD,
    db=REDIS_BACKEND_DB,
    key_prefix="superset_sqllab",
)

# Cache for dashboard filter state (`CACHE_TYPE` defaults to `SimpleCache` when
#  running in debug mode unless overridden)
FILTER_STATE_CACHE_CONFIG: CacheConfig = {
    "CACHE_TYPE": "RedisCache",
    "CACHE_DEFAULT_TIMEOUT": int(timedelta(days=90).total_seconds()),
    "CACHE_KEY_PREFIX": "superset_filter_cache",
    "CACHE_REDIS_URL": f"redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/{REDIS_FILTER_DB}",
}

# Cache for explore form data state (`CACHE_TYPE` defaults to `SimpleCache` when
#  running in debug mode unless overridden)
EXPLORE_FORM_DATA_CACHE_CONFIG: CacheConfig = {
    "CACHE_TYPE": "RedisCache",
    "CACHE_DEFAULT_TIMEOUT": int(timedelta(days=7).total_seconds()),
    # should the timeout be reset when retrieving a cached value
    "REFRESH_TIMEOUT_ON_RETRIEVAL": True,
    "CACHE_KEY_PREFIX": "superset_explore_cache",
    "CACHE_REDIS_URL": f"redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/{REDIS_EXPLORE_DB}",
}


# Setup Async Queries via Celery
class CeleryConfig(object):
    broker_url = (
        f"redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/{REDIS_CELERY_DB}"
    )
    imports = (
        "superset.sql_lab",
        "superset.tasks",
    )
    result_backend = (
        f"redis://:{REDIS_PASSWORD}@{REDIS_HOST}:{REDIS_PORT}/{REDIS_CELERY_DB}"
    )
    worker_prefetch_multiplier = 0
    task_acks_late = True
    task_annotations = {
        "sql_lab.get_sql_results": {"rate_limit": "100/s"},
        "email_reports.send": {
            "rate_limit": "1/s",
            "time_limit": int(timedelta(seconds=600).total_seconds()),
            "soft_time_limit": int(timedelta(seconds=600).total_seconds()),
            "ignore_result": True,
        },
    }
    beat_schedule = {
        "cache-warmup-hourly": {
            "task": "cache-warmup",
            "schedule": crontab(minute=1, hour="*/6"),
            "kwargs": {
                "strategy_name": "top_n_dashboards",
                "top_n": 5,
                "since": "7 days ago",
            },
        },
        "reports.scheduler": {
            "task": "reports.scheduler",
            "schedule": crontab(minute="*", hour="*"),
        },
        "reports.prune_log": {
            "task": "reports.prune_log",
            "schedule": crontab(minute=0, hour=0),
        },
    }


CELERY_CONFIG = CeleryConfig

# Override the default row limit when requesting chart data
ROW_LIMIT = 10000

# Variables needed for build the URL for warming up a given chart/table cache
SUPERSET_WEBSERVER_ADDRESS = os.getenv("SUPERSET_WEBSERVER_ADDRESS")
SUPERSET_WEBSERVER_PORT = os.getenv("SUPERSET_WEBSERVER_PORT")

# The webdriver to use for generating reports
WEBDRIVER_TYPE = "chrome"
WEBDRIVER_OPTION_ARGS = [
    "--force-device-scale-factor=2.0",
    "--high-dpi-support=2.0",
    "--headless",
    "--disable-gpu",
    "--disable-dev-shm-usage",
    "--no-sandbox",
    "--disable-setuid-sandbox",
    "--disable-extensions",
]

# WebDriver configuration
# This is for internal use, you can keep http, used by beat and worker
WEBDRIVER_BASEURL = (
    "http://" + SUPERSET_WEBSERVER_ADDRESS + ":" + SUPERSET_WEBSERVER_PORT + "/"
)

# This is the link sent to the recipient, change to your domain eg. https://superset.mydomain.com
WEBDRIVER_BASEURL_USER_FRIENDLY = os.getenv("SUPERSET_URL")

# Email configuration
# ALERT_REPORTS_NOTIFICATION_DRY_RUN = True
EMAIL_NOTIFICATIONS = True
SMTP_HOST = os.getenv("SMTP_HOST")
SMTP_STARTTLS = True
SMTP_SSL = False
SMTP_USER = os.getenv("SMTP_USER")
SMTP_PORT = 587
SMTP_PASSWORD = os.getenv("SMTP_PASSWORD")
SMTP_MAIL_FROM = os.getenv("SMTP_MAIL")

# Global async query config options.
# Requires GLOBAL_ASYNC_QUERIES feature flag to be enabled.
GLOBAL_ASYNC_QUERIES_REDIS_CONFIG = {
    "port": REDIS_PORT,
    "host": REDIS_HOST,
    "password": REDIS_PASSWORD,
    "db": REDIS_CELERY_DB,
}

# JWT cookie secure option use a secret key to sign and validate the contents
GLOBAL_ASYNC_QUERIES_JWT_SECRET = os.getenv("GLOBAL_ASYNC_QUERIES_JWT_SECRET")

# Flag that controls if limit should be enforced on the CTA (create table as queries).
SQLLAB_CTAS_NO_LIMIT = True

# This is useful if one wants to enable anonymous users to view
# dashboards. Explicit grant on specific datasets is still required.
PUBLIC_ROLE_LIKE = "Gamma"

# Prevent cookie from being read by frontend JS?
SESSION_COOKIE_HTTPONLY = False
SESSION_COOKIE_SAMESITE = None  # One of [None, 'Lax', 'Strict']


# Secret used to encrypt the data in metabase database
DEFINED_SECRET_KEY = os.getenv("DEFINED_SECRET_KEY")
SECRET_KEY = f"\2\1{DEFINED_SECRET_KEY}\1\2\e\y\y\h"

# This is an important setting, and should be lower than your
# [load balancer / proxy / envoy / kong / ...] timeout settings.
# You should also make sure to configure your WSGI server
# (gunicorn, nginx, apache, ...) timeout setting to be <= to this setting
SUPERSET_WEBSERVER_TIMEOUT = 120

# Timeout duration for SQL Lab synchronous queries
SQLLAB_TIMEOUT = 120
# Use all X-Forwarded headers when ENABLE_PROXY_FIX is True.
# When proxying to a different port, set "x_port" to 0 to avoid downstream issues.
ENABLE_PROXY_FIX = True

# Overwrite default FEATURE_FLAGS settings
FEATURE_FLAGS = {
    # Note that: RowLevelSecurityFilter is only given by default to the Admin role
    # and the Admin Role does have the all_datasources security permission.
    # But, if users create a specific role with access to RowLevelSecurityFilter MVC
    # and a custom datasource access, the table dropdown will not be correctly filtered
    # by that custom datasource access. So we are assuming a default security config,
    # a custom security config could potentially give access to setting filters on
    # tables that users do not have access to.
    "DASHBOARD_CACHE": True,
    "GLOBAL_ASYNC_QUERIES": True,
    "DASHBOARD_CROSS_FILTERS": True,
    "DASHBOARD_NATIVE_FILTERS_SET": True,
    "ALERT_REPORTS": True,
    # Enables the drag and drop interface in Explore control panel
    "ENABLE_EXPLORE_DRAG_AND_DROP": True,
    "ENABLE_DND_WITH_CLICK_UX": True,
    "ENABLE_JAVASCRIPT_CONTROLS": True,
    "ENABLE_TEMPLATE_PROCESSING": True,
}

EXTRA_CATEGORICAL_COLOR_SCHEMES = [
    {
        "id": "PAI3colores",
        "description": "colores para graficas PAI",
        "label": "colores",
        "isDefault": False,
        "colors": [
            "#5ac189",
            "#5ac189",
            "#5ac189",
            "#FF8000",
            "#FF8000",
            "#FF8000",
            "#FF0000",
            "#FF0000",
            "#FF0000",
        ],
    }
]

EXTRA_SEQUENTIAL_COLOR_SCHEMES = [
    {
        "id": "PAI3coloresSeq",
        "description": "colores para graficas PAI",
        "isDiverging": False,
        "label": "My 3colres",
        "isDefault": True,
        "colors": [
            "#5ac189",
            "#5ac189",
            "#5ac189",
            "#FF8000",
            "#FF8000",
            "#FF8000",
            "#FF0000",
            "#FF0000",
            "#FF0000",
        ],
    }
]

# Enable/disable CSP warning
CONTENT_SECURITY_POLICY_WARNING = False

# Do you want Talisman enabled?
TALISMAN_ENABLED = utils.cast_to_boolean(os.environ.get("TALISMAN_ENABLED", True))

# If you want Talisman, how do you want it configured??
TALISMAN_CONFIG = {
    "content_security_policy": {
        "base-uri": ["'self'"],
        "default-src": ["'self'"],
        "img-src": ["'self'", "blob:", "data:"],
        "worker-src": ["'self'", "blob:"],
        "connect-src": [
            "'self'",
            "https://api.mapbox.com",
            "https://events.mapbox.com",
        ],
        "object-src": "'none'",
        "style-src": [
            "'self'",
            "'unsafe-inline'",
        ],
        "script-src": ["'self'", "'strict-dynamic'"],
    },
    "content_security_policy_nonce_in": ["script-src"],
    "force_https": False,
    "session_cookie_secure": False,
}
# React requires `eval` to work correctly in dev mode
TALISMAN_DEV_CONFIG = {
    "content_security_policy": {
        "base-uri": ["'self'"],
        "default-src": ["'self'"],
        "img-src": ["'self'", "blob:", "data:", "https://gitlab.com"],
        "worker-src": ["'self'", "blob:"],
        "connect-src": [
            "'self'",
            "https://api.mapbox.com",
            "https://events.mapbox.com",
        ],
        "object-src": "'none'",
        "style-src": [
            "'self'",
            "'unsafe-inline'",
        ],
        "script-src": ["'self'", "'unsafe-inline'", "'unsafe-eval'"],
    },
    "content_security_policy_nonce_in": ["script-src"],
    "force_https": False,
    "session_cookie_secure": False,
}

# Flask-WTF flag for CSRF
WTF_CSRF_ENABLED = False

#
# Optionally import superset_config_docker.py (which will have been included on
# the PYTHONPATH) in order to allow for local settings to be overridden
#
try:
    import superset_config_docker
    from superset_config_docker import *  # noqa

    logger.info(
        f"Loaded your Docker configuration at " f"[{superset_config_docker.__file__}]"
    )
except ImportError:
    logger.info("Using default Docker config...")

How to reproduce the bug

  1. Create a deck.gl Scatterplot chart
  2. Setting Longitude & Latitude
  3. Set JavaScript tooltip generator with value: d => Cantidad: ${d.object.extraProps.count}

Screenshots/recordings

Screenshot from 2024-09-05 15-15-39

Logs from UI:

Screenshot from 2024-09-05 15-27-12

Superset version

3.1.3

Python version

3.10

Node version

16

Browser

Chrome

Additional context

This chart worked fine at version 2.1.0, but after I upgrade to version 3.1.0 or 3.1.3 this error appear

Checklist

  • [X] I have searched Superset docs and Slack and didn't find a solution to my problem.
  • [X] I have searched the GitHub issue tracker and didn't find a similar bug report.
  • [X] I have checked Superset's logs for errors and if I found a relevant Python stacktrace, I included it here as text in the "additional context" section.

Comment From: dosubot[bot]

Hey there, @fgomezotero! I'm here to help you out with any bugs, questions, or contributions you have in mind. Let's tackle this issue together!

I found a similar closed issue that might be relevant to your problem: SQL not execute in new superset version (2.1.0 -> 3.0.1) [1].

The 'Unexpected error' message with a 400 HTTP code when building a deck.gl Scatterplot chart in Superset version 3.1.3, which worked fine in version 2.1.0, is likely due to the URL length exceeding the MAX_URL_LENGTH of 8000 characters. The getExploreLongUrl function constructs the URL for the request, and if the URL length exceeds this limit, it can cause a 400 error.

In version 3.1.3, the getExploreLongUrl function includes a check for the URL length and attempts to shorten it by using minimal form data if allowOverflow is set to false. However, if the URL still exceeds the limit, it can result in an error.

Here is the relevant part of the code:

const MAX_URL_LENGTH = 8000;

export function getExploreLongUrl(
  formData: JsonObject,
  endpointType: string,
  allowOverflow = true,
  extraSearch = {},
): string | undefined {
  // ... other code ...

  const url = uri.directory(directory).search(search).toString();
  if (!allowOverflow && url.length > MAX_URL_LENGTH) {
    const minimalFormData = {
      datasource: formData.datasource,
      viz_type: formData.viz_type,
    };
    return getExploreLongUrl(minimalFormData, endpointType, false, {
      URL_IS_TOO_LONG_TO_SHARE: null,
    });
  }

  return url;
}

To resolve this issue, you may need to ensure that the form data being sent does not result in a URL longer than 8000 characters. Simplifying or reducing the number of parameters in the form data might help in keeping the URL within the acceptable length [2][3].

To continue talking to Dosu, mention @dosu.

Comment From: fgomezotero

@dosu in my case the URL doesnt exceed this value, here are an example of url: http://superset-ui-superset-test.apps.ocp-te.paas.red.uy/explore/?form_data_key=FujCgcLr7lsLYQt5rc_6UJCDflhjCtdbjP1fQk4Dpz3mAgjetRhQfZVT3EIt1-Vk&dashboard_page_id=x5w6LqPVu&slice_id=2

Comment From: dosubot[bot]

The 'Unexpected error' message with a 400 HTTP code when building a deck.gl Scatterplot chart in Superset version 3.1.3, despite the URL length not exceeding the maximum limit, could be due to the formData not containing a datasource. According to the provided code, the getExploreLongUrl function returns null if formData.datasource is not present:

export function getExploreLongUrl(
  formData,
  endpointType,
  allowOverflow = true,
  extraSearch = {},
) {
  if (!formData.datasource) {
    return null;
  }
  // ... rest of the code
}

Ensure that the formData object includes a valid datasource property [1][2].

To continue talking to Dosu, mention @dosu.

Comment From: fgomezotero

@dosu here are an example of logs:

2024-09-05 15:48:27,766:DEBUG:superset.stats_logger:[stats_logger] (incr) loaded_from_source
2024-09-05 15:48:27,766:DEBUG:superset.stats_logger:[stats_logger] (incr) loaded_from_source_without_force
2024-09-05 15:48:27,767:DEBUG:superset.stats_logger:[stats_logger] (incr) set_cache_key
2024-09-05 15:48:27,770:DEBUG:superset.stats_logger:[stats_logger] (incr) explore_json
10.255.65.33 - - [05/Sep/2024:15:48:27 -0300] "POST /superset/explore_json/?form_data=%7B%22slice_id%22%3A22%7D&results=true HTTP/1.1" 200 2209 "http://superset-ui-superset-test.apps.ocp-te.paas.red.uy/explore/?form_data_key=FujCgcLr7lsLYQt5rc_6UJCDflhjCtdbjP1fQk4Dpz3mAgjetRhQfZVT3EIt1-Vk&slice_id=22" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36"
2024-09-05 15:48:27,822:DEBUG:superset.stats_logger:[stats_logger] (incr) events
10.255.65.33 - - [05/Sep/2024:15:48:27 -0300] "GET /api/v1/async_event/?last_id=1725562098810-0 HTTP/1.1" 200 19 "http://superset-ui-superset-test.apps.ocp-te.paas.red.uy/explore/?form_data_key=FujCgcLr7lsLYQt5rc_6UJCDflhjCtdbjP1fQk4Dpz3mAgjetRhQfZVT3EIt1-Vk&slice_id=22" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36"
2024-09-05 15:48:28,488:DEBUG:superset.stats_logger:[stats_logger] (incr) events
10.255.65.33 - - [05/Sep/2024:15:48:28 -0300] "GET /api/v1/async_event/?last_id=1725562098810-0 HTTP/1.1" 200 332 "http://superset-ui-superset-test.apps.ocp-te.paas.red.uy/explore/?form_data_key=FujCgcLr7lsLYQt5rc_6UJCDflhjCtdbjP1fQk4Dpz3mAgjetRhQfZVT3EIt1-Vk&slice_id=22" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36"
2024-09-05 15:48:28,508:DEBUG:superset.stats_logger:[stats_logger] (incr) ExploreFormDataRestApi.put
2024-09-05 15:48:28,516:DEBUG:superset.stats_logger:[stats_logger] (incr) ExploreFormDataRestApi.put.success
2024-09-05 15:48:28,516:DEBUG:superset.stats_logger:[stats_logger] (timing) ExploreFormDataRestApi.put.time | 34.62638519704342 
10.255.65.33 - - [05/Sep/2024:15:48:28 -0300] "PUT /api/v1/explore/form_data/FujCgcLr7lsLYQt5rc_6UJCDflhjCtdbjP1fQk4Dpz3mAgjetRhQfZVT3EIt1-Vk?tab_id=7 HTTP/1.1" 200 80 "http://superset-ui-superset-test.apps.ocp-te.paas.red.uy/explore/?form_data_key=FujCgcLr7lsLYQt5rc_6UJCDflhjCtdbjP1fQk4Dpz3mAgjetRhQfZVT3EIt1-Vk&slice_id=22" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36"
2024-09-05 15:48:28,541:WARNING:superset.views.base:Superset.explore_json_data This API endpoint is deprecated and will be removed in version 4.0.0
2024-09-05 15:48:28,544:DEBUG:parsedatetime:eval - with context - False, False
2024-09-05 15:48:28,550:INFO:superset.viz:Cache key: ba813540d99cf23e90993efb383a9252
2024-09-05 15:48:28,550:WARNING:superset.viz:force_cached (viz.py): value not found for cache key ba813540d99cf23e90993efb383a9252
2024-09-05 15:48:28,551:DEBUG:superset.stats_logger:[stats_logger] (incr) explore_json_data
10.255.65.33 - - [05/Sep/2024:15:48:28 -0300] "GET /superset/explore_json/data/ejr-8e610f8794ab67ad8c8a483734575161 HTTP/1.1" 400 35 "http://superset-ui-superset-test.apps.ocp-te.paas.red.uy/explore/?form_data_key=FujCgcLr7lsLYQt5rc_6UJCDflhjCtdbjP1fQk4Dpz3mAgjetRhQfZVT3EIt1-Vk&slice_id=22" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36"
2024-09-05 15:48:28,724:DEBUG:superset.stats_logger:[stats_logger] (incr) ChartRestApi.get.success
2024-09-05 15:48:28,724:DEBUG:superset.stats_logger:[stats_logger] (timing) ChartRestApi.get.time | 43.20465540513396 
10.255.65.33 - - [05/Sep/2024:15:48:28 -0300] "GET /api/v1/chart/22 HTTP/1.1" 200 2073 "http://superset-ui-superset-test.apps.ocp-te.paas.red.uy/explore/?form_data_key=FujCgcLr7lsLYQt5rc_6UJCDflhjCtdbjP1fQk4Dpz3mAgjetRhQfZVT3EIt1-Vk&slice_id=22" "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36"

Comment From: dosubot[bot]

The 400 HTTP error when building a deck.gl Scatterplot chart in Superset version 3.1.3 is likely due to the formData object missing the datasource property. The getExploreLongUrl function returns undefined if formData.datasource is not present, which can lead to a 400 error when the request is made.

Ensure that your formData object includes the datasource property before calling getExploreLongUrl. Here is an example structure of the formData object:

form_data: {
  color_picker: { a: 0.82, b: 3, g: 0, r: 205 },
  datasource: '5__table',
  granularity_sqla: 'dttm',
  groupby: [],
  mapbox_style: 'mapbox://styles/mapbox/light-v9',
  multiplier: 10,
  point_radius_fixed: { type: 'metric', value: 'count' },
  point_unit: 'square_m',
  row_limit: 5000,
  size: 'count',
  spatial: { latCol: 'LAT', lonCol: 'LON', type: 'latlong' },
  time_grain_sqla: null,
  viewport: {
    bearing: -4.952916738791771,
    latitude: 37.78926922909199,
    longitude: -122.42613341901688,
    pitch: 4.750411100577438,
    zoom: 12.729132798697304,
  },
  viz_type: 'deck_scatter',
  slice_id: 66,
  time_range: ' : ',
  adhoc_filters: [
    {
      clause: 'WHERE',
      expressionType: 'SIMPLE',
      filterOptionName: 'b0487cd7-2139-476f-a388-1f0f9759530f',
      comparator: '',
      operator: 'IS NOT NULL',
      subject: 'LAT',
    },
    {
      clause: 'WHERE',
      expressionType: 'SIMPLE',
      filterOptionName: '9398a511-3673-468c-b47e-634f59358b6f',
      comparator: '',
      operator: 'IS NOT NULL',
      subject: 'LON',
    },
  ],
  where: '',
  having: '',
  filters: [
    { col: 'LAT', op: 'IS NOT NULL', val: '' },
    { col: 'LON', op: 'IS NOT NULL', val: '' },
  ],
}

Additionally, ensure that the datasource is correctly set in the BaseViz class, as it is a required parameter. If the datasource is missing, the BaseViz class will raise a QueryObjectValidationError:

class BaseViz:  # pylint: disable=too-many-public-methods
    """All visualizations derive this base class"""

    # ... other attributes and methods

    def __init__(
        self,
        datasource: BaseDatasource,
        form_data: dict[str, Any],
        force: bool = False,
        force_cached: bool = False,
    ) -> None:
        if not datasource:
            raise QueryObjectValidationError(_("Viz is missing a datasource"))

        self.datasource = datasource
        self.form_data = form_data
        # ... rest of the constructor

Make sure the datasource property is correctly set to avoid the 400 error [1][2][3].

To continue talking to Dosu, mention @dosu.

Comment From: rusackas

This thread has sadly gone silent for quite a while now. Are you still able to reproduce this in 4.1.2 or 5.0.0rc2?

Comment From: msyavuz

I am closing this one as we don't have any recent activity around this. Feel free to open it again if you can still reproduce it.