Skip to content

Commit

Permalink
chore: Use datetime.timedelta for defining durations in config (apach…
Browse files Browse the repository at this point in the history
…e#16029)

* chore: use datetime.timedelta for defining durations

* Update config.py
  • Loading branch information
john-bodley authored Aug 2, 2021
1 parent 5917407 commit 41e8190
Show file tree
Hide file tree
Showing 2 changed files with 34 additions and 34 deletions.
61 changes: 30 additions & 31 deletions superset/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
import re
import sys
from collections import OrderedDict
from datetime import date
from datetime import date, timedelta
from typing import Any, Callable, Dict, List, Optional, Type, TYPE_CHECKING, Union

from cachelib.base import BaseCache
Expand Down Expand Up @@ -139,7 +139,7 @@ def _try_json_readsha( # pylint: disable=unused-argument
# [load balancer / proxy / envoy / kong / ...] timeout settings.
# You should also make sure to configure your WSGI server
# (gunicorn, nginx, apache, ...) timeout setting to be <= to this setting
SUPERSET_WEBSERVER_TIMEOUT = 60
SUPERSET_WEBSERVER_TIMEOUT = int(timedelta(minutes=1).total_seconds())

# this 2 settings are used by dashboard period force refresh feature
# When user choose auto force refresh frequency
Expand Down Expand Up @@ -489,13 +489,12 @@ def _try_json_readsha( # pylint: disable=unused-argument
"CACHE_NO_NULL_WARNING": True,
}

# Time in seconds before selenium
# times out after trying to locate an element on the page and wait
# Time before selenium times out after trying to locate an element on the page and wait
# for that element to load for a screenshot.
SCREENSHOT_LOCATE_WAIT = 10
# Time in seconds before selenium
# times out after waiting for all DOM class elements named "loading" are gone.
SCREENSHOT_LOAD_WAIT = 60
SCREENSHOT_LOCATE_WAIT = int(timedelta(seconds=10).total_seconds())
# Time before selenium times out after waiting for all DOM class elements named
# "loading" are gone.
SCREENSHOT_LOAD_WAIT = int(timedelta(minutes=1).total_seconds())
# Selenium destroy retries
SCREENSHOT_SELENIUM_RETRIES = 5
# Give selenium an headstart, in seconds
Expand All @@ -518,9 +517,9 @@ def _try_json_readsha( # pylint: disable=unused-argument
# Setup image size default is (300, 200, True)
# IMG_SIZE = (300, 200, True)

# Default cache timeout (in seconds), applies to all cache backends unless
# specifically overridden in each cache config.
CACHE_DEFAULT_TIMEOUT = 60 * 60 * 24 # 1 day
# Default cache timeout, applies to all cache backends unless specifically overridden in
# each cache config.
CACHE_DEFAULT_TIMEOUT = int(timedelta(days=1).total_seconds())

# Default cache for Superset objects
CACHE_CONFIG: CacheConfig = {"CACHE_TYPE": "null"}
Expand Down Expand Up @@ -682,8 +681,8 @@ class CeleryConfig: # pylint: disable=too-few-public-methods
"sql_lab.get_sql_results": {"rate_limit": "100/s"},
"email_reports.send": {
"rate_limit": "1/s",
"time_limit": 120,
"soft_time_limit": 150,
"time_limit": int(timedelta(seconds=120).total_seconds()),
"soft_time_limit": int(timedelta(seconds=150).total_seconds()),
"ignore_result": True,
},
}
Expand Down Expand Up @@ -723,22 +722,21 @@ class CeleryConfig: # pylint: disable=too-few-public-methods
DEFAULT_DB_ID = None

# Timeout duration for SQL Lab synchronous queries
SQLLAB_TIMEOUT = 30
SQLLAB_TIMEOUT = int(timedelta(seconds=30).total_seconds())

# Timeout duration for SQL Lab query validation
SQLLAB_VALIDATION_TIMEOUT = 10
SQLLAB_VALIDATION_TIMEOUT = int(timedelta(seconds=10).total_seconds())

# SQLLAB_DEFAULT_DBID
SQLLAB_DEFAULT_DBID = None

# The MAX duration (in seconds) a query can run for before being killed
# by celery.
SQLLAB_ASYNC_TIME_LIMIT_SEC = 60 * 60 * 6
# The MAX duration a query can run for before being killed by celery.
SQLLAB_ASYNC_TIME_LIMIT_SEC = int(timedelta(hours=6).total_seconds())

# Some databases support running EXPLAIN queries that allow users to estimate
# query costs before they run. These EXPLAIN queries should have a small
# timeout.
SQLLAB_QUERY_COST_ESTIMATE_TIMEOUT = 10 # seconds
SQLLAB_QUERY_COST_ESTIMATE_TIMEOUT = int(timedelta(seconds=10).total_seconds())
# The feature is off by default, and currently only supported in Presto and Postgres.
# It also need to be enabled on a per-database basis, by adding the key/value pair
# `cost_estimate_enabled: true` to the database `extra` attribute.
Expand Down Expand Up @@ -906,7 +904,7 @@ def CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC(
TROUBLESHOOTING_LINK = ""

# CSRF token timeout, set to None for a token that never expires
WTF_CSRF_TIME_LIMIT = 60 * 60 * 24 * 7
WTF_CSRF_TIME_LIMIT = int(timedelta(weeks=1).total_seconds())

# This link should lead to a page with instructions on how to gain access to a
# Datasource. It will be placed at the bottom of permissions errors.
Expand All @@ -922,11 +920,11 @@ def CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC(
TRACKING_URL_TRANSFORMER = lambda x: x

# Interval between consecutive polls when using Hive Engine
HIVE_POLL_INTERVAL = 5
HIVE_POLL_INTERVAL = int(timedelta(seconds=5).total_seconds())

# Interval between consecutive polls when using Presto Engine
# See here: https://github.com/dropbox/PyHive/blob/8eb0aeab8ca300f3024655419b93dad926c1a351/pyhive/presto.py#L93 # pylint: disable=line-too-long
PRESTO_POLL_INTERVAL = 1
PRESTO_POLL_INTERVAL = int(timedelta(seconds=1).total_seconds())

# Allow for javascript controls components
# this enables programmers to customize certain charts (like the
Expand Down Expand Up @@ -984,10 +982,10 @@ def CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC(
ALERT_REPORTS_WORKING_TIME_OUT_KILL = True
# if ALERT_REPORTS_WORKING_TIME_OUT_KILL is True, set a celery hard timeout
# Equal to working timeout + ALERT_REPORTS_WORKING_TIME_OUT_LAG
ALERT_REPORTS_WORKING_TIME_OUT_LAG = 10
ALERT_REPORTS_WORKING_TIME_OUT_LAG = int(timedelta(seconds=10).total_seconds())
# if ALERT_REPORTS_WORKING_TIME_OUT_KILL is True, set a celery hard timeout
# Equal to working timeout + ALERT_REPORTS_WORKING_SOFT_TIME_OUT_LAG
ALERT_REPORTS_WORKING_SOFT_TIME_OUT_LAG = 1
ALERT_REPORTS_WORKING_SOFT_TIME_OUT_LAG = int(timedelta(seconds=1).total_seconds())
# If set to true no notification is sent, the worker will just log a message.
# Useful for debugging
ALERT_REPORTS_NOTIFICATION_DRY_RUN = False
Expand Down Expand Up @@ -1020,7 +1018,7 @@ def CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC(
# by celery.
#
# Warning: This config key is deprecated and will be removed in version 2.0.0"
EMAIL_ASYNC_TIME_LIMIT_SEC = 300
EMAIL_ASYNC_TIME_LIMIT_SEC = int(timedelta(minutes=5).total_seconds())

# Send bcc of all reports to this address. Set to None to disable.
# This is useful for maintaining an audit trail of all email deliveries.
Expand Down Expand Up @@ -1064,9 +1062,8 @@ def CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC(
WEBDRIVER_BASEURL = "http://0.0.0.0:8080/"
# The base URL for the email report hyperlinks.
WEBDRIVER_BASEURL_USER_FRIENDLY = WEBDRIVER_BASEURL
# Time in seconds, selenium will wait for the page to load
# and render for the email report.
EMAIL_PAGE_RENDER_WAIT = 30
# Time selenium will wait for the page to load and render for the email report.
EMAIL_PAGE_RENDER_WAIT = int(timedelta(seconds=30).total_seconds())

# Send user to a link where they can report bugs
BUG_REPORT_URL = None
Expand Down Expand Up @@ -1134,8 +1131,8 @@ def CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC(
SESSION_COOKIE_SECURE = False # Prevent cookie from being transmitted over non-tls?
SESSION_COOKIE_SAMESITE = "Lax" # One of [None, 'None', 'Lax', 'Strict']

# Flask configuration variables
SEND_FILE_MAX_AGE_DEFAULT = 60 * 60 * 24 * 365 # Cache static resources
# Cache static resources.
SEND_FILE_MAX_AGE_DEFAULT = int(timedelta(days=365).total_seconds())

# URI to database storing the example data, points to
# SQLALCHEMY_DATABASE_URI by default if set to `None`
Expand Down Expand Up @@ -1196,7 +1193,9 @@ def CSV_TO_HIVE_UPLOAD_DIRECTORY_FUNC(
GLOBAL_ASYNC_QUERIES_JWT_COOKIE_DOMAIN = None
GLOBAL_ASYNC_QUERIES_JWT_SECRET = "test-secret-change-me"
GLOBAL_ASYNC_QUERIES_TRANSPORT = "polling"
GLOBAL_ASYNC_QUERIES_POLLING_DELAY = 500
GLOBAL_ASYNC_QUERIES_POLLING_DELAY = int(
timedelta(milliseconds=500).total_seconds() * 1000
)
GLOBAL_ASYNC_QUERIES_WEBSOCKET_URL = "ws://127.0.0.1:8080/"

# A SQL dataset health check. Note if enabled it is strongly advised that the callable
Expand Down
7 changes: 4 additions & 3 deletions tests/integration_tests/superset_test_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
# under the License.
# type: ignore
from copy import copy
from datetime import timedelta

from superset.config import *
from tests.integration_tests.superset_test_custom_template_processors import (
Expand Down Expand Up @@ -84,18 +85,18 @@ def GET_FEATURE_FLAGS_FUNC(ff):
REDIS_RESULTS_DB = os.environ.get("REDIS_RESULTS_DB", 3)
REDIS_CACHE_DB = os.environ.get("REDIS_CACHE_DB", 4)

CACHE_DEFAULT_TIMEOUT = 600
CACHE_DEFAULT_TIMEOUT = int(timedelta(minutes=10).total_seconds())

CACHE_CONFIG = {
"CACHE_TYPE": "redis",
"CACHE_DEFAULT_TIMEOUT": 60,
"CACHE_DEFAULT_TIMEOUT": int(timedelta(minutes=1).total_seconds()),
"CACHE_KEY_PREFIX": "superset_cache",
"CACHE_REDIS_URL": f"redis://{REDIS_HOST}:{REDIS_PORT}/{REDIS_CACHE_DB}",
}

DATA_CACHE_CONFIG = {
**CACHE_CONFIG,
"CACHE_DEFAULT_TIMEOUT": 30,
"CACHE_DEFAULT_TIMEOUT": int(timedelta(seconds=30).total_seconds()),
"CACHE_KEY_PREFIX": "superset_data_cache",
}

Expand Down

0 comments on commit 41e8190

Please sign in to comment.