From dff1b613ddf87e4e72e8a47475bcfd1d55796a5c Mon Sep 17 00:00:00 2001 From: Fokko Driesprong Date: Thu, 6 May 2021 23:32:07 +0200 Subject: [PATCH] Make the https:// optional (#165) --- CHANGELOG.md | 3 ++- dbt/adapters/spark/connections.py | 10 +++++++--- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9075551c0..002a0a1e7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,11 +7,12 @@ ### Under the hood - Parse information returned by `list_relations_without_caching` macro to speed up catalog generation ([#93](https://github.com/fishtown-analytics/dbt-spark/issues/93), [#160](https://github.com/fishtown-analytics/dbt-spark/pull/160)) +- More flexible host passing, https:// can be omitted ([#153](https://github.com/fishtown-analytics/dbt-spark/issues/153)) ### Contributors - [@friendofasquid](https://github.com/friendofasquid) ([#159](https://github.com/fishtown-analytics/dbt-spark/pull/159)) - [@franloza](https://github.com/franloza) ([#160](https://github.com/fishtown-analytics/dbt-spark/pull/160)) - +- [@Fokko](https://github.com/Fokko) ([#165](https://github.com/fishtown-analytics/dbt-spark/pull/165)) ## dbt-spark 0.19.1 (Release TBD) diff --git a/dbt/adapters/spark/connections.py b/dbt/adapters/spark/connections.py index 457a0d843..bd26f6efe 100644 --- a/dbt/adapters/spark/connections.py +++ b/dbt/adapters/spark/connections.py @@ -254,7 +254,7 @@ class SparkConnectionManager(SQLConnectionManager): SPARK_CLUSTER_HTTP_PATH = "/sql/protocolv1/o/{organization}/{cluster}" SPARK_SQL_ENDPOINT_HTTP_PATH = "/sql/1.0/endpoints/{endpoint}" SPARK_CONNECTION_URL = ( - "https://{host}:{port}" + SPARK_CLUSTER_HTTP_PATH + "{host}:{port}" + SPARK_CLUSTER_HTTP_PATH ) @contextmanager @@ -320,8 +320,13 @@ def open(cls, connection): cls.validate_creds(creds, ['token', 'host', 'port', 'cluster', 'organization']) + # Prepend https:// if it is missing + host = creds.host + if not host.startswith('https://'): + host = 'https://' + creds.host + conn_url = cls.SPARK_CONNECTION_URL.format( - host=creds.host, + host=host, port=creds.port, organization=creds.organization, cluster=creds.cluster @@ -350,7 +355,6 @@ def open(cls, connection): kerberos_service_name=creds.kerberos_service_name) # noqa handle = PyhiveConnectionWrapper(conn) elif creds.method == SparkConnectionMethod.ODBC: - http_path = None if creds.cluster is not None: required_fields = ['driver', 'host', 'port', 'token', 'organization', 'cluster']