Skip to content

Commit

Permalink
rename exceptions (#585)
Browse files Browse the repository at this point in the history
* rename exceptions

* point back to main

* Delete Under the Hood-20230110-105904.yaml

* Update Under the Hood-20230110-101919.yaml
  • Loading branch information
emmyoop authored Jan 11, 2023
1 parent 28a9d09 commit 94148f6
Show file tree
Hide file tree
Showing 10 changed files with 44 additions and 42 deletions.
7 changes: 7 additions & 0 deletions .changes/unreleased/Under the Hood-20230110-101919.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
kind: Under the Hood
body: Rename exceptions to match dbt-core.
time: 2023-01-10T10:19:19.675879-06:00
custom:
Author: emmyoop
Issue: "557"
PR: "585"
25 changes: 13 additions & 12 deletions dbt/adapters/spark/connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ def cluster_id(self):
def __post_init__(self):
# spark classifies database and schema as the same thing
if self.database is not None and self.database != self.schema:
raise dbt.exceptions.RuntimeException(
raise dbt.exceptions.DbtRuntimeError(
f" schema: {self.schema} \n"
f" database: {self.database} \n"
f"On Spark, database must be omitted or have the same value as"
Expand All @@ -102,7 +102,7 @@ def __post_init__(self):
try:
import pyodbc # noqa: F401
except ImportError as e:
raise dbt.exceptions.RuntimeException(
raise dbt.exceptions.DbtRuntimeError(
f"{self.method} connection method requires "
"additional dependencies. \n"
"Install the additional required dependencies with "
Expand All @@ -111,7 +111,7 @@ def __post_init__(self):
) from e

if self.method == SparkConnectionMethod.ODBC and self.cluster and self.endpoint:
raise dbt.exceptions.RuntimeException(
raise dbt.exceptions.DbtRuntimeError(
"`cluster` and `endpoint` cannot both be set when"
f" using {self.method} method to connect to Spark"
)
Expand All @@ -120,7 +120,7 @@ def __post_init__(self):
self.method == SparkConnectionMethod.HTTP
or self.method == SparkConnectionMethod.THRIFT
) and not (ThriftState and THttpClient and hive):
raise dbt.exceptions.RuntimeException(
raise dbt.exceptions.DbtRuntimeError(
f"{self.method} connection method requires "
"additional dependencies. \n"
"Install the additional required dependencies with "
Expand All @@ -131,7 +131,7 @@ def __post_init__(self):
try:
import pyspark # noqa: F401
except ImportError as e:
raise dbt.exceptions.RuntimeException(
raise dbt.exceptions.DbtRuntimeError(
f"{self.method} connection method requires "
"additional dependencies. \n"
"Install the additional required dependencies with "
Expand Down Expand Up @@ -233,12 +233,13 @@ def execute(self, sql, bindings=None):
if poll_state.errorMessage:
logger.debug("Poll response: {}".format(poll_state))
logger.debug("Poll status: {}".format(state))
dbt.exceptions.raise_database_error(poll_state.errorMessage)
raise dbt.exceptions.DbtDatabaseError(poll_state.errorMessage)

elif state not in STATE_SUCCESS:
status_type = ThriftState._VALUES_TO_NAMES.get(state, "Unknown<{!r}>".format(state))

dbt.exceptions.raise_database_error("Query failed with status: {}".format(status_type))
raise dbt.exceptions.DbtDatabaseError(
"Query failed with status: {}".format(status_type)
)

logger.debug("Poll status: {}, query complete".format(state))

Expand Down Expand Up @@ -293,9 +294,9 @@ def exception_handler(self, sql):
thrift_resp = exc.args[0]
if hasattr(thrift_resp, "status"):
msg = thrift_resp.status.errorMessage
raise dbt.exceptions.RuntimeException(msg)
raise dbt.exceptions.DbtRuntimeError(msg)
else:
raise dbt.exceptions.RuntimeException(str(exc))
raise dbt.exceptions.DbtRuntimeError(str(exc))

def cancel(self, connection):
connection.handle.cancel()
Expand Down Expand Up @@ -462,7 +463,7 @@ def open(cls, connection):
msg = "Failed to connect"
if creds.token is not None:
msg += ", is your token valid?"
raise dbt.exceptions.FailedToConnectException(msg) from e
raise dbt.exceptions.FailedToConnectError(msg) from e
retryable_message = _is_retryable_error(e)
if retryable_message and creds.connect_retries > 0:
msg = (
Expand All @@ -483,7 +484,7 @@ def open(cls, connection):
logger.warning(msg)
time.sleep(creds.connect_timeout)
else:
raise dbt.exceptions.FailedToConnectException("failed to connect") from e
raise dbt.exceptions.FailedToConnectError("failed to connect") from e
else:
raise exc

Expand Down
12 changes: 6 additions & 6 deletions dbt/adapters/spark/impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ def add_schema_to_cache(self, schema) -> str:
"""Cache a new schema in dbt. It will show up in `list relations`."""
if schema is None:
name = self.nice_connection_name()
dbt.exceptions.raise_compiler_error(
raise dbt.exceptions.CompilationError(
"Attempted to cache a null schema for {}".format(name)
)
if dbt.flags.USE_CACHE: # type: ignore
Expand All @@ -138,7 +138,7 @@ def list_relations_without_caching(
kwargs = {"schema_relation": schema_relation}
try:
results = self.execute_macro(LIST_RELATIONS_MACRO_NAME, kwargs=kwargs)
except dbt.exceptions.RuntimeException as e:
except dbt.exceptions.DbtRuntimeError as e:
errmsg = getattr(e, "msg", "")
if f"Database '{schema_relation}' not found" in errmsg:
return []
Expand All @@ -150,7 +150,7 @@ def list_relations_without_caching(
relations = []
for row in results:
if len(row) != 4:
raise dbt.exceptions.RuntimeException(
raise dbt.exceptions.DbtRuntimeError(
f'Invalid value from "show table extended ...", '
f"got {len(row)} values, expected 4"
)
Expand Down Expand Up @@ -222,7 +222,7 @@ def get_columns_in_relation(self, relation: Relation) -> List[SparkColumn]:
GET_COLUMNS_IN_RELATION_RAW_MACRO_NAME, kwargs={"relation": relation}
)
columns = self.parse_describe_extended(relation, rows)
except dbt.exceptions.RuntimeException as e:
except dbt.exceptions.DbtRuntimeError as e:
# spark would throw error when table doesn't exist, where other
# CDW would just return and empty list, normalizing the behavior here
errmsg = getattr(e, "msg", "")
Expand Down Expand Up @@ -280,7 +280,7 @@ def get_properties(self, relation: Relation) -> Dict[str, str]:
def get_catalog(self, manifest):
schema_map = self._get_catalog_schemas(manifest)
if len(schema_map) > 1:
dbt.exceptions.raise_compiler_error(
raise dbt.exceptions.CompilationError(
f"Expected only one database in get_catalog, found " f"{list(schema_map)}"
)

Expand Down Expand Up @@ -308,7 +308,7 @@ def _get_one_catalog(
manifest,
) -> agate.Table:
if len(schemas) != 1:
dbt.exceptions.raise_compiler_error(
raise dbt.exceptions.CompilationError(
f"Expected only one schema in spark _get_one_catalog, found " f"{schemas}"
)

Expand Down
22 changes: 11 additions & 11 deletions dbt/adapters/spark/python_submissions.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def _create_work_dir(self, path: str) -> None:
},
)
if response.status_code != 200:
raise dbt.exceptions.RuntimeException(
raise dbt.exceptions.DbtRuntimeError(
f"Error creating work_dir for python notebooks\n {response.content!r}"
)

Expand All @@ -71,7 +71,7 @@ def _upload_notebook(self, path: str, compiled_code: str) -> None:
},
)
if response.status_code != 200:
raise dbt.exceptions.RuntimeException(
raise dbt.exceptions.DbtRuntimeError(
f"Error creating python notebook.\n {response.content!r}"
)

Expand Down Expand Up @@ -99,7 +99,7 @@ def _submit_job(self, path: str, cluster_spec: dict) -> str:
json=job_spec,
)
if submit_response.status_code != 200:
raise dbt.exceptions.RuntimeException(
raise dbt.exceptions.DbtRuntimeError(
f"Error creating python run.\n {submit_response.content!r}"
)
return submit_response.json()["run_id"]
Expand Down Expand Up @@ -135,7 +135,7 @@ def _submit_through_notebook(self, compiled_code: str, cluster_spec: dict) -> No
json_run_output = run_output.json()
result_state = json_run_output["metadata"]["state"]["result_state"]
if result_state != "SUCCESS":
raise dbt.exceptions.RuntimeException(
raise dbt.exceptions.DbtRuntimeError(
"Python model failed with traceback as:\n"
"(Note that the line number here does not "
"match the line number in your code due to dbt templating)\n"
Expand Down Expand Up @@ -169,9 +169,9 @@ def polling(
response = status_func(**status_func_kwargs)
state = get_state_func(response)
if exceeded_timeout:
raise dbt.exceptions.RuntimeException("python model run timed out")
raise dbt.exceptions.DbtRuntimeError("python model run timed out")
if state != expected_end_state:
raise dbt.exceptions.RuntimeException(
raise dbt.exceptions.DbtRuntimeError(
"python model run ended in state"
f"{state} with state_message\n{get_state_msg_func(response)}"
)
Expand Down Expand Up @@ -205,7 +205,7 @@ def create(self) -> str:
},
)
if response.status_code != 200:
raise dbt.exceptions.RuntimeException(
raise dbt.exceptions.DbtRuntimeError(
f"Error creating an execution context.\n {response.content!r}"
)
return response.json()["id"]
Expand All @@ -221,7 +221,7 @@ def destroy(self, context_id: str) -> str:
},
)
if response.status_code != 200:
raise dbt.exceptions.RuntimeException(
raise dbt.exceptions.DbtRuntimeError(
f"Error deleting an execution context.\n {response.content!r}"
)
return response.json()["id"]
Expand All @@ -246,7 +246,7 @@ def execute(self, context_id: str, command: str) -> str:
},
)
if response.status_code != 200:
raise dbt.exceptions.RuntimeException(
raise dbt.exceptions.DbtRuntimeError(
f"Error creating a command.\n {response.content!r}"
)
return response.json()["id"]
Expand All @@ -263,7 +263,7 @@ def status(self, context_id: str, command_id: str) -> Dict[str, Any]:
},
)
if response.status_code != 200:
raise dbt.exceptions.RuntimeException(
raise dbt.exceptions.DbtRuntimeError(
f"Error getting status of command.\n {response.content!r}"
)
return response.json()
Expand Down Expand Up @@ -298,7 +298,7 @@ def submit(self, compiled_code: str) -> None:
get_state_msg_func=lambda response: response.json()["results"]["data"],
)
if response["results"]["resultType"] == "error":
raise dbt.exceptions.RuntimeException(
raise dbt.exceptions.DbtRuntimeError(
f"Python model failed with traceback as:\n"
f"{response['results']['cause']}"
)
Expand Down
6 changes: 3 additions & 3 deletions dbt/adapters/spark/relation.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from dataclasses import dataclass, field

from dbt.adapters.base.relation import BaseRelation, Policy
from dbt.exceptions import RuntimeException
from dbt.exceptions import DbtRuntimeError


@dataclass
Expand Down Expand Up @@ -31,11 +31,11 @@ class SparkRelation(BaseRelation):

def __post_init__(self):
if self.database != self.schema and self.database:
raise RuntimeException("Cannot set database in spark!")
raise DbtRuntimeError("Cannot set database in spark!")

def render(self):
if self.include_policy.database and self.include_policy.schema:
raise RuntimeException(
raise DbtRuntimeError(
"Got a spark relation with schema and database set to "
"include, but only one can be set"
)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from cProfile import run
from tests.integration.base import DBTIntegrationTest, use_profile
import dbt.exceptions


class TestIncrementalOnSchemaChange(DBTIntegrationTest):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from cProfile import run
from tests.integration.base import DBTIntegrationTest, use_profile
import dbt.exceptions


class TestIncrementalStrategies(DBTIntegrationTest):
Expand Down
3 changes: 0 additions & 3 deletions tests/integration/persist_docs/test_persist_docs.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,5 @@
from cProfile import run
from tests.integration.base import DBTIntegrationTest, use_profile
import dbt.exceptions

import json


class TestPersistDocsDelta(DBTIntegrationTest):
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from cProfile import run
from tests.integration.base import DBTIntegrationTest, use_profile
import dbt.exceptions


class TestSeedColumnTypeCast(DBTIntegrationTest):
Expand Down
8 changes: 4 additions & 4 deletions tests/unit/test_adapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from unittest import mock

import dbt.flags as flags
from dbt.exceptions import RuntimeException
from dbt.exceptions import DbtRuntimeError
from agate import Row
from pyhive import hive
from dbt.adapters.spark import SparkAdapter, SparkRelation
Expand Down Expand Up @@ -453,7 +453,7 @@ def test_relation_with_database(self):
adapter = SparkAdapter(config)
# fine
adapter.Relation.create(schema='different', identifier='table')
with self.assertRaises(RuntimeException):
with self.assertRaises(DbtRuntimeError):
# not fine - database set
adapter.Relation.create(
database='something', schema='different', identifier='table')
Expand All @@ -476,7 +476,7 @@ def test_profile_with_database(self):
},
'target': 'test'
}
with self.assertRaises(RuntimeException):
with self.assertRaises(DbtRuntimeError):
config_from_parts_or_dicts(self.project_cfg, profile)

def test_profile_with_cluster_and_sql_endpoint(self):
Expand All @@ -496,7 +496,7 @@ def test_profile_with_cluster_and_sql_endpoint(self):
},
'target': 'test'
}
with self.assertRaises(RuntimeException):
with self.assertRaises(DbtRuntimeError):
config_from_parts_or_dicts(self.project_cfg, profile)

def test_parse_columns_from_information_with_table_type_and_delta_provider(self):
Expand Down

0 comments on commit 94148f6

Please sign in to comment.