From eb200b46877c31469f410b7d47dfd245473ed7cb Mon Sep 17 00:00:00 2001 From: Emily Rockman Date: Tue, 10 Jan 2023 17:43:27 -0600 Subject: [PATCH] Rename exceptions (#6539) * rename InternalException * rename RuntimeException * rename DatabaseException * rename CompilationException * cleanup renames in tests and postgres * rename ValidationException * rename IncompatibleSchemaException * more renaming * more renaming * rename InternalException again * convert ParsingException * replace JSONValidationException and SemverException * replace VersionsNotCompatibleException * replace NotImplementedException * replace FailedToConnectException * replace InvalidConnectionException * replace InvalidSelectorException * replace DuplicateYamlKeyException * replace ConnectionException * minor cleanup * update comment * more cleanup * add class decorator * rename more exceptions * more renamed, add changelog * rename exception * rework class deprecations * removing testing line * fix failing test * rename newer exceptions * fix failing test * commit unsaved faile * convert back an rpc exception * remove class deprecations --- .../Breaking Changes-20221205-141937.yaml | 5 +- core/dbt/adapters/base/column.py | 12 +- core/dbt/adapters/base/connections.py | 36 +- core/dbt/adapters/base/impl.py | 114 ++- core/dbt/adapters/base/plugin.py | 4 +- core/dbt/adapters/base/query_headers.py | 4 +- core/dbt/adapters/base/relation.py | 18 +- core/dbt/adapters/cache.py | 20 +- core/dbt/adapters/factory.py | 12 +- core/dbt/adapters/sql/connections.py | 10 +- core/dbt/adapters/sql/impl.py | 4 +- core/dbt/clients/_jinja_blocks.py | 28 +- core/dbt/clients/agate_helper.py | 4 +- core/dbt/clients/git.py | 8 +- core/dbt/clients/jinja.py | 48 +- core/dbt/clients/jinja_static.py | 6 +- core/dbt/clients/system.py | 2 +- core/dbt/clients/yaml_helper.py | 2 +- core/dbt/compilation.py | 14 +- core/dbt/config/profile.py | 22 +- core/dbt/config/project.py | 16 +- core/dbt/config/renderer.py | 8 +- core/dbt/config/runtime.py | 22 +- core/dbt/config/selectors.py | 8 +- core/dbt/config/utils.py | 6 +- core/dbt/context/base.py | 20 +- core/dbt/context/configured.py | 6 +- core/dbt/context/context_config.py | 8 +- core/dbt/context/docs.py | 8 +- core/dbt/context/exceptions_jinja.py | 68 +- core/dbt/context/macro_resolver.py | 6 +- core/dbt/context/macros.py | 6 +- core/dbt/context/providers.py | 128 ++-- core/dbt/context/secret.py | 4 +- core/dbt/contracts/connection.py | 4 +- core/dbt/contracts/graph/manifest.py | 30 +- core/dbt/contracts/graph/model_config.py | 16 +- core/dbt/contracts/graph/unparsed.py | 6 +- core/dbt/contracts/relation.py | 14 +- core/dbt/contracts/results.py | 6 +- core/dbt/contracts/state.py | 10 +- core/dbt/contracts/util.py | 14 +- core/dbt/deps/git.py | 4 +- core/dbt/deps/registry.py | 18 +- core/dbt/deps/resolver.py | 16 +- core/dbt/events/functions.py | 4 +- core/dbt/events/proto_types.py | 26 +- core/dbt/events/types.proto | 28 +- core/dbt/events/types.py | 10 +- core/dbt/exceptions.py | 669 +++++++++--------- core/dbt/graph/cli.py | 22 +- core/dbt/graph/graph.py | 6 +- core/dbt/graph/selector.py | 8 +- core/dbt/graph/selector_methods.py | 30 +- core/dbt/graph/selector_spec.py | 14 +- core/dbt/internal_deprecations.py | 8 +- core/dbt/lib.py | 4 +- core/dbt/main.py | 12 +- core/dbt/parser/base.py | 10 +- core/dbt/parser/generic_test.py | 8 +- core/dbt/parser/generic_test_builders.py | 44 +- core/dbt/parser/hooks.py | 4 +- core/dbt/parser/macros.py | 8 +- core/dbt/parser/manifest.py | 26 +- core/dbt/parser/models.py | 30 +- core/dbt/parser/read_files.py | 8 +- core/dbt/parser/schemas.py | 100 +-- core/dbt/parser/search.py | 6 +- core/dbt/parser/snapshots.py | 4 +- core/dbt/parser/sources.py | 4 +- core/dbt/parser/sql.py | 4 +- core/dbt/semver.py | 18 +- core/dbt/task/base.py | 34 +- core/dbt/task/build.py | 6 +- core/dbt/task/compile.py | 10 +- core/dbt/task/freshness.py | 10 +- core/dbt/task/generate.py | 10 +- core/dbt/task/init.py | 2 +- core/dbt/task/list.py | 14 +- core/dbt/task/run.py | 36 +- core/dbt/task/run_operation.py | 4 +- core/dbt/task/runnable.py | 36 +- core/dbt/task/seed.py | 4 +- core/dbt/task/snapshot.py | 4 +- core/dbt/task/sql.py | 4 +- core/dbt/task/test.py | 18 +- core/dbt/tests/fixtures/project.py | 6 +- core/dbt/utils.py | 16 +- .../dbt/adapters/postgres/connections.py | 6 +- .../postgres/dbt/adapters/postgres/impl.py | 20 +- .../dbt/adapters/postgres/relation.py | 4 +- .../035_docs_blocks_tests/test_docs_blocks.py | 8 +- .../062_defer_state_tests/test_defer_state.py | 2 +- .../test_modified_state.py | 6 +- .../test_run_results_state.py | 2 - .../test_partial_parsing.py | 12 +- .../test_pp_metrics.py | 6 +- .../068_partial_parsing_tests/test_pp_vars.py | 10 +- test/unit/test_adapter_connection_manager.py | 22 +- test/unit/test_cache.py | 2 +- test/unit/test_config.py | 2 +- test/unit/test_context.py | 4 +- test/unit/test_core_dbt_utils.py | 4 +- test/unit/test_deps.py | 8 +- test/unit/test_exceptions.py | 6 +- test/unit/test_graph_selection.py | 2 +- test/unit/test_graph_selector_methods.py | 4 +- test/unit/test_graph_selector_spec.py | 6 +- test/unit/test_jinja.py | 34 +- test/unit/test_parser.py | 22 +- test/unit/test_postgres_adapter.py | 6 +- .../test_registry_get_request_exception.py | 4 +- test/unit/test_semver.py | 4 +- .../query_comment/test_query_comment.py | 4 +- tests/functional/artifacts/test_override.py | 4 +- .../artifacts/test_previous_version_state.py | 4 +- .../basic/test_invalid_reference.py | 4 +- tests/functional/configs/test_configs.py | 4 +- .../configs/test_configs_in_schema_files.py | 6 +- .../functional/configs/test_disabled_model.py | 6 +- .../functional/configs/test_unused_configs.py | 4 +- .../context_methods/test_builtin_functions.py | 6 +- .../context_methods/test_cli_vars.py | 8 +- .../context_methods/test_secret_env_vars.py | 8 +- .../test_var_in_generate_name.py | 4 +- .../dependencies/test_local_dependency.py | 6 +- .../deprecations/test_deprecations.py | 10 +- .../duplicates/test_duplicate_analysis.py | 4 +- .../duplicates/test_duplicate_exposure.py | 4 +- .../duplicates/test_duplicate_macro.py | 6 +- .../duplicates/test_duplicate_metric.py | 4 +- .../duplicates/test_duplicate_model.py | 6 +- .../duplicates/test_duplicate_source.py | 4 +- .../fail_fast/test_fail_fast_run.py | 6 +- tests/functional/hooks/test_model_hooks.py | 4 +- .../test_invalid_models.py | 14 +- tests/functional/macros/test_macros.py | 4 +- .../materializations/test_incremental.py | 6 +- .../functional/metrics/test_metric_configs.py | 6 +- tests/functional/metrics/test_metrics.py | 32 +- .../schema_tests/test_schema_v2_tests.py | 10 +- .../test_missing_strategy_snapshot.py | 4 +- .../test_source_overrides_duplicate_model.py | 4 +- .../functional/sources/test_simple_source.py | 4 +- .../sources/test_source_fresher_state.py | 6 +- tests/unit/test_connection_retries.py | 4 +- tests/unit/test_deprecations.py | 84 +-- tests/unit/test_events.py | 8 +- 148 files changed, 1330 insertions(+), 1332 deletions(-) diff --git a/.changes/unreleased/Breaking Changes-20221205-141937.yaml b/.changes/unreleased/Breaking Changes-20221205-141937.yaml index 5f2a780d661..39506f9ab2b 100644 --- a/.changes/unreleased/Breaking Changes-20221205-141937.yaml +++ b/.changes/unreleased/Breaking Changes-20221205-141937.yaml @@ -1,8 +1,9 @@ kind: Breaking Changes -body: Cleaned up exceptions to directly raise in code. Removed use of all exception +body: Cleaned up exceptions to directly raise in code. Also updated the existing + exception to meet PEP guidelines.Removed use of all exception functions in the code base and marked them all as deprecated to be removed next minor release. time: 2022-12-05T14:19:37.863032-06:00 custom: Author: emmyoop - Issue: 6339 6393 + Issue: 6339 6393 6460 diff --git a/core/dbt/adapters/base/column.py b/core/dbt/adapters/base/column.py index b47aac64062..3c6246b33a6 100644 --- a/core/dbt/adapters/base/column.py +++ b/core/dbt/adapters/base/column.py @@ -2,7 +2,7 @@ import re from typing import Dict, ClassVar, Any, Optional -from dbt.exceptions import RuntimeException +from dbt.exceptions import DbtRuntimeError @dataclass @@ -85,7 +85,7 @@ def is_numeric(self) -> bool: def string_size(self) -> int: if not self.is_string(): - raise RuntimeException("Called string_size() on non-string field!") + raise DbtRuntimeError("Called string_size() on non-string field!") if self.dtype == "text" or self.char_size is None: # char_size should never be None. Handle it reasonably just in case @@ -124,7 +124,7 @@ def __repr__(self) -> str: def from_description(cls, name: str, raw_data_type: str) -> "Column": match = re.match(r"([^(]+)(\([^)]+\))?", raw_data_type) if match is None: - raise RuntimeException(f'Could not interpret data type "{raw_data_type}"') + raise DbtRuntimeError(f'Could not interpret data type "{raw_data_type}"') data_type, size_info = match.groups() char_size = None numeric_precision = None @@ -137,7 +137,7 @@ def from_description(cls, name: str, raw_data_type: str) -> "Column": try: char_size = int(parts[0]) except ValueError: - raise RuntimeException( + raise DbtRuntimeError( f'Could not interpret data_type "{raw_data_type}": ' f'could not convert "{parts[0]}" to an integer' ) @@ -145,14 +145,14 @@ def from_description(cls, name: str, raw_data_type: str) -> "Column": try: numeric_precision = int(parts[0]) except ValueError: - raise RuntimeException( + raise DbtRuntimeError( f'Could not interpret data_type "{raw_data_type}": ' f'could not convert "{parts[0]}" to an integer' ) try: numeric_scale = int(parts[1]) except ValueError: - raise RuntimeException( + raise DbtRuntimeError( f'Could not interpret data_type "{raw_data_type}": ' f'could not convert "{parts[1]}" to an integer' ) diff --git a/core/dbt/adapters/base/connections.py b/core/dbt/adapters/base/connections.py index 577cdf6d9a6..73e87ae9600 100644 --- a/core/dbt/adapters/base/connections.py +++ b/core/dbt/adapters/base/connections.py @@ -91,13 +91,13 @@ def get_thread_connection(self) -> Connection: key = self.get_thread_identifier() with self.lock: if key not in self.thread_connections: - raise dbt.exceptions.InvalidConnectionException(key, list(self.thread_connections)) + raise dbt.exceptions.InvalidConnectionError(key, list(self.thread_connections)) return self.thread_connections[key] def set_thread_connection(self, conn: Connection) -> None: key = self.get_thread_identifier() if key in self.thread_connections: - raise dbt.exceptions.InternalException( + raise dbt.exceptions.DbtInternalError( "In set_thread_connection, existing connection exists for {}" ) self.thread_connections[key] = conn @@ -137,7 +137,7 @@ def exception_handler(self, sql: str) -> ContextManager: :return: A context manager that handles exceptions raised by the underlying database. """ - raise dbt.exceptions.NotImplementedException( + raise dbt.exceptions.NotImplementedError( "`exception_handler` is not implemented for this adapter!" ) @@ -211,7 +211,7 @@ def retry_connection( connect should trigger a retry. :type retryable_exceptions: Iterable[Type[Exception]] :param int retry_limit: How many times to retry the call to connect. If this limit - is exceeded before a successful call, a FailedToConnectException will be raised. + is exceeded before a successful call, a FailedToConnectError will be raised. Must be non-negative. :param retry_timeout: Time to wait between attempts to connect. Can also take a Callable that takes the number of attempts so far, beginning at 0, and returns an int @@ -220,14 +220,14 @@ def retry_connection( :param int _attempts: Parameter used to keep track of the number of attempts in calling the connect function across recursive calls. Passed as an argument to retry_timeout if it is a Callable. This parameter should not be set by the initial caller. - :raises dbt.exceptions.FailedToConnectException: Upon exhausting all retry attempts without + :raises dbt.exceptions.FailedToConnectError: Upon exhausting all retry attempts without successfully acquiring a handle. :return: The given connection with its appropriate state and handle attributes set depending on whether we successfully acquired a handle or not. """ timeout = retry_timeout(_attempts) if callable(retry_timeout) else retry_timeout if timeout < 0: - raise dbt.exceptions.FailedToConnectException( + raise dbt.exceptions.FailedToConnectError( "retry_timeout cannot be negative or return a negative time." ) @@ -235,7 +235,7 @@ def retry_connection( # This guard is not perfect others may add to the recursion limit (e.g. built-ins). connection.handle = None connection.state = ConnectionState.FAIL - raise dbt.exceptions.FailedToConnectException("retry_limit cannot be negative") + raise dbt.exceptions.FailedToConnectError("retry_limit cannot be negative") try: connection.handle = connect() @@ -246,7 +246,7 @@ def retry_connection( if retry_limit <= 0: connection.handle = None connection.state = ConnectionState.FAIL - raise dbt.exceptions.FailedToConnectException(str(e)) + raise dbt.exceptions.FailedToConnectError(str(e)) logger.debug( f"Got a retryable error when attempting to open a {cls.TYPE} connection.\n" @@ -268,12 +268,12 @@ def retry_connection( except Exception as e: connection.handle = None connection.state = ConnectionState.FAIL - raise dbt.exceptions.FailedToConnectException(str(e)) + raise dbt.exceptions.FailedToConnectError(str(e)) @abc.abstractmethod def cancel_open(self) -> Optional[List[str]]: """Cancel all open connections on the adapter. (passable)""" - raise dbt.exceptions.NotImplementedException( + raise dbt.exceptions.NotImplementedError( "`cancel_open` is not implemented for this adapter!" ) @@ -288,7 +288,7 @@ def open(cls, connection: Connection) -> Connection: This should be thread-safe, or hold the lock if necessary. The given connection should not be in either in_use or available. """ - raise dbt.exceptions.NotImplementedException("`open` is not implemented for this adapter!") + raise dbt.exceptions.NotImplementedError("`open` is not implemented for this adapter!") def release(self) -> None: with self.lock: @@ -320,16 +320,12 @@ def cleanup_all(self) -> None: @abc.abstractmethod def begin(self) -> None: """Begin a transaction. (passable)""" - raise dbt.exceptions.NotImplementedException( - "`begin` is not implemented for this adapter!" - ) + raise dbt.exceptions.NotImplementedError("`begin` is not implemented for this adapter!") @abc.abstractmethod def commit(self) -> None: """Commit a transaction. (passable)""" - raise dbt.exceptions.NotImplementedException( - "`commit` is not implemented for this adapter!" - ) + raise dbt.exceptions.NotImplementedError("`commit` is not implemented for this adapter!") @classmethod def _rollback_handle(cls, connection: Connection) -> None: @@ -365,7 +361,7 @@ def _close_handle(cls, connection: Connection) -> None: def _rollback(cls, connection: Connection) -> None: """Roll back the given connection.""" if connection.transaction_open is False: - raise dbt.exceptions.InternalException( + raise dbt.exceptions.DbtInternalError( f"Tried to rollback transaction on connection " f'"{connection.name}", but it does not have one open!' ) @@ -415,6 +411,4 @@ def execute( :return: A tuple of the query status and results (empty if fetch=False). :rtype: Tuple[AdapterResponse, agate.Table] """ - raise dbt.exceptions.NotImplementedException( - "`execute` is not implemented for this adapter!" - ) + raise dbt.exceptions.NotImplementedError("`execute` is not implemented for this adapter!") diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py index 64ebbeac5dd..98b78217c14 100644 --- a/core/dbt/adapters/base/impl.py +++ b/core/dbt/adapters/base/impl.py @@ -22,20 +22,20 @@ import pytz from dbt.exceptions import ( - InternalException, - InvalidMacroArgType, - InvalidMacroResult, - InvalidQuoteConfigType, - NotImplementedException, - NullRelationCacheAttempted, - NullRelationDropAttempted, - RelationReturnedMultipleResults, - RenameToNoneAttempted, - RuntimeException, - SnapshotTargetIncomplete, - SnapshotTargetNotSnapshotTable, - UnexpectedNull, - UnexpectedNonTimestamp, + DbtInternalError, + MacroArgTypeError, + MacroResultError, + QuoteConfigTypeError, + NotImplementedError, + NullRelationCacheAttemptedError, + NullRelationDropAttemptedError, + RelationReturnedMultipleResultsError, + RenameToNoneAttemptedError, + DbtRuntimeError, + SnapshotTargetIncompleteError, + SnapshotTargetNotSnapshotTableError, + UnexpectedNullError, + UnexpectedNonTimestampError, ) from dbt.adapters.protocol import ( @@ -75,7 +75,7 @@ def _expect_row_value(key: str, row: agate.Row): if key not in row.keys(): - raise InternalException( + raise DbtInternalError( 'Got a row without "{}" column, columns: {}'.format(key, row.keys()) ) return row[key] @@ -104,10 +104,10 @@ def _utc(dt: Optional[datetime], source: BaseRelation, field_name: str) -> datet assume the datetime is already for UTC and add the timezone. """ if dt is None: - raise UnexpectedNull(field_name, source) + raise UnexpectedNullError(field_name, source) elif not hasattr(dt, "tzinfo"): - raise UnexpectedNonTimestamp(field_name, source, dt) + raise UnexpectedNonTimestampError(field_name, source, dt) elif dt.tzinfo: return dt.astimezone(pytz.UTC) @@ -433,7 +433,7 @@ def cache_added(self, relation: Optional[BaseRelation]) -> str: """Cache a new relation in dbt. It will show up in `list relations`.""" if relation is None: name = self.nice_connection_name() - raise NullRelationCacheAttempted(name) + raise NullRelationCacheAttemptedError(name) self.cache.add(relation) # so jinja doesn't render things return "" @@ -445,7 +445,7 @@ def cache_dropped(self, relation: Optional[BaseRelation]) -> str: """ if relation is None: name = self.nice_connection_name() - raise NullRelationDropAttempted(name) + raise NullRelationDropAttemptedError(name) self.cache.drop(relation) return "" @@ -462,7 +462,7 @@ def cache_renamed( name = self.nice_connection_name() src_name = _relation_name(from_relation) dst_name = _relation_name(to_relation) - raise RenameToNoneAttempted(src_name, dst_name, name) + raise RenameToNoneAttemptedError(src_name, dst_name, name) self.cache.rename(from_relation, to_relation) return "" @@ -474,12 +474,12 @@ def cache_renamed( @abc.abstractmethod def date_function(cls) -> str: """Get the date function used by this adapter's database.""" - raise NotImplementedException("`date_function` is not implemented for this adapter!") + raise NotImplementedError("`date_function` is not implemented for this adapter!") @classmethod @abc.abstractmethod def is_cancelable(cls) -> bool: - raise NotImplementedException("`is_cancelable` is not implemented for this adapter!") + raise NotImplementedError("`is_cancelable` is not implemented for this adapter!") ### # Abstract methods about schemas @@ -487,7 +487,7 @@ def is_cancelable(cls) -> bool: @abc.abstractmethod def list_schemas(self, database: str) -> List[str]: """Get a list of existing schemas in database""" - raise NotImplementedException("`list_schemas` is not implemented for this adapter!") + raise NotImplementedError("`list_schemas` is not implemented for this adapter!") @available.parse(lambda *a, **k: False) def check_schema_exists(self, database: str, schema: str) -> bool: @@ -510,13 +510,13 @@ def drop_relation(self, relation: BaseRelation) -> None: *Implementors must call self.cache.drop() to preserve cache state!* """ - raise NotImplementedException("`drop_relation` is not implemented for this adapter!") + raise NotImplementedError("`drop_relation` is not implemented for this adapter!") @abc.abstractmethod @available.parse_none def truncate_relation(self, relation: BaseRelation) -> None: """Truncate the given relation.""" - raise NotImplementedException("`truncate_relation` is not implemented for this adapter!") + raise NotImplementedError("`truncate_relation` is not implemented for this adapter!") @abc.abstractmethod @available.parse_none @@ -525,15 +525,13 @@ def rename_relation(self, from_relation: BaseRelation, to_relation: BaseRelation Implementors must call self.cache.rename() to preserve cache state. """ - raise NotImplementedException("`rename_relation` is not implemented for this adapter!") + raise NotImplementedError("`rename_relation` is not implemented for this adapter!") @abc.abstractmethod @available.parse_list def get_columns_in_relation(self, relation: BaseRelation) -> List[BaseColumn]: """Get a list of the columns in the given Relation.""" - raise NotImplementedException( - "`get_columns_in_relation` is not implemented for this adapter!" - ) + raise NotImplementedError("`get_columns_in_relation` is not implemented for this adapter!") @available.deprecated("get_columns_in_relation", lambda *a, **k: []) def get_columns_in_table(self, schema: str, identifier: str) -> List[BaseColumn]: @@ -555,7 +553,7 @@ def expand_column_types(self, goal: BaseRelation, current: BaseRelation) -> None :param self.Relation current: A relation that currently exists in the database with columns of unspecified types. """ - raise NotImplementedException( + raise NotImplementedError( "`expand_target_column_types` is not implemented for this adapter!" ) @@ -570,7 +568,7 @@ def list_relations_without_caching(self, schema_relation: BaseRelation) -> List[ :return: The relations in schema :rtype: List[self.Relation] """ - raise NotImplementedException( + raise NotImplementedError( "`list_relations_without_caching` is not implemented for this adapter!" ) @@ -612,7 +610,7 @@ def get_missing_columns( to_relation. """ if not isinstance(from_relation, self.Relation): - raise InvalidMacroArgType( + raise MacroArgTypeError( method_name="get_missing_columns", arg_name="from_relation", got_value=from_relation, @@ -620,7 +618,7 @@ def get_missing_columns( ) if not isinstance(to_relation, self.Relation): - raise InvalidMacroArgType( + raise MacroArgTypeError( method_name="get_missing_columns", arg_name="to_relation", got_value=to_relation, @@ -641,11 +639,11 @@ def valid_snapshot_target(self, relation: BaseRelation) -> None: expected columns. :param Relation relation: The relation to check - :raises CompilationException: If the columns are + :raises InvalidMacroArgType: If the columns are incorrect. """ if not isinstance(relation, self.Relation): - raise InvalidMacroArgType( + raise MacroArgTypeError( method_name="valid_snapshot_target", arg_name="relation", got_value=relation, @@ -666,16 +664,16 @@ def valid_snapshot_target(self, relation: BaseRelation) -> None: if missing: if extra: - raise SnapshotTargetIncomplete(extra, missing) + raise SnapshotTargetIncompleteError(extra, missing) else: - raise SnapshotTargetNotSnapshotTable(missing) + raise SnapshotTargetNotSnapshotTableError(missing) @available.parse_none def expand_target_column_types( self, from_relation: BaseRelation, to_relation: BaseRelation ) -> None: if not isinstance(from_relation, self.Relation): - raise InvalidMacroArgType( + raise MacroArgTypeError( method_name="expand_target_column_types", arg_name="from_relation", got_value=from_relation, @@ -683,7 +681,7 @@ def expand_target_column_types( ) if not isinstance(to_relation, self.Relation): - raise InvalidMacroArgType( + raise MacroArgTypeError( method_name="expand_target_column_types", arg_name="to_relation", got_value=to_relation, @@ -765,7 +763,7 @@ def get_relation(self, database: str, schema: str, identifier: str) -> Optional[ "schema": schema, "database": database, } - raise RelationReturnedMultipleResults(kwargs, matches) + raise RelationReturnedMultipleResultsError(kwargs, matches) elif matches: return matches[0] @@ -787,20 +785,20 @@ def already_exists(self, schema: str, name: str) -> bool: @available.parse_none def create_schema(self, relation: BaseRelation): """Create the given schema if it does not exist.""" - raise NotImplementedException("`create_schema` is not implemented for this adapter!") + raise NotImplementedError("`create_schema` is not implemented for this adapter!") @abc.abstractmethod @available.parse_none def drop_schema(self, relation: BaseRelation): """Drop the given schema (and everything in it) if it exists.""" - raise NotImplementedException("`drop_schema` is not implemented for this adapter!") + raise NotImplementedError("`drop_schema` is not implemented for this adapter!") @available @classmethod @abc.abstractmethod def quote(cls, identifier: str) -> str: """Quote the given identifier, as appropriate for the database.""" - raise NotImplementedException("`quote` is not implemented for this adapter!") + raise NotImplementedError("`quote` is not implemented for this adapter!") @available def quote_as_configured(self, identifier: str, quote_key: str) -> str: @@ -829,7 +827,7 @@ def quote_seed_column(self, column: str, quote_config: Optional[bool]) -> str: elif quote_config is None: pass else: - raise InvalidQuoteConfigType(quote_config) + raise QuoteConfigTypeError(quote_config) if quote_columns: return self.quote(column) @@ -850,7 +848,7 @@ def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str: :param col_idx: The index into the agate table for the column. :return: The name of the type in the database """ - raise NotImplementedException("`convert_text_type` is not implemented for this adapter!") + raise NotImplementedError("`convert_text_type` is not implemented for this adapter!") @classmethod @abc.abstractmethod @@ -862,7 +860,7 @@ def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str: :param col_idx: The index into the agate table for the column. :return: The name of the type in the database """ - raise NotImplementedException("`convert_number_type` is not implemented for this adapter!") + raise NotImplementedError("`convert_number_type` is not implemented for this adapter!") @classmethod @abc.abstractmethod @@ -874,9 +872,7 @@ def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str: :param col_idx: The index into the agate table for the column. :return: The name of the type in the database """ - raise NotImplementedException( - "`convert_boolean_type` is not implemented for this adapter!" - ) + raise NotImplementedError("`convert_boolean_type` is not implemented for this adapter!") @classmethod @abc.abstractmethod @@ -888,9 +884,7 @@ def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str: :param col_idx: The index into the agate table for the column. :return: The name of the type in the database """ - raise NotImplementedException( - "`convert_datetime_type` is not implemented for this adapter!" - ) + raise NotImplementedError("`convert_datetime_type` is not implemented for this adapter!") @classmethod @abc.abstractmethod @@ -902,7 +896,7 @@ def convert_date_type(cls, agate_table: agate.Table, col_idx: int) -> str: :param col_idx: The index into the agate table for the column. :return: The name of the type in the database """ - raise NotImplementedException("`convert_date_type` is not implemented for this adapter!") + raise NotImplementedError("`convert_date_type` is not implemented for this adapter!") @classmethod @abc.abstractmethod @@ -914,7 +908,7 @@ def convert_time_type(cls, agate_table: agate.Table, col_idx: int) -> str: :param col_idx: The index into the agate table for the column. :return: The name of the type in the database """ - raise NotImplementedException("`convert_time_type` is not implemented for this adapter!") + raise NotImplementedError("`convert_time_type` is not implemented for this adapter!") @available @classmethod @@ -981,7 +975,7 @@ def execute_macro( else: package_name = 'the "{}" package'.format(project) - raise RuntimeException( + raise DbtRuntimeError( 'dbt could not find a macro with the name "{}" in {}'.format( macro_name, package_name ) @@ -1079,7 +1073,7 @@ def calculate_freshness( # now we have a 1-row table of the maximum `loaded_at_field` value and # the current time according to the db. if len(table) != 1 or len(table[0]) != 2: - raise InvalidMacroResult(FRESHNESS_MACRO_NAME, table) + raise MacroResultError(FRESHNESS_MACRO_NAME, table) if table[0][0] is None: # no records in the table, so really the max_loaded_at was # infinitely long ago. Just call it 0:00 January 1 year UTC @@ -1156,7 +1150,7 @@ def string_add_sql( elif location == "prepend": return f"'{value}' || {add_to}" else: - raise RuntimeException(f'Got an unexpected location value of "{location}"') + raise DbtRuntimeError(f'Got an unexpected location value of "{location}"') def get_rows_different_sql( self, @@ -1214,7 +1208,7 @@ def submit_python_job(self, parsed_model: dict, compiled_code: str) -> AdapterRe return self.generate_python_submission_response(submission_result) def generate_python_submission_response(self, submission_result: Any) -> AdapterResponse: - raise NotImplementedException( + raise NotImplementedError( "Your adapter need to implement generate_python_submission_response" ) @@ -1238,7 +1232,7 @@ def get_incremental_strategy_macro(self, model_context, strategy: str): valid_strategies.append("default") builtin_strategies = self.builtin_incremental_strategies() if strategy in builtin_strategies and strategy not in valid_strategies: - raise RuntimeException( + raise DbtRuntimeError( f"The incremental strategy '{strategy}' is not valid for this adapter" ) @@ -1246,7 +1240,7 @@ def get_incremental_strategy_macro(self, model_context, strategy: str): macro_name = f"get_incremental_{strategy}_sql" # The model_context should have MacroGenerator callable objects for all macros if macro_name not in model_context: - raise RuntimeException( + raise DbtRuntimeError( 'dbt could not find an incremental strategy macro with the name "{}" in {}'.format( macro_name, self.config.project_name ) diff --git a/core/dbt/adapters/base/plugin.py b/core/dbt/adapters/base/plugin.py index f0d348d8f57..f1a77f89b9d 100644 --- a/core/dbt/adapters/base/plugin.py +++ b/core/dbt/adapters/base/plugin.py @@ -1,7 +1,7 @@ from typing import List, Optional, Type from dbt.adapters.base import Credentials -from dbt.exceptions import CompilationException +from dbt.exceptions import CompilationError from dbt.adapters.protocol import AdapterProtocol @@ -11,7 +11,7 @@ def project_name_from_path(include_path: str) -> str: partial = Project.partial_load(include_path) if partial.project_name is None: - raise CompilationException(f"Invalid project at {include_path}: name not set!") + raise CompilationError(f"Invalid project at {include_path}: name not set!") return partial.project_name diff --git a/core/dbt/adapters/base/query_headers.py b/core/dbt/adapters/base/query_headers.py index dd88fdb2d41..bfacd2aee8c 100644 --- a/core/dbt/adapters/base/query_headers.py +++ b/core/dbt/adapters/base/query_headers.py @@ -7,7 +7,7 @@ from dbt.contracts.connection import AdapterRequiredConfig, QueryComment from dbt.contracts.graph.nodes import ResultNode from dbt.contracts.graph.manifest import Manifest -from dbt.exceptions import RuntimeException +from dbt.exceptions import DbtRuntimeError class NodeWrapper: @@ -48,7 +48,7 @@ def set(self, comment: Optional[str], append: bool): if isinstance(comment, str) and "*/" in comment: # tell the user "no" so they don't hurt themselves by writing # garbage - raise RuntimeException(f'query comment contains illegal value "*/": {comment}') + raise DbtRuntimeError(f'query comment contains illegal value "*/": {comment}') self.query_comment = comment self.append = append diff --git a/core/dbt/adapters/base/relation.py b/core/dbt/adapters/base/relation.py index 5bc0c56b264..13f64c01742 100644 --- a/core/dbt/adapters/base/relation.py +++ b/core/dbt/adapters/base/relation.py @@ -11,7 +11,11 @@ Policy, Path, ) -from dbt.exceptions import ApproximateMatch, InternalException, MultipleDatabasesNotAllowed +from dbt.exceptions import ( + ApproximateMatchError, + DbtInternalError, + MultipleDatabasesNotAllowedError, +) from dbt.node_types import NodeType from dbt.utils import filter_null_values, deep_merge, classproperty @@ -83,7 +87,7 @@ def matches( if not search: # nothing was passed in - raise dbt.exceptions.RuntimeException( + raise dbt.exceptions.DbtRuntimeError( "Tried to match relation, but no search path was passed!" ) @@ -100,7 +104,7 @@ def matches( if approximate_match and not exact_match: target = self.create(database=database, schema=schema, identifier=identifier) - raise ApproximateMatch(target, self) + raise ApproximateMatchError(target, self) return exact_match @@ -249,14 +253,14 @@ def create_from( ) -> Self: if node.resource_type == NodeType.Source: if not isinstance(node, SourceDefinition): - raise InternalException( + raise DbtInternalError( "type mismatch, expected SourceDefinition but got {}".format(type(node)) ) return cls.create_from_source(node, **kwargs) else: # Can't use ManifestNode here because of parameterized generics if not isinstance(node, (ParsedNode)): - raise InternalException( + raise DbtInternalError( f"type mismatch, expected ManifestNode but got {type(node)}" ) return cls.create_from_node(config, node, **kwargs) @@ -354,7 +358,7 @@ class InformationSchema(BaseRelation): def __post_init__(self): if not isinstance(self.information_schema_view, (type(None), str)): - raise dbt.exceptions.CompilationException( + raise dbt.exceptions.CompilationError( "Got an invalid name: {}".format(self.information_schema_view) ) @@ -438,7 +442,7 @@ def flatten(self, allow_multiple_databases: bool = False): if not allow_multiple_databases: seen = {r.database.lower() for r in self if r.database} if len(seen) > 1: - raise MultipleDatabasesNotAllowed(seen) + raise MultipleDatabasesNotAllowedError(seen) for information_schema_name, schema in self.search(): path = {"database": information_schema_name.database, "schema": schema} diff --git a/core/dbt/adapters/cache.py b/core/dbt/adapters/cache.py index 430c79d3b3a..24a0e469df1 100644 --- a/core/dbt/adapters/cache.py +++ b/core/dbt/adapters/cache.py @@ -9,11 +9,11 @@ _ReferenceKey, ) from dbt.exceptions import ( - DependentLinkNotCached, - NewNameAlreadyInCache, - NoneRelationFound, - ReferencedLinkNotCached, - TruncatedModelNameCausedCollision, + DependentLinkNotCachedError, + NewNameAlreadyInCacheError, + NoneRelationFoundError, + ReferencedLinkNotCachedError, + TruncatedModelNameCausedCollisionError, ) from dbt.events.functions import fire_event, fire_event_if from dbt.events.types import CacheAction, CacheDumpGraph @@ -141,7 +141,7 @@ def rename_key(self, old_key, new_key): :raises InternalError: If the new key already exists. """ if new_key in self.referenced_by: - raise NewNameAlreadyInCache(old_key, new_key) + raise NewNameAlreadyInCacheError(old_key, new_key) if old_key not in self.referenced_by: return @@ -257,11 +257,11 @@ def _add_link(self, referenced_key, dependent_key): if referenced is None: return if referenced is None: - raise ReferencedLinkNotCached(referenced_key) + raise ReferencedLinkNotCachedError(referenced_key) dependent = self.relations.get(dependent_key) if dependent is None: - raise DependentLinkNotCached(dependent_key) + raise DependentLinkNotCachedError(dependent_key) assert dependent is not None # we just raised! @@ -426,7 +426,7 @@ def _check_rename_constraints(self, old_key, new_key): if new_key in self.relations: # Tell user when collision caused by model names truncated during # materialization. - raise TruncatedModelNameCausedCollision(new_key, self.relations) + raise TruncatedModelNameCausedCollisionError(new_key, self.relations) if old_key not in self.relations: fire_event( @@ -490,7 +490,7 @@ def get_relations(self, database: Optional[str], schema: Optional[str]) -> List[ ] if None in results: - raise NoneRelationFound() + raise NoneRelationFoundError() return results def clear(self): diff --git a/core/dbt/adapters/factory.py b/core/dbt/adapters/factory.py index 16a0a3ffcd1..38c6bcb7894 100644 --- a/core/dbt/adapters/factory.py +++ b/core/dbt/adapters/factory.py @@ -10,7 +10,7 @@ from dbt.contracts.connection import AdapterRequiredConfig, Credentials from dbt.events.functions import fire_event from dbt.events.types import AdapterImportError, PluginLoadError -from dbt.exceptions import InternalException, RuntimeException +from dbt.exceptions import DbtInternalError, DbtRuntimeError from dbt.include.global_project import PACKAGE_PATH as GLOBAL_PROJECT_PATH from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME @@ -34,7 +34,7 @@ def get_plugin_by_name(self, name: str) -> AdapterPlugin: names = ", ".join(self.plugins.keys()) message = f"Invalid adapter type {name}! Must be one of {names}" - raise RuntimeException(message) + raise DbtRuntimeError(message) def get_adapter_class_by_name(self, name: str) -> Type[Adapter]: plugin = self.get_plugin_by_name(name) @@ -60,7 +60,7 @@ def load_plugin(self, name: str) -> Type[Credentials]: # the user about it via a runtime error if exc.name == "dbt.adapters." + name: fire_event(AdapterImportError(exc=str(exc))) - raise RuntimeException(f"Could not find adapter type {name}!") + raise DbtRuntimeError(f"Could not find adapter type {name}!") # otherwise, the error had to have come from some underlying # library. Log the stack trace. @@ -70,7 +70,7 @@ def load_plugin(self, name: str) -> Type[Credentials]: plugin_type = plugin.adapter.type() if plugin_type != name: - raise RuntimeException( + raise DbtRuntimeError( f"Expected to find adapter with type named {name}, got " f"adapter with type {plugin_type}" ) @@ -132,7 +132,7 @@ def get_adapter_plugins(self, name: Optional[str]) -> List[AdapterPlugin]: try: plugin = self.plugins[plugin_name] except KeyError: - raise InternalException(f"No plugin found for {plugin_name}") from None + raise DbtInternalError(f"No plugin found for {plugin_name}") from None plugins.append(plugin) seen.add(plugin_name) for dep in plugin.dependencies: @@ -151,7 +151,7 @@ def get_include_paths(self, name: Optional[str]) -> List[Path]: try: path = self.packages[package_name] except KeyError: - raise InternalException(f"No internal package listing found for {package_name}") + raise DbtInternalError(f"No internal package listing found for {package_name}") paths.append(path) return paths diff --git a/core/dbt/adapters/sql/connections.py b/core/dbt/adapters/sql/connections.py index bc1a562ad86..e13cf12e319 100644 --- a/core/dbt/adapters/sql/connections.py +++ b/core/dbt/adapters/sql/connections.py @@ -27,9 +27,7 @@ class SQLConnectionManager(BaseConnectionManager): @abc.abstractmethod def cancel(self, connection: Connection): """Cancel the given connection.""" - raise dbt.exceptions.NotImplementedException( - "`cancel` is not implemented for this adapter!" - ) + raise dbt.exceptions.NotImplementedError("`cancel` is not implemented for this adapter!") def cancel_open(self) -> List[str]: names = [] @@ -95,7 +93,7 @@ def add_query( @abc.abstractmethod def get_response(cls, cursor: Any) -> AdapterResponse: """Get the status of the cursor.""" - raise dbt.exceptions.NotImplementedException( + raise dbt.exceptions.NotImplementedError( "`get_response` is not implemented for this adapter!" ) @@ -151,7 +149,7 @@ def add_commit_query(self): def begin(self): connection = self.get_thread_connection() if connection.transaction_open is True: - raise dbt.exceptions.InternalException( + raise dbt.exceptions.DbtInternalError( 'Tried to begin a new transaction on connection "{}", but ' "it already had one open!".format(connection.name) ) @@ -164,7 +162,7 @@ def begin(self): def commit(self): connection = self.get_thread_connection() if connection.transaction_open is False: - raise dbt.exceptions.InternalException( + raise dbt.exceptions.DbtInternalError( 'Tried to commit transaction on connection "{}", but ' "it does not have one open!".format(connection.name) ) diff --git a/core/dbt/adapters/sql/impl.py b/core/dbt/adapters/sql/impl.py index 4606b046f54..fc787f0c834 100644 --- a/core/dbt/adapters/sql/impl.py +++ b/core/dbt/adapters/sql/impl.py @@ -2,7 +2,7 @@ from typing import Any, Optional, Tuple, Type, List from dbt.contracts.connection import Connection -from dbt.exceptions import RelationTypeNull +from dbt.exceptions import RelationTypeNullError from dbt.adapters.base import BaseAdapter, available from dbt.adapters.cache import _make_ref_key_msg from dbt.adapters.sql import SQLConnectionManager @@ -131,7 +131,7 @@ def alter_column_type(self, relation, column_name, new_column_type) -> None: def drop_relation(self, relation): if relation.type is None: - raise RelationTypeNull(relation) + raise RelationTypeNullError(relation) self.cache_dropped(relation) self.execute_macro(DROP_RELATION_MACRO_NAME, kwargs={"relation": relation}) diff --git a/core/dbt/clients/_jinja_blocks.py b/core/dbt/clients/_jinja_blocks.py index fa74a317649..1ada0a6234d 100644 --- a/core/dbt/clients/_jinja_blocks.py +++ b/core/dbt/clients/_jinja_blocks.py @@ -2,13 +2,13 @@ from collections import namedtuple from dbt.exceptions import ( - BlockDefinitionNotAtTop, - InternalException, - MissingCloseTag, - MissingControlFlowStartTag, - NestedTags, - UnexpectedControlFlowEndTag, - UnexpectedMacroEOF, + BlockDefinitionNotAtTopError, + DbtInternalError, + MissingCloseTagError, + MissingControlFlowStartTagError, + NestedTagsError, + UnexpectedControlFlowEndTagError, + UnexpectedMacroEOFError, ) @@ -147,7 +147,7 @@ def _first_match(self, *patterns, **kwargs): def _expect_match(self, expected_name, *patterns, **kwargs): match = self._first_match(*patterns, **kwargs) if match is None: - raise UnexpectedMacroEOF(expected_name, self.data[self.pos :]) + raise UnexpectedMacroEOFError(expected_name, self.data[self.pos :]) return match def handle_expr(self, match): @@ -261,7 +261,7 @@ def find_tags(self): elif block_type_name is not None: yield self.handle_tag(match) else: - raise InternalException( + raise DbtInternalError( "Invalid regex match in next_block, expected block start, " "expr start, or comment start" ) @@ -317,16 +317,16 @@ def find_blocks(self, allowed_blocks=None, collect_raw_data=True): found = self.stack.pop() else: expected = _CONTROL_FLOW_END_TAGS[tag.block_type_name] - raise UnexpectedControlFlowEndTag(tag, expected, self.tag_parser) + raise UnexpectedControlFlowEndTagError(tag, expected, self.tag_parser) expected = _CONTROL_FLOW_TAGS[found] if expected != tag.block_type_name: - raise MissingControlFlowStartTag(tag, expected, self.tag_parser) + raise MissingControlFlowStartTagError(tag, expected, self.tag_parser) if tag.block_type_name in allowed_blocks: if self.stack: - raise BlockDefinitionNotAtTop(self.tag_parser, tag.start) + raise BlockDefinitionNotAtTopError(self.tag_parser, tag.start) if self.current is not None: - raise NestedTags(outer=self.current, inner=tag) + raise NestedTagsError(outer=self.current, inner=tag) if collect_raw_data: raw_data = self.data[self.last_position : tag.start] self.last_position = tag.start @@ -347,7 +347,7 @@ def find_blocks(self, allowed_blocks=None, collect_raw_data=True): if self.current: linecount = self.data[: self.current.end].count("\n") + 1 - raise MissingCloseTag(self.current.block_type_name, linecount) + raise MissingCloseTagError(self.current.block_type_name, linecount) if collect_raw_data: raw_data = self.data[self.last_position :] diff --git a/core/dbt/clients/agate_helper.py b/core/dbt/clients/agate_helper.py index 11492a9faef..1d69a2bd17f 100644 --- a/core/dbt/clients/agate_helper.py +++ b/core/dbt/clients/agate_helper.py @@ -7,7 +7,7 @@ import dbt.utils from typing import Iterable, List, Dict, Union, Optional, Any -from dbt.exceptions import RuntimeException +from dbt.exceptions import DbtRuntimeError BOM = BOM_UTF8.decode("utf-8") # '\ufeff' @@ -168,7 +168,7 @@ def __setitem__(self, key, value): return elif not isinstance(value, type(existing_type)): # actual type mismatch! - raise RuntimeException( + raise DbtRuntimeError( f"Tables contain columns with the same names ({key}), " f"but different types ({value} vs {existing_type})" ) diff --git a/core/dbt/clients/git.py b/core/dbt/clients/git.py index 4ddbb1969ee..d6cb3f3870c 100644 --- a/core/dbt/clients/git.py +++ b/core/dbt/clients/git.py @@ -16,8 +16,8 @@ CommandResultError, GitCheckoutError, GitCloningError, - GitCloningProblem, - RuntimeException, + UnknownGitCloningProblemError, + DbtRuntimeError, ) from packaging import version @@ -134,7 +134,7 @@ def clone_and_checkout( err = exc.stderr exists = re.match("fatal: destination path '(.+)' already exists", err) if not exists: - raise GitCloningProblem(repo) + raise UnknownGitCloningProblemError(repo) directory = None start_sha = None @@ -144,7 +144,7 @@ def clone_and_checkout( else: matches = re.match("Cloning into '(.+)'", err.decode("utf-8")) if matches is None: - raise RuntimeException(f'Error cloning {repo} - never saw "Cloning into ..." from git') + raise DbtRuntimeError(f'Error cloning {repo} - never saw "Cloning into ..." from git') directory = matches.group(1) fire_event(GitProgressPullingNewDependency(dir=directory)) full_path = os.path.join(cwd, directory) diff --git a/core/dbt/clients/jinja.py b/core/dbt/clients/jinja.py index c1b8865e33e..e9dcb45017b 100644 --- a/core/dbt/clients/jinja.py +++ b/core/dbt/clients/jinja.py @@ -28,17 +28,17 @@ from dbt.contracts.graph.nodes import GenericTestNode from dbt.exceptions import ( - CaughtMacroException, - CaughtMacroExceptionWithNode, - CompilationException, - InternalException, - InvalidMaterializationArg, - JinjaRenderingException, + CaughtMacroError, + CaughtMacroErrorWithNodeError, + CompilationError, + DbtInternalError, + MaterializationArgError, + JinjaRenderingError, MacroReturn, - MaterializtionMacroNotUsed, - NoSupportedLanguagesFound, - UndefinedCompilation, - UndefinedMacroException, + MaterializtionMacroNotUsedError, + NoSupportedLanguagesFoundError, + UndefinedCompilationError, + UndefinedMacroError, ) from dbt import flags from dbt.node_types import ModelLanguage @@ -161,9 +161,9 @@ def quoted_native_concat(nodes): except (ValueError, SyntaxError, MemoryError): result = raw if isinstance(raw, BoolMarker) and not isinstance(result, bool): - raise JinjaRenderingException(f"Could not convert value '{raw!s}' into type 'bool'") + raise JinjaRenderingError(f"Could not convert value '{raw!s}' into type 'bool'") if isinstance(raw, NumberMarker) and not _is_number(result): - raise JinjaRenderingException(f"Could not convert value '{raw!s}' into type 'number'") + raise JinjaRenderingError(f"Could not convert value '{raw!s}' into type 'number'") return result @@ -241,12 +241,12 @@ def exception_handler(self) -> Iterator[None]: try: yield except (TypeError, jinja2.exceptions.TemplateRuntimeError) as e: - raise CaughtMacroException(e) + raise CaughtMacroError(e) def call_macro(self, *args, **kwargs): # called from __call__ methods if self.context is None: - raise InternalException("Context is still None in call_macro!") + raise DbtInternalError("Context is still None in call_macro!") assert self.context is not None macro = self.get_macro() @@ -273,7 +273,7 @@ def push(self, name): def pop(self, name): got = self.call_stack.pop() if got != name: - raise InternalException(f"popped {got}, expected {name}") + raise DbtInternalError(f"popped {got}, expected {name}") class MacroGenerator(BaseMacroGenerator): @@ -300,8 +300,8 @@ def exception_handler(self) -> Iterator[None]: try: yield except (TypeError, jinja2.exceptions.TemplateRuntimeError) as e: - raise CaughtMacroExceptionWithNode(exc=e, node=self.macro) - except CompilationException as e: + raise CaughtMacroErrorWithNodeError(exc=e, node=self.macro) + except CompilationError as e: e.stack.append(self.macro) raise e @@ -380,7 +380,7 @@ def parse(self, parser): node.defaults.append(languages) else: - raise InvalidMaterializationArg(materialization_name, target.name) + raise MaterializationArgError(materialization_name, target.name) if SUPPORTED_LANG_ARG not in node.args: node.args.append(SUPPORTED_LANG_ARG) @@ -455,7 +455,7 @@ def __call__(self, *args, **kwargs): return self def __reduce__(self): - raise UndefinedCompilation(name=self.name, node=node) + raise UndefinedCompilationError(name=self.name, node=node) return Undefined @@ -513,10 +513,10 @@ def catch_jinja(node=None) -> Iterator[None]: yield except jinja2.exceptions.TemplateSyntaxError as e: e.translated = False - raise CompilationException(str(e), node) from e + raise CompilationError(str(e), node) from e except jinja2.exceptions.UndefinedError as e: - raise UndefinedMacroException(str(e), node) from e - except CompilationException as exc: + raise UndefinedMacroError(str(e), node) from e + except CompilationError as exc: exc.add_node(node) raise @@ -655,13 +655,13 @@ def _convert_function(value: Any, keypath: Tuple[Union[str, int], ...]) -> Any: def get_supported_languages(node: jinja2.nodes.Macro) -> List[ModelLanguage]: if "materialization" not in node.name: - raise MaterializtionMacroNotUsed(node=node) + raise MaterializtionMacroNotUsedError(node=node) no_kwargs = not node.defaults no_langs_found = SUPPORTED_LANG_ARG not in node.args if no_kwargs or no_langs_found: - raise NoSupportedLanguagesFound(node=node) + raise NoSupportedLanguagesFoundError(node=node) lang_idx = node.args.index(SUPPORTED_LANG_ARG) # indexing defaults from the end diff --git a/core/dbt/clients/jinja_static.py b/core/dbt/clients/jinja_static.py index d71211cea6e..47790166ae5 100644 --- a/core/dbt/clients/jinja_static.py +++ b/core/dbt/clients/jinja_static.py @@ -1,6 +1,6 @@ import jinja2 from dbt.clients.jinja import get_environment -from dbt.exceptions import MacroNamespaceNotString, MacroNameNotString +from dbt.exceptions import MacroNamespaceNotStringError, MacroNameNotStringError def statically_extract_macro_calls(string, ctx, db_wrapper=None): @@ -117,14 +117,14 @@ def statically_parse_adapter_dispatch(func_call, ctx, db_wrapper): func_name = kwarg.value.value possible_macro_calls.append(func_name) else: - raise MacroNameNotString(kwarg_value=kwarg.value.value) + raise MacroNameNotStringError(kwarg_value=kwarg.value.value) elif kwarg.key == "macro_namespace": # This will remain to enable static resolution kwarg_type = type(kwarg.value).__name__ if kwarg_type == "Const": macro_namespace = kwarg.value.value else: - raise MacroNamespaceNotString(kwarg_type) + raise MacroNamespaceNotStringError(kwarg_type) # positional arguments if packages_arg: diff --git a/core/dbt/clients/system.py b/core/dbt/clients/system.py index 0382dcb98e8..6c72fadea52 100644 --- a/core/dbt/clients/system.py +++ b/core/dbt/clients/system.py @@ -412,7 +412,7 @@ def _interpret_oserror(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn: _handle_posix_error(exc, cwd, cmd) # this should not be reachable, raise _something_ at least! - raise dbt.exceptions.InternalException( + raise dbt.exceptions.DbtInternalError( "Unhandled exception in _interpret_oserror: {}".format(exc) ) diff --git a/core/dbt/clients/yaml_helper.py b/core/dbt/clients/yaml_helper.py index bc0ada41ebb..d5a29b0309f 100644 --- a/core/dbt/clients/yaml_helper.py +++ b/core/dbt/clients/yaml_helper.py @@ -60,4 +60,4 @@ def load_yaml_text(contents, path=None): else: error = str(e) - raise dbt.exceptions.ValidationException(error) + raise dbt.exceptions.DbtValidationError(error) diff --git a/core/dbt/compilation.py b/core/dbt/compilation.py index 4ae78fd3485..19e603b6312 100644 --- a/core/dbt/compilation.py +++ b/core/dbt/compilation.py @@ -21,9 +21,9 @@ SeedNode, ) from dbt.exceptions import ( - GraphDependencyNotFound, - InternalException, - RuntimeException, + GraphDependencyNotFoundError, + DbtInternalError, + DbtRuntimeError, ) from dbt.graph import Graph from dbt.events.functions import fire_event @@ -257,7 +257,7 @@ def _recursively_prepend_ctes( inserting CTEs into the SQL. """ if model.compiled_code is None: - raise RuntimeException("Cannot inject ctes into an unparsed node", model) + raise DbtRuntimeError("Cannot inject ctes into an unparsed node", model) if model.extra_ctes_injected: return (model, model.extra_ctes) @@ -278,7 +278,7 @@ def _recursively_prepend_ctes( # ephemeral model. for cte in model.extra_ctes: if cte.id not in manifest.nodes: - raise InternalException( + raise DbtInternalError( f"During compilation, found a cte reference that " f"could not be resolved: {cte.id}" ) @@ -286,7 +286,7 @@ def _recursively_prepend_ctes( assert not isinstance(cte_model, SeedNode) if not cte_model.is_ephemeral_model: - raise InternalException(f"{cte.id} is not ephemeral") + raise DbtInternalError(f"{cte.id} is not ephemeral") # This model has already been compiled, so it's been # through here before @@ -399,7 +399,7 @@ def link_node(self, linker: Linker, node: GraphMemberNode, manifest: Manifest): elif dependency in manifest.metrics: linker.dependency(node.unique_id, (manifest.metrics[dependency].unique_id)) else: - raise GraphDependencyNotFound(node, dependency) + raise GraphDependencyNotFoundError(node, dependency) def link_graph(self, linker: Linker, manifest: Manifest, add_test_edges: bool = False): for source in manifest.sources.values(): diff --git a/core/dbt/config/profile.py b/core/dbt/config/profile.py index e8bf85dbd27..156c41445e9 100644 --- a/core/dbt/config/profile.py +++ b/core/dbt/config/profile.py @@ -10,12 +10,12 @@ from dbt.contracts.connection import Credentials, HasCredentials from dbt.contracts.project import ProfileConfig, UserConfig from dbt.exceptions import ( - CompilationException, + CompilationError, DbtProfileError, DbtProjectError, - ValidationException, - RuntimeException, - ProfileConfigInvalid, + DbtValidationError, + DbtRuntimeError, + ProfileConfigError, ) from dbt.events.types import MissingProfileTarget from dbt.events.functions import fire_event @@ -60,9 +60,9 @@ def read_profile(profiles_dir: str) -> Dict[str, Any]: msg = f"The profiles.yml file at {path} is empty" raise DbtProfileError(INVALID_PROFILE_MESSAGE.format(error_string=msg)) return yaml_content - except ValidationException as e: + except DbtValidationError as e: msg = INVALID_PROFILE_MESSAGE.format(error_string=e) - raise ValidationException(msg) from e + raise DbtValidationError(msg) from e return {} @@ -75,7 +75,7 @@ def read_user_config(directory: str) -> UserConfig: if user_config is not None: UserConfig.validate(user_config) return UserConfig.from_dict(user_config) - except (RuntimeException, ValidationError): + except (DbtRuntimeError, ValidationError): pass return UserConfig() @@ -158,7 +158,7 @@ def validate(self): dct = self.to_profile_info(serialize_credentials=True) ProfileConfig.validate(dct) except ValidationError as exc: - raise ProfileConfigInvalid(exc) from exc + raise ProfileConfigError(exc) from exc @staticmethod def _credentials_from_profile( @@ -182,8 +182,8 @@ def _credentials_from_profile( data = cls.translate_aliases(profile) cls.validate(data) credentials = cls.from_dict(data) - except (RuntimeException, ValidationError) as e: - msg = str(e) if isinstance(e, RuntimeException) else e.message + except (DbtRuntimeError, ValidationError) as e: + msg = str(e) if isinstance(e, DbtRuntimeError) else e.message raise DbtProfileError( 'Credentials in profile "{}", target "{}" invalid: {}'.format( profile_name, target_name, msg @@ -299,7 +299,7 @@ def render_profile( try: profile_data = renderer.render_data(raw_profile_data) - except CompilationException as exc: + except CompilationError as exc: raise DbtProfileError(str(exc)) from exc return target_name, profile_data diff --git a/core/dbt/config/project.py b/core/dbt/config/project.py index 69c6b79866c..7f0398f53c6 100644 --- a/core/dbt/config/project.py +++ b/core/dbt/config/project.py @@ -21,10 +21,10 @@ from dbt.contracts.connection import QueryComment from dbt.exceptions import ( DbtProjectError, - SemverException, - ProjectContractBroken, - ProjectContractInvalid, - RuntimeException, + SemverError, + ProjectContractBrokenError, + ProjectContractError, + DbtRuntimeError, ) from dbt.graph import SelectionSpec from dbt.helper_types import NoValue @@ -219,7 +219,7 @@ def _get_required_version( try: dbt_version = _parse_versions(dbt_raw_version) - except SemverException as e: + except SemverError as e: raise DbtProjectError(str(e)) from e if verify_version: @@ -325,7 +325,7 @@ def create_project(self, rendered: RenderComponents) -> "Project": ProjectContract.validate(rendered.project_dict) cfg = ProjectContract.from_dict(rendered.project_dict) except ValidationError as e: - raise ProjectContractInvalid(e) from e + raise ProjectContractError(e) from e # name/version are required in the Project definition, so we can assume # they are present name = cfg.name @@ -642,7 +642,7 @@ def validate(self): try: ProjectContract.validate(self.to_project_config()) except ValidationError as e: - raise ProjectContractBroken(e) from e + raise ProjectContractBrokenError(e) from e @classmethod def partial_load(cls, project_root: str, *, verify_version: bool = False) -> PartialProject: @@ -667,7 +667,7 @@ def hashed_name(self): def get_selector(self, name: str) -> Union[SelectionSpec, bool]: if name not in self.selectors: - raise RuntimeException( + raise DbtRuntimeError( f"Could not find selector named {name}, expected one of {list(self.selectors)}" ) return self.selectors[name]["definition"] diff --git a/core/dbt/config/renderer.py b/core/dbt/config/renderer.py index 434e30666a4..68958dbbce5 100644 --- a/core/dbt/config/renderer.py +++ b/core/dbt/config/renderer.py @@ -8,7 +8,7 @@ from dbt.context.secret import SecretContext, SECRET_PLACEHOLDER from dbt.context.base import BaseContext from dbt.contracts.connection import HasCredentials -from dbt.exceptions import DbtProjectError, CompilationException, RecursionException +from dbt.exceptions import DbtProjectError, CompilationError, RecursionError from dbt.utils import deep_map_render @@ -40,14 +40,14 @@ def render_value(self, value: Any, keypath: Optional[Keypath] = None) -> Any: try: with catch_jinja(): return get_rendered(value, self.context, native=True) - except CompilationException as exc: + except CompilationError as exc: msg = f"Could not render {value}: {exc.msg}" - raise CompilationException(msg) from exc + raise CompilationError(msg) from exc def render_data(self, data: Dict[str, Any]) -> Dict[str, Any]: try: return deep_map_render(self.render_entry, data) - except RecursionException: + except RecursionError: raise DbtProjectError( f"Cycle detected: {self.name} input has a reference to itself", project=data ) diff --git a/core/dbt/config/runtime.py b/core/dbt/config/runtime.py index 8b1b30f383b..b0b74b9a222 100644 --- a/core/dbt/config/runtime.py +++ b/core/dbt/config/runtime.py @@ -25,11 +25,11 @@ from dbt.contracts.relation import ComponentName from dbt.dataclass_schema import ValidationError from dbt.exceptions import ( - ConfigContractBroken, + ConfigContractBrokenError, DbtProjectError, - NonUniquePackageName, - RuntimeException, - UninstalledPackagesFound, + NonUniquePackageNameError, + DbtRuntimeError, + UninstalledPackagesFoundError, ) from dbt.events.functions import warn_or_error from dbt.events.types import UnusedResourceConfigPath @@ -187,7 +187,7 @@ def validate(self): try: Configuration.validate(self.serialize()) except ValidationError as e: - raise ConfigContractBroken(e) from e + raise ConfigContractBrokenError(e) from e @classmethod def _get_rendered_profile( @@ -258,7 +258,7 @@ def from_args(cls, args: Any) -> "RuntimeConfig": :param args: The arguments as parsed from the cli. :raises DbtProjectError: If the project is invalid or missing. :raises DbtProfileError: If the profile is invalid or missing. - :raises ValidationException: If the cli variables are invalid. + :raises DbtValidationError: If the cli variables are invalid. """ project, profile = cls.collect_parts(args) @@ -353,7 +353,7 @@ def load_dependencies(self, base_only=False) -> Mapping[str, "RuntimeConfig"]: count_packages_specified = len(self.packages.packages) # type: ignore count_packages_installed = len(tuple(self._get_project_directories())) if count_packages_specified > count_packages_installed: - raise UninstalledPackagesFound( + raise UninstalledPackagesFoundError( count_packages_specified, count_packages_installed, self.packages_install_path, @@ -361,7 +361,7 @@ def load_dependencies(self, base_only=False) -> Mapping[str, "RuntimeConfig"]: project_paths = itertools.chain(internal_packages, self._get_project_directories()) for project_name, project in self.load_projects(project_paths): if project_name in all_projects: - raise NonUniquePackageName(project_name) + raise NonUniquePackageNameError(project_name) all_projects[project_name] = project self.dependencies = all_projects return self.dependencies @@ -426,7 +426,7 @@ def to_target_dict(self): def __getattribute__(self, name): if name in {"profile_name", "target_name", "threads"}: - raise RuntimeException(f'Error: disallowed attribute "{name}" - no profile!') + raise DbtRuntimeError(f'Error: disallowed attribute "{name}" - no profile!') return Profile.__getattribute__(self, name) @@ -453,7 +453,7 @@ def __post_init__(self): def __getattribute__(self, name): # Override __getattribute__ to check that the attribute isn't 'banned'. if name in {"profile_name", "target_name"}: - raise RuntimeException(f'Error: disallowed attribute "{name}" - no profile!') + raise DbtRuntimeError(f'Error: disallowed attribute "{name}" - no profile!') # avoid every attribute access triggering infinite recursion return RuntimeConfig.__getattribute__(self, name) @@ -602,7 +602,7 @@ def from_args(cls: Type[RuntimeConfig], args: Any) -> "RuntimeConfig": :param args: The arguments as parsed from the cli. :raises DbtProjectError: If the project is invalid or missing. :raises DbtProfileError: If the profile is invalid or missing. - :raises ValidationException: If the cli variables are invalid. + :raises DbtValidationError: If the cli variables are invalid. """ project, profile = cls.collect_parts(args) diff --git a/core/dbt/config/selectors.py b/core/dbt/config/selectors.py index 193a1bb70a8..e26ee01d316 100644 --- a/core/dbt/config/selectors.py +++ b/core/dbt/config/selectors.py @@ -12,7 +12,7 @@ resolve_path_from_base, ) from dbt.contracts.selection import SelectorFile -from dbt.exceptions import DbtSelectorsError, RuntimeException +from dbt.exceptions import DbtSelectorsError, DbtRuntimeError from dbt.graph import parse_from_selectors_definition, SelectionSpec from dbt.graph.selector_spec import SelectionCriteria @@ -46,7 +46,7 @@ def selectors_from_dict(cls, data: Dict[str, Any]) -> "SelectorConfig": f"yaml-selectors", result_type="invalid_selector", ) from exc - except RuntimeException as exc: + except DbtRuntimeError as exc: raise DbtSelectorsError( f"Could not read selector file data: {exc}", result_type="invalid_selector", @@ -62,7 +62,7 @@ def render_from_dict( ) -> "SelectorConfig": try: rendered = renderer.render_data(data) - except (ValidationError, RuntimeException) as exc: + except (ValidationError, DbtRuntimeError) as exc: raise DbtSelectorsError( f"Could not render selector data: {exc}", result_type="invalid_selector", @@ -77,7 +77,7 @@ def from_path( ) -> "SelectorConfig": try: data = load_yaml_text(load_file_contents(str(path))) - except (ValidationError, RuntimeException) as exc: + except (ValidationError, DbtRuntimeError) as exc: raise DbtSelectorsError( f"Could not read selector file: {exc}", result_type="invalid_selector", diff --git a/core/dbt/config/utils.py b/core/dbt/config/utils.py index 921626ba088..eb379b5d1f7 100644 --- a/core/dbt/config/utils.py +++ b/core/dbt/config/utils.py @@ -9,7 +9,7 @@ from dbt.config.renderer import DbtProjectYamlRenderer, ProfileRenderer from dbt.events.functions import fire_event from dbt.events.types import InvalidVarsYAML -from dbt.exceptions import ValidationException, VarsArgNotYamlDict +from dbt.exceptions import DbtValidationError, VarsArgNotYamlDictError def parse_cli_vars(var_string: str) -> Dict[str, Any]: @@ -19,8 +19,8 @@ def parse_cli_vars(var_string: str) -> Dict[str, Any]: if var_type is dict: return cli_vars else: - raise VarsArgNotYamlDict(var_type) - except ValidationException: + raise VarsArgNotYamlDictError(var_type) + except DbtValidationError: fire_event(InvalidVarsYAML()) raise diff --git a/core/dbt/context/base.py b/core/dbt/context/base.py index fc218538bac..edf0895fe31 100644 --- a/core/dbt/context/base.py +++ b/core/dbt/context/base.py @@ -10,12 +10,12 @@ from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER from dbt.contracts.graph.nodes import Resource from dbt.exceptions import ( - DisallowSecretEnvVar, - EnvVarMissing, + SecretEnvVarLocationError, + EnvVarMissingError, MacroReturn, - RequiredVarNotFound, - SetStrictWrongType, - ZipStrictWrongType, + RequiredVarNotFoundError, + SetStrictWrongTypeError, + ZipStrictWrongTypeError, ) from dbt.events.functions import fire_event, get_invocation_id from dbt.events.types import JinjaLogInfo, JinjaLogDebug @@ -153,7 +153,7 @@ def node_name(self): return "" def get_missing_var(self, var_name): - raise RequiredVarNotFound(var_name, self._merged, self._node) + raise RequiredVarNotFoundError(var_name, self._merged, self._node) def has_var(self, var_name: str): return var_name in self._merged @@ -297,7 +297,7 @@ def env_var(self, var: str, default: Optional[str] = None) -> str: """ return_value = None if var.startswith(SECRET_ENV_PREFIX): - raise DisallowSecretEnvVar(var) + raise SecretEnvVarLocationError(var) if var in os.environ: return_value = os.environ[var] elif default is not None: @@ -312,7 +312,7 @@ def env_var(self, var: str, default: Optional[str] = None) -> str: return return_value else: - raise EnvVarMissing(var) + raise EnvVarMissingError(var) if os.environ.get("DBT_MACRO_DEBUGGING"): @@ -493,7 +493,7 @@ def set_strict(value: Iterable[Any]) -> Set[Any]: try: return set(value) except TypeError as e: - raise SetStrictWrongType(e) + raise SetStrictWrongTypeError(e) @contextmember("zip") @staticmethod @@ -537,7 +537,7 @@ def zip_strict(*args: Iterable[Any]) -> Iterable[Any]: try: return zip(*args) except TypeError as e: - raise ZipStrictWrongType(e) + raise ZipStrictWrongTypeError(e) @contextmember @staticmethod diff --git a/core/dbt/context/configured.py b/core/dbt/context/configured.py index ca1de35423b..da4132e8046 100644 --- a/core/dbt/context/configured.py +++ b/core/dbt/context/configured.py @@ -8,7 +8,7 @@ from dbt.context.base import contextproperty, contextmember, Var from dbt.context.target import TargetContext -from dbt.exceptions import EnvVarMissing, DisallowSecretEnvVar +from dbt.exceptions import EnvVarMissingError, SecretEnvVarLocationError class ConfiguredContext(TargetContext): @@ -86,7 +86,7 @@ def var(self) -> ConfiguredVar: def env_var(self, var: str, default: Optional[str] = None) -> str: return_value = None if var.startswith(SECRET_ENV_PREFIX): - raise DisallowSecretEnvVar(var) + raise SecretEnvVarLocationError(var) if var in os.environ: return_value = os.environ[var] elif default is not None: @@ -104,7 +104,7 @@ def env_var(self, var: str, default: Optional[str] = None) -> str: return return_value else: - raise EnvVarMissing(var) + raise EnvVarMissingError(var) class MacroResolvingContext(ConfiguredContext): diff --git a/core/dbt/context/context_config.py b/core/dbt/context/context_config.py index 2b0aafe7189..b497887ab45 100644 --- a/core/dbt/context/context_config.py +++ b/core/dbt/context/context_config.py @@ -5,7 +5,7 @@ from dbt.config import RuntimeConfig, Project, IsFQNResource from dbt.contracts.graph.model_config import BaseConfig, get_config_for, _listify -from dbt.exceptions import InternalException +from dbt.exceptions import DbtInternalError from dbt.node_types import NodeType from dbt.utils import fqn_search @@ -89,7 +89,7 @@ def get_node_project(self, project_name: str): return self._active_project dependencies = self._active_project.load_dependencies() if project_name not in dependencies: - raise InternalException( + raise DbtInternalError( f"Project name {project_name} not found in dependencies " f"(found {list(dependencies)})" ) @@ -287,14 +287,14 @@ def _add_config_call(cls, config_call_dict, opts: Dict[str, Any]) -> None: elif k in BaseConfig.mergebehavior["update"]: if not isinstance(v, dict): - raise InternalException(f"expected dict, got {v}") + raise DbtInternalError(f"expected dict, got {v}") if k in config_call_dict and isinstance(config_call_dict[k], dict): config_call_dict[k].update(v) else: config_call_dict[k] = v elif k in BaseConfig.mergebehavior["dict_key_append"]: if not isinstance(v, dict): - raise InternalException(f"expected dict, got {v}") + raise DbtInternalError(f"expected dict, got {v}") if k in config_call_dict: # should always be a dict for key, value in v.items(): extend = False diff --git a/core/dbt/context/docs.py b/core/dbt/context/docs.py index 89a652736dd..3d5abf42e11 100644 --- a/core/dbt/context/docs.py +++ b/core/dbt/context/docs.py @@ -1,8 +1,8 @@ from typing import Any, Dict, Union from dbt.exceptions import ( - DocTargetNotFound, - InvalidDocArgs, + DocTargetNotFoundError, + DocArgsError, ) from dbt.config.runtime import RuntimeConfig from dbt.contracts.graph.manifest import Manifest @@ -52,7 +52,7 @@ def doc(self, *args: str) -> str: elif len(args) == 2: doc_package_name, doc_name = args else: - raise InvalidDocArgs(self.node, args) + raise DocArgsError(self.node, args) # Documentation target_doc = self.manifest.resolve_doc( @@ -68,7 +68,7 @@ def doc(self, *args: str) -> str: # TODO CT-211 source_file.add_node(self.node.unique_id) # type: ignore[union-attr] else: - raise DocTargetNotFound( + raise DocTargetNotFoundError( node=self.node, target_doc_name=doc_name, target_doc_package=doc_package_name ) diff --git a/core/dbt/context/exceptions_jinja.py b/core/dbt/context/exceptions_jinja.py index a1f49e416fb..98f19048f1a 100644 --- a/core/dbt/context/exceptions_jinja.py +++ b/core/dbt/context/exceptions_jinja.py @@ -6,23 +6,23 @@ from dbt.events.types import JinjaLogWarning from dbt.exceptions import ( - RuntimeException, - MissingConfig, - MissingMaterialization, - MissingRelation, - AmbiguousAlias, - AmbiguousCatalogMatch, - CacheInconsistency, - DataclassNotDict, - CompilationException, - DatabaseException, - DependencyNotFound, - DependencyException, - DuplicatePatchPath, - DuplicateResourceName, - InvalidPropertyYML, - NotImplementedException, - RelationWrongType, + DbtRuntimeError, + MissingConfigError, + MissingMaterializationError, + MissingRelationError, + AmbiguousAliasError, + AmbiguousCatalogMatchError, + CacheInconsistencyError, + DataclassNotDictError, + CompilationError, + DbtDatabaseError, + DependencyNotFoundError, + DependencyError, + DuplicatePatchPathError, + DuplicateResourceNameError, + PropertyYMLError, + NotImplementedError, + RelationWrongTypeError, ) @@ -32,69 +32,69 @@ def warn(msg, node=None): def missing_config(model, name) -> NoReturn: - raise MissingConfig(unique_id=model.unique_id, name=name) + raise MissingConfigError(unique_id=model.unique_id, name=name) def missing_materialization(model, adapter_type) -> NoReturn: - raise MissingMaterialization( + raise MissingMaterializationError( materialization=model.config.materialized, adapter_type=adapter_type ) def missing_relation(relation, model=None) -> NoReturn: - raise MissingRelation(relation, model) + raise MissingRelationError(relation, model) def raise_ambiguous_alias(node_1, node_2, duped_name=None) -> NoReturn: - raise AmbiguousAlias(node_1, node_2, duped_name) + raise AmbiguousAliasError(node_1, node_2, duped_name) def raise_ambiguous_catalog_match(unique_id, match_1, match_2) -> NoReturn: - raise AmbiguousCatalogMatch(unique_id, match_1, match_2) + raise AmbiguousCatalogMatchError(unique_id, match_1, match_2) def raise_cache_inconsistent(message) -> NoReturn: - raise CacheInconsistency(message) + raise CacheInconsistencyError(message) def raise_dataclass_not_dict(obj) -> NoReturn: - raise DataclassNotDict(obj) + raise DataclassNotDictError(obj) def raise_compiler_error(msg, node=None) -> NoReturn: - raise CompilationException(msg, node) + raise CompilationError(msg, node) def raise_database_error(msg, node=None) -> NoReturn: - raise DatabaseException(msg, node) + raise DbtDatabaseError(msg, node) def raise_dep_not_found(node, node_description, required_pkg) -> NoReturn: - raise DependencyNotFound(node, node_description, required_pkg) + raise DependencyNotFoundError(node, node_description, required_pkg) def raise_dependency_error(msg) -> NoReturn: - raise DependencyException(scrub_secrets(msg, env_secrets())) + raise DependencyError(scrub_secrets(msg, env_secrets())) def raise_duplicate_patch_name(patch_1, existing_patch_path) -> NoReturn: - raise DuplicatePatchPath(patch_1, existing_patch_path) + raise DuplicatePatchPathError(patch_1, existing_patch_path) def raise_duplicate_resource_name(node_1, node_2) -> NoReturn: - raise DuplicateResourceName(node_1, node_2) + raise DuplicateResourceNameError(node_1, node_2) def raise_invalid_property_yml_version(path, issue) -> NoReturn: - raise InvalidPropertyYML(path, issue) + raise PropertyYMLError(path, issue) def raise_not_implemented(msg) -> NoReturn: - raise NotImplementedException(msg) + raise NotImplementedError(msg) def relation_wrong_type(relation, expected_type, model=None) -> NoReturn: - raise RelationWrongType(relation, expected_type, model) + raise RelationWrongTypeError(relation, expected_type, model) # Update this when a new function should be added to the @@ -130,7 +130,7 @@ def wrap(func): def inner(*args, **kwargs): try: return func(*args, **kwargs) - except RuntimeException as exc: + except DbtRuntimeError as exc: exc.add_node(model) raise exc diff --git a/core/dbt/context/macro_resolver.py b/core/dbt/context/macro_resolver.py index 6e70bafd05e..20f97febcb0 100644 --- a/core/dbt/context/macro_resolver.py +++ b/core/dbt/context/macro_resolver.py @@ -1,6 +1,6 @@ from typing import Dict, MutableMapping, Optional from dbt.contracts.graph.nodes import Macro -from dbt.exceptions import DuplicateMacroName, PackageNotFoundForMacro +from dbt.exceptions import DuplicateMacroNameError, PackageNotFoundForMacroError from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME from dbt.clients.jinja import MacroGenerator @@ -86,7 +86,7 @@ def _add_macro_to( package_namespaces[macro.package_name] = namespace if macro.name in namespace: - raise DuplicateMacroName(macro, macro, macro.package_name) + raise DuplicateMacroNameError(macro, macro, macro.package_name) package_namespaces[macro.package_name][macro.name] = macro def add_macro(self, macro: Macro): @@ -187,7 +187,7 @@ def get_from_package(self, package_name: Optional[str], name: str) -> Optional[M elif package_name in self.macro_resolver.packages: macro = self.macro_resolver.packages[package_name].get(name) else: - raise PackageNotFoundForMacro(package_name) + raise PackageNotFoundForMacroError(package_name) if not macro: return None macro_func = MacroGenerator(macro, self.ctx, self.node, self.thread_ctx) diff --git a/core/dbt/context/macros.py b/core/dbt/context/macros.py index 921480ec05a..1c61e564e06 100644 --- a/core/dbt/context/macros.py +++ b/core/dbt/context/macros.py @@ -3,7 +3,7 @@ from dbt.clients.jinja import MacroGenerator, MacroStack from dbt.contracts.graph.nodes import Macro from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME -from dbt.exceptions import DuplicateMacroName, PackageNotFoundForMacro +from dbt.exceptions import DuplicateMacroNameError, PackageNotFoundForMacroError FlatNamespace = Dict[str, MacroGenerator] @@ -75,7 +75,7 @@ def get_from_package(self, package_name: Optional[str], name: str) -> Optional[M elif package_name in self.packages: return self.packages[package_name].get(name) else: - raise PackageNotFoundForMacro(package_name) + raise PackageNotFoundForMacroError(package_name) # This class builds the MacroNamespace by adding macros to @@ -122,7 +122,7 @@ def _add_macro_to( hierarchy[macro.package_name] = namespace if macro.name in namespace: - raise DuplicateMacroName(macro_func.macro, macro, macro.package_name) + raise DuplicateMacroNameError(macro_func.macro, macro, macro.package_name) hierarchy[macro.package_name][macro.name] = macro_func def add_macro(self, macro: Macro, ctx: Dict[str, Any]): diff --git a/core/dbt/context/providers.py b/core/dbt/context/providers.py index 2e7af0a79f2..fec5111e36c 100644 --- a/core/dbt/context/providers.py +++ b/core/dbt/context/providers.py @@ -41,28 +41,28 @@ from dbt.contracts.graph.metrics import MetricReference, ResolvedMetricReference from dbt.events.functions import get_metadata_vars from dbt.exceptions import ( - CompilationException, - ConflictingConfigKeys, - DisallowSecretEnvVar, - EnvVarMissing, - InternalException, - InvalidInlineModelConfig, - InvalidNumberSourceArgs, - InvalidPersistDocsValueType, - LoadAgateTableNotSeed, + CompilationError, + ConflictingConfigKeysError, + SecretEnvVarLocationError, + EnvVarMissingError, + DbtInternalError, + InlineModelConfigError, + NumberSourceArgsError, + PersistDocsValueTypeError, + LoadAgateTableNotSeedError, LoadAgateTableValueError, - MacroInvalidDispatchArg, - MacrosSourcesUnWriteable, - MetricInvalidArgs, - MissingConfig, - OperationsCannotRefEphemeralNodes, - PackageNotInDeps, - ParsingException, - RefBadContext, - RefInvalidArgs, - RuntimeException, - TargetNotFound, - ValidationException, + MacroDispatchArgError, + MacrosSourcesUnWriteableError, + MetricArgsError, + MissingConfigError, + OperationsCannotRefEphemeralNodesError, + PackageNotInDepsError, + ParsingError, + RefBadContextError, + RefArgsError, + DbtRuntimeError, + TargetNotFoundError, + DbtValidationError, ) from dbt.config import IsFQNResource from dbt.node_types import NodeType, ModelLanguage @@ -144,10 +144,10 @@ def dispatch( f'`adapter.dispatch("{suggest_macro_name}", ' f'macro_namespace="{suggest_macro_namespace}")`?' ) - raise CompilationException(msg) + raise CompilationError(msg) if packages is not None: - raise MacroInvalidDispatchArg(macro_name) + raise MacroDispatchArgError(macro_name) namespace = macro_namespace @@ -159,7 +159,7 @@ def dispatch( search_packages = [self.config.project_name, namespace] else: # Not a string and not None so must be a list - raise CompilationException( + raise CompilationError( f"In adapter.dispatch, got a list macro_namespace argument " f'("{macro_namespace}"), but macro_namespace should be None or a string.' ) @@ -172,8 +172,8 @@ def dispatch( try: # this uses the namespace from the context macro = self._namespace.get_from_package(package_name, search_name) - except CompilationException: - # Only raise CompilationException if macro is not found in + except CompilationError: + # Only raise CompilationError if macro is not found in # any package macro = None @@ -187,7 +187,7 @@ def dispatch( searched = ", ".join(repr(a) for a in attempts) msg = f"In dispatch: No macro named '{macro_name}' found\n Searched for: {searched}" - raise CompilationException(msg) + raise CompilationError(msg) class BaseResolver(metaclass=abc.ABCMeta): @@ -223,12 +223,12 @@ def _repack_args(self, name: str, package: Optional[str]) -> List[str]: def validate_args(self, name: str, package: Optional[str]): if not isinstance(name, str): - raise CompilationException( + raise CompilationError( f"The name argument to ref() must be a string, got {type(name)}" ) if package is not None and not isinstance(package, str): - raise CompilationException( + raise CompilationError( f"The package argument to ref() must be a string or None, got {type(package)}" ) @@ -241,7 +241,7 @@ def __call__(self, *args: str) -> RelationProxy: elif len(args) == 2: package, name = args else: - raise RefInvalidArgs(node=self.model, args=args) + raise RefArgsError(node=self.model, args=args) self.validate_args(name, package) return self.resolve(name, package) @@ -253,19 +253,19 @@ def resolve(self, source_name: str, table_name: str): def validate_args(self, source_name: str, table_name: str): if not isinstance(source_name, str): - raise CompilationException( + raise CompilationError( f"The source name (first) argument to source() must be a " f"string, got {type(source_name)}" ) if not isinstance(table_name, str): - raise CompilationException( + raise CompilationError( f"The table name (second) argument to source() must be a " f"string, got {type(table_name)}" ) def __call__(self, *args: str) -> RelationProxy: if len(args) != 2: - raise InvalidNumberSourceArgs(args, node=self.model) + raise NumberSourceArgsError(args, node=self.model) self.validate_args(args[0], args[1]) return self.resolve(args[0], args[1]) @@ -282,12 +282,12 @@ def _repack_args(self, name: str, package: Optional[str]) -> List[str]: def validate_args(self, name: str, package: Optional[str]): if not isinstance(name, str): - raise CompilationException( + raise CompilationError( f"The name argument to metric() must be a string, got {type(name)}" ) if package is not None and not isinstance(package, str): - raise CompilationException( + raise CompilationError( f"The package argument to metric() must be a string or None, got {type(package)}" ) @@ -300,7 +300,7 @@ def __call__(self, *args: str) -> MetricReference: elif len(args) == 2: package, name = args else: - raise MetricInvalidArgs(node=self.model, args=args) + raise MetricArgsError(node=self.model, args=args) self.validate_args(name, package) return self.resolve(name, package) @@ -321,7 +321,7 @@ def _transform_config(self, config): if oldkey in config: newkey = oldkey.replace("_", "-") if newkey in config: - raise ConflictingConfigKeys(oldkey, newkey, node=self.model) + raise ConflictingConfigKeysError(oldkey, newkey, node=self.model) config[newkey] = config.pop(oldkey) return config @@ -331,14 +331,14 @@ def __call__(self, *args, **kwargs): elif len(args) == 0 and len(kwargs) > 0: opts = kwargs else: - raise InvalidInlineModelConfig(node=self.model) + raise InlineModelConfigError(node=self.model) opts = self._transform_config(opts) # it's ok to have a parse context with no context config, but you must # not call it! if self.context_config is None: - raise RuntimeException("At parse time, did not receive a context config") + raise DbtRuntimeError("At parse time, did not receive a context config") self.context_config.add_config_call(opts) return "" @@ -379,7 +379,7 @@ def _lookup(self, name, default=_MISSING): else: result = self.model.config.get(name, default) if result is _MISSING: - raise MissingConfig(unique_id=self.model.unique_id, name=name) + raise MissingConfigError(unique_id=self.model.unique_id, name=name) return result def require(self, name, validator=None): @@ -401,14 +401,14 @@ def get(self, name, default=None, validator=None): def persist_relation_docs(self) -> bool: persist_docs = self.get("persist_docs", default={}) if not isinstance(persist_docs, dict): - raise InvalidPersistDocsValueType(persist_docs) + raise PersistDocsValueTypeError(persist_docs) return persist_docs.get("relation", False) def persist_column_docs(self) -> bool: persist_docs = self.get("persist_docs", default={}) if not isinstance(persist_docs, dict): - raise InvalidPersistDocsValueType(persist_docs) + raise PersistDocsValueTypeError(persist_docs) return persist_docs.get("columns", False) @@ -467,7 +467,7 @@ def resolve(self, target_name: str, target_package: Optional[str] = None) -> Rel ) if target_model is None or isinstance(target_model, Disabled): - raise TargetNotFound( + raise TargetNotFoundError( node=self.model, target_name=target_name, target_kind="node", @@ -489,7 +489,7 @@ def validate( ) -> None: if resolved.unique_id not in self.model.depends_on.nodes: args = self._repack_args(target_name, target_package) - raise RefBadContext(node=self.model, args=args) + raise RefBadContextError(node=self.model, args=args) class OperationRefResolver(RuntimeRefResolver): @@ -505,7 +505,7 @@ def create_relation(self, target_model: ManifestNode, name: str) -> RelationProx if target_model.is_ephemeral_model: # In operations, we can't ref() ephemeral nodes, because # Macros do not support set_cte - raise OperationsCannotRefEphemeralNodes(target_model.name, node=self.model) + raise OperationsCannotRefEphemeralNodesError(target_model.name, node=self.model) else: return super().create_relation(target_model, name) @@ -528,7 +528,7 @@ def resolve(self, source_name: str, table_name: str): ) if target_source is None or isinstance(target_source, Disabled): - raise TargetNotFound( + raise TargetNotFoundError( node=self.model, target_name=f"{source_name}.{table_name}", target_kind="source", @@ -555,7 +555,7 @@ def resolve(self, target_name: str, target_package: Optional[str] = None) -> Met ) if target_metric is None or isinstance(target_metric, Disabled): - raise TargetNotFound( + raise TargetNotFoundError( node=self.model, target_name=target_name, target_kind="metric", @@ -584,7 +584,7 @@ def packages_for_node(self) -> Iterable[Project]: if package_name != self._config.project_name: if package_name not in dependencies: # I don't think this is actually reachable - raise PackageNotInDeps(package_name, node=self._node) + raise PackageNotInDepsError(package_name, node=self._node) yield dependencies[package_name] yield self._config @@ -674,7 +674,7 @@ def __init__( context_config: Optional[ContextConfig], ) -> None: if provider is None: - raise InternalException(f"Invalid provider given to context: {provider}") + raise DbtInternalError(f"Invalid provider given to context: {provider}") # mypy appeasement - we know it'll be a RuntimeConfig self.config: RuntimeConfig self.model: Union[Macro, ManifestNode] = model @@ -751,7 +751,7 @@ def inner(value: T) -> None: return elif value == arg: return - raise ValidationException( + raise DbtValidationError( 'Expected value "{}" to be one of {}'.format(value, ",".join(map(str, args))) ) @@ -767,7 +767,7 @@ def inner(value: T) -> None: def write(self, payload: str) -> str: # macros/source defs aren't 'writeable'. if isinstance(self.model, (Macro, SourceDefinition)): - raise MacrosSourcesUnWriteable(node=self.model) + raise MacrosSourcesUnWriteableError(node=self.model) self.model.build_path = self.model.write_node(self.config.target_path, "run", payload) return "" @@ -782,12 +782,12 @@ def try_or_compiler_error( try: return func(*args, **kwargs) except Exception: - raise CompilationException(message_if_exception, self.model) + raise CompilationError(message_if_exception, self.model) @contextmember def load_agate_table(self) -> agate.Table: if not isinstance(self.model, SeedNode): - raise LoadAgateTableNotSeed(self.model.resource_type, node=self.model) + raise LoadAgateTableNotSeedError(self.model.resource_type, node=self.model) assert self.model.root_path path = os.path.join(self.model.root_path, self.model.original_file_path) column_types = self.model.config.column_types @@ -1185,7 +1185,7 @@ def adapter_macro(self, name: str, *args, **kwargs): "https://docs.getdbt.com/reference/dbt-jinja-functions/dispatch)" " adapter_macro was called for: {macro_name}".format(macro_name=name) ) - raise CompilationException(msg) + raise CompilationError(msg) @contextmember def env_var(self, var: str, default: Optional[str] = None) -> str: @@ -1196,7 +1196,7 @@ def env_var(self, var: str, default: Optional[str] = None) -> str: """ return_value = None if var.startswith(SECRET_ENV_PREFIX): - raise DisallowSecretEnvVar(var) + raise SecretEnvVarLocationError(var) if var in os.environ: return_value = os.environ[var] elif default is not None: @@ -1229,7 +1229,7 @@ def env_var(self, var: str, default: Optional[str] = None) -> str: source_file.env_vars.append(var) # type: ignore[union-attr] return return_value else: - raise EnvVarMissing(var) + raise EnvVarMissingError(var) @contextproperty def selected_resources(self) -> List[str]: @@ -1248,7 +1248,7 @@ def submit_python_job(self, parsed_model: Dict, compiled_code: str) -> AdapterRe and self.context_macro_stack.call_stack[1] == "macro.dbt.statement" and "materialization" in self.context_macro_stack.call_stack[0] ): - raise RuntimeException( + raise DbtRuntimeError( f"submit_python_job is not intended to be called here, at model {parsed_model['alias']}, with macro call_stack {self.context_macro_stack.call_stack}." ) return self.adapter.submit_python_job(parsed_model, compiled_code) @@ -1410,7 +1410,7 @@ def generate_runtime_macro_context( class ExposureRefResolver(BaseResolver): def __call__(self, *args) -> str: if len(args) not in (1, 2): - raise RefInvalidArgs(node=self.model, args=args) + raise RefArgsError(node=self.model, args=args) self.model.refs.append(list(args)) return "" @@ -1418,7 +1418,7 @@ def __call__(self, *args) -> str: class ExposureSourceResolver(BaseResolver): def __call__(self, *args) -> str: if len(args) != 2: - raise InvalidNumberSourceArgs(args, node=self.model) + raise NumberSourceArgsError(args, node=self.model) self.model.sources.append(list(args)) return "" @@ -1426,7 +1426,7 @@ def __call__(self, *args) -> str: class ExposureMetricResolver(BaseResolver): def __call__(self, *args) -> str: if len(args) not in (1, 2): - raise MetricInvalidArgs(node=self.model, args=args) + raise MetricArgsError(node=self.model, args=args) self.model.metrics.append(list(args)) return "" @@ -1468,14 +1468,14 @@ def __call__(self, *args) -> str: elif len(args) == 2: package, name = args else: - raise RefInvalidArgs(node=self.model, args=args) + raise RefArgsError(node=self.model, args=args) self.validate_args(name, package) self.model.refs.append(list(args)) return "" def validate_args(self, name, package): if not isinstance(name, str): - raise ParsingException( + raise ParsingError( f"In a metrics section in {self.model.original_file_path} " "the name argument to ref() must be a string" ) @@ -1558,7 +1558,7 @@ def _build_test_namespace(self): def env_var(self, var: str, default: Optional[str] = None) -> str: return_value = None if var.startswith(SECRET_ENV_PREFIX): - raise DisallowSecretEnvVar(var) + raise SecretEnvVarLocationError(var) if var in os.environ: return_value = os.environ[var] elif default is not None: @@ -1584,7 +1584,7 @@ def env_var(self, var: str, default: Optional[str] = None) -> str: source_file.add_env_var(var, yaml_key, name) # type: ignore[union-attr] return return_value else: - raise EnvVarMissing(var) + raise EnvVarMissingError(var) def generate_test_context( diff --git a/core/dbt/context/secret.py b/core/dbt/context/secret.py index da13509ef50..4d8ff342aff 100644 --- a/core/dbt/context/secret.py +++ b/core/dbt/context/secret.py @@ -4,7 +4,7 @@ from .base import BaseContext, contextmember from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER -from dbt.exceptions import EnvVarMissing +from dbt.exceptions import EnvVarMissingError SECRET_PLACEHOLDER = "$$$DBT_SECRET_START$$${}$$$DBT_SECRET_END$$$" @@ -50,7 +50,7 @@ def env_var(self, var: str, default: Optional[str] = None) -> str: self.env_vars[var] = return_value if var in os.environ else DEFAULT_ENV_PLACEHOLDER return return_value else: - raise EnvVarMissing(var) + raise EnvVarMissingError(var) def generate_secret_context(cli_vars: Dict[str, Any]) -> Dict[str, Any]: diff --git a/core/dbt/contracts/connection.py b/core/dbt/contracts/connection.py index fe4ae912229..3f12a603363 100644 --- a/core/dbt/contracts/connection.py +++ b/core/dbt/contracts/connection.py @@ -12,7 +12,7 @@ List, Callable, ) -from dbt.exceptions import InternalException +from dbt.exceptions import DbtInternalError from dbt.utils import translate_aliases from dbt.events.functions import fire_event from dbt.events.types import NewConnectionOpening @@ -94,7 +94,7 @@ def handle(self): # this will actually change 'self._handle'. self._handle.resolve(self) except RecursionError as exc: - raise InternalException( + raise DbtInternalError( "A connection's open() method attempted to read the handle value" ) from exc return self._handle diff --git a/core/dbt/contracts/graph/manifest.py b/core/dbt/contracts/graph/manifest.py index c43012ec521..4dd2ddc2f33 100644 --- a/core/dbt/contracts/graph/manifest.py +++ b/core/dbt/contracts/graph/manifest.py @@ -40,10 +40,10 @@ from dbt.contracts.util import BaseArtifactMetadata, SourceKey, ArtifactMixin, schema_version from dbt.dataclass_schema import dbtClassMixin from dbt.exceptions import ( - CompilationException, - DuplicateResourceName, - DuplicateMacroInPackage, - DuplicateMaterializationName, + CompilationError, + DuplicateResourceNameError, + DuplicateMacroInPackageError, + DuplicateMaterializationNameError, ) from dbt.helper_types import PathSet from dbt.events.functions import fire_event @@ -102,7 +102,7 @@ def populate(self, manifest): def perform_lookup(self, unique_id: UniqueID, manifest) -> Documentation: if unique_id not in manifest.docs: - raise dbt.exceptions.InternalException( + raise dbt.exceptions.DbtInternalError( f"Doc {unique_id} found in cache but not found in manifest" ) return manifest.docs[unique_id] @@ -135,7 +135,7 @@ def populate(self, manifest): def perform_lookup(self, unique_id: UniqueID, manifest: "Manifest") -> SourceDefinition: if unique_id not in manifest.sources: - raise dbt.exceptions.InternalException( + raise dbt.exceptions.DbtInternalError( f"Source {unique_id} found in cache but not found in manifest" ) return manifest.sources[unique_id] @@ -173,7 +173,7 @@ def populate(self, manifest): def perform_lookup(self, unique_id: UniqueID, manifest) -> ManifestNode: if unique_id not in manifest.nodes: - raise dbt.exceptions.InternalException( + raise dbt.exceptions.DbtInternalError( f"Node {unique_id} found in cache but not found in manifest" ) return manifest.nodes[unique_id] @@ -206,7 +206,7 @@ def populate(self, manifest): def perform_lookup(self, unique_id: UniqueID, manifest: "Manifest") -> Metric: if unique_id not in manifest.metrics: - raise dbt.exceptions.InternalException( + raise dbt.exceptions.DbtInternalError( f"Metric {unique_id} found in cache but not found in manifest" ) return manifest.metrics[unique_id] @@ -398,7 +398,7 @@ def __eq__(self, other: object) -> bool: return NotImplemented equal = self.specificity == other.specificity and self.locality == other.locality if equal: - raise DuplicateMaterializationName(self.macro, other) + raise DuplicateMaterializationNameError(self.macro, other) return equal @@ -480,13 +480,13 @@ def _update_into(dest: MutableMapping[str, T], new_item: T): """ unique_id = new_item.unique_id if unique_id not in dest: - raise dbt.exceptions.RuntimeException( + raise dbt.exceptions.DbtRuntimeError( f"got an update_{new_item.resource_type} call with an " f"unrecognized {new_item.resource_type}: {new_item.unique_id}" ) existing = dest[unique_id] if new_item.original_file_path != existing.original_file_path: - raise dbt.exceptions.RuntimeException( + raise dbt.exceptions.DbtRuntimeError( f"cannot update a {new_item.resource_type} to have a new file path!" ) dest[unique_id] = new_item @@ -839,7 +839,7 @@ def expect(self, unique_id: str) -> GraphMemberNode: return self.metrics[unique_id] else: # something terrible has happened - raise dbt.exceptions.InternalException( + raise dbt.exceptions.DbtInternalError( "Expected node {} not found in manifest".format(unique_id) ) @@ -1035,7 +1035,7 @@ def merge_from_artifact( def add_macro(self, source_file: SourceFile, macro: Macro): if macro.unique_id in self.macros: # detect that the macro exists and emit an error - raise DuplicateMacroInPackage(macro=macro, macro_mapping=self.macros) + raise DuplicateMacroInPackageError(macro=macro, macro_mapping=self.macros) self.macros[macro.unique_id] = macro source_file.macros.append(macro.unique_id) @@ -1213,7 +1213,7 @@ def __post_serialize__(self, dct): def _check_duplicates(value: BaseNode, src: Mapping[str, BaseNode]): if value.unique_id in src: - raise DuplicateResourceName(value, src[value.unique_id]) + raise DuplicateResourceNameError(value, src[value.unique_id]) K_T = TypeVar("K_T") @@ -1222,7 +1222,7 @@ def _check_duplicates(value: BaseNode, src: Mapping[str, BaseNode]): def _expect_value(key: K_T, src: Mapping[K_T, V_T], old_file: SourceFile, name: str) -> V_T: if key not in src: - raise CompilationException( + raise CompilationError( 'Expected to find "{}" in cached "result.{}" based ' "on cached file information: {}!".format(key, name, old_file) ) diff --git a/core/dbt/contracts/graph/model_config.py b/core/dbt/contracts/graph/model_config.py index b22f724de53..407c5435786 100644 --- a/core/dbt/contracts/graph/model_config.py +++ b/core/dbt/contracts/graph/model_config.py @@ -9,7 +9,7 @@ ) from dbt.contracts.graph.unparsed import AdditionalPropertiesAllowed, Docs from dbt.contracts.graph.utils import validate_color -from dbt.exceptions import InternalException, CompilationException +from dbt.exceptions import DbtInternalError, CompilationError from dbt.contracts.util import Replaceable, list_str from dbt import hooks from dbt.node_types import NodeType @@ -30,7 +30,7 @@ def _get_meta_value(cls: Type[M], fld: Field, key: str, default: Any) -> M: try: return cls(value) except ValueError as exc: - raise InternalException(f"Invalid {cls} value: {value}") from exc + raise DbtInternalError(f"Invalid {cls} value: {value}") from exc def _set_meta_value(obj: M, key: str, existing: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: @@ -140,17 +140,17 @@ def _merge_field_value( return _listify(self_value) + _listify(other_value) elif merge_behavior == MergeBehavior.Update: if not isinstance(self_value, dict): - raise InternalException(f"expected dict, got {self_value}") + raise DbtInternalError(f"expected dict, got {self_value}") if not isinstance(other_value, dict): - raise InternalException(f"expected dict, got {other_value}") + raise DbtInternalError(f"expected dict, got {other_value}") value = self_value.copy() value.update(other_value) return value elif merge_behavior == MergeBehavior.DictKeyAppend: if not isinstance(self_value, dict): - raise InternalException(f"expected dict, got {self_value}") + raise DbtInternalError(f"expected dict, got {self_value}") if not isinstance(other_value, dict): - raise InternalException(f"expected dict, got {other_value}") + raise DbtInternalError(f"expected dict, got {other_value}") new_dict = {} for key in self_value.keys(): new_dict[key] = _listify(self_value[key]) @@ -172,7 +172,7 @@ def _merge_field_value( return new_dict else: - raise InternalException(f"Got an invalid merge_behavior: {merge_behavior}") + raise DbtInternalError(f"Got an invalid merge_behavior: {merge_behavior}") def insensitive_patterns(*patterns: str): @@ -227,7 +227,7 @@ def __delitem__(self, key): msg = ( 'Error, tried to delete config key "{}": Cannot delete ' "built-in keys" ).format(key) - raise CompilationException(msg) + raise CompilationError(msg) else: del self._extra[key] diff --git a/core/dbt/contracts/graph/unparsed.py b/core/dbt/contracts/graph/unparsed.py index ba2e48c7c9c..6521e644542 100644 --- a/core/dbt/contracts/graph/unparsed.py +++ b/core/dbt/contracts/graph/unparsed.py @@ -11,7 +11,7 @@ # trigger the PathEncoder import dbt.helper_types # noqa:F401 -from dbt.exceptions import CompilationException, ParsingException +from dbt.exceptions import CompilationError, ParsingError from dbt.dataclass_schema import dbtClassMixin, StrEnum, ExtensibleDbtClassMixin, ValidationError @@ -222,7 +222,7 @@ class ExternalPartition(AdditionalPropertiesAllowed, Replaceable): def __post_init__(self): if self.name == "" or self.data_type == "": - raise CompilationException("External partition columns must have names and data types") + raise CompilationError("External partition columns must have names and data types") @dataclass @@ -514,7 +514,7 @@ def validate(cls, data): errors.append("must contain only letters, numbers and underscores") if errors: - raise ParsingException( + raise ParsingError( f"The metric name '{data['name']}' is invalid. It {', '.join(e for e in errors)}" ) diff --git a/core/dbt/contracts/relation.py b/core/dbt/contracts/relation.py index e8cba2ad155..e557c358966 100644 --- a/core/dbt/contracts/relation.py +++ b/core/dbt/contracts/relation.py @@ -9,7 +9,7 @@ from dbt.dataclass_schema import dbtClassMixin, StrEnum from dbt.contracts.util import Replaceable -from dbt.exceptions import CompilationException, DataclassNotDict +from dbt.exceptions import CompilationError, DataclassNotDictError from dbt.utils import deep_merge @@ -43,10 +43,10 @@ def __getitem__(self, key): raise KeyError(key) from None def __iter__(self): - raise DataclassNotDict(self) + raise DataclassNotDictError(self) def __len__(self): - raise DataclassNotDict(self) + raise DataclassNotDictError(self) def incorporate(self, **kwargs): value = self.to_dict(omit_none=True) @@ -88,13 +88,11 @@ class Path(FakeAPIObject): def __post_init__(self): # handle pesky jinja2.Undefined sneaking in here and messing up rende if not isinstance(self.database, (type(None), str)): - raise CompilationException("Got an invalid path database: {}".format(self.database)) + raise CompilationError("Got an invalid path database: {}".format(self.database)) if not isinstance(self.schema, (type(None), str)): - raise CompilationException("Got an invalid path schema: {}".format(self.schema)) + raise CompilationError("Got an invalid path schema: {}".format(self.schema)) if not isinstance(self.identifier, (type(None), str)): - raise CompilationException( - "Got an invalid path identifier: {}".format(self.identifier) - ) + raise CompilationError("Got an invalid path identifier: {}".format(self.identifier)) def get_lowered_part(self, key: ComponentName) -> Optional[str]: part = self.get_part(key) diff --git a/core/dbt/contracts/results.py b/core/dbt/contracts/results.py index 97c43396e33..9243750284f 100644 --- a/core/dbt/contracts/results.py +++ b/core/dbt/contracts/results.py @@ -7,7 +7,7 @@ Replaceable, schema_version, ) -from dbt.exceptions import InternalException +from dbt.exceptions import DbtInternalError from dbt.events.functions import fire_event from dbt.events.types import TimingInfoCollected from dbt.events.proto_types import RunResultMsg, TimingInfoMsg @@ -343,14 +343,14 @@ def process_freshness_result(result: FreshnessNodeResult) -> FreshnessNodeOutput # we know that this must be a SourceFreshnessResult if not isinstance(result, SourceFreshnessResult): - raise InternalException( + raise DbtInternalError( "Got {} instead of a SourceFreshnessResult for a " "non-error result in freshness execution!".format(type(result)) ) # if we're here, we must have a non-None freshness threshold criteria = result.node.freshness if criteria is None: - raise InternalException( + raise DbtInternalError( "Somehow evaluated a freshness result for a source that has no freshness criteria!" ) return SourceFreshnessOutput( diff --git a/core/dbt/contracts/state.py b/core/dbt/contracts/state.py index 9940a0cb93d..cb135e241ac 100644 --- a/core/dbt/contracts/state.py +++ b/core/dbt/contracts/state.py @@ -3,7 +3,7 @@ from .results import RunResultsArtifact from .results import FreshnessExecutionResultArtifact from typing import Optional -from dbt.exceptions import IncompatibleSchemaException +from dbt.exceptions import IncompatibleSchemaError class PreviousState: @@ -19,7 +19,7 @@ def __init__(self, path: Path, current_path: Path): if manifest_path.exists() and manifest_path.is_file(): try: self.manifest = WritableManifest.read_and_check_versions(str(manifest_path)) - except IncompatibleSchemaException as exc: + except IncompatibleSchemaError as exc: exc.add_filename(str(manifest_path)) raise @@ -27,7 +27,7 @@ def __init__(self, path: Path, current_path: Path): if results_path.exists() and results_path.is_file(): try: self.results = RunResultsArtifact.read_and_check_versions(str(results_path)) - except IncompatibleSchemaException as exc: + except IncompatibleSchemaError as exc: exc.add_filename(str(results_path)) raise @@ -37,7 +37,7 @@ def __init__(self, path: Path, current_path: Path): self.sources = FreshnessExecutionResultArtifact.read_and_check_versions( str(sources_path) ) - except IncompatibleSchemaException as exc: + except IncompatibleSchemaError as exc: exc.add_filename(str(sources_path)) raise @@ -47,6 +47,6 @@ def __init__(self, path: Path, current_path: Path): self.sources_current = FreshnessExecutionResultArtifact.read_and_check_versions( str(sources_current_path) ) - except IncompatibleSchemaException as exc: + except IncompatibleSchemaError as exc: exc.add_filename(str(sources_current_path)) raise diff --git a/core/dbt/contracts/util.py b/core/dbt/contracts/util.py index 99f7a35c66d..fb2af2dac59 100644 --- a/core/dbt/contracts/util.py +++ b/core/dbt/contracts/util.py @@ -5,9 +5,9 @@ from dbt.clients.system import write_json, read_json from dbt import deprecations from dbt.exceptions import ( - InternalException, - RuntimeException, - IncompatibleSchemaException, + DbtInternalError, + DbtRuntimeError, + IncompatibleSchemaError, ) from dbt.version import __version__ from dbt.events.functions import get_invocation_id, get_metadata_vars @@ -123,7 +123,7 @@ def read(cls, path: str): try: data = read_json(path) except (EnvironmentError, ValueError) as exc: - raise RuntimeException( + raise DbtRuntimeError( f'Could not read {cls.__name__} at "{path}" as JSON: {exc}' ) from exc @@ -320,7 +320,7 @@ def read_and_check_versions(cls, path: str): try: data = read_json(path) except (EnvironmentError, ValueError) as exc: - raise RuntimeException( + raise DbtRuntimeError( f'Could not read {cls.__name__} at "{path}" as JSON: {exc}' ) from exc @@ -332,7 +332,7 @@ def read_and_check_versions(cls, path: str): previous_schema_version = data["metadata"]["dbt_schema_version"] # cls.dbt_schema_version is a SchemaVersion object if not cls.is_compatible_version(previous_schema_version): - raise IncompatibleSchemaException( + raise IncompatibleSchemaError( expected=str(cls.dbt_schema_version), found=previous_schema_version, ) @@ -357,7 +357,7 @@ class ArtifactMixin(VersionedSchema, Writable, Readable): def validate(cls, data): super().validate(data) if cls.dbt_schema_version is None: - raise InternalException("Cannot call from_dict with no schema version!") + raise DbtInternalError("Cannot call from_dict with no schema version!") class Identifier(ValidatedStringMixin): diff --git a/core/dbt/deps/git.py b/core/dbt/deps/git.py index 5d7a1331c58..a32f91ee158 100644 --- a/core/dbt/deps/git.py +++ b/core/dbt/deps/git.py @@ -9,7 +9,7 @@ GitPackage, ) from dbt.deps.base import PinnedPackage, UnpinnedPackage, get_downloads_path -from dbt.exceptions import ExecutableError, MultipleVersionGitDeps +from dbt.exceptions import ExecutableError, MultipleVersionGitDepsError from dbt.events.functions import fire_event, warn_or_error from dbt.events.types import EnsureGitInstalled, DepsUnpinned @@ -143,7 +143,7 @@ def resolved(self) -> GitPinnedPackage: if len(requested) == 0: requested = {"HEAD"} elif len(requested) > 1: - raise MultipleVersionGitDeps(self.git, requested) + raise MultipleVersionGitDepsError(self.git, requested) return GitPinnedPackage( git=self.git, diff --git a/core/dbt/deps/registry.py b/core/dbt/deps/registry.py index f3398f4b16f..e1f39a7551d 100644 --- a/core/dbt/deps/registry.py +++ b/core/dbt/deps/registry.py @@ -10,10 +10,10 @@ ) from dbt.deps.base import PinnedPackage, UnpinnedPackage from dbt.exceptions import ( - DependencyException, - PackageNotFound, - PackageVersionNotFound, - VersionsNotCompatibleException, + DependencyError, + PackageNotFoundError, + PackageVersionNotFoundError, + VersionsNotCompatibleError, ) @@ -71,7 +71,7 @@ def __init__( def _check_in_index(self): index = registry.index_cached() if self.package not in index: - raise PackageNotFound(self.package) + raise PackageNotFoundError(self.package) @classmethod def from_contract(cls, contract: RegistryPackage) -> "RegistryUnpinnedPackage": @@ -95,9 +95,9 @@ def resolved(self) -> RegistryPinnedPackage: self._check_in_index() try: range_ = semver.reduce_versions(*self.versions) - except VersionsNotCompatibleException as e: + except VersionsNotCompatibleError as e: new_msg = "Version error for package {}: {}".format(self.name, e) - raise DependencyException(new_msg) from e + raise DependencyError(new_msg) from e should_version_check = bool(flags.VERSION_CHECK) dbt_version = get_installed_version() @@ -118,7 +118,9 @@ def resolved(self) -> RegistryPinnedPackage: target = None if not target: # raise an exception if no installable target version is found - raise PackageVersionNotFound(self.package, range_, installable, should_version_check) + raise PackageVersionNotFoundError( + self.package, range_, installable, should_version_check + ) latest_compatible = installable[-1] return RegistryPinnedPackage( package=self.package, version=target, version_latest=latest_compatible diff --git a/core/dbt/deps/resolver.py b/core/dbt/deps/resolver.py index 323e2f562c1..db57ef0f641 100644 --- a/core/dbt/deps/resolver.py +++ b/core/dbt/deps/resolver.py @@ -2,10 +2,10 @@ from typing import Dict, List, NoReturn, Union, Type, Iterator, Set from dbt.exceptions import ( - DuplicateDependencyToRoot, - DuplicateProjectDependency, - MismatchedDependencyTypes, - InternalException, + DuplicateDependencyToRootError, + DuplicateProjectDependencyError, + MismatchedDependencyTypeError, + DbtInternalError, ) from dbt.config import Project, RuntimeConfig @@ -56,7 +56,7 @@ def __setitem__(self, key: BasePackage, value): self.packages[key_str] = value def _mismatched_types(self, old: UnpinnedPackage, new: UnpinnedPackage) -> NoReturn: - raise MismatchedDependencyTypes(new, old) + raise MismatchedDependencyTypeError(new, old) def incorporate(self, package: UnpinnedPackage): key: str = self._pick_key(package) @@ -80,7 +80,7 @@ def update_from(self, src: List[PackageContract]) -> None: elif isinstance(contract, RegistryPackage): pkg = RegistryUnpinnedPackage.from_contract(contract) else: - raise InternalException("Invalid package type {}".format(type(contract))) + raise DbtInternalError("Invalid package type {}".format(type(contract))) self.incorporate(pkg) @classmethod @@ -107,9 +107,9 @@ def _check_for_duplicate_project_names( for package in final_deps: project_name = package.get_project_name(config, renderer) if project_name in seen: - raise DuplicateProjectDependency(project_name) + raise DuplicateProjectDependencyError(project_name) elif project_name == config.project_name: - raise DuplicateDependencyToRoot(project_name) + raise DuplicateDependencyToRootError(project_name) seen.add(project_name) diff --git a/core/dbt/events/functions.py b/core/dbt/events/functions.py index 9722fb5fecf..06e5a89965c 100644 --- a/core/dbt/events/functions.py +++ b/core/dbt/events/functions.py @@ -163,9 +163,9 @@ def msg_to_dict(msg: EventMsg) -> dict: def warn_or_error(event, node=None): if flags.WARN_ERROR: # TODO: resolve this circular import when at top - from dbt.exceptions import EventCompilationException + from dbt.exceptions import EventCompilationError - raise EventCompilationException(event.message(), node) + raise EventCompilationError(event.message(), node) else: fire_event(event) diff --git a/core/dbt/events/proto_types.py b/core/dbt/events/proto_types.py index 746ce294067..536e0c6c7cc 100644 --- a/core/dbt/events/proto_types.py +++ b/core/dbt/events/proto_types.py @@ -429,19 +429,19 @@ class ExposureNameDeprecationMsg(betterproto.Message): @dataclass -class FunctionDeprecated(betterproto.Message): +class InternalDeprecation(betterproto.Message): """D008""" - function_name: str = betterproto.string_field(1) + name: str = betterproto.string_field(1) reason: str = betterproto.string_field(2) suggested_action: str = betterproto.string_field(3) version: str = betterproto.string_field(4) @dataclass -class FunctionDeprecatedMsg(betterproto.Message): +class InternalDeprecationMsg(betterproto.Message): info: "EventInfo" = betterproto.message_field(1) - data: "FunctionDeprecated" = betterproto.message_field(2) + data: "InternalDeprecation" = betterproto.message_field(2) @dataclass @@ -999,20 +999,20 @@ class MacroFileParseMsg(betterproto.Message): @dataclass -class PartialParsingExceptionProcessingFile(betterproto.Message): +class PartialParsingErrorProcessingFile(betterproto.Message): """I014""" file: str = betterproto.string_field(1) @dataclass -class PartialParsingExceptionProcessingFileMsg(betterproto.Message): +class PartialParsingErrorProcessingFileMsg(betterproto.Message): info: "EventInfo" = betterproto.message_field(1) - data: "PartialParsingExceptionProcessingFile" = betterproto.message_field(2) + data: "PartialParsingErrorProcessingFile" = betterproto.message_field(2) @dataclass -class PartialParsingException(betterproto.Message): +class PartialParsingError(betterproto.Message): """I016""" exc_info: Dict[str, str] = betterproto.map_field( @@ -1021,9 +1021,9 @@ class PartialParsingException(betterproto.Message): @dataclass -class PartialParsingExceptionMsg(betterproto.Message): +class PartialParsingErrorMsg(betterproto.Message): info: "EventInfo" = betterproto.message_field(1) - data: "PartialParsingException" = betterproto.message_field(2) + data: "PartialParsingError" = betterproto.message_field(2) @dataclass @@ -2239,7 +2239,7 @@ class CatchableExceptionOnRunMsg(betterproto.Message): @dataclass -class InternalExceptionOnRun(betterproto.Message): +class InternalErrorOnRun(betterproto.Message): """W003""" build_path: str = betterproto.string_field(1) @@ -2247,9 +2247,9 @@ class InternalExceptionOnRun(betterproto.Message): @dataclass -class InternalExceptionOnRunMsg(betterproto.Message): +class InternalErrorOnRunMsg(betterproto.Message): info: "EventInfo" = betterproto.message_field(1) - data: "InternalExceptionOnRun" = betterproto.message_field(2) + data: "InternalErrorOnRun" = betterproto.message_field(2) @dataclass diff --git a/core/dbt/events/types.proto b/core/dbt/events/types.proto index 85d46692089..80510687f81 100644 --- a/core/dbt/events/types.proto +++ b/core/dbt/events/types.proto @@ -336,16 +336,16 @@ message ExposureNameDeprecationMsg { } //D008 -message FunctionDeprecated { - string function_name = 1; +message InternalDeprecation { + string name = 1; string reason = 2; string suggested_action = 3; string version = 4; } -message FunctionDeprecatedMsg { +message InternalDeprecationMsg { EventInfo info = 1; - FunctionDeprecated data = 2; + InternalDeprecation data = 2; } // E - DB Adapter @@ -797,23 +797,23 @@ message MacroFileParseMsg { // Skipping I013 // I014 -message PartialParsingExceptionProcessingFile { +message PartialParsingErrorProcessingFile { string file = 1; } -message PartialParsingExceptionProcessingFileMsg { +message PartialParsingErrorProcessingFileMsg { EventInfo info = 1; - PartialParsingExceptionProcessingFile data = 2; + PartialParsingErrorProcessingFile data = 2; } // I016 -message PartialParsingException { +message PartialParsingError { map exc_info = 1; } -message PartialParsingExceptionMsg { +message PartialParsingErrorMsg { EventInfo info = 1; - PartialParsingException data = 2; + PartialParsingError data = 2; } // I017 @@ -825,10 +825,8 @@ message PartialParsingSkipParsingMsg { PartialParsingSkipParsing data = 2; } - // Skipped I018, I019, I020, I021, I022, I023 - // I024 message UnableToPartialParse { string reason = 1; @@ -1795,14 +1793,14 @@ message CatchableExceptionOnRunMsg { } // W003 -message InternalExceptionOnRun { +message InternalErrorOnRun { string build_path = 1; string exc = 2; } -message InternalExceptionOnRunMsg { +message InternalErrorOnRunMsg { EventInfo info = 1; - InternalExceptionOnRun data = 2; + InternalErrorOnRun data = 2; } // W004 diff --git a/core/dbt/events/types.py b/core/dbt/events/types.py index b76188a8c97..cfa0f1feaa4 100644 --- a/core/dbt/events/types.py +++ b/core/dbt/events/types.py @@ -378,7 +378,7 @@ def message(self): @dataclass -class FunctionDeprecated(WarnLevel, pt.FunctionDeprecated): +class InternalDeprecation(WarnLevel, pt.InternalDeprecation): def code(self): return "D008" @@ -387,7 +387,7 @@ def message(self): if self.reason: extra_reason = f"\n{self.reason}" msg = ( - f"`{self.function_name}` is deprecated and will be removed in dbt-core version {self.version}\n\n" + f"`{self.name}` is deprecated and will be removed in dbt-core version {self.version}\n\n" f"Adapter maintainers can resolve this deprecation by {self.suggested_action}. {extra_reason}" ) return warning_tag(msg) @@ -802,7 +802,7 @@ def message(self) -> str: @dataclass -class PartialParsingExceptionProcessingFile(DebugLevel, pt.PartialParsingExceptionProcessingFile): +class PartialParsingErrorProcessingFile(DebugLevel, pt.PartialParsingErrorProcessingFile): def code(self): return "I014" @@ -814,7 +814,7 @@ def message(self) -> str: @dataclass -class PartialParsingException(DebugLevel, pt.PartialParsingException): +class PartialParsingError(DebugLevel, pt.PartialParsingError): def code(self): return "I016" @@ -1879,7 +1879,7 @@ def message(self) -> str: @dataclass -class InternalExceptionOnRun(DebugLevel, pt.InternalExceptionOnRun): +class InternalErrorOnRun(DebugLevel, pt.InternalErrorOnRun): def code(self): return "W003" diff --git a/core/dbt/exceptions.py b/core/dbt/exceptions.py index 7d8326cd352..f207496e9b1 100644 --- a/core/dbt/exceptions.py +++ b/core/dbt/exceptions.py @@ -38,7 +38,7 @@ def data(self): } -class InternalException(Exception): +class DbtInternalError(Exception): def __init__(self, msg: str): self.stack: List = [] self.msg = scrub_secrets(msg, env_secrets()) @@ -79,7 +79,7 @@ def __str__(self): return lines[0] + "\n" + "\n".join([" " + line for line in lines[1:]]) -class RuntimeException(RuntimeError, Exception): +class DbtRuntimeError(RuntimeError, Exception): CODE = 10001 MESSAGE = "Runtime error" @@ -172,72 +172,7 @@ def data(self): return result -class RPCFailureResult(RuntimeException): - CODE = 10002 - MESSAGE = "RPC execution error" - - -class RPCTimeoutException(RuntimeException): - CODE = 10008 - MESSAGE = "RPC timeout error" - - def __init__(self, timeout: Optional[float]): - super().__init__(self.MESSAGE) - self.timeout = timeout - - def data(self): - result = super().data() - result.update( - { - "timeout": self.timeout, - "message": f"RPC timed out after {self.timeout}s", - } - ) - return result - - -class RPCKilledException(RuntimeException): - CODE = 10009 - MESSAGE = "RPC process killed" - - def __init__(self, signum: int): - self.signum = signum - self.msg = f"RPC process killed by signal {self.signum}" - super().__init__(self.msg) - - def data(self): - return { - "signum": self.signum, - "message": self.msg, - } - - -class RPCCompiling(RuntimeException): - CODE = 10010 - MESSAGE = 'RPC server is compiling the project, call the "status" method for' " compile status" - - def __init__(self, msg: str = None, node=None): - if msg is None: - msg = "compile in progress" - super().__init__(msg, node) - - -class RPCLoadException(RuntimeException): - CODE = 10011 - MESSAGE = ( - 'RPC server failed to compile project, call the "status" method for' " compile status" - ) - - def __init__(self, cause: Dict[str, Any]): - self.cause = cause - self.msg = f'{self.MESSAGE}: {self.cause["message"]}' - super().__init__(self.msg) - - def data(self): - return {"cause": self.cause, "message": self.msg} - - -class DatabaseException(RuntimeException): +class DbtDatabaseError(DbtRuntimeError): CODE = 10003 MESSAGE = "Database Error" @@ -247,14 +182,14 @@ def process_stack(self): if hasattr(self.node, "build_path") and self.node.build_path: lines.append(f"compiled Code at {self.node.build_path}") - return lines + RuntimeException.process_stack(self) + return lines + DbtRuntimeError.process_stack(self) @property def type(self): return "Database" -class CompilationException(RuntimeException): +class CompilationError(DbtRuntimeError): CODE = 10004 MESSAGE = "Compilation Error" @@ -274,16 +209,16 @@ def _fix_dupe_msg(self, path_1: str, path_2: str, name: str, type_name: str) -> ) -class RecursionException(RuntimeException): +class RecursionError(DbtRuntimeError): pass -class ValidationException(RuntimeException): +class DbtValidationError(DbtRuntimeError): CODE = 10005 MESSAGE = "Validation Error" -class ParsingException(RuntimeException): +class ParsingError(DbtRuntimeError): CODE = 10015 MESSAGE = "Parsing Error" @@ -293,7 +228,7 @@ def type(self): # TODO: this isn't raised in the core codebase. Is it raised elsewhere? -class JSONValidationException(ValidationException): +class JSONValidationError(DbtValidationError): def __init__(self, typename, errors): self.typename = typename self.errors = errors @@ -303,11 +238,11 @@ def __init__(self, typename, errors): def __reduce__(self): # see https://stackoverflow.com/a/36342588 for why this is necessary - return (JSONValidationException, (self.typename, self.errors)) + return (JSONValidationError, (self.typename, self.errors)) -class IncompatibleSchemaException(RuntimeException): - def __init__(self, expected: str, found: Optional[str]): +class IncompatibleSchemaError(DbtRuntimeError): + def __init__(self, expected: str, found: Optional[str] = None): self.expected = expected self.found = found self.filename = "input file" @@ -334,11 +269,11 @@ def get_message(self) -> str: MESSAGE = "Incompatible Schema" -class JinjaRenderingException(CompilationException): +class JinjaRenderingError(CompilationError): pass -class UndefinedMacroException(CompilationException): +class UndefinedMacroError(CompilationError): def __str__(self, prefix: str = "! ") -> str: msg = super().__str__(prefix) return ( @@ -348,28 +283,16 @@ def __str__(self, prefix: str = "! ") -> str: ) -class UnknownAsyncIDException(Exception): - CODE = 10012 - MESSAGE = "RPC server got an unknown async ID" - - def __init__(self, task_id): - self.task_id = task_id - - def __str__(self): - return f"{self.MESSAGE}: {self.task_id}" - - -class AliasException(ValidationException): +class AliasError(DbtValidationError): pass -class DependencyException(Exception): - # this can happen due to raise_dependency_error and its callers +class DependencyError(Exception): CODE = 10006 MESSAGE = "Dependency Error" -class DbtConfigError(RuntimeException): +class DbtConfigError(DbtRuntimeError): CODE = 10007 MESSAGE = "DBT Configuration Error" @@ -387,7 +310,7 @@ def __str__(self, prefix="! ") -> str: return f"{msg}\n\nError encountered in {self.path}" -class FailFastException(RuntimeException): +class FailFastError(DbtRuntimeError): CODE = 10013 MESSAGE = "FailFast Error" @@ -412,7 +335,7 @@ class DbtProfileError(DbtConfigError): pass -class SemverException(Exception): +class SemverError(Exception): def __init__(self, msg: str = None): self.msg = msg if msg is not None: @@ -421,22 +344,22 @@ def __init__(self, msg: str = None): super().__init__() -class VersionsNotCompatibleException(SemverException): +class VersionsNotCompatibleError(SemverError): pass -class NotImplementedException(Exception): +class NotImplementedError(Exception): def __init__(self, msg: str): self.msg = msg self.formatted_msg = f"ERROR: {self.msg}" super().__init__(self.formatted_msg) -class FailedToConnectException(DatabaseException): +class FailedToConnectError(DbtDatabaseError): pass -class CommandError(RuntimeException): +class CommandError(DbtRuntimeError): def __init__(self, cwd: str, cmd: List[str], msg: str = "Error running command"): cmd_scrubbed = list(scrub_secrets(cmd_txt, env_secrets()) for cmd_txt in cmd) super().__init__(msg) @@ -483,7 +406,7 @@ def __str__(self): return f"{self.msg} running: {self.cmd}" -class InvalidConnectionException(RuntimeException): +class InvalidConnectionError(DbtRuntimeError): def __init__(self, thread_id, known: List): self.thread_id = thread_id self.known = known @@ -492,17 +415,17 @@ def __init__(self, thread_id, known: List): ) -class InvalidSelectorException(RuntimeException): +class InvalidSelectorError(DbtRuntimeError): def __init__(self, name: str): self.name = name super().__init__(name) -class DuplicateYamlKeyException(CompilationException): +class DuplicateYamlKeyError(CompilationError): pass -class ConnectionException(Exception): +class ConnectionError(Exception): """ There was a problem with the connection that returned a bad response, timed out, or resulted in a file that is corrupt. @@ -512,7 +435,7 @@ class ConnectionException(Exception): # event level exception -class EventCompilationException(CompilationException): +class EventCompilationError(CompilationError): def __init__(self, msg: str, node): self.msg = scrub_secrets(msg, env_secrets()) self.node = node @@ -520,7 +443,7 @@ def __init__(self, msg: str, node): # compilation level exceptions -class GraphDependencyNotFound(CompilationException): +class GraphDependencyNotFoundError(CompilationError): def __init__(self, node, dependency: str): self.node = node self.dependency = dependency @@ -534,21 +457,21 @@ def get_message(self) -> str: # client level exceptions -class NoSupportedLanguagesFound(CompilationException): +class NoSupportedLanguagesFoundError(CompilationError): def __init__(self, node): self.node = node self.msg = f"No supported_languages found in materialization macro {self.node.name}" super().__init__(msg=self.msg) -class MaterializtionMacroNotUsed(CompilationException): +class MaterializtionMacroNotUsedError(CompilationError): def __init__(self, node): self.node = node self.msg = "Only materialization macros can be used with this function" super().__init__(msg=self.msg) -class UndefinedCompilation(CompilationException): +class UndefinedCompilationError(CompilationError): def __init__(self, name: str, node): self.name = name self.node = node @@ -556,20 +479,20 @@ def __init__(self, name: str, node): super().__init__(msg=self.msg) -class CaughtMacroExceptionWithNode(CompilationException): +class CaughtMacroErrorWithNodeError(CompilationError): def __init__(self, exc, node): self.exc = exc self.node = node super().__init__(msg=str(exc)) -class CaughtMacroException(CompilationException): +class CaughtMacroError(CompilationError): def __init__(self, exc): self.exc = exc super().__init__(msg=str(exc)) -class MacroNameNotString(CompilationException): +class MacroNameNotStringError(CompilationError): def __init__(self, kwarg_value): self.kwarg_value = kwarg_value super().__init__(msg=self.get_message()) @@ -582,7 +505,7 @@ def get_message(self) -> str: return msg -class MissingControlFlowStartTag(CompilationException): +class MissingControlFlowStartTagError(CompilationError): def __init__(self, tag, expected_tag: str, tag_parser): self.tag = tag self.expected_tag = expected_tag @@ -598,7 +521,7 @@ def get_message(self) -> str: return msg -class UnexpectedControlFlowEndTag(CompilationException): +class UnexpectedControlFlowEndTagError(CompilationError): def __init__(self, tag, expected_tag: str, tag_parser): self.tag = tag self.expected_tag = expected_tag @@ -614,7 +537,7 @@ def get_message(self) -> str: return msg -class UnexpectedMacroEOF(CompilationException): +class UnexpectedMacroEOFError(CompilationError): def __init__(self, expected_name: str, actual_name: str): self.expected_name = expected_name self.actual_name = actual_name @@ -625,7 +548,7 @@ def get_message(self) -> str: return msg -class MacroNamespaceNotString(CompilationException): +class MacroNamespaceNotStringError(CompilationError): def __init__(self, kwarg_type: Any): self.kwarg_type = kwarg_type super().__init__(msg=self.get_message()) @@ -638,7 +561,7 @@ def get_message(self) -> str: return msg -class NestedTags(CompilationException): +class NestedTagsError(CompilationError): def __init__(self, outer, inner): self.outer = outer self.inner = inner @@ -653,7 +576,7 @@ def get_message(self) -> str: return msg -class BlockDefinitionNotAtTop(CompilationException): +class BlockDefinitionNotAtTopError(CompilationError): def __init__(self, tag_parser, tag_start): self.tag_parser = tag_parser self.tag_start = tag_start @@ -668,7 +591,7 @@ def get_message(self) -> str: return msg -class MissingCloseTag(CompilationException): +class MissingCloseTagError(CompilationError): def __init__(self, block_type_name: str, linecount: int): self.block_type_name = block_type_name self.linecount = linecount @@ -679,7 +602,7 @@ def get_message(self) -> str: return msg -class GitCloningProblem(RuntimeException): +class UnknownGitCloningProblemError(DbtRuntimeError): def __init__(self, repo: str): self.repo = scrub_secrets(repo, env_secrets()) super().__init__(msg=self.get_message()) @@ -692,7 +615,7 @@ def get_message(self) -> str: return msg -class BadSpecError(InternalException): +class BadSpecError(DbtInternalError): def __init__(self, repo, revision, error): self.repo = repo self.revision = revision @@ -704,7 +627,7 @@ def get_message(self) -> str: return msg -class GitCloningError(InternalException): +class GitCloningError(DbtInternalError): def __init__(self, repo: str, revision: str, error: CommandResultError): self.repo = repo self.revision = revision @@ -727,7 +650,7 @@ class GitCheckoutError(BadSpecError): pass -class InvalidMaterializationArg(CompilationException): +class MaterializationArgError(CompilationError): def __init__(self, name: str, argument: str): self.name = name self.argument = argument @@ -738,7 +661,7 @@ def get_message(self) -> str: return msg -class OperationException(CompilationException): +class OperationError(CompilationError): def __init__(self, operation_name): self.operation_name = operation_name super().__init__(msg=self.get_message()) @@ -753,7 +676,7 @@ def get_message(self) -> str: return msg -class SymbolicLinkError(CompilationException): +class SymbolicLinkError(CompilationError): def __init__(self): super().__init__(msg=self.get_message()) @@ -768,23 +691,21 @@ def get_message(self) -> str: # context level exceptions - - -class ZipStrictWrongType(CompilationException): +class ZipStrictWrongTypeError(CompilationError): def __init__(self, exc): self.exc = exc msg = str(self.exc) super().__init__(msg=msg) -class SetStrictWrongType(CompilationException): +class SetStrictWrongTypeError(CompilationError): def __init__(self, exc): self.exc = exc msg = str(self.exc) super().__init__(msg=msg) -class LoadAgateTableValueError(CompilationException): +class LoadAgateTableValueError(CompilationError): def __init__(self, exc: ValueError, node): self.exc = exc self.node = node @@ -792,7 +713,7 @@ def __init__(self, exc: ValueError, node): super().__init__(msg=msg) -class LoadAgateTableNotSeed(CompilationException): +class LoadAgateTableNotSeedError(CompilationError): def __init__(self, resource_type, node): self.resource_type = resource_type self.node = node @@ -800,14 +721,14 @@ def __init__(self, resource_type, node): super().__init__(msg=msg) -class MacrosSourcesUnWriteable(CompilationException): +class MacrosSourcesUnWriteableError(CompilationError): def __init__(self, node): self.node = node msg = 'cannot "write" macros or sources' super().__init__(msg=msg) -class PackageNotInDeps(CompilationException): +class PackageNotInDepsError(CompilationError): def __init__(self, package_name: str, node): self.package_name = package_name self.node = node @@ -815,7 +736,7 @@ def __init__(self, package_name: str, node): super().__init__(msg=msg) -class OperationsCannotRefEphemeralNodes(CompilationException): +class OperationsCannotRefEphemeralNodesError(CompilationError): def __init__(self, target_name: str, node): self.target_name = target_name self.node = node @@ -823,7 +744,7 @@ def __init__(self, target_name: str, node): super().__init__(msg=msg) -class InvalidPersistDocsValueType(CompilationException): +class PersistDocsValueTypeError(CompilationError): def __init__(self, persist_docs: Any): self.persist_docs = persist_docs msg = ( @@ -833,14 +754,14 @@ def __init__(self, persist_docs: Any): super().__init__(msg=msg) -class InvalidInlineModelConfig(CompilationException): +class InlineModelConfigError(CompilationError): def __init__(self, node): self.node = node msg = "Invalid inline model config" super().__init__(msg=msg) -class ConflictingConfigKeys(CompilationException): +class ConflictingConfigKeysError(CompilationError): def __init__(self, oldkey: str, newkey: str, node): self.oldkey = oldkey self.newkey = newkey @@ -849,7 +770,7 @@ def __init__(self, oldkey: str, newkey: str, node): super().__init__(msg=msg) -class InvalidNumberSourceArgs(CompilationException): +class NumberSourceArgsError(CompilationError): def __init__(self, args, node): self.args = args self.node = node @@ -857,7 +778,7 @@ def __init__(self, args, node): super().__init__(msg=msg) -class RequiredVarNotFound(CompilationException): +class RequiredVarNotFoundError(CompilationError): def __init__(self, var_name: str, merged: Dict, node): self.var_name = var_name self.merged = merged @@ -877,14 +798,14 @@ def get_message(self) -> str: return msg -class PackageNotFoundForMacro(CompilationException): +class PackageNotFoundForMacroError(CompilationError): def __init__(self, package_name: str): self.package_name = package_name msg = f"Could not find package '{self.package_name}'" super().__init__(msg=msg) -class DisallowSecretEnvVar(ParsingException): +class SecretEnvVarLocationError(ParsingError): def __init__(self, env_var_name: str): self.env_var_name = env_var_name super().__init__(msg=self.get_message()) @@ -897,7 +818,7 @@ def get_message(self) -> str: return msg -class InvalidMacroArgType(CompilationException): +class MacroArgTypeError(CompilationError): def __init__(self, method_name: str, arg_name: str, got_value: Any, expected_type): self.method_name = method_name self.arg_name = arg_name @@ -915,7 +836,7 @@ def get_message(self) -> str: return msg -class InvalidBoolean(CompilationException): +class BooleanError(CompilationError): def __init__(self, return_value: Any, macro_name: str): self.return_value = return_value self.macro_name = macro_name @@ -929,7 +850,7 @@ def get_message(self) -> str: return msg -class RefInvalidArgs(CompilationException): +class RefArgsError(CompilationError): def __init__(self, node, args): self.node = node self.args = args @@ -940,7 +861,7 @@ def get_message(self) -> str: return msg -class MetricInvalidArgs(CompilationException): +class MetricArgsError(CompilationError): def __init__(self, node, args): self.node = node self.args = args @@ -951,7 +872,7 @@ def get_message(self) -> str: return msg -class RefBadContext(CompilationException): +class RefBadContextError(CompilationError): def __init__(self, node, args): self.node = node self.args = args @@ -980,7 +901,7 @@ def get_message(self) -> str: return msg -class InvalidDocArgs(CompilationException): +class DocArgsError(CompilationError): def __init__(self, node, args): self.node = node self.args = args @@ -991,8 +912,8 @@ def get_message(self) -> str: return msg -class DocTargetNotFound(CompilationException): - def __init__(self, node, target_doc_name: str, target_doc_package: Optional[str]): +class DocTargetNotFoundError(CompilationError): + def __init__(self, node, target_doc_name: str, target_doc_package: Optional[str] = None): self.node = node self.target_doc_name = target_doc_name self.target_doc_package = target_doc_package @@ -1006,7 +927,7 @@ def get_message(self) -> str: return msg -class MacroInvalidDispatchArg(CompilationException): +class MacroDispatchArgError(CompilationError): def __init__(self, macro_name: str): self.macro_name = macro_name super().__init__(msg=self.get_message()) @@ -1025,7 +946,7 @@ def get_message(self) -> str: return msg -class DuplicateMacroName(CompilationException): +class DuplicateMacroNameError(CompilationError): def __init__(self, node_1, node_2, namespace: str): self.node_1 = node_1 self.node_2 = node_2 @@ -1051,7 +972,7 @@ def get_message(self) -> str: # parser level exceptions -class InvalidDictParse(ParsingException): +class DictParseError(ParsingError): def __init__(self, exc: ValidationError, node): self.exc = exc self.node = node @@ -1059,7 +980,7 @@ def __init__(self, exc: ValidationError, node): super().__init__(msg=msg) -class InvalidConfigUpdate(ParsingException): +class ConfigUpdateError(ParsingError): def __init__(self, exc: ValidationError, node): self.exc = exc self.node = node @@ -1067,7 +988,7 @@ def __init__(self, exc: ValidationError, node): super().__init__(msg=msg) -class PythonParsingException(ParsingException): +class PythonParsingError(ParsingError): def __init__(self, exc: SyntaxError, node): self.exc = exc self.node = node @@ -1079,7 +1000,7 @@ def get_message(self) -> str: return msg -class PythonLiteralEval(ParsingException): +class PythonLiteralEvalError(ParsingError): def __init__(self, exc: Exception, node): self.exc = exc self.node = node @@ -1095,14 +1016,14 @@ def get_message(self) -> str: return msg -class InvalidModelConfig(ParsingException): +class ModelConfigError(ParsingError): def __init__(self, exc: ValidationError, node): self.msg = self.validator_error_message(exc) self.node = node super().__init__(msg=self.msg) -class YamlParseListFailure(ParsingException): +class YamlParseListError(ParsingError): def __init__( self, path: str, @@ -1127,7 +1048,7 @@ def get_message(self) -> str: return msg -class YamlParseDictFailure(ParsingException): +class YamlParseDictError(ParsingError): def __init__( self, path: str, @@ -1152,8 +1073,13 @@ def get_message(self) -> str: return msg -class YamlLoadFailure(ParsingException): - def __init__(self, project_name: Optional[str], path: str, exc: ValidationException): +class YamlLoadError(ParsingError): + def __init__( + self, + path: str, + exc: DbtValidationError, + project_name: Optional[str] = None, + ): self.project_name = project_name self.path = path self.exc = exc @@ -1167,49 +1093,54 @@ def get_message(self) -> str: return msg -class InvalidTestConfig(ParsingException): +class TestConfigError(ParsingError): def __init__(self, exc: ValidationError, node): self.msg = self.validator_error_message(exc) self.node = node super().__init__(msg=self.msg) -class InvalidSchemaConfig(ParsingException): +class SchemaConfigError(ParsingError): def __init__(self, exc: ValidationError, node): self.msg = self.validator_error_message(exc) self.node = node super().__init__(msg=self.msg) -class InvalidSnapshopConfig(ParsingException): +class SnapshopConfigError(ParsingError): def __init__(self, exc: ValidationError, node): self.msg = self.validator_error_message(exc) self.node = node super().__init__(msg=self.msg) -class SameKeyNested(CompilationException): +class SameKeyNestedError(CompilationError): def __init__(self): msg = "Test cannot have the same key at the top-level and in config" super().__init__(msg=msg) -class TestArgIncludesModel(CompilationException): +class TestArgIncludesModelError(CompilationError): def __init__(self): msg = 'Test arguments include "model", which is a reserved argument' super().__init__(msg=msg) -class UnexpectedTestNamePattern(CompilationException): +class UnexpectedTestNamePatternError(CompilationError): def __init__(self, test_name: str): self.test_name = test_name msg = f"Test name string did not match expected pattern: {self.test_name}" super().__init__(msg=msg) -class CustomMacroPopulatingConfigValues(CompilationException): +class CustomMacroPopulatingConfigValueError(CompilationError): def __init__( - self, target_name: str, column_name: Optional[str], name: str, key: str, err_msg: str + self, + target_name: str, + name: str, + key: str, + err_msg: str, + column_name: Optional[str] = None, ): self.target_name = target_name self.column_name = column_name @@ -1239,21 +1170,21 @@ def get_message(self) -> str: return msg -class TagsNotListOfStrings(CompilationException): +class TagsNotListOfStringsError(CompilationError): def __init__(self, tags: Any): self.tags = tags msg = f"got {self.tags} ({type(self.tags)}) for tags, expected a list of strings" super().__init__(msg=msg) -class TagNotString(CompilationException): +class TagNotStringError(CompilationError): def __init__(self, tag: Any): self.tag = tag msg = f"got {self.tag} ({type(self.tag)}) for tag, expected a str" super().__init__(msg=msg) -class TestNameNotString(ParsingException): +class TestNameNotStringError(ParsingError): def __init__(self, test_name: Any): self.test_name = test_name super().__init__(msg=self.get_message()) @@ -1264,7 +1195,7 @@ def get_message(self) -> str: return msg -class TestArgsNotDict(ParsingException): +class TestArgsNotDictError(ParsingError): def __init__(self, test_args: Any): self.test_args = test_args super().__init__(msg=self.get_message()) @@ -1275,7 +1206,7 @@ def get_message(self) -> str: return msg -class TestDefinitionDictLength(ParsingException): +class TestDefinitionDictLengthError(ParsingError): def __init__(self, test): self.test = test super().__init__(msg=self.get_message()) @@ -1289,7 +1220,7 @@ def get_message(self) -> str: return msg -class TestInvalidType(ParsingException): +class TestTypeError(ParsingError): def __init__(self, test: Any): self.test = test super().__init__(msg=self.get_message()) @@ -1300,7 +1231,7 @@ def get_message(self) -> str: # This is triggered across multiple files -class EnvVarMissing(ParsingException): +class EnvVarMissingError(ParsingError): def __init__(self, var: str): self.var = var super().__init__(msg=self.get_message()) @@ -1310,7 +1241,7 @@ def get_message(self) -> str: return msg -class TargetNotFound(CompilationException): +class TargetNotFoundError(CompilationError): def __init__( self, node, @@ -1349,7 +1280,7 @@ def get_message(self) -> str: return msg -class DuplicateSourcePatchName(CompilationException): +class DuplicateSourcePatchNameError(CompilationError): def __init__(self, patch_1, patch_2): self.patch_1 = patch_1 self.patch_2 = patch_2 @@ -1371,7 +1302,7 @@ def get_message(self) -> str: return msg -class DuplicateMacroPatchName(CompilationException): +class DuplicateMacroPatchNameError(CompilationError): def __init__(self, patch_1, existing_patch_path): self.patch_1 = patch_1 self.existing_patch_path = existing_patch_path @@ -1392,7 +1323,7 @@ def get_message(self) -> str: # core level exceptions -class DuplicateAlias(AliasException): +class DuplicateAliasError(AliasError): def __init__(self, kwargs: Mapping[str, Any], aliases: Mapping[str, str], canonical_key: str): self.kwargs = kwargs self.aliases = aliases @@ -1409,9 +1340,7 @@ def get_message(self) -> str: # Postgres Exceptions - - -class UnexpectedDbReference(NotImplementedException): +class UnexpectedDbReferenceError(NotImplementedError): def __init__(self, adapter, database, expected): self.adapter = adapter self.database = database @@ -1423,7 +1352,7 @@ def get_message(self) -> str: return msg -class CrossDbReferenceProhibited(CompilationException): +class CrossDbReferenceProhibitedError(CompilationError): def __init__(self, adapter, exc_msg: str): self.adapter = adapter self.exc_msg = exc_msg @@ -1434,7 +1363,7 @@ def get_message(self) -> str: return msg -class IndexConfigNotDict(CompilationException): +class IndexConfigNotDictError(CompilationError): def __init__(self, raw_index: Any): self.raw_index = raw_index super().__init__(msg=self.get_message()) @@ -1448,7 +1377,7 @@ def get_message(self) -> str: return msg -class InvalidIndexConfig(CompilationException): +class IndexConfigError(CompilationError): def __init__(self, exc: TypeError): self.exc = exc super().__init__(msg=self.get_message()) @@ -1460,7 +1389,7 @@ def get_message(self) -> str: # adapters exceptions -class InvalidMacroResult(CompilationException): +class MacroResultError(CompilationError): def __init__(self, freshness_macro_name: str, table): self.freshness_macro_name = freshness_macro_name self.table = table @@ -1472,7 +1401,7 @@ def get_message(self) -> str: return msg -class SnapshotTargetNotSnapshotTable(CompilationException): +class SnapshotTargetNotSnapshotTableError(CompilationError): def __init__(self, missing: List): self.missing = missing super().__init__(msg=self.get_message()) @@ -1484,7 +1413,7 @@ def get_message(self) -> str: return msg -class SnapshotTargetIncomplete(CompilationException): +class SnapshotTargetIncompleteError(CompilationError): def __init__(self, extra: List, missing: List): self.extra = extra self.missing = missing @@ -1500,7 +1429,7 @@ def get_message(self) -> str: return msg -class RenameToNoneAttempted(CompilationException): +class RenameToNoneAttemptedError(CompilationError): def __init__(self, src_name: str, dst_name: str, name: str): self.src_name = src_name self.dst_name = dst_name @@ -1509,21 +1438,21 @@ def __init__(self, src_name: str, dst_name: str, name: str): super().__init__(msg=self.msg) -class NullRelationDropAttempted(CompilationException): +class NullRelationDropAttemptedError(CompilationError): def __init__(self, name: str): self.name = name self.msg = f"Attempted to drop a null relation for {self.name}" super().__init__(msg=self.msg) -class NullRelationCacheAttempted(CompilationException): +class NullRelationCacheAttemptedError(CompilationError): def __init__(self, name: str): self.name = name self.msg = f"Attempted to cache a null relation for {self.name}" super().__init__(msg=self.msg) -class InvalidQuoteConfigType(CompilationException): +class QuoteConfigTypeError(CompilationError): def __init__(self, quote_config: Any): self.quote_config = quote_config super().__init__(msg=self.get_message()) @@ -1536,7 +1465,7 @@ def get_message(self) -> str: return msg -class MultipleDatabasesNotAllowed(CompilationException): +class MultipleDatabasesNotAllowedError(CompilationError): def __init__(self, databases): self.databases = databases super().__init__(msg=self.get_message()) @@ -1546,14 +1475,14 @@ def get_message(self) -> str: return msg -class RelationTypeNull(CompilationException): +class RelationTypeNullError(CompilationError): def __init__(self, relation): self.relation = relation self.msg = f"Tried to drop relation {self.relation}, but its type is null." super().__init__(msg=self.msg) -class MaterializationNotAvailable(CompilationException): +class MaterializationNotAvailableError(CompilationError): def __init__(self, materialization, adapter_type: str): self.materialization = materialization self.adapter_type = adapter_type @@ -1564,7 +1493,7 @@ def get_message(self) -> str: return msg -class RelationReturnedMultipleResults(CompilationException): +class RelationReturnedMultipleResultsError(CompilationError): def __init__(self, kwargs: Mapping[str, Any], matches: List): self.kwargs = kwargs self.matches = matches @@ -1579,7 +1508,7 @@ def get_message(self) -> str: return msg -class ApproximateMatch(CompilationException): +class ApproximateMatchError(CompilationError): def __init__(self, target, relation): self.target = target self.relation = relation @@ -1597,8 +1526,7 @@ def get_message(self) -> str: return msg -# adapters exceptions -class UnexpectedNull(DatabaseException): +class UnexpectedNullError(DbtDatabaseError): def __init__(self, field_name: str, source): self.field_name = field_name self.source = source @@ -1609,7 +1537,7 @@ def __init__(self, field_name: str, source): super().__init__(msg) -class UnexpectedNonTimestamp(DatabaseException): +class UnexpectedNonTimestampError(DbtDatabaseError): def __init__(self, field_name: str, source, dt: Any): self.field_name = field_name self.source = source @@ -1622,7 +1550,7 @@ def __init__(self, field_name: str, source, dt: Any): # deps exceptions -class MultipleVersionGitDeps(DependencyException): +class MultipleVersionGitDepsError(DependencyError): def __init__(self, git: str, requested): self.git = git self.requested = requested @@ -1633,7 +1561,7 @@ def __init__(self, git: str, requested): super().__init__(msg) -class DuplicateProjectDependency(DependencyException): +class DuplicateProjectDependencyError(DependencyError): def __init__(self, project_name: str): self.project_name = project_name msg = ( @@ -1643,7 +1571,7 @@ def __init__(self, project_name: str): super().__init__(msg) -class DuplicateDependencyToRoot(DependencyException): +class DuplicateDependencyToRootError(DependencyError): def __init__(self, project_name: str): self.project_name = project_name msg = ( @@ -1654,7 +1582,7 @@ def __init__(self, project_name: str): super().__init__(msg) -class MismatchedDependencyTypes(DependencyException): +class MismatchedDependencyTypeError(DependencyError): def __init__(self, new, old): self.new = new self.old = old @@ -1665,7 +1593,7 @@ def __init__(self, new, old): super().__init__(msg) -class PackageVersionNotFound(DependencyException): +class PackageVersionNotFoundError(DependencyError): def __init__( self, package_name: str, @@ -1701,7 +1629,7 @@ def get_message(self) -> str: return msg -class PackageNotFound(DependencyException): +class PackageNotFoundError(DependencyError): def __init__(self, package_name: str): self.package_name = package_name msg = f"Package {self.package_name} was not found in the package index" @@ -1709,37 +1637,35 @@ def __init__(self, package_name: str): # config level exceptions - - -class ProfileConfigInvalid(DbtProfileError): +class ProfileConfigError(DbtProfileError): def __init__(self, exc: ValidationError): self.exc = exc msg = self.validator_error_message(self.exc) super().__init__(msg=msg) -class ProjectContractInvalid(DbtProjectError): +class ProjectContractError(DbtProjectError): def __init__(self, exc: ValidationError): self.exc = exc msg = self.validator_error_message(self.exc) super().__init__(msg=msg) -class ProjectContractBroken(DbtProjectError): +class ProjectContractBrokenError(DbtProjectError): def __init__(self, exc: ValidationError): self.exc = exc msg = self.validator_error_message(self.exc) super().__init__(msg=msg) -class ConfigContractBroken(DbtProjectError): +class ConfigContractBrokenError(DbtProjectError): def __init__(self, exc: ValidationError): self.exc = exc msg = self.validator_error_message(self.exc) super().__init__(msg=msg) -class NonUniquePackageName(CompilationException): +class NonUniquePackageNameError(CompilationError): def __init__(self, project_name: str): self.project_name = project_name super().__init__(msg=self.get_message()) @@ -1754,7 +1680,7 @@ def get_message(self) -> str: return msg -class UninstalledPackagesFound(CompilationException): +class UninstalledPackagesFoundError(CompilationError): def __init__( self, count_packages_specified: int, @@ -1777,7 +1703,7 @@ def get_message(self) -> str: return msg -class VarsArgNotYamlDict(CompilationException): +class VarsArgNotYamlDictError(CompilationError): def __init__(self, var_type): self.var_type = var_type super().__init__(msg=self.get_message()) @@ -1790,7 +1716,7 @@ def get_message(self) -> str: # contracts level -class UnrecognizedCredentialType(CompilationException): +class UnrecognizedCredentialTypeError(CompilationError): def __init__(self, typename: str, supported_types: List): self.typename = typename self.supported_types = supported_types @@ -1803,7 +1729,7 @@ def get_message(self) -> str: return msg -class DuplicateMacroInPackage(CompilationException): +class DuplicateMacroInPackageError(CompilationError): def __init__(self, macro, macro_mapping: Mapping): self.macro = macro self.macro_mapping = macro_mapping @@ -1832,7 +1758,7 @@ def get_message(self) -> str: return msg -class DuplicateMaterializationName(CompilationException): +class DuplicateMaterializationNameError(CompilationError): def __init__(self, macro, other_macro): self.macro = macro self.other_macro = other_macro @@ -1852,7 +1778,7 @@ def get_message(self) -> str: # jinja exceptions -class PatchTargetNotFound(CompilationException): +class PatchTargetNotFoundError(CompilationError): def __init__(self, patches: Dict): self.patches = patches super().__init__(msg=self.get_message()) @@ -1866,7 +1792,7 @@ def get_message(self) -> str: return msg -class MacroNotFound(CompilationException): +class MacroNotFoundError(CompilationError): def __init__(self, node, target_macro_id: str): self.node = node self.target_macro_id = target_macro_id @@ -1875,7 +1801,7 @@ def __init__(self, node, target_macro_id: str): super().__init__(msg=msg) -class MissingConfig(CompilationException): +class MissingConfigError(CompilationError): def __init__(self, unique_id: str, name: str): self.unique_id = unique_id self.name = name @@ -1885,7 +1811,7 @@ def __init__(self, unique_id: str, name: str): super().__init__(msg=msg) -class MissingMaterialization(CompilationException): +class MissingMaterializationError(CompilationError): def __init__(self, materialization, adapter_type): self.materialization = materialization self.adapter_type = adapter_type @@ -1902,7 +1828,7 @@ def get_message(self) -> str: return msg -class MissingRelation(CompilationException): +class MissingRelationError(CompilationError): def __init__(self, relation, model=None): self.relation = relation self.model = model @@ -1910,7 +1836,7 @@ def __init__(self, relation, model=None): super().__init__(msg=msg) -class AmbiguousAlias(CompilationException): +class AmbiguousAliasError(CompilationError): def __init__(self, node_1, node_2, duped_name=None): self.node_1 = node_1 self.node_2 = node_2 @@ -1931,7 +1857,7 @@ def get_message(self) -> str: return msg -class AmbiguousCatalogMatch(CompilationException): +class AmbiguousCatalogMatchError(CompilationError): def __init__(self, unique_id: str, match_1, match_2): self.unique_id = unique_id self.match_1 = match_1 @@ -1955,14 +1881,14 @@ def get_message(self) -> str: return msg -class CacheInconsistency(InternalException): +class CacheInconsistencyError(DbtInternalError): def __init__(self, msg: str): self.msg = msg formatted_msg = f"Cache inconsistency detected: {self.msg}" super().__init__(msg=formatted_msg) -class NewNameAlreadyInCache(CacheInconsistency): +class NewNameAlreadyInCacheError(CacheInconsistencyError): def __init__(self, old_key: str, new_key: str): self.old_key = old_key self.new_key = new_key @@ -1972,21 +1898,21 @@ def __init__(self, old_key: str, new_key: str): super().__init__(msg) -class ReferencedLinkNotCached(CacheInconsistency): +class ReferencedLinkNotCachedError(CacheInconsistencyError): def __init__(self, referenced_key: str): self.referenced_key = referenced_key msg = f"in add_link, referenced link key {self.referenced_key} not in cache!" super().__init__(msg) -class DependentLinkNotCached(CacheInconsistency): +class DependentLinkNotCachedError(CacheInconsistencyError): def __init__(self, dependent_key: str): self.dependent_key = dependent_key msg = f"in add_link, dependent link key {self.dependent_key} not in cache!" super().__init__(msg) -class TruncatedModelNameCausedCollision(CacheInconsistency): +class TruncatedModelNameCausedCollisionError(CacheInconsistencyError): def __init__(self, new_key, relations: Dict): self.new_key = new_key self.relations = relations @@ -2013,14 +1939,14 @@ def get_message(self) -> str: return msg -class NoneRelationFound(CacheInconsistency): +class NoneRelationFoundError(CacheInconsistencyError): def __init__(self): msg = "in get_relations, a None relation was found in the cache!" super().__init__(msg) # this is part of the context and also raised in dbt.contracts.relation.py -class DataclassNotDict(CompilationException): +class DataclassNotDictError(CompilationError): def __init__(self, obj: Any): self.obj = obj super().__init__(msg=self.get_message()) @@ -2034,7 +1960,7 @@ def get_message(self) -> str: return msg -class DependencyNotFound(CompilationException): +class DependencyNotFoundError(CompilationError): def __init__(self, node, node_description, required_pkg): self.node = node self.node_description = node_description @@ -2051,7 +1977,7 @@ def get_message(self) -> str: return msg -class DuplicatePatchPath(CompilationException): +class DuplicatePatchPathError(CompilationError): def __init__(self, patch_1, existing_patch_path): self.patch_1 = patch_1 self.existing_patch_path = existing_patch_path @@ -2073,8 +1999,8 @@ def get_message(self) -> str: return msg -# should this inherit ParsingException instead? -class DuplicateResourceName(CompilationException): +# should this inherit ParsingError instead? +class DuplicateResourceNameError(CompilationError): def __init__(self, node_1, node_2): self.node_1 = node_1 self.node_2 = node_2 @@ -2126,7 +2052,7 @@ def get_message(self) -> str: return msg -class InvalidPropertyYML(CompilationException): +class PropertyYMLError(CompilationError): def __init__(self, path: str, issue: str): self.path = path self.issue = issue @@ -2141,14 +2067,14 @@ def get_message(self) -> str: return msg -class PropertyYMLMissingVersion(InvalidPropertyYML): +class PropertyYMLMissingVersionError(PropertyYMLError): def __init__(self, path: str): self.path = path self.issue = f"the yml property file {self.path} is missing a version tag" super().__init__(self.path, self.issue) -class PropertyYMLVersionNotInt(InvalidPropertyYML): +class PropertyYMLVersionNotIntError(PropertyYMLError): def __init__(self, path: str, version: Any): self.path = path self.version = version @@ -2159,7 +2085,7 @@ def __init__(self, path: str, version: Any): super().__init__(self.path, self.issue) -class PropertyYMLInvalidTag(InvalidPropertyYML): +class PropertyYMLInvalidTagError(PropertyYMLError): def __init__(self, path: str, version: int): self.path = path self.version = version @@ -2167,7 +2093,7 @@ def __init__(self, path: str, version: int): super().__init__(self.path, self.issue) -class RelationWrongType(CompilationException): +class RelationWrongTypeError(CompilationError): def __init__(self, relation, expected_type, model=None): self.relation = relation self.expected_type = expected_type @@ -2185,6 +2111,83 @@ def get_message(self) -> str: return msg +# not modifying these since rpc should be deprecated soon +class UnknownAsyncIDException(Exception): + CODE = 10012 + MESSAGE = "RPC server got an unknown async ID" + + def __init__(self, task_id): + self.task_id = task_id + + def __str__(self): + return f"{self.MESSAGE}: {self.task_id}" + + +class RPCFailureResult(DbtRuntimeError): + CODE = 10002 + MESSAGE = "RPC execution error" + + +class RPCTimeoutException(DbtRuntimeError): + CODE = 10008 + MESSAGE = "RPC timeout error" + + def __init__(self, timeout: Optional[float] = None): + super().__init__(self.MESSAGE) + self.timeout = timeout + + def data(self): + result = super().data() + result.update( + { + "timeout": self.timeout, + "message": f"RPC timed out after {self.timeout}s", + } + ) + return result + + +class RPCKilledException(DbtRuntimeError): + CODE = 10009 + MESSAGE = "RPC process killed" + + def __init__(self, signum: int): + self.signum = signum + self.msg = f"RPC process killed by signal {self.signum}" + super().__init__(self.msg) + + def data(self): + return { + "signum": self.signum, + "message": self.msg, + } + + +class RPCCompiling(DbtRuntimeError): + CODE = 10010 + MESSAGE = 'RPC server is compiling the project, call the "status" method for' " compile status" + + def __init__(self, msg: str = None, node=None): + if msg is None: + msg = "compile in progress" + super().__init__(msg, node) + + +class RPCLoadException(DbtRuntimeError): + CODE = 10011 + MESSAGE = ( + 'RPC server failed to compile project, call the "status" method for' " compile status" + ) + + def __init__(self, cause: Dict[str, Any]): + self.cause = cause + self.msg = f'{self.MESSAGE}: {self.cause["message"]}' + super().__init__(self.msg) + + def data(self): + return {"cause": self.cause, "message": self.msg} + + # These are copies of what's in dbt/context/exceptions_jinja.py to not immediately break adapters # utilizing these functions as exceptions. These are direct copies to avoid circular imports. # They will be removed in 1 (or 2?) versions. Issue to be created to ensure it happens. @@ -2207,147 +2210,147 @@ def warn(msg, node=None): @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="MissingConfig"), + suggested_action=SUGGESTED_ACTION.format(exception="MissingConfigError"), reason=REASON, ) def missing_config(model, name) -> NoReturn: - raise MissingConfig(unique_id=model.unique_id, name=name) + raise MissingConfigError(unique_id=model.unique_id, name=name) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="MissingMaterialization"), + suggested_action=SUGGESTED_ACTION.format(exception="MissingMaterializationError"), reason=REASON, ) def missing_materialization(model, adapter_type) -> NoReturn: materialization = model.config.materialized - raise MissingMaterialization(materialization=materialization, adapter_type=adapter_type) + raise MissingMaterializationError(materialization=materialization, adapter_type=adapter_type) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="MissingRelation"), + suggested_action=SUGGESTED_ACTION.format(exception="MissingRelationError"), reason=REASON, ) def missing_relation(relation, model=None) -> NoReturn: - raise MissingRelation(relation, model) + raise MissingRelationError(relation, model) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="AmbiguousAlias"), + suggested_action=SUGGESTED_ACTION.format(exception="AmbiguousAliasError"), reason=REASON, ) def raise_ambiguous_alias(node_1, node_2, duped_name=None) -> NoReturn: - raise AmbiguousAlias(node_1, node_2, duped_name) + raise AmbiguousAliasError(node_1, node_2, duped_name) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="AmbiguousCatalogMatch"), + suggested_action=SUGGESTED_ACTION.format(exception="AmbiguousCatalogMatchError"), reason=REASON, ) def raise_ambiguous_catalog_match(unique_id, match_1, match_2) -> NoReturn: - raise AmbiguousCatalogMatch(unique_id, match_1, match_2) + raise AmbiguousCatalogMatchError(unique_id, match_1, match_2) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="CacheInconsistency"), + suggested_action=SUGGESTED_ACTION.format(exception="CacheInconsistencyError"), reason=REASON, ) def raise_cache_inconsistent(message) -> NoReturn: - raise CacheInconsistency(message) + raise CacheInconsistencyError(message) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="DataclassNotDict"), + suggested_action=SUGGESTED_ACTION.format(exception="DataclassNotDictError"), reason=REASON, ) def raise_dataclass_not_dict(obj) -> NoReturn: - raise DataclassNotDict(obj) + raise DataclassNotDictError(obj) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="CompilationException"), + suggested_action=SUGGESTED_ACTION.format(exception="CompilationError"), reason=REASON, ) def raise_compiler_error(msg, node=None) -> NoReturn: - raise CompilationException(msg, node) + raise CompilationError(msg, node) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="DatabaseException"), + suggested_action=SUGGESTED_ACTION.format(exception="DbtDatabaseError"), reason=REASON, ) def raise_database_error(msg, node=None) -> NoReturn: - raise DatabaseException(msg, node) + raise DbtDatabaseError(msg, node) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="DependencyNotFound"), + suggested_action=SUGGESTED_ACTION.format(exception="DependencyNotFoundError"), reason=REASON, ) def raise_dep_not_found(node, node_description, required_pkg) -> NoReturn: - raise DependencyNotFound(node, node_description, required_pkg) + raise DependencyNotFoundError(node, node_description, required_pkg) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="DependencyException"), + suggested_action=SUGGESTED_ACTION.format(exception="DependencyError"), reason=REASON, ) def raise_dependency_error(msg) -> NoReturn: - raise DependencyException(scrub_secrets(msg, env_secrets())) + raise DependencyError(scrub_secrets(msg, env_secrets())) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="DuplicatePatchPath"), + suggested_action=SUGGESTED_ACTION.format(exception="DuplicatePatchPathError"), reason=REASON, ) def raise_duplicate_patch_name(patch_1, existing_patch_path) -> NoReturn: - raise DuplicatePatchPath(patch_1, existing_patch_path) + raise DuplicatePatchPathError(patch_1, existing_patch_path) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="DuplicateResourceName"), + suggested_action=SUGGESTED_ACTION.format(exception="DuplicateResourceNameError"), reason=REASON, ) def raise_duplicate_resource_name(node_1, node_2) -> NoReturn: - raise DuplicateResourceName(node_1, node_2) + raise DuplicateResourceNameError(node_1, node_2) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="InvalidPropertyYML"), + suggested_action=SUGGESTED_ACTION.format(exception="PropertyYMLError"), reason=REASON, ) def raise_invalid_property_yml_version(path, issue) -> NoReturn: - raise InvalidPropertyYML(path, issue) + raise PropertyYMLError(path, issue) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="NotImplementedException"), + suggested_action=SUGGESTED_ACTION.format(exception="NotImplementedError"), reason=REASON, ) def raise_not_implemented(msg) -> NoReturn: - raise NotImplementedException(msg) + raise NotImplementedError(msg) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="RelationWrongType"), + suggested_action=SUGGESTED_ACTION.format(exception="RelationWrongTypeError"), reason=REASON, ) def relation_wrong_type(relation, expected_type, model=None) -> NoReturn: - raise RelationWrongType(relation, expected_type, model) + raise RelationWrongTypeError(relation, expected_type, model) # these were implemented in core so deprecating here by calling the new exception directly @@ -2355,81 +2358,81 @@ def relation_wrong_type(relation, expected_type, model=None) -> NoReturn: @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="DuplicateAlias"), + suggested_action=SUGGESTED_ACTION.format(exception="DuplicateAliasError"), reason=REASON, ) def raise_duplicate_alias( kwargs: Mapping[str, Any], aliases: Mapping[str, str], canonical_key: str ) -> NoReturn: - raise DuplicateAlias(kwargs, aliases, canonical_key) + raise DuplicateAliasError(kwargs, aliases, canonical_key) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="DuplicateSourcePatchName"), + suggested_action=SUGGESTED_ACTION.format(exception="DuplicateSourcePatchNameError"), reason=REASON, ) def raise_duplicate_source_patch_name(patch_1, patch_2): - raise DuplicateSourcePatchName(patch_1, patch_2) + raise DuplicateSourcePatchNameError(patch_1, patch_2) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="DuplicateMacroPatchName"), + suggested_action=SUGGESTED_ACTION.format(exception="DuplicateMacroPatchNameError"), reason=REASON, ) def raise_duplicate_macro_patch_name(patch_1, existing_patch_path): - raise DuplicateMacroPatchName(patch_1, existing_patch_path) + raise DuplicateMacroPatchNameError(patch_1, existing_patch_path) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="DuplicateMacroName"), + suggested_action=SUGGESTED_ACTION.format(exception="DuplicateMacroNameError"), reason=REASON, ) def raise_duplicate_macro_name(node_1, node_2, namespace) -> NoReturn: - raise DuplicateMacroName(node_1, node_2, namespace) + raise DuplicateMacroNameError(node_1, node_2, namespace) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="ApproximateMatch"), + suggested_action=SUGGESTED_ACTION.format(exception="ApproximateMatchError"), reason=REASON, ) def approximate_relation_match(target, relation): - raise ApproximateMatch(target, relation) + raise ApproximateMatchError(target, relation) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="RelationReturnedMultipleResults"), + suggested_action=SUGGESTED_ACTION.format(exception="RelationReturnedMultipleResultsError"), reason=REASON, ) def get_relation_returned_multiple_results(kwargs, matches): - raise RelationReturnedMultipleResults(kwargs, matches) + raise RelationReturnedMultipleResultsError(kwargs, matches) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="OperationException"), + suggested_action=SUGGESTED_ACTION.format(exception="OperationError"), reason=REASON, ) def system_error(operation_name): - raise OperationException(operation_name) + raise OperationError(operation_name) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="InvalidMaterializationArg"), + suggested_action=SUGGESTED_ACTION.format(exception="InvalidMaterializationArgError"), reason=REASON, ) def invalid_materialization_argument(name, argument): - raise InvalidMaterializationArg(name, argument) + raise MaterializationArgError(name, argument) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="BadSpecException"), + suggested_action=SUGGESTED_ACTION.format(exception="BadSpecError"), reason=REASON, ) def bad_package_spec(repo, spec, error_message): @@ -2447,34 +2450,34 @@ def raise_git_cloning_error(error: CommandResultError) -> NoReturn: @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="GitCloningProblem"), + suggested_action=SUGGESTED_ACTION.format(exception="UnknownGitCloningProblemError"), reason=REASON, ) def raise_git_cloning_problem(repo) -> NoReturn: - raise GitCloningProblem(repo) + raise UnknownGitCloningProblemError(repo) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="MacroInvalidDispatchArg"), + suggested_action=SUGGESTED_ACTION.format(exception="MacroDispatchArgError"), reason=REASON, ) def macro_invalid_dispatch_arg(macro_name) -> NoReturn: - raise MacroInvalidDispatchArg(macro_name) + raise MacroDispatchArgError(macro_name) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="GraphDependencyNotFound"), + suggested_action=SUGGESTED_ACTION.format(exception="GraphDependencyNotFoundError"), reason=REASON, ) def dependency_not_found(node, dependency): - raise GraphDependencyNotFound(node, dependency) + raise GraphDependencyNotFoundError(node, dependency) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="TargetNotFound"), + suggested_action=SUGGESTED_ACTION.format(exception="TargetNotFoundError"), reason=REASON, ) def target_not_found( @@ -2484,7 +2487,7 @@ def target_not_found( target_package: Optional[str] = None, disabled: Optional[bool] = None, ) -> NoReturn: - raise TargetNotFound( + raise TargetNotFoundError( node=node, target_name=target_name, target_kind=target_kind, @@ -2495,141 +2498,151 @@ def target_not_found( @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="DocTargetNotFound"), + suggested_action=SUGGESTED_ACTION.format(exception="DocTargetNotFoundError"), reason=REASON, ) def doc_target_not_found( - model, target_doc_name: str, target_doc_package: Optional[str] + model, target_doc_name: str, target_doc_package: Optional[str] = None ) -> NoReturn: - raise DocTargetNotFound( + raise DocTargetNotFoundError( node=model, target_doc_name=target_doc_name, target_doc_package=target_doc_package ) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="InvalidDocArgs"), + suggested_action=SUGGESTED_ACTION.format(exception="DocArgsError"), reason=REASON, ) def doc_invalid_args(model, args) -> NoReturn: - raise InvalidDocArgs(node=model, args=args) + raise DocArgsError(node=model, args=args) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="RefBadContext"), + suggested_action=SUGGESTED_ACTION.format(exception="RefBadContextError"), reason=REASON, ) def ref_bad_context(model, args) -> NoReturn: - raise RefBadContext(node=model, args=args) + raise RefBadContextError(node=model, args=args) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="MetricInvalidArgs"), + suggested_action=SUGGESTED_ACTION.format(exception="MetricArgsError"), reason=REASON, ) def metric_invalid_args(model, args) -> NoReturn: - raise MetricInvalidArgs(node=model, args=args) + raise MetricArgsError(node=model, args=args) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="RefInvalidArgs"), + suggested_action=SUGGESTED_ACTION.format(exception="RefArgsError"), reason=REASON, ) def ref_invalid_args(model, args) -> NoReturn: - raise RefInvalidArgs(node=model, args=args) + raise RefArgsError(node=model, args=args) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="InvalidBoolean"), + suggested_action=SUGGESTED_ACTION.format(exception="BooleanError"), reason=REASON, ) def invalid_bool_error(got_value, macro_name) -> NoReturn: - raise InvalidBoolean(return_value=got_value, macro_name=macro_name) + raise BooleanError(return_value=got_value, macro_name=macro_name) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="InvalidMacroArgType"), + suggested_action=SUGGESTED_ACTION.format(exception="MacroArgTypeError"), reason=REASON, ) def invalid_type_error(method_name, arg_name, got_value, expected_type) -> NoReturn: - """Raise a CompilationException when an adapter method available to macros + """Raise a InvalidMacroArgType when an adapter method available to macros has changed. """ - raise InvalidMacroArgType(method_name, arg_name, got_value, expected_type) + raise MacroArgTypeError(method_name, arg_name, got_value, expected_type) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="DisallowSecretEnvVar"), + suggested_action=SUGGESTED_ACTION.format(exception="SecretEnvVarLocationError"), reason=REASON, ) def disallow_secret_env_var(env_var_name) -> NoReturn: """Raise an error when a secret env var is referenced outside allowed rendering contexts""" - raise DisallowSecretEnvVar(env_var_name) + raise SecretEnvVarLocationError(env_var_name) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="ParsingException"), + suggested_action=SUGGESTED_ACTION.format(exception="ParsingError"), reason=REASON, ) def raise_parsing_error(msg, node=None) -> NoReturn: - raise ParsingException(msg, node) + raise ParsingError(msg, node) +# These are the exceptions functions that were not called within dbt-core but will remain +# here deprecated to give a chance for adapters to rework @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="CompilationException"), + suggested_action=SUGGESTED_ACTION.format(exception="UnrecognizedCredentialTypeError"), reason=REASON, ) def raise_unrecognized_credentials_type(typename, supported_types): - raise UnrecognizedCredentialType(typename, supported_types) + raise UnrecognizedCredentialTypeError(typename, supported_types) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="CompilationException"), + suggested_action=SUGGESTED_ACTION.format(exception="PatchTargetNotFoundError"), reason=REASON, ) def raise_patch_targets_not_found(patches): - raise PatchTargetNotFound(patches) + raise PatchTargetNotFoundError(patches) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="RelationReturnedMultipleResults"), + suggested_action=SUGGESTED_ACTION.format(exception="RelationReturnedMultipleResultsError"), reason=REASON, ) def multiple_matching_relations(kwargs, matches): - raise RelationReturnedMultipleResults(kwargs, matches) + raise RelationReturnedMultipleResultsError(kwargs, matches) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="MaterializationNotAvailable"), + suggested_action=SUGGESTED_ACTION.format(exception="MaterializationNotAvailableError"), reason=REASON, ) def materialization_not_available(model, adapter_type): materialization = model.config.materialized - raise MaterializationNotAvailable(materialization=materialization, adapter_type=adapter_type) + raise MaterializationNotAvailableError( + materialization=materialization, adapter_type=adapter_type + ) @deprecated( version=DEPRECATION_VERSION, - suggested_action=SUGGESTED_ACTION.format(exception="MacroNotFound"), + suggested_action=SUGGESTED_ACTION.format(exception="MacroNotFoundError"), reason=REASON, ) def macro_not_found(model, target_macro_id): - raise MacroNotFound(node=model, target_macro_id=target_macro_id) + raise MacroNotFoundError(node=model, target_macro_id=target_macro_id) # adapters use this to format messages. it should be deprecated but live on for now +# TODO: What should the message here be? +@deprecated( + version=DEPRECATION_VERSION, + suggested_action="Format this message in the adapter", + reason="`validator_error_message` is now a mathod on DbtRuntimeError", +) def validator_error_message(exc): """Given a dbt.dataclass_schema.ValidationError (which is basically a jsonschema.ValidationError), return the relevant parts as a string diff --git a/core/dbt/graph/cli.py b/core/dbt/graph/cli.py index 2ae0d814327..a5581ed1d78 100644 --- a/core/dbt/graph/cli.py +++ b/core/dbt/graph/cli.py @@ -7,7 +7,7 @@ from typing import Dict, List, Optional, Tuple, Any, Union from dbt.contracts.selection import SelectorDefinition, SelectorFile -from dbt.exceptions import InternalException, ValidationException +from dbt.exceptions import DbtInternalError, DbtValidationError from .selector_spec import ( SelectionUnion, @@ -94,15 +94,15 @@ def parse_difference( def _get_list_dicts(dct: Dict[str, Any], key: str) -> List[RawDefinition]: result: List[RawDefinition] = [] if key not in dct: - raise InternalException(f"Expected to find key {key} in dict, only found {list(dct)}") + raise DbtInternalError(f"Expected to find key {key} in dict, only found {list(dct)}") values = dct[key] if not isinstance(values, list): - raise ValidationException(f'Invalid value for key "{key}". Expected a list.') + raise DbtValidationError(f'Invalid value for key "{key}". Expected a list.') for value in values: if isinstance(value, dict): for value_key in value: if not isinstance(value_key, str): - raise ValidationException( + raise DbtValidationError( f'Expected all keys to "{key}" dict to be strings, ' f'but "{value_key}" is a "{type(value_key)}"' ) @@ -110,7 +110,7 @@ def _get_list_dicts(dct: Dict[str, Any], key: str) -> List[RawDefinition]: elif isinstance(value, str): result.append(value) else: - raise ValidationException( + raise DbtValidationError( f'Invalid value type {type(value)} in key "{key}", expected ' f"dict or str (value: {value})." ) @@ -140,7 +140,7 @@ def _parse_include_exclude_subdefs( # do not allow multiple exclude: defs at the same level if diff_arg is not None: yaml_sel_cfg = yaml.dump(definition) - raise ValidationException( + raise DbtValidationError( f"You cannot provide multiple exclude arguments to the " f"same selector set operator:\n{yaml_sel_cfg}" ) @@ -182,7 +182,7 @@ def parse_dict_definition(definition: Dict[str, Any], result={}) -> SelectionSpe key = list(definition)[0] value = definition[key] if not isinstance(key, str): - raise ValidationException( + raise DbtValidationError( f'Expected definition key to be a "str", got one of type ' f'"{type(key)}" ({key})' ) dct = { @@ -192,7 +192,7 @@ def parse_dict_definition(definition: Dict[str, Any], result={}) -> SelectionSpe elif definition.get("method") == "selector": sel_def = definition.get("value") if sel_def not in result: - raise ValidationException(f"Existing selector definition for {sel_def} not found.") + raise DbtValidationError(f"Existing selector definition for {sel_def} not found.") return result[definition["value"]]["definition"] elif "method" in definition and "value" in definition: dct = definition @@ -200,7 +200,7 @@ def parse_dict_definition(definition: Dict[str, Any], result={}) -> SelectionSpe diff_arg = _parse_exclusions(definition, result=result) dct = {k: v for k, v in dct.items() if k != "exclude"} else: - raise ValidationException( + raise DbtValidationError( f'Expected either 1 key or else "method" ' f'and "value" keys, but got {list(definition)}' ) @@ -226,7 +226,7 @@ def parse_from_definition( and len(definition) > 1 ): keys = ",".join(definition.keys()) - raise ValidationException( + raise DbtValidationError( f"Only a single 'union' or 'intersection' key is allowed " f"in a root level selector definition; found {keys}." ) @@ -239,7 +239,7 @@ def parse_from_definition( elif isinstance(definition, dict): return parse_dict_definition(definition, result=result) else: - raise ValidationException( + raise DbtValidationError( f"Expected to find union, intersection, str or dict, instead " f"found {type(definition)}: {definition}" ) diff --git a/core/dbt/graph/graph.py b/core/dbt/graph/graph.py index 2dda596e073..9c20750cd54 100644 --- a/core/dbt/graph/graph.py +++ b/core/dbt/graph/graph.py @@ -2,7 +2,7 @@ from itertools import product import networkx as nx # type: ignore -from dbt.exceptions import InternalException +from dbt.exceptions import DbtInternalError UniqueId = NewType("UniqueId", str) @@ -27,7 +27,7 @@ def __iter__(self) -> Iterator[UniqueId]: def ancestors(self, node: UniqueId, max_depth: Optional[int] = None) -> Set[UniqueId]: """Returns all nodes having a path to `node` in `graph`""" if not self.graph.has_node(node): - raise InternalException(f"Node {node} not found in the graph!") + raise DbtInternalError(f"Node {node} not found in the graph!") return { child for _, child in nx.bfs_edges(self.graph, node, reverse=True, depth_limit=max_depth) @@ -36,7 +36,7 @@ def ancestors(self, node: UniqueId, max_depth: Optional[int] = None) -> Set[Uniq def descendants(self, node: UniqueId, max_depth: Optional[int] = None) -> Set[UniqueId]: """Returns all nodes reachable from `node` in `graph`""" if not self.graph.has_node(node): - raise InternalException(f"Node {node} not found in the graph!") + raise DbtInternalError(f"Node {node} not found in the graph!") return {child for _, child in nx.bfs_edges(self.graph, node, depth_limit=max_depth)} def select_childrens_parents(self, selected: Set[UniqueId]) -> Set[UniqueId]: diff --git a/core/dbt/graph/selector.py b/core/dbt/graph/selector.py index 8f9561c6519..fdae6327d0e 100644 --- a/core/dbt/graph/selector.py +++ b/core/dbt/graph/selector.py @@ -9,8 +9,8 @@ from dbt.events.types import SelectorReportInvalidSelector, NoNodesForSelectionCriteria from dbt.node_types import NodeType from dbt.exceptions import ( - InternalException, - InvalidSelectorException, + DbtInternalError, + InvalidSelectorError, ) from dbt.contracts.graph.nodes import GraphMemberNode from dbt.contracts.graph.manifest import Manifest @@ -78,7 +78,7 @@ def get_nodes_from_criteria( nodes = self.graph.nodes() try: collected = self.select_included(nodes, spec) - except InvalidSelectorException: + except InvalidSelectorError: valid_selectors = ", ".join(self.SELECTOR_METHODS) fire_event( SelectorReportInvalidSelector( @@ -183,7 +183,7 @@ def _is_match(self, unique_id: UniqueId) -> bool: elif unique_id in self.manifest.metrics: node = self.manifest.metrics[unique_id] else: - raise InternalException(f"Node {unique_id} not found in the manifest!") + raise DbtInternalError(f"Node {unique_id} not found in the manifest!") return self.node_is_match(node) def filter_selection(self, selected: Set[UniqueId]) -> Set[UniqueId]: diff --git a/core/dbt/graph/selector_methods.py b/core/dbt/graph/selector_methods.py index c77625649bc..2c73d480dae 100644 --- a/core/dbt/graph/selector_methods.py +++ b/core/dbt/graph/selector_methods.py @@ -19,8 +19,8 @@ ) from dbt.contracts.state import PreviousState from dbt.exceptions import ( - InternalException, - RuntimeException, + DbtInternalError, + DbtRuntimeError, ) from dbt.node_types import NodeType @@ -207,7 +207,7 @@ def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[Uniqu "`${{source_name}}.${{target_name}}`, or " "`${{package_name}}.${{source_name}}.${{target_name}}" ).format(selector) - raise RuntimeException(msg) + raise DbtRuntimeError(msg) for node, real_node in self.source_nodes(included_nodes): if target_package not in (real_node.package_name, SELECTOR_GLOB): @@ -234,7 +234,7 @@ def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[Uniqu "the form ${{exposure_name}} or " "${{exposure_package.exposure_name}}" ).format(selector) - raise RuntimeException(msg) + raise DbtRuntimeError(msg) for node, real_node in self.exposure_nodes(included_nodes): if target_package not in (real_node.package_name, SELECTOR_GLOB): @@ -259,7 +259,7 @@ def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[Uniqu "the form ${{metric_name}} or " "${{metric_package.metric_name}}" ).format(selector) - raise RuntimeException(msg) + raise DbtRuntimeError(msg) for node, real_node in self.metric_nodes(included_nodes): if target_package not in (real_node.package_name, SELECTOR_GLOB): @@ -367,7 +367,7 @@ def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[Uniqu try: resource_type = NodeType(selector) except ValueError as exc: - raise RuntimeException(f'Invalid resource_type selector "{selector}"') from exc + raise DbtRuntimeError(f'Invalid resource_type selector "{selector}"') from exc for node, real_node in self.parsed_nodes(included_nodes): if real_node.resource_type == resource_type: yield node @@ -390,7 +390,7 @@ def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[Uniqu elif selector in ("singular", "data"): search_type = SingularTestNode else: - raise RuntimeException( + raise DbtRuntimeError( f'Invalid test type selector {selector}: expected "generic" or ' '"singular"' ) @@ -407,7 +407,7 @@ def __init__(self, *args, **kwargs): def _macros_modified(self) -> List[str]: # we checked in the caller! if self.previous_state is None or self.previous_state.manifest is None: - raise InternalException("No comparison manifest in _macros_modified") + raise DbtInternalError("No comparison manifest in _macros_modified") old_macros = self.previous_state.manifest.macros new_macros = self.manifest.macros @@ -496,7 +496,7 @@ def check_new(self, old: Optional[SelectorTarget], new: SelectorTarget) -> bool: def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]: if self.previous_state is None or self.previous_state.manifest is None: - raise RuntimeException("Got a state selector method, but no comparison manifest") + raise DbtRuntimeError("Got a state selector method, but no comparison manifest") state_checks = { # it's new if there is no old version @@ -514,7 +514,7 @@ def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[Uniqu if selector in state_checks: checker = state_checks[selector] else: - raise RuntimeException( + raise DbtRuntimeError( f'Got an invalid selector "{selector}", expected one of ' f'"{list(state_checks)}"' ) @@ -538,7 +538,7 @@ def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[Uniqu class ResultSelectorMethod(SelectorMethod): def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]: if self.previous_state is None or self.previous_state.results is None: - raise InternalException("No comparison run_results") + raise DbtInternalError("No comparison run_results") matches = set( result.unique_id for result in self.previous_state.results if result.status == selector ) @@ -551,13 +551,11 @@ class SourceStatusSelectorMethod(SelectorMethod): def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]: if self.previous_state is None or self.previous_state.sources is None: - raise InternalException( + raise DbtInternalError( "No previous state comparison freshness results in sources.json" ) elif self.previous_state.sources_current is None: - raise InternalException( - "No current state comparison freshness results in sources.json" - ) + raise DbtInternalError("No current state comparison freshness results in sources.json") current_state_sources = { result.unique_id: getattr(result, "max_loaded_at", 0) @@ -633,7 +631,7 @@ def __init__( def get_method(self, method: MethodName, method_arguments: List[str]) -> SelectorMethod: if method not in self.SELECTOR_METHODS: - raise InternalException( + raise DbtInternalError( f'Method name "{method}" is a valid node selection ' f"method name, but it is not handled" ) diff --git a/core/dbt/graph/selector_spec.py b/core/dbt/graph/selector_spec.py index 5b8e4560d5e..af7ae014163 100644 --- a/core/dbt/graph/selector_spec.py +++ b/core/dbt/graph/selector_spec.py @@ -7,7 +7,7 @@ from typing import Set, Iterator, List, Optional, Dict, Union, Any, Iterable, Tuple from .graph import UniqueId from .selector_methods import MethodName -from dbt.exceptions import RuntimeException, InvalidSelectorException +from dbt.exceptions import DbtRuntimeError, InvalidSelectorError RAW_SELECTOR_PATTERN = re.compile( @@ -47,7 +47,7 @@ def _match_to_int(match: Dict[str, str], key: str) -> Optional[int]: try: return int(raw) except ValueError as exc: - raise RuntimeException(f"Invalid node spec - could not handle parent depth {raw}") from exc + raise DbtRuntimeError(f"Invalid node spec - could not handle parent depth {raw}") from exc SelectionSpec = Union[ @@ -73,7 +73,7 @@ class SelectionCriteria: def __post_init__(self): if self.children and self.childrens_parents: - raise RuntimeException( + raise DbtRuntimeError( f'Invalid node spec {self.raw} - "@" prefix and "+" suffix ' "are incompatible" ) @@ -96,9 +96,7 @@ def parse_method(cls, groupdict: Dict[str, Any]) -> Tuple[MethodName, List[str]] try: method_name = MethodName(method_parts[0]) except ValueError as exc: - raise InvalidSelectorException( - f"'{method_parts[0]}' is not a valid method name" - ) from exc + raise InvalidSelectorError(f"'{method_parts[0]}' is not a valid method name") from exc method_arguments: List[str] = method_parts[1:] @@ -112,7 +110,7 @@ def selection_criteria_from_dict( indirect_selection: IndirectSelection = IndirectSelection.Eager, ) -> "SelectionCriteria": if "value" not in dct: - raise RuntimeException(f'Invalid node spec "{raw}" - no search value!') + raise DbtRuntimeError(f'Invalid node spec "{raw}" - no search value!') method_name, method_arguments = cls.parse_method(dct) parents_depth = _match_to_int(dct, "parents_depth") @@ -163,7 +161,7 @@ def from_single_spec( result = RAW_SELECTOR_PATTERN.match(raw) if result is None: # bad spec! - raise RuntimeException(f'Invalid selector spec "{raw}"') + raise DbtRuntimeError(f'Invalid selector spec "{raw}"') return cls.selection_criteria_from_dict( raw, result.groupdict(), indirect_selection=indirect_selection diff --git a/core/dbt/internal_deprecations.py b/core/dbt/internal_deprecations.py index e6154329ca7..fbc435026b6 100644 --- a/core/dbt/internal_deprecations.py +++ b/core/dbt/internal_deprecations.py @@ -2,18 +2,18 @@ from typing import Optional from dbt.events.functions import warn_or_error -from dbt.events.types import FunctionDeprecated +from dbt.events.types import InternalDeprecation def deprecated(suggested_action: str, version: str, reason: Optional[str]): def inner(func): @functools.wraps(func) def wrapped(*args, **kwargs): - function_name = func.__name__ + name = func.__name__ warn_or_error( - FunctionDeprecated( - function_name=function_name, + InternalDeprecation( + name=name, suggested_action=suggested_action, version=version, reason=reason, diff --git a/core/dbt/lib.py b/core/dbt/lib.py index f4b9ab5be0e..2726f101b00 100644 --- a/core/dbt/lib.py +++ b/core/dbt/lib.py @@ -4,7 +4,7 @@ from dbt.contracts.results import RunningStatus, collect_timing_info from dbt.events.functions import fire_event from dbt.events.types import NodeCompiling, NodeExecuting -from dbt.exceptions import RuntimeException +from dbt.exceptions import DbtRuntimeError from dbt import flags from dbt.task.sql import SqlCompileRunner from dataclasses import dataclass @@ -125,7 +125,7 @@ def get_task_by_type(type): elif type == "run_operation": return RunOperationTask - raise RuntimeException("not a valid task") + raise DbtRuntimeError("not a valid task") def create_task(type, args, manifest, config): diff --git a/core/dbt/main.py b/core/dbt/main.py index 1bdd59fef1f..1d2dad9b259 100644 --- a/core/dbt/main.py +++ b/core/dbt/main.py @@ -46,9 +46,9 @@ from dbt.config.profile import read_user_config from dbt.exceptions import ( Exception as dbtException, - InternalException, - NotImplementedException, - FailedToConnectException, + DbtInternalError, + NotImplementedError, + FailedToConnectError, ) @@ -92,7 +92,7 @@ def add_optional_argument_inverse( ): mutex_group = self.add_mutually_exclusive_group() if not name.startswith("--"): - raise InternalException( + raise DbtInternalError( 'cannot handle optional argument without "--" prefix: ' f'got "{name}"' ) if dest is None: @@ -207,7 +207,7 @@ def track_run(task): try: yield dbt.tracking.track_invocation_end(config=task.config, args=task.args, result_type="ok") - except (NotImplementedException, FailedToConnectException) as e: + except (NotImplementedError, FailedToConnectError) as e: fire_event(MainEncounteredError(exc=str(e))) dbt.tracking.track_invocation_end(config=task.config, args=task.args, result_type="error") except Exception: @@ -220,7 +220,7 @@ def track_run(task): def run_from_args(parsed): log_cache_events(getattr(parsed, "log_cache_events", False)) - # this will convert DbtConfigErrors into RuntimeExceptions + # this will convert DbtConfigErrors into DbtRuntimeError # task could be any one of the task objects task = parsed.cls.from_args(args=parsed) diff --git a/core/dbt/parser/base.py b/core/dbt/parser/base.py index 9c245214d83..1f01aff36f1 100644 --- a/core/dbt/parser/base.py +++ b/core/dbt/parser/base.py @@ -18,7 +18,7 @@ from dbt.contracts.graph.manifest import Manifest from dbt.contracts.graph.nodes import ManifestNode, BaseNode from dbt.contracts.graph.unparsed import UnparsedNode, Docs -from dbt.exceptions import InternalException, InvalidConfigUpdate, InvalidDictParse +from dbt.exceptions import DbtInternalError, ConfigUpdateError, DictParseError from dbt import hooks from dbt.node_types import NodeType, ModelLanguage from dbt.parser.search import FileBlock @@ -76,7 +76,7 @@ def __init__(self, config: RuntimeConfig, manifest: Manifest, component: str) -> root_project_name=config.project_name, ) if macro is None: - raise InternalException(f"No macro with name generate_{component}_name found") + raise DbtInternalError(f"No macro with name generate_{component}_name found") root_context = generate_generate_name_macro_context(macro, config, manifest) self.updater = MacroGenerator(macro, root_context) @@ -224,7 +224,7 @@ def _create_parsetime_node( original_file_path=block.path.original_file_path, raw_code=block.contents, ) - raise InvalidDictParse(exc, node=node) + raise DictParseError(exc, node=node) def _context_for(self, parsed_node: IntermediateNode, config: ContextConfig) -> Dict[str, Any]: return generate_parser_model_context(parsed_node, self.root_project, self.manifest, config) @@ -345,7 +345,7 @@ def initial_config(self, fqn: List[str]) -> ContextConfig: self.project.project_name, ) else: - raise InternalException( + raise DbtInternalError( f"Got an unexpected project version={config_version}, expected 2" ) @@ -363,7 +363,7 @@ def render_update(self, node: IntermediateNode, config: ContextConfig) -> None: self.update_parsed_node_config(node, config, context=context) except ValidationError as exc: # we got a ValidationError - probably bad types in config() - raise InvalidConfigUpdate(exc, node=node) from exc + raise ConfigUpdateError(exc, node=node) from exc def add_result_node(self, block: FileBlock, node: ManifestNode): if node.config.enabled: diff --git a/core/dbt/parser/generic_test.py b/core/dbt/parser/generic_test.py index 822dd5b2d85..ea281e1c993 100644 --- a/core/dbt/parser/generic_test.py +++ b/core/dbt/parser/generic_test.py @@ -2,7 +2,7 @@ import jinja2 -from dbt.exceptions import ParsingException +from dbt.exceptions import ParsingError from dbt.clients import jinja from dbt.contracts.graph.nodes import GenericTestNode, Macro from dbt.contracts.graph.unparsed import UnparsedMacro @@ -51,14 +51,14 @@ def parse_unparsed_generic_test(self, base_node: UnparsedMacro) -> Iterable[Macr ) if isinstance(t, jinja.BlockTag) ] - except ParsingException as exc: + except ParsingError as exc: exc.add_node(base_node) raise for block in blocks: try: ast = jinja.parse(block.full_block) - except ParsingException as e: + except ParsingError as e: e.add_node(base_node) raise @@ -68,7 +68,7 @@ def parse_unparsed_generic_test(self, base_node: UnparsedMacro) -> Iterable[Macr if len(generic_test_nodes) != 1: # things have gone disastrously wrong, we thought we only # parsed one block! - raise ParsingException( + raise ParsingError( f"Found multiple generic tests in {block.full_block}, expected 1", node=base_node, ) diff --git a/core/dbt/parser/generic_test_builders.py b/core/dbt/parser/generic_test_builders.py index af0282c953f..206e9c51438 100644 --- a/core/dbt/parser/generic_test_builders.py +++ b/core/dbt/parser/generic_test_builders.py @@ -22,17 +22,17 @@ UnparsedExposure, ) from dbt.exceptions import ( - CustomMacroPopulatingConfigValues, - SameKeyNested, - TagNotString, - TagsNotListOfStrings, - TestArgIncludesModel, - TestArgsNotDict, - TestDefinitionDictLength, - TestInvalidType, - TestNameNotString, - UnexpectedTestNamePattern, - UndefinedMacroException, + CustomMacroPopulatingConfigValueError, + SameKeyNestedError, + TagNotStringError, + TagsNotListOfStringsError, + TestArgIncludesModelError, + TestArgsNotDictError, + TestDefinitionDictLengthError, + TestTypeError, + TestNameNotStringError, + UnexpectedTestNamePatternError, + UndefinedMacroError, ) from dbt.parser.search import FileBlock @@ -234,7 +234,7 @@ def __init__( test_name, test_args = self.extract_test_args(test, column_name) self.args: Dict[str, Any] = test_args if "model" in self.args: - raise TestArgIncludesModel() + raise TestArgIncludesModelError() self.package_name: str = package_name self.target: Testable = target @@ -242,7 +242,7 @@ def __init__( match = self.TEST_NAME_PATTERN.match(test_name) if match is None: - raise UnexpectedTestNamePattern(test_name) + raise UnexpectedTestNamePatternError(test_name) groups = match.groupdict() self.name: str = groups["test_name"] @@ -259,15 +259,15 @@ def __init__( value = self.args.pop(key, None) # 'modifier' config could be either top level arg or in config if value and "config" in self.args and key in self.args["config"]: - raise SameKeyNested() + raise SameKeyNestedError() if not value and "config" in self.args: value = self.args["config"].pop(key, None) if isinstance(value, str): try: value = get_rendered(value, render_ctx, native=True) - except UndefinedMacroException as e: - raise CustomMacroPopulatingConfigValues( + except UndefinedMacroError as e: + raise CustomMacroPopulatingConfigValueError( target_name=self.target.name, column_name=column_name, name=self.name, @@ -310,7 +310,7 @@ def _bad_type(self) -> TypeError: @staticmethod def extract_test_args(test, name=None) -> Tuple[str, Dict[str, Any]]: if not isinstance(test, dict): - raise TestInvalidType(test) + raise TestTypeError(test) # If the test is a dictionary with top-level keys, the test name is "test_name" # and the rest are arguments @@ -324,13 +324,13 @@ def extract_test_args(test, name=None) -> Tuple[str, Dict[str, Any]]: else: test = list(test.items()) if len(test) != 1: - raise TestDefinitionDictLength(test) + raise TestDefinitionDictLengthError(test) test_name, test_args = test[0] if not isinstance(test_args, dict): - raise TestArgsNotDict(test_args) + raise TestArgsNotDictError(test_args) if not isinstance(test_name, str): - raise TestNameNotString(test_name) + raise TestNameNotStringError(test_name) test_args = deepcopy(test_args) if name is not None: test_args["column_name"] = name @@ -421,10 +421,10 @@ def tags(self) -> List[str]: if isinstance(tags, str): tags = [tags] if not isinstance(tags, list): - raise TagsNotListOfStrings(tags) + raise TagsNotListOfStringsError(tags) for tag in tags: if not isinstance(tag, str): - raise TagNotString(tag) + raise TagNotStringError(tag) return tags[:] def macro_name(self) -> str: diff --git a/core/dbt/parser/hooks.py b/core/dbt/parser/hooks.py index d05ea136dc5..d96257a0e71 100644 --- a/core/dbt/parser/hooks.py +++ b/core/dbt/parser/hooks.py @@ -4,7 +4,7 @@ from dbt.context.context_config import ContextConfig from dbt.contracts.files import FilePath from dbt.contracts.graph.nodes import HookNode -from dbt.exceptions import InternalException +from dbt.exceptions import DbtInternalError from dbt.node_types import NodeType, RunHookType from dbt.parser.base import SimpleParser from dbt.parser.search import FileBlock @@ -46,7 +46,7 @@ def get_hook_defs(self) -> List[str]: elif self.hook_type == RunHookType.End: hooks = self.project.on_run_end else: - raise InternalException( + raise DbtInternalError( 'hook_type must be one of "{}" or "{}" (got {})'.format( RunHookType.Start, RunHookType.End, self.hook_type ) diff --git a/core/dbt/parser/macros.py b/core/dbt/parser/macros.py index 7c5336b8ccf..1a9ee03d57d 100644 --- a/core/dbt/parser/macros.py +++ b/core/dbt/parser/macros.py @@ -6,7 +6,7 @@ from dbt.contracts.graph.unparsed import UnparsedMacro from dbt.contracts.graph.nodes import Macro from dbt.contracts.files import FilePath, SourceFile -from dbt.exceptions import ParsingException +from dbt.exceptions import ParsingError from dbt.events.functions import fire_event from dbt.events.types import MacroFileParse from dbt.node_types import NodeType @@ -56,14 +56,14 @@ def parse_unparsed_macros(self, base_node: UnparsedMacro) -> Iterable[Macro]: ) if isinstance(t, jinja.BlockTag) ] - except ParsingException as exc: + except ParsingError as exc: exc.add_node(base_node) raise for block in blocks: try: ast = jinja.parse(block.full_block) - except ParsingException as e: + except ParsingError as e: e.add_node(base_node) raise @@ -72,7 +72,7 @@ def parse_unparsed_macros(self, base_node: UnparsedMacro) -> Iterable[Macro]: if len(macro_nodes) != 1: # things have gone disastrously wrong, we thought we only # parsed one block! - raise ParsingException( + raise ParsingError( f"Found multiple macros in {block.full_block}, expected 1", node=base_node ) diff --git a/core/dbt/parser/manifest.py b/core/dbt/parser/manifest.py index 988c4539c9e..fbfada4fc2a 100644 --- a/core/dbt/parser/manifest.py +++ b/core/dbt/parser/manifest.py @@ -21,8 +21,8 @@ from dbt.helper_types import PathSet from dbt.events.functions import fire_event, get_invocation_id, warn_or_error from dbt.events.types import ( - PartialParsingExceptionProcessingFile, - PartialParsingException, + PartialParsingErrorProcessingFile, + PartialParsingError, PartialParsingSkipParsing, UnableToPartialParse, PartialParsingNotEnabled, @@ -61,7 +61,7 @@ ResultNode, ) from dbt.contracts.util import Writable -from dbt.exceptions import TargetNotFound, AmbiguousAlias +from dbt.exceptions import TargetNotFoundError, AmbiguousAliasError from dbt.parser.base import Parser from dbt.parser.analysis import AnalysisParser from dbt.parser.generic_test import GenericTestParser @@ -278,9 +278,9 @@ def load(self): source_file = self.manifest.files[file_id] if source_file: parse_file_type = source_file.parse_file_type - fire_event(PartialParsingExceptionProcessingFile(file=file_id)) + fire_event(PartialParsingErrorProcessingFile(file=file_id)) exc_info["parse_file_type"] = parse_file_type - fire_event(PartialParsingException(exc_info=exc_info)) + fire_event(PartialParsingError(exc_info=exc_info)) # Send event if dbt.tracking.active_user is not None: @@ -989,7 +989,7 @@ def invalid_target_fail_unless_test( ) ) else: - raise TargetNotFound( + raise TargetNotFoundError( node=node, target_name=target_name, target_kind=target_kind, @@ -1017,11 +1017,13 @@ def _check_resource_uniqueness( existing_node = names_resources.get(name) if existing_node is not None: - raise dbt.exceptions.DuplicateResourceName(existing_node, node) + raise dbt.exceptions.DuplicateResourceNameError(existing_node, node) existing_alias = alias_resources.get(full_node_name) if existing_alias is not None: - raise AmbiguousAlias(node_1=existing_alias, node_2=node, duped_name=full_node_name) + raise AmbiguousAliasError( + node_1=existing_alias, node_2=node, duped_name=full_node_name + ) names_resources[name] = node alias_resources[full_node_name] = node @@ -1113,7 +1115,7 @@ def _process_refs_for_exposure(manifest: Manifest, current_project: str, exposur elif len(ref) == 2: target_model_package, target_model_name = ref else: - raise dbt.exceptions.InternalException( + raise dbt.exceptions.DbtInternalError( f"Refs should always be 1 or 2 arguments - got {len(ref)}" ) @@ -1157,7 +1159,7 @@ def _process_refs_for_metric(manifest: Manifest, current_project: str, metric: M elif len(ref) == 2: target_model_package, target_model_name = ref else: - raise dbt.exceptions.InternalException( + raise dbt.exceptions.DbtInternalError( f"Refs should always be 1 or 2 arguments - got {len(ref)}" ) @@ -1208,7 +1210,7 @@ def _process_metrics_for_node( elif len(metric) == 2: target_metric_package, target_metric_name = metric else: - raise dbt.exceptions.InternalException( + raise dbt.exceptions.DbtInternalError( f"Metric references should always be 1 or 2 arguments - got {len(metric)}" ) @@ -1253,7 +1255,7 @@ def _process_refs_for_node(manifest: Manifest, current_project: str, node: Manif elif len(ref) == 2: target_model_package, target_model_name = ref else: - raise dbt.exceptions.InternalException( + raise dbt.exceptions.DbtInternalError( f"Refs should always be 1 or 2 arguments - got {len(ref)}" ) diff --git a/core/dbt/parser/models.py b/core/dbt/parser/models.py index 39bb18be714..597200abba5 100644 --- a/core/dbt/parser/models.py +++ b/core/dbt/parser/models.py @@ -30,11 +30,11 @@ import ast from dbt.dataclass_schema import ValidationError from dbt.exceptions import ( - InvalidModelConfig, - ParsingException, - PythonLiteralEval, - PythonParsingException, - UndefinedMacroException, + ModelConfigError, + ParsingError, + PythonLiteralEvalError, + PythonParsingError, + UndefinedMacroError, ) dbt_function_key_words = set(["ref", "source", "config", "get"]) @@ -66,13 +66,13 @@ def visit_FunctionDef(self, node: ast.FunctionDef) -> None: def check_error(self, node): if self.num_model_def != 1: - raise ParsingException( + raise ParsingError( f"dbt allows exactly one model defined per python file, found {self.num_model_def}", node=node, ) if len(self.dbt_errors) != 0: - raise ParsingException("\n".join(self.dbt_errors), node=node) + raise ParsingError("\n".join(self.dbt_errors), node=node) class PythonParseVisitor(ast.NodeVisitor): @@ -96,7 +96,7 @@ def _safe_eval(self, node): try: return ast.literal_eval(node) except (SyntaxError, ValueError, TypeError, MemoryError, RecursionError) as exc: - raise PythonLiteralEval(exc, node=self.dbt_node) from exc + raise PythonLiteralEvalError(exc, node=self.dbt_node) from exc def _get_call_literals(self, node): # List of literals @@ -176,9 +176,9 @@ def verify_python_model_code(node): node, ) if rendered_python != node.raw_code: - raise ParsingException("") - except (UndefinedMacroException, ParsingException): - raise ParsingException("No jinja in python model code is allowed", node=node) + raise ParsingError("") + except (UndefinedMacroError, ParsingError): + raise ParsingError("No jinja in python model code is allowed", node=node) class ModelParser(SimpleSQLParser[ModelNode]): @@ -202,7 +202,7 @@ def parse_python_model(self, node, config, context): try: tree = ast.parse(node.raw_code, filename=node.original_file_path) except SyntaxError as exc: - raise PythonParsingException(exc, node=node) from exc + raise PythonParsingError(exc, node=node) from exc # Only parse if AST tree has instructions in body if tree.body: @@ -219,12 +219,12 @@ def parse_python_model(self, node, config, context): if func == "get": num_args = len(args) if num_args == 0: - raise ParsingException( + raise ParsingError( "dbt.config.get() requires at least one argument", node=node, ) if num_args > 2: - raise ParsingException( + raise ParsingError( f"dbt.config.get() takes at most 2 arguments ({num_args} given)", node=node, ) @@ -255,7 +255,7 @@ def render_update(self, node: ModelNode, config: ContextConfig) -> None: except ValidationError as exc: # we got a ValidationError - probably bad types in config() - raise InvalidModelConfig(exc, node=node) from exc + raise ModelConfigError(exc, node=node) from exc return elif not flags.STATIC_PARSER: diff --git a/core/dbt/parser/read_files.py b/core/dbt/parser/read_files.py index ccb6b1b0790..531e5f39560 100644 --- a/core/dbt/parser/read_files.py +++ b/core/dbt/parser/read_files.py @@ -12,7 +12,7 @@ ) from dbt.parser.schemas import yaml_from_file, schema_file_keys, check_format_version -from dbt.exceptions import ParsingException +from dbt.exceptions import ParsingError from dbt.parser.search import filesystem_search from typing import Optional @@ -75,21 +75,21 @@ def validate_yaml(file_path, dct): f"The schema file at {file_path} is " f"invalid because the value of '{key}' is not a list" ) - raise ParsingException(msg) + raise ParsingError(msg) for element in dct[key]: if not isinstance(element, dict): msg = ( f"The schema file at {file_path} is " f"invalid because a list element for '{key}' is not a dictionary" ) - raise ParsingException(msg) + raise ParsingError(msg) if "name" not in element: msg = ( f"The schema file at {file_path} is " f"invalid because a list element for '{key}' does not have a " "name attribute." ) - raise ParsingException(msg) + raise ParsingError(msg) # Special processing for big seed files diff --git a/core/dbt/parser/schemas.py b/core/dbt/parser/schemas.py index 32bfbb559a1..482eb5b6e35 100644 --- a/core/dbt/parser/schemas.py +++ b/core/dbt/parser/schemas.py @@ -50,22 +50,22 @@ UnparsedSourceDefinition, ) from dbt.exceptions import ( - CompilationException, - DuplicateMacroPatchName, - DuplicatePatchPath, - DuplicateSourcePatchName, - JSONValidationException, - InternalException, - InvalidSchemaConfig, - InvalidTestConfig, - ParsingException, - PropertyYMLInvalidTag, - PropertyYMLMissingVersion, - PropertyYMLVersionNotInt, - ValidationException, - YamlLoadFailure, - YamlParseDictFailure, - YamlParseListFailure, + CompilationError, + DuplicateMacroPatchNameError, + DuplicatePatchPathError, + DuplicateSourcePatchNameError, + JSONValidationError, + DbtInternalError, + SchemaConfigError, + TestConfigError, + ParsingError, + PropertyYMLInvalidTagError, + PropertyYMLMissingVersionError, + PropertyYMLVersionNotIntError, + DbtValidationError, + YamlLoadError, + YamlParseDictError, + YamlParseListError, ) from dbt.events.functions import warn_or_error from dbt.events.types import WrongResourceSchemaFile, NoNodeForYamlKey, MacroNotFoundForPatch @@ -102,8 +102,10 @@ def yaml_from_file(source_file: SchemaSourceFile) -> Dict[str, Any]: try: # source_file.contents can sometimes be None return load_yaml_text(source_file.contents or "", source_file.path) - except ValidationException as e: - raise YamlLoadFailure(source_file.project_name, source_file.path.relative_path, e) + except DbtValidationError as e: + raise YamlLoadError( + project_name=source_file.project_name, path=source_file.path.relative_path, exc=e + ) class ParserRef: @@ -255,7 +257,7 @@ def get_hashable_md(data: Union[str, int, float, List, Dict]) -> Union[str, List original_file_path=target.original_file_path, raw_code=raw_code, ) - raise InvalidTestConfig(exc, node) + raise TestConfigError(exc, node) # lots of time spent in this method def _parse_generic_test( @@ -278,20 +280,20 @@ def _parse_generic_test( self.store_env_vars(target, schema_file_id, self.schema_yaml_vars.env_vars) self.schema_yaml_vars.env_vars = {} - except ParsingException as exc: + except ParsingError as exc: context = _trimmed(str(target)) msg = "Invalid test config given in {}:\n\t{}\n\t@: {}".format( target.original_file_path, exc.msg, context ) - raise ParsingException(msg) from exc + raise ParsingError(msg) from exc - except CompilationException as exc: + except CompilationError as exc: context = _trimmed(str(target)) msg = ( "Invalid generic test configuration given in " f"{target.original_file_path}: \n{exc.msg}\n\t@: {context}" ) - raise CompilationException(msg) from exc + raise CompilationError(msg) from exc original_name = os.path.basename(target.original_file_path) compiled_path = get_pseudo_test_path(builder.compiled_name, original_name) @@ -397,7 +399,7 @@ def render_test_update(self, node, config, builder, schema_file_id): # env_vars should have been updated in the context env_var method except ValidationError as exc: # we got a ValidationError - probably bad types in config() - raise InvalidSchemaConfig(exc, node=node) from exc + raise SchemaConfigError(exc, node=node) from exc def parse_node(self, block: GenericTestBlock) -> GenericTestNode: """In schema parsing, we rewrite most of the part of parse_node that @@ -537,16 +539,16 @@ def parse_file(self, block: FileBlock, dct: Dict = None) -> None: def check_format_version(file_path, yaml_dct) -> None: if "version" not in yaml_dct: - raise PropertyYMLMissingVersion(file_path) + raise PropertyYMLMissingVersionError(file_path) version = yaml_dct["version"] # if it's not an integer, the version is malformed, or not # set. Either way, only 'version: 2' is supported. if not isinstance(version, int): - raise PropertyYMLVersionNotInt(file_path, version) + raise PropertyYMLVersionNotIntError(file_path, version) if version != 2: - raise PropertyYMLInvalidTag(file_path, version) + raise PropertyYMLInvalidTagError(file_path, version) Parsed = TypeVar("Parsed", UnpatchedSourceDefinition, ParsedNodePatch, ParsedMacroPatch) @@ -594,7 +596,7 @@ def root_project(self): def get_key_dicts(self) -> Iterable[Dict[str, Any]]: data = self.yaml.data.get(self.key, []) if not isinstance(data, list): - raise ParsingException( + raise ParsingError( "{} must be a list, got {} instead: ({})".format( self.key, type(data), _trimmed(str(data)) ) @@ -607,12 +609,10 @@ def get_key_dicts(self) -> Iterable[Dict[str, Any]]: # check that entry is a dict and that all dict values # are strings if coerce_dict_str(entry) is None: - raise YamlParseListFailure( - path, self.key, data, "expected a dict with string keys" - ) + raise YamlParseListError(path, self.key, data, "expected a dict with string keys") if "name" not in entry: - raise ParsingException("Entry did not contain a name") + raise ParsingError("Entry did not contain a name") # Render the data (except for tests and descriptions). # See the SchemaYamlRenderer @@ -631,8 +631,8 @@ def render_entry(self, dct): try: # This does a deep_map which will fail if there are circular references dct = self.renderer.render_data(dct) - except ParsingException as exc: - raise ParsingException( + except ParsingError as exc: + raise ParsingError( f"Failed to render {self.yaml.file.path.original_file_path} from " f"project {self.project.project_name}: {exc}" ) from exc @@ -655,8 +655,8 @@ def _target_from_dict(self, cls: Type[T], data: Dict[str, Any]) -> T: try: cls.validate(data) return cls.from_dict(data) - except (ValidationError, JSONValidationException) as exc: - raise YamlParseDictFailure(path, self.key, data, exc) + except (ValidationError, JSONValidationError) as exc: + raise YamlParseDictError(path, self.key, data, exc) # The other parse method returns TestBlocks. This one doesn't. # This takes the yaml dictionaries in 'sources' keys and uses them @@ -677,7 +677,7 @@ def parse(self) -> List[TestBlock]: # source patches must be unique key = (patch.overrides, patch.name) if key in self.manifest.source_patches: - raise DuplicateSourcePatchName(patch, self.manifest.source_patches[key]) + raise DuplicateSourcePatchNameError(patch, self.manifest.source_patches[key]) self.manifest.source_patches[key] = patch source_file.source_patches.append(key) else: @@ -780,8 +780,8 @@ def get_unparsed_target(self) -> Iterable[NonSourceTarget]: self.normalize_meta_attribute(data, path) self.normalize_docs_attribute(data, path) node = self._target_type().from_dict(data) - except (ValidationError, JSONValidationException) as exc: - raise YamlParseDictFailure(path, self.key, data, exc) + except (ValidationError, JSONValidationError) as exc: + raise YamlParseDictError(path, self.key, data, exc) else: yield node @@ -790,7 +790,7 @@ def get_unparsed_target(self) -> Iterable[NonSourceTarget]: def normalize_attribute(self, data, path, attribute): if attribute in data: if "config" in data and attribute in data["config"]: - raise ParsingException( + raise ParsingError( f""" In {path}: found {attribute} dictionary in 'config' dictionary and as top-level key. Remove the top-level key and define it under 'config' dictionary only. @@ -858,7 +858,7 @@ def parse_patch(self, block: TargetBlock[NodeTarget], refs: ParserRef) -> None: elif patch.yaml_key == "analyses": unique_id = self.manifest.analysis_lookup.get_unique_id(patch.name, None) else: - raise InternalException( + raise DbtInternalError( f"Unexpected yaml_key {patch.yaml_key} for patch in " f"file {source_file.path.original_file_path}" ) @@ -877,7 +877,7 @@ def parse_patch(self, block: TargetBlock[NodeTarget], refs: ParserRef) -> None: "unique id cannot be enabled in the schema file. They must be enabled " "in `dbt_project.yml` or in the sql files." ) - raise ParsingException(msg) + raise ParsingError(msg) # all nodes in the disabled dict have the same unique_id so just grab the first one # to append with the uniqe id @@ -905,7 +905,7 @@ def parse_patch(self, block: TargetBlock[NodeTarget], refs: ParserRef) -> None: if node: if node.patch_path: package_name, existing_file_path = node.patch_path.split("://") - raise DuplicatePatchPath(patch, existing_file_path) + raise DuplicatePatchPathError(patch, existing_file_path) source_file.append_patch(patch.yaml_key, node.unique_id) # re-calculate the node config with the patch config. Always do this @@ -961,7 +961,7 @@ def parse_patch(self, block: TargetBlock[UnparsedMacroUpdate], refs: ParserRef) return if macro.patch_path: package_name, existing_file_path = macro.patch_path.split("://") - raise DuplicateMacroPatchName(patch, existing_file_path) + raise DuplicateMacroPatchNameError(patch, existing_file_path) source_file.macro_patches[patch.name] = unique_id macro.patch(patch) @@ -997,7 +997,7 @@ def parse_exposure(self, unparsed: UnparsedExposure): ) if not isinstance(config, ExposureConfig): - raise InternalException( + raise DbtInternalError( f"Calculated a {type(config)} for an exposure, but expected an ExposureConfig" ) @@ -1063,8 +1063,8 @@ def parse(self): try: UnparsedExposure.validate(data) unparsed = UnparsedExposure.from_dict(data) - except (ValidationError, JSONValidationException) as exc: - raise YamlParseDictFailure(self.yaml.path, self.key, data, exc) + except (ValidationError, JSONValidationError) as exc: + raise YamlParseDictError(self.yaml.path, self.key, data, exc) self.parse_exposure(unparsed) @@ -1100,7 +1100,7 @@ def parse_metric(self, unparsed: UnparsedMetric): ) if not isinstance(config, MetricConfig): - raise InternalException( + raise DbtInternalError( f"Calculated a {type(config)} for a metric, but expected a MetricConfig" ) @@ -1180,6 +1180,6 @@ def parse(self): UnparsedMetric.validate(data) unparsed = UnparsedMetric.from_dict(data) - except (ValidationError, JSONValidationException) as exc: - raise YamlParseDictFailure(self.yaml.path, self.key, data, exc) + except (ValidationError, JSONValidationError) as exc: + raise YamlParseDictError(self.yaml.path, self.key, data, exc) self.parse_metric(unparsed) diff --git a/core/dbt/parser/search.py b/core/dbt/parser/search.py index f8ccc974be4..75e7fa6636c 100644 --- a/core/dbt/parser/search.py +++ b/core/dbt/parser/search.py @@ -7,7 +7,7 @@ from dbt.clients.system import find_matching from dbt.config import Project from dbt.contracts.files import FilePath, AnySourceFile -from dbt.exceptions import ParsingException, InternalException +from dbt.exceptions import ParsingError, DbtInternalError # What's the point of wrapping a SourceFile with this class? @@ -73,7 +73,7 @@ def filesystem_search( file_path_list = [] for result in find_matching(root, relative_dirs, ext, ignore_spec): if "searched_path" not in result or "relative_path" not in result: - raise InternalException("Invalid result from find_matching: {}".format(result)) + raise DbtInternalError("Invalid result from find_matching: {}".format(result)) file_match = FilePath( searched_path=result["searched_path"], relative_path=result["relative_path"], @@ -113,7 +113,7 @@ def extract_blocks(self, source_file: FileBlock) -> Iterable[BlockTag]: assert isinstance(block, BlockTag) yield block - except ParsingException as exc: + except ParsingError as exc: if exc.node is None: exc.add_node(source_file) raise diff --git a/core/dbt/parser/snapshots.py b/core/dbt/parser/snapshots.py index dffc7d90641..72aec4ee976 100644 --- a/core/dbt/parser/snapshots.py +++ b/core/dbt/parser/snapshots.py @@ -4,7 +4,7 @@ from dbt.dataclass_schema import ValidationError from dbt.contracts.graph.nodes import IntermediateSnapshotNode, SnapshotNode -from dbt.exceptions import InvalidSnapshopConfig +from dbt.exceptions import SnapshopConfigError from dbt.node_types import NodeType from dbt.parser.base import SQLParser from dbt.parser.search import BlockContents, BlockSearcher, FileBlock @@ -68,7 +68,7 @@ def transform(self, node: IntermediateSnapshotNode) -> SnapshotNode: self.set_snapshot_attributes(parsed_node) return parsed_node except ValidationError as exc: - raise InvalidSnapshopConfig(exc, node) + raise SnapshopConfigError(exc, node) def parse_file(self, file_block: FileBlock) -> None: blocks = BlockSearcher( diff --git a/core/dbt/parser/sources.py b/core/dbt/parser/sources.py index cc9acea98c3..098ebde09c6 100644 --- a/core/dbt/parser/sources.py +++ b/core/dbt/parser/sources.py @@ -26,7 +26,7 @@ ) from dbt.events.functions import warn_or_error from dbt.events.types import UnusedTables -from dbt.exceptions import InternalException +from dbt.exceptions import DbtInternalError from dbt.node_types import NodeType from dbt.parser.schemas import SchemaParser, ParserRef @@ -150,7 +150,7 @@ def parse_source(self, target: UnpatchedSourceDefinition) -> SourceDefinition: ) if not isinstance(config, SourceConfig): - raise InternalException( + raise DbtInternalError( f"Calculated a {type(config)} for a source, but expected a SourceConfig" ) diff --git a/core/dbt/parser/sql.py b/core/dbt/parser/sql.py index 82d09c12d6b..98e28aadc19 100644 --- a/core/dbt/parser/sql.py +++ b/core/dbt/parser/sql.py @@ -5,7 +5,7 @@ from dbt.contracts.graph.manifest import SourceFile from dbt.contracts.graph.nodes import SqlNode, Macro from dbt.contracts.graph.unparsed import UnparsedMacro -from dbt.exceptions import InternalException +from dbt.exceptions import DbtInternalError from dbt.node_types import NodeType from dbt.parser.base import SimpleSQLParser from dbt.parser.macros import MacroParser @@ -35,7 +35,7 @@ def resource_type(self) -> NodeType: def get_compiled_path(block: FileBlock): # we do it this way to make mypy happy if not isinstance(block, SqlBlock): - raise InternalException( + raise DbtInternalError( "While parsing SQL operation, got an actual file block instead of " "an SQL block: {}".format(block) ) diff --git a/core/dbt/semver.py b/core/dbt/semver.py index 7f8913c3600..24f00b333a1 100644 --- a/core/dbt/semver.py +++ b/core/dbt/semver.py @@ -5,7 +5,7 @@ from packaging import version as packaging_version -from dbt.exceptions import VersionsNotCompatibleException +from dbt.exceptions import VersionsNotCompatibleError import dbt.utils from dbt.dataclass_schema import dbtClassMixin, StrEnum @@ -94,7 +94,7 @@ def from_version_string(cls, version_string): match = _VERSION_REGEX.match(version_string) if not match: - raise dbt.exceptions.SemverException( + raise dbt.exceptions.SemverError( f'"{version_string}" is not a valid semantic version.' ) @@ -222,7 +222,7 @@ def _try_combine_exact(self, a, b): if a.compare(b) == 0: return a else: - raise VersionsNotCompatibleException() + raise VersionsNotCompatibleError() def _try_combine_lower_bound_with_exact(self, lower, exact): comparison = lower.compare(exact) @@ -230,7 +230,7 @@ def _try_combine_lower_bound_with_exact(self, lower, exact): if comparison < 0 or (comparison == 0 and lower.matcher == Matchers.GREATER_THAN_OR_EQUAL): return exact - raise VersionsNotCompatibleException() + raise VersionsNotCompatibleError() def _try_combine_lower_bound(self, a, b): if b.is_unbounded: @@ -258,7 +258,7 @@ def _try_combine_upper_bound_with_exact(self, upper, exact): if comparison > 0 or (comparison == 0 and upper.matcher == Matchers.LESS_THAN_OR_EQUAL): return exact - raise VersionsNotCompatibleException() + raise VersionsNotCompatibleError() def _try_combine_upper_bound(self, a, b): if b.is_unbounded: @@ -291,7 +291,7 @@ def reduce(self, other): end = self._try_combine_upper_bound(self.end, other.end) if start.compare(end) > 0: - raise VersionsNotCompatibleException() + raise VersionsNotCompatibleError() return VersionRange(start=start, end=end) @@ -379,8 +379,8 @@ def reduce_versions(*args): for version_specifier in version_specifiers: to_return = to_return.reduce(version_specifier.to_range()) - except VersionsNotCompatibleException: - raise VersionsNotCompatibleException( + except VersionsNotCompatibleError: + raise VersionsNotCompatibleError( "Could not find a satisfactory version from options: {}".format([str(a) for a in args]) ) @@ -394,7 +394,7 @@ def versions_compatible(*args): try: reduce_versions(*args) return True - except VersionsNotCompatibleException: + except VersionsNotCompatibleError: return False diff --git a/core/dbt/task/base.py b/core/dbt/task/base.py index b7ababdd067..e13f963cc7b 100644 --- a/core/dbt/task/base.py +++ b/core/dbt/task/base.py @@ -16,10 +16,10 @@ RunningStatus, ) from dbt.exceptions import ( - NotImplementedException, - CompilationException, - RuntimeException, - InternalException, + NotImplementedError, + CompilationError, + DbtRuntimeError, + DbtInternalError, ) from dbt.logger import log_manager from dbt.events.functions import fire_event @@ -27,7 +27,7 @@ LogDbtProjectError, LogDbtProfileError, CatchableExceptionOnRun, - InternalExceptionOnRun, + InternalErrorOnRun, GenericExceptionOnRun, NodeConnectionReleaseError, LogDebugStackTrace, @@ -99,17 +99,17 @@ def from_args(cls, args): fire_event(LogDbtProjectError(exc=str(exc))) tracking.track_invalid_invocation(args=args, result_type=exc.result_type) - raise dbt.exceptions.RuntimeException("Could not run dbt") from exc + raise dbt.exceptions.DbtRuntimeError("Could not run dbt") from exc except dbt.exceptions.DbtProfileError as exc: all_profile_names = list(read_profiles(flags.PROFILES_DIR).keys()) fire_event(LogDbtProfileError(exc=str(exc), profiles=all_profile_names)) tracking.track_invalid_invocation(args=args, result_type=exc.result_type) - raise dbt.exceptions.RuntimeException("Could not run dbt") from exc + raise dbt.exceptions.DbtRuntimeError("Could not run dbt") from exc return cls(args, config) @abstractmethod def run(self): - raise dbt.exceptions.NotImplementedException("Not Implemented") + raise dbt.exceptions.NotImplementedError("Not Implemented") def interpret_results(self, results): return True @@ -123,7 +123,7 @@ def get_nearest_project_dir(args): if os.path.exists(project_file): return args.project_dir else: - raise dbt.exceptions.RuntimeException( + raise dbt.exceptions.DbtRuntimeError( "fatal: Invalid --project-dir flag. Not a dbt project. " "Missing dbt_project.yml file" ) @@ -137,7 +137,7 @@ def get_nearest_project_dir(args): return cwd cwd = os.path.dirname(cwd) - raise dbt.exceptions.RuntimeException( + raise dbt.exceptions.DbtRuntimeError( "fatal: Not a dbt project (or any of the parent directories). " "Missing dbt_project.yml file" ) @@ -328,7 +328,7 @@ def _handle_catchable_exception(self, e, ctx): return str(e) def _handle_internal_exception(self, e, ctx): - fire_event(InternalExceptionOnRun(build_path=self.node.build_path, exc=str(e))) + fire_event(InternalErrorOnRun(build_path=self.node.build_path, exc=str(e))) return str(e) def _handle_generic_exception(self, e, ctx): @@ -344,10 +344,10 @@ def _handle_generic_exception(self, e, ctx): return str(e) def handle_exception(self, e, ctx): - catchable_errors = (CompilationException, RuntimeException) + catchable_errors = (CompilationError, DbtRuntimeError) if isinstance(e, catchable_errors): error = self._handle_catchable_exception(e, ctx) - elif isinstance(e, InternalException): + elif isinstance(e, DbtInternalError): error = self._handle_internal_exception(e, ctx) else: error = self._handle_generic_exception(e, ctx) @@ -402,16 +402,16 @@ def _safe_release_connection(self): return None def before_execute(self): - raise NotImplementedException() + raise NotImplementedError() def execute(self, compiled_node, manifest): - raise NotImplementedException() + raise NotImplementedError() def run(self, compiled_node, manifest): return self.execute(compiled_node, manifest) def after_execute(self, result): - raise NotImplementedException() + raise NotImplementedError() def _skip_caused_by_ephemeral_failure(self): if self.skip_cause is None or self.skip_cause.node is None: @@ -437,7 +437,7 @@ def on_skip(self): ) print_run_result_error(result=self.skip_cause, newline=False) if self.skip_cause is None: # mypy appeasement - raise InternalException( + raise DbtInternalError( "Skip cause not set but skip was somehow caused by an ephemeral failure" ) # set an error so dbt will exit with an error code diff --git a/core/dbt/task/build.py b/core/dbt/task/build.py index aabc561bd7c..8a5dc39c9b7 100644 --- a/core/dbt/task/build.py +++ b/core/dbt/task/build.py @@ -5,7 +5,7 @@ from dbt.adapters.factory import get_adapter from dbt.contracts.results import NodeStatus -from dbt.exceptions import InternalException +from dbt.exceptions import DbtInternalError from dbt.graph import ResourceTypeSelector from dbt.node_types import NodeType from dbt.task.test import TestSelector @@ -44,7 +44,7 @@ def resource_types(self): def get_node_selector(self) -> ResourceTypeSelector: if self.manifest is None or self.graph is None: - raise InternalException("manifest and graph must be set to get node selection") + raise DbtInternalError("manifest and graph must be set to get node selection") resource_types = self.resource_types @@ -66,7 +66,7 @@ def get_runner_type(self, node): def compile_manifest(self): if self.manifest is None: - raise InternalException("compile_manifest called before manifest was loaded") + raise DbtInternalError("compile_manifest called before manifest was loaded") adapter = get_adapter(self.config) compiler = adapter.get_compiler() self.graph = compiler.compile(self.manifest, add_test_edges=True) diff --git a/core/dbt/task/compile.py b/core/dbt/task/compile.py index 740d35d37e9..995063491f6 100644 --- a/core/dbt/task/compile.py +++ b/core/dbt/task/compile.py @@ -6,7 +6,7 @@ from dbt.contracts.graph.manifest import WritableManifest from dbt.contracts.results import RunStatus, RunResult -from dbt.exceptions import InternalException, RuntimeException +from dbt.exceptions import DbtInternalError, DbtRuntimeError from dbt.graph import ResourceTypeSelector from dbt.events.functions import fire_event from dbt.events.types import CompileComplete @@ -43,7 +43,7 @@ def raise_on_first_error(self): def get_node_selector(self) -> ResourceTypeSelector: if self.manifest is None or self.graph is None: - raise InternalException("manifest and graph must be set to get perform node selection") + raise DbtInternalError("manifest and graph must be set to get perform node selection") return ResourceTypeSelector( graph=self.graph, manifest=self.manifest, @@ -63,12 +63,12 @@ def _get_deferred_manifest(self) -> Optional[WritableManifest]: state = self.previous_state if state is None: - raise RuntimeException( + raise DbtRuntimeError( "Received a --defer argument, but no value was provided to --state" ) if state.manifest is None: - raise RuntimeException(f'Could not find manifest in --state path: "{self.args.state}"') + raise DbtRuntimeError(f'Could not find manifest in --state path: "{self.args.state}"') return state.manifest def defer_to_manifest(self, adapter, selected_uids: AbstractSet[str]): @@ -76,7 +76,7 @@ def defer_to_manifest(self, adapter, selected_uids: AbstractSet[str]): if deferred_manifest is None: return if self.manifest is None: - raise InternalException( + raise DbtInternalError( "Expected to defer to manifest, but there is no runtime manifest to defer from!" ) self.manifest.merge_from_artifact( diff --git a/core/dbt/task/freshness.py b/core/dbt/task/freshness.py index c4898b779fa..819bc4164a3 100644 --- a/core/dbt/task/freshness.py +++ b/core/dbt/task/freshness.py @@ -15,7 +15,7 @@ SourceFreshnessResult, FreshnessStatus, ) -from dbt.exceptions import RuntimeException, InternalException +from dbt.exceptions import DbtRuntimeError, DbtInternalError from dbt.events.functions import fire_event from dbt.events.types import ( FreshnessCheckComplete, @@ -33,7 +33,7 @@ class FreshnessRunner(BaseRunner): def on_skip(self): - raise RuntimeException("Freshness: nodes cannot be skipped!") + raise DbtRuntimeError("Freshness: nodes cannot be skipped!") def before_execute(self): description = "freshness of {0.source_name}.{0.name}".format(self.node) @@ -100,7 +100,7 @@ def execute(self, compiled_node, manifest): # therefore loaded_at_field should be a str. If this invariant is # broken, raise! if compiled_node.loaded_at_field is None: - raise InternalException( + raise DbtInternalError( "Got to execute for source freshness of a source that has no loaded_at_field!" ) @@ -132,7 +132,7 @@ def execute(self, compiled_node, manifest): def compile(self, manifest): if self.node.resource_type != NodeType.Source: # should be unreachable... - raise RuntimeException("fresnhess runner: got a non-Source") + raise DbtRuntimeError("fresnhess runner: got a non-Source") # we don't do anything interesting when we compile a source node return self.node @@ -162,7 +162,7 @@ def raise_on_first_error(self): def get_node_selector(self): if self.manifest is None or self.graph is None: - raise InternalException("manifest and graph must be set to get perform node selection") + raise DbtInternalError("manifest and graph must be set to get perform node selection") return FreshnessSelector( graph=self.graph, manifest=self.manifest, diff --git a/core/dbt/task/generate.py b/core/dbt/task/generate.py index 87723a530a1..19fa4c1bde9 100644 --- a/core/dbt/task/generate.py +++ b/core/dbt/task/generate.py @@ -22,7 +22,7 @@ ColumnMetadata, CatalogArtifact, ) -from dbt.exceptions import InternalException, AmbiguousCatalogMatch +from dbt.exceptions import DbtInternalError, AmbiguousCatalogMatchError from dbt.include.global_project import DOCS_INDEX_FILE_PATH from dbt.events.functions import fire_event from dbt.events.types import ( @@ -81,7 +81,7 @@ def get_table(self, data: PrimitiveDict) -> CatalogTable: str(data["table_name"]), ) except KeyError as exc: - raise dbt.exceptions.CompilationException( + raise dbt.exceptions.CompilationError( "Catalog information missing required key {} (got {})".format(exc, data) ) table: CatalogTable @@ -119,7 +119,7 @@ def make_unique_id_map( unique_ids = source_map.get(table.key(), set()) for unique_id in unique_ids: if unique_id in sources: - raise AmbiguousCatalogMatch( + raise AmbiguousCatalogMatchError( unique_id, sources[unique_id].to_dict(omit_none=True), table.to_dict(omit_none=True), @@ -201,7 +201,7 @@ def get_unique_id_mapping( class GenerateTask(CompileTask): def _get_manifest(self) -> Manifest: if self.manifest is None: - raise InternalException("manifest should not be None in _get_manifest") + raise DbtInternalError("manifest should not be None in _get_manifest") return self.manifest def run(self) -> CatalogArtifact: @@ -232,7 +232,7 @@ def run(self) -> CatalogArtifact: shutil.copytree(asset_path, to_asset_path) if self.manifest is None: - raise InternalException("self.manifest was None in run!") + raise DbtInternalError("self.manifest was None in run!") adapter = get_adapter(self.config) with adapter.connection_named("generate_catalog"): diff --git a/core/dbt/task/init.py b/core/dbt/task/init.py index b1769d2e729..f3a7dd28e75 100644 --- a/core/dbt/task/init.py +++ b/core/dbt/task/init.py @@ -252,7 +252,7 @@ def run(self): try: move_to_nearest_project_dir(self.args) in_project = True - except dbt.exceptions.RuntimeException: + except dbt.exceptions.DbtRuntimeError: in_project = False if in_project: diff --git a/core/dbt/task/list.py b/core/dbt/task/list.py index 49fb07b359a..fa8d3ccd8d2 100644 --- a/core/dbt/task/list.py +++ b/core/dbt/task/list.py @@ -7,7 +7,7 @@ from dbt.node_types import NodeType from dbt.events.functions import warn_or_error from dbt.events.types import NoNodesSelected -from dbt.exceptions import RuntimeException, InternalException +from dbt.exceptions import DbtRuntimeError, DbtInternalError from dbt.logger import log_manager from dbt.events.eventmgr import EventLevel @@ -44,9 +44,9 @@ def __init__(self, args, config): super().__init__(args, config) if self.args.models: if self.args.select: - raise RuntimeException('"models" and "select" are mutually exclusive arguments') + raise DbtRuntimeError('"models" and "select" are mutually exclusive arguments') if self.args.resource_types: - raise RuntimeException( + raise DbtRuntimeError( '"models" and "resource_type" are mutually exclusive ' "arguments" ) @@ -72,7 +72,7 @@ def _iterate_selected_nodes(self): warn_or_error(NoNodesSelected()) return if self.manifest is None: - raise InternalException("manifest is None in _iterate_selected_nodes") + raise DbtInternalError("manifest is None in _iterate_selected_nodes") for node in nodes: if node in self.manifest.nodes: yield self.manifest.nodes[node] @@ -83,7 +83,7 @@ def _iterate_selected_nodes(self): elif node in self.manifest.metrics: yield self.manifest.metrics[node] else: - raise RuntimeException( + raise DbtRuntimeError( f'Got an unexpected result from node selection: "{node}"' f"Expected a source or a node!" ) @@ -143,7 +143,7 @@ def run(self): elif output == "path": generator = self.generate_paths else: - raise InternalException("Invalid output {}".format(output)) + raise DbtInternalError("Invalid output {}".format(output)) return self.output_results(generator()) @@ -185,7 +185,7 @@ def defer_to_manifest(self, adapter, selected_uids): def get_node_selector(self): if self.manifest is None or self.graph is None: - raise InternalException("manifest and graph must be set to get perform node selection") + raise DbtInternalError("manifest and graph must be set to get perform node selection") if self.resource_types == [NodeType.Test]: return TestSelector( graph=self.graph, diff --git a/core/dbt/task/run.py b/core/dbt/task/run.py index ff468f4dd41..145225be9d5 100644 --- a/core/dbt/task/run.py +++ b/core/dbt/task/run.py @@ -21,11 +21,11 @@ from dbt.contracts.graph.nodes import HookNode, ResultNode from dbt.contracts.results import NodeStatus, RunResult, RunStatus, RunningStatus, BaseResult from dbt.exceptions import ( - CompilationException, - InternalException, - MissingMaterialization, - RuntimeException, - ValidationException, + CompilationError, + DbtInternalError, + MissingMaterializationError, + DbtRuntimeError, + DbtValidationError, ) from dbt.events.functions import fire_event, get_invocation_id from dbt.events.types import ( @@ -106,7 +106,7 @@ def get_hook(source, index): def track_model_run(index, num_nodes, run_model_result): if tracking.active_user is None: - raise InternalException("cannot track model run with no active user") + raise DbtInternalError("cannot track model run with no active user") invocation_id = get_invocation_id() tracking.track_model_run( { @@ -135,14 +135,14 @@ def _validate_materialization_relations_dict(inp: Dict[Any, Any], model) -> List 'Invalid return value from materialization, "relations" ' "not found, got keys: {}".format(list(inp)) ) - raise CompilationException(msg, node=model) from None + raise CompilationError(msg, node=model) from None if not isinstance(relations_value, list): msg = ( 'Invalid return value from materialization, "relations" ' "not a list, got: {}".format(relations_value) ) - raise CompilationException(msg, node=model) from None + raise CompilationError(msg, node=model) from None relations: List[BaseRelation] = [] for relation in relations_value: @@ -151,7 +151,7 @@ def _validate_materialization_relations_dict(inp: Dict[Any, Any], model) -> List "Invalid return value from materialization, " '"relations" contains non-Relation: {}'.format(relation) ) - raise CompilationException(msg, node=model) + raise CompilationError(msg, node=model) assert isinstance(relation, BaseRelation) relations.append(relation) @@ -213,7 +213,7 @@ def after_execute(self, result): def _build_run_model_result(self, model, context): result = context["load_result"]("main") if not result: - raise RuntimeException("main is not being called during running model") + raise DbtRuntimeError("main is not being called during running model") adapter_response = {} if isinstance(result.response, dbtClassMixin): adapter_response = result.response.to_dict(omit_none=True) @@ -234,7 +234,7 @@ def _materialization_relations(self, result: Any, model) -> List[BaseRelation]: 'The materialization ("{}") did not explicitly return a ' "list of relations to add to the cache.".format(str(model.get_materialization())) ) - raise CompilationException(msg, node=model) + raise CompilationError(msg, node=model) if isinstance(result, dict): return _validate_materialization_relations_dict(result, model) @@ -243,7 +243,7 @@ def _materialization_relations(self, result: Any, model) -> List[BaseRelation]: "Invalid return value from materialization, expected a dict " 'with key "relations", got: {}'.format(str(result)) ) - raise CompilationException(msg, node=model) + raise CompilationError(msg, node=model) def execute(self, model, manifest): context = generate_runtime_model_context(model, self.config, manifest) @@ -253,12 +253,12 @@ def execute(self, model, manifest): ) if materialization_macro is None: - raise MissingMaterialization( + raise MissingMaterializationError( materialization=model.get_materialization(), adapter_type=self.adapter.type() ) if "config" not in context: - raise InternalException( + raise DbtInternalError( "Invalid materialization context generated, missing config: {}".format(context) ) context_config = context["config"] @@ -267,7 +267,7 @@ def execute(self, model, manifest): model_lang_supported = model.language in materialization_macro.supported_languages if mat_has_supported_langs and not model_lang_supported: str_langs = [str(lang) for lang in materialization_macro.supported_languages] - raise ValidationException( + raise DbtValidationError( f'Materialization "{materialization_macro.name}" only supports languages {str_langs}; ' f'got "{model.language}"' ) @@ -315,7 +315,7 @@ def _hook_keyfunc(self, hook: HookNode) -> Tuple[str, Optional[int]]: def get_hooks_by_type(self, hook_type: RunHookType) -> List[HookNode]: if self.manifest is None: - raise InternalException("self.manifest was None in get_hooks_by_type") + raise DbtInternalError("self.manifest was None in get_hooks_by_type") nodes = self.manifest.nodes.values() # find all hooks defined in the manifest (could be multiple projects) @@ -395,7 +395,7 @@ def safe_run_hooks( ) -> None: try: self.run_hooks(adapter, hook_type, extra_context) - except RuntimeException as exc: + except DbtRuntimeError as exc: fire_event(DatabaseErrorRunningHook(hook_type=hook_type.value)) self.node_results.append( BaseResult( @@ -457,7 +457,7 @@ def after_run(self, adapter, results): def get_node_selector(self) -> ResourceTypeSelector: if self.manifest is None or self.graph is None: - raise InternalException("manifest and graph must be set to get perform node selection") + raise DbtInternalError("manifest and graph must be set to get perform node selection") return ResourceTypeSelector( graph=self.graph, manifest=self.manifest, diff --git a/core/dbt/task/run_operation.py b/core/dbt/task/run_operation.py index e510c70c37d..63384f1c21f 100644 --- a/core/dbt/task/run_operation.py +++ b/core/dbt/task/run_operation.py @@ -10,7 +10,7 @@ from dbt.adapters.factory import get_adapter from dbt.config.utils import parse_cli_vars from dbt.contracts.results import RunOperationResultsArtifact -from dbt.exceptions import InternalException +from dbt.exceptions import DbtInternalError from dbt.events.functions import fire_event from dbt.events.types import ( RunningOperationCaughtError, @@ -34,7 +34,7 @@ def _get_kwargs(self) -> Dict[str, Any]: def compile_manifest(self) -> None: if self.manifest is None: - raise InternalException("manifest was None in compile_manifest") + raise DbtInternalError("manifest was None in compile_manifest") def _run_unsafe(self) -> agate.Table: adapter = get_adapter(self.config) diff --git a/core/dbt/task/runnable.py b/core/dbt/task/runnable.py index 7143c286675..58504332ad2 100644 --- a/core/dbt/task/runnable.py +++ b/core/dbt/task/runnable.py @@ -44,10 +44,10 @@ from dbt.contracts.results import NodeStatus, RunExecutionResult, RunningStatus from dbt.contracts.state import PreviousState from dbt.exceptions import ( - InternalException, - NotImplementedException, - RuntimeException, - FailFastException, + DbtInternalError, + NotImplementedError, + DbtRuntimeError, + FailFastError, ) from dbt.graph import GraphQueue, NodeSelector, SelectionSpec, parse_difference, Graph @@ -83,7 +83,7 @@ def load_manifest(self): def compile_manifest(self): if self.manifest is None: - raise InternalException("compile_manifest called before manifest was loaded") + raise DbtInternalError("compile_manifest called before manifest was loaded") # we cannot get adapter in init since it will break rpc #5579 adapter = get_adapter(self.config) @@ -150,7 +150,7 @@ def get_selection_spec(self) -> SelectionSpec: @abstractmethod def get_node_selector(self) -> NodeSelector: - raise NotImplementedException(f"get_node_selector not implemented for task {type(self)}") + raise NotImplementedError(f"get_node_selector not implemented for task {type(self)}") @abstractmethod def defer_to_manifest(self, adapter, selected_uids: AbstractSet[str]): @@ -164,7 +164,7 @@ def get_graph_queue(self) -> GraphQueue: def _runtime_initialize(self): super()._runtime_initialize() if self.manifest is None or self.graph is None: - raise InternalException("_runtime_initialize never loaded the manifest and graph!") + raise DbtInternalError("_runtime_initialize never loaded the manifest and graph!") self.job_queue = self.get_graph_queue() @@ -176,7 +176,7 @@ def _runtime_initialize(self): elif uid in self.manifest.sources: self._flattened_nodes.append(self.manifest.sources[uid]) else: - raise InternalException( + raise DbtInternalError( f"Node selection returned {uid}, expected a node or a source" ) @@ -186,7 +186,7 @@ def raise_on_first_error(self): return False def get_runner_type(self, node): - raise NotImplementedException("Not Implemented") + raise NotImplementedError("Not Implemented") def result_path(self): return os.path.join(self.config.target_path, RESULT_FILE_NAME) @@ -246,7 +246,7 @@ def call_runner(self, runner): fail_fast = flags.FAIL_FAST if result.status in (NodeStatus.Error, NodeStatus.Fail) and fail_fast: - self._raise_next_tick = FailFastException( + self._raise_next_tick = FailFastError( msg="Failing early due to test failure or runtime error", result=result, node=getattr(result, "node", None), @@ -255,7 +255,7 @@ def call_runner(self, runner): # if we raise inside a thread, it'll just get silently swallowed. # stash the error message we want here, and it will check the # next 'tick' - should be soon since our thread is about to finish! - self._raise_next_tick = RuntimeException(result.message) + self._raise_next_tick = DbtRuntimeError(result.message) return result @@ -280,7 +280,7 @@ def _raise_set_error(self): def run_queue(self, pool): """Given a pool, submit jobs from the queue to the pool.""" if self.job_queue is None: - raise InternalException("Got to run_queue with no job queue set") + raise DbtInternalError("Got to run_queue with no job queue set") def callback(result): """Note: mark_done, at a minimum, must happen here or dbt will @@ -289,7 +289,7 @@ def callback(result): self._handle_result(result) if self.job_queue is None: - raise InternalException("Got to run_queue callback with no job queue set") + raise DbtInternalError("Got to run_queue callback with no job queue set") self.job_queue.mark_done(result.node.unique_id) while not self.job_queue.empty(): @@ -331,7 +331,7 @@ def _handle_result(self, result): node = result.node if self.manifest is None: - raise InternalException("manifest was None in _handle_result") + raise DbtInternalError("manifest was None in _handle_result") if isinstance(node, SourceDefinition): self.manifest.update_source(node) @@ -387,7 +387,7 @@ def execute_nodes(self): try: self.run_queue(pool) - except FailFastException as failure: + except FailFastError as failure: self._cancel_connections(pool) print_run_result_error(failure.result) raise @@ -404,7 +404,7 @@ def execute_nodes(self): def _mark_dependent_errors(self, node_id, result, cause): if self.graph is None: - raise InternalException("graph is None in _mark_dependent_errors") + raise DbtInternalError("graph is None in _mark_dependent_errors") for dep_node_id in self.graph.get_dependent_nodes(node_id): self._skipped_children[dep_node_id] = cause @@ -458,7 +458,7 @@ def run(self): self._runtime_initialize() if self._flattened_nodes is None: - raise InternalException("after _runtime_initialize, _flattened_nodes was still None") + raise DbtInternalError("after _runtime_initialize, _flattened_nodes was still None") if len(self._flattened_nodes) == 0: with TextOnly(): @@ -514,7 +514,7 @@ def interpret_results(cls, results): def get_model_schemas(self, adapter, selected_uids: Iterable[str]) -> Set[BaseRelation]: if self.manifest is None: - raise InternalException("manifest was None in get_model_schemas") + raise DbtInternalError("manifest was None in get_model_schemas") result: Set[BaseRelation] = set() for node in self.manifest.nodes.values(): diff --git a/core/dbt/task/seed.py b/core/dbt/task/seed.py index 564a55b1577..58b6aa25bda 100644 --- a/core/dbt/task/seed.py +++ b/core/dbt/task/seed.py @@ -6,7 +6,7 @@ ) from dbt.contracts.results import RunStatus -from dbt.exceptions import InternalException +from dbt.exceptions import DbtInternalError from dbt.graph import ResourceTypeSelector from dbt.logger import TextOnly from dbt.events.functions import fire_event @@ -73,7 +73,7 @@ def raise_on_first_error(self): def get_node_selector(self): if self.manifest is None or self.graph is None: - raise InternalException("manifest and graph must be set to get perform node selection") + raise DbtInternalError("manifest and graph must be set to get perform node selection") return ResourceTypeSelector( graph=self.graph, manifest=self.manifest, diff --git a/core/dbt/task/snapshot.py b/core/dbt/task/snapshot.py index 8de99864b96..f5e8a549bb2 100644 --- a/core/dbt/task/snapshot.py +++ b/core/dbt/task/snapshot.py @@ -1,6 +1,6 @@ from .run import ModelRunner, RunTask -from dbt.exceptions import InternalException +from dbt.exceptions import DbtInternalError from dbt.events.functions import fire_event from dbt.events.base_types import EventLevel from dbt.events.types import LogSnapshotResult @@ -37,7 +37,7 @@ def raise_on_first_error(self): def get_node_selector(self): if self.manifest is None or self.graph is None: - raise InternalException("manifest and graph must be set to get perform node selection") + raise DbtInternalError("manifest and graph must be set to get perform node selection") return ResourceTypeSelector( graph=self.graph, manifest=self.manifest, diff --git a/core/dbt/task/sql.py b/core/dbt/task/sql.py index 4a267bd91bf..4f662383d74 100644 --- a/core/dbt/task/sql.py +++ b/core/dbt/task/sql.py @@ -25,7 +25,7 @@ def __init__(self, config, adapter, node, node_index, num_nodes): def handle_exception(self, e, ctx): fire_event(SQLRunnerException(exc=str(e), exc_info=traceback.format_exc())) if isinstance(e, dbt.exceptions.Exception): - if isinstance(e, dbt.exceptions.RuntimeException): + if isinstance(e, dbt.exceptions.DbtRuntimeError): e.add_node(ctx.node) return e @@ -51,7 +51,7 @@ def error_result(self, node, error, start_time, timing_info): raise error def ephemeral_result(self, node, start_time, timing_info): - raise dbt.exceptions.NotImplementedException("cannot execute ephemeral nodes remotely!") + raise dbt.exceptions.NotImplementedError("cannot execute ephemeral nodes remotely!") class SqlCompileRunner(GenericSqlRunner[RemoteCompileResult]): diff --git a/core/dbt/task/test.py b/core/dbt/task/test.py index b55eed940ac..48422b5e726 100644 --- a/core/dbt/task/test.py +++ b/core/dbt/task/test.py @@ -22,9 +22,9 @@ LogStartLine, ) from dbt.exceptions import ( - InternalException, - InvalidBoolean, - MissingMaterialization, + DbtInternalError, + BooleanError, + MissingMaterializationError, ) from dbt.graph import ( ResourceTypeSelector, @@ -51,7 +51,7 @@ def convert_bool_type(field) -> bool: try: return bool(strtobool(field)) # type: ignore except ValueError: - raise InvalidBoolean(field, "get_test_sql") + raise BooleanError(field, "get_test_sql") # need this so we catch both true bools and 0/1 return bool(field) @@ -101,10 +101,10 @@ def execute_test( ) if materialization_macro is None: - raise MissingMaterialization(materialization=test.get_materialization(), adapter_type=self.adapter.type()) + raise MissingMaterializationError(materialization=test.get_materialization(), adapter_type=self.adapter.type()) if "config" not in context: - raise InternalException( + raise DbtInternalError( "Invalid materialization context generated, missing config: {}".format(context) ) @@ -118,14 +118,14 @@ def execute_test( table = result["table"] num_rows = len(table.rows) if num_rows != 1: - raise InternalException( + raise DbtInternalError( f"dbt internally failed to execute {test.unique_id}: " f"Returned {num_rows} rows, but expected " f"1 row" ) num_cols = len(table.columns) if num_cols != 3: - raise InternalException( + raise DbtInternalError( f"dbt internally failed to execute {test.unique_id}: " f"Returned {num_cols} columns, but expected " f"3 columns" @@ -203,7 +203,7 @@ def raise_on_first_error(self): def get_node_selector(self) -> TestSelector: if self.manifest is None or self.graph is None: - raise InternalException("manifest and graph must be set to get perform node selection") + raise DbtInternalError("manifest and graph must be set to get perform node selection") return TestSelector( graph=self.graph, manifest=self.manifest, diff --git a/core/dbt/tests/fixtures/project.py b/core/dbt/tests/fixtures/project.py index 2d7ae5ded67..9fb34ff59a4 100644 --- a/core/dbt/tests/fixtures/project.py +++ b/core/dbt/tests/fixtures/project.py @@ -6,7 +6,7 @@ import warnings import yaml -from dbt.exceptions import CompilationException, DatabaseException +from dbt.exceptions import CompilationError, DbtDatabaseError import dbt.flags as flags from dbt.config.runtime import RuntimeConfig from dbt.adapters.factory import get_adapter, register_adapter, reset_adapters, get_adapter_by_type @@ -494,10 +494,10 @@ def project( # a `load_dependencies` method. # Macros gets executed as part of drop_scheme in core/dbt/adapters/sql/impl.py. When # the macros have errors (which is what we're actually testing for...) they end up - # throwing CompilationExceptions or DatabaseExceptions + # throwing CompilationErrorss or DatabaseErrors try: project.drop_test_schema() - except (KeyError, AttributeError, CompilationException, DatabaseException): + except (KeyError, AttributeError, CompilationError, DbtDatabaseError): pass os.chdir(orig_cwd) cleanup_event_logger() diff --git a/core/dbt/utils.py b/core/dbt/utils.py index 6afe9d1e26d..e9c4677130d 100644 --- a/core/dbt/utils.py +++ b/core/dbt/utils.py @@ -15,7 +15,7 @@ from pathlib import PosixPath, WindowsPath from contextlib import contextmanager -from dbt.exceptions import ConnectionException, DuplicateAlias +from dbt.exceptions import ConnectionError, DuplicateAliasError from dbt.events.functions import fire_event from dbt.events.types import RetryExternalCall, RecordRetryException from dbt import flags @@ -92,13 +92,13 @@ def get_model_name_or_none(model): def get_dbt_macro_name(name): if name is None: - raise dbt.exceptions.InternalException("Got None for a macro name!") + raise dbt.exceptions.DbtInternalError("Got None for a macro name!") return f"{MACRO_PREFIX}{name}" def get_dbt_docs_name(name): if name is None: - raise dbt.exceptions.InternalException("Got None for a doc name!") + raise dbt.exceptions.DbtInternalError("Got None for a doc name!") return f"{DOCS_PREFIX}{name}" @@ -228,7 +228,7 @@ def deep_map_render(func: Callable[[Any, Tuple[Union[str, int], ...]], Any], val return _deep_map_render(func, value, ()) except RuntimeError as exc: if "maximum recursion depth exceeded" in str(exc): - raise dbt.exceptions.RecursionException("Cycle detected in deep_map_render") + raise dbt.exceptions.RecursionError("Cycle detected in deep_map_render") raise @@ -365,7 +365,7 @@ def translate_mapping(self, kwargs: Mapping[str, Any]) -> Dict[str, Any]: for key, value in kwargs.items(): canonical_key = self.aliases.get(key, key) if canonical_key in result: - raise DuplicateAlias(kwargs, self.aliases, canonical_key) + raise DuplicateAliasError(kwargs, self.aliases, canonical_key) result[canonical_key] = self.translate_value(value) return result @@ -385,7 +385,7 @@ def translate(self, value: Mapping[str, Any]) -> Dict[str, Any]: return self.translate_mapping(value) except RuntimeError as exc: if "maximum recursion depth exceeded" in str(exc): - raise dbt.exceptions.RecursionException( + raise dbt.exceptions.RecursionError( "Cycle detected in a value passed to translate!" ) raise @@ -403,7 +403,7 @@ def translate_aliases( :returns: A dict containing all the values in kwargs referenced by their canonical key. - :raises: `AliasException`, if a canonical key is defined more than once. + :raises: `AliasError`, if a canonical key is defined more than once. """ translator = Translator(aliases, recurse) return translator.translate(kwargs) @@ -624,7 +624,7 @@ def _connection_exception_retry(fn, max_attempts: int, attempt: int = 0): time.sleep(1) return _connection_exception_retry(fn, max_attempts, attempt + 1) else: - raise ConnectionException("External connection exception occurred: " + str(exc)) + raise ConnectionError("External connection exception occurred: " + str(exc)) # This is used to serialize the args in the run_results and in the logs. diff --git a/plugins/postgres/dbt/adapters/postgres/connections.py b/plugins/postgres/dbt/adapters/postgres/connections.py index df24b0f9118..afa74a46339 100644 --- a/plugins/postgres/dbt/adapters/postgres/connections.py +++ b/plugins/postgres/dbt/adapters/postgres/connections.py @@ -73,19 +73,19 @@ def exception_handler(self, sql): logger.debug("Failed to release connection!") pass - raise dbt.exceptions.DatabaseException(str(e).strip()) from e + raise dbt.exceptions.DbtDatabaseError(str(e).strip()) from e except Exception as e: logger.debug("Error running SQL: {}", sql) logger.debug("Rolling back transaction.") self.rollback_if_open() - if isinstance(e, dbt.exceptions.RuntimeException): + if isinstance(e, dbt.exceptions.DbtRuntimeError): # during a sql query, an internal to dbt exception was raised. # this sounds a lot like a signal handler and probably has # useful information, so raise it without modification. raise - raise dbt.exceptions.RuntimeException(e) from e + raise dbt.exceptions.DbtRuntimeError(e) from e @classmethod def open(cls, connection): diff --git a/plugins/postgres/dbt/adapters/postgres/impl.py b/plugins/postgres/dbt/adapters/postgres/impl.py index 78b86234eae..9a5d5d3f8f6 100644 --- a/plugins/postgres/dbt/adapters/postgres/impl.py +++ b/plugins/postgres/dbt/adapters/postgres/impl.py @@ -9,11 +9,11 @@ from dbt.adapters.postgres import PostgresRelation from dbt.dataclass_schema import dbtClassMixin, ValidationError from dbt.exceptions import ( - CrossDbReferenceProhibited, - IndexConfigNotDict, - InvalidIndexConfig, - RuntimeException, - UnexpectedDbReference, + CrossDbReferenceProhibitedError, + IndexConfigNotDictError, + IndexConfigError, + DbtRuntimeError, + UnexpectedDbReferenceError, ) import dbt.utils @@ -46,9 +46,9 @@ def parse(cls, raw_index) -> Optional["PostgresIndexConfig"]: cls.validate(raw_index) return cls.from_dict(raw_index) except ValidationError as exc: - raise InvalidIndexConfig(exc) + raise IndexConfigError(exc) except TypeError: - raise IndexConfigNotDict(raw_index) + raise IndexConfigNotDictError(raw_index) @dataclass @@ -74,7 +74,7 @@ def verify_database(self, database): database = database.strip('"') expected = self.config.credentials.database if database.lower() != expected.lower(): - raise UnexpectedDbReference(self.type(), database, expected) + raise UnexpectedDbReferenceError(self.type(), database, expected) # return an empty string on success so macros can call this return "" @@ -107,8 +107,8 @@ def _get_catalog_schemas(self, manifest): schemas = super()._get_catalog_schemas(manifest) try: return schemas.flatten() - except RuntimeException as exc: - raise CrossDbReferenceProhibited(self.type(), exc.msg) + except DbtRuntimeError as exc: + raise CrossDbReferenceProhibitedError(self.type(), exc.msg) def _link_cached_relations(self, manifest): schemas: Set[str] = set() diff --git a/plugins/postgres/dbt/adapters/postgres/relation.py b/plugins/postgres/dbt/adapters/postgres/relation.py index 0f3296c1818..43c8c724a74 100644 --- a/plugins/postgres/dbt/adapters/postgres/relation.py +++ b/plugins/postgres/dbt/adapters/postgres/relation.py @@ -1,7 +1,7 @@ from dbt.adapters.base import Column from dataclasses import dataclass from dbt.adapters.base.relation import BaseRelation -from dbt.exceptions import RuntimeException +from dbt.exceptions import DbtRuntimeError @dataclass(frozen=True, eq=False, repr=False) @@ -14,7 +14,7 @@ def __post_init__(self): and self.type is not None and len(self.identifier) > self.relation_max_name_length() ): - raise RuntimeException( + raise DbtRuntimeError( f"Relation name '{self.identifier}' " f"is longer than {self.relation_max_name_length()} characters" ) diff --git a/test/integration/035_docs_blocks_tests/test_docs_blocks.py b/test/integration/035_docs_blocks_tests/test_docs_blocks.py index dacddf394f9..f37c8e677ac 100644 --- a/test/integration/035_docs_blocks_tests/test_docs_blocks.py +++ b/test/integration/035_docs_blocks_tests/test_docs_blocks.py @@ -122,7 +122,7 @@ def test_postgres_alternative_docs_path(self): @use_profile('postgres') def test_postgres_alternative_docs_path_missing(self): self.use_default_project({"docs-paths": [self.dir("not-docs")]}) - with self.assertRaises(dbt.exceptions.CompilationException): + with self.assertRaises(dbt.exceptions.CompilationError): self.run_dbt() @@ -142,7 +142,7 @@ def models(self): @use_profile('postgres') def test_postgres_missing_doc_ref(self): # The run should fail since we could not find the docs reference. - with self.assertRaises(dbt.exceptions.CompilationException): + with self.assertRaises(dbt.exceptions.CompilationError): self.run_dbt() @@ -162,7 +162,7 @@ def models(self): @use_profile('postgres') def test_postgres_invalid_doc_ref(self): # The run should fail since we could not find the docs reference. - with self.assertRaises(dbt.exceptions.CompilationException): + with self.assertRaises(dbt.exceptions.CompilationError): self.run_dbt(expect_pass=False) class TestDuplicateDocsBlock(DBTIntegrationTest): @@ -180,5 +180,5 @@ def models(self): @use_profile('postgres') def test_postgres_duplicate_doc_ref(self): - with self.assertRaises(dbt.exceptions.CompilationException): + with self.assertRaises(dbt.exceptions.CompilationError): self.run_dbt(expect_pass=False) diff --git a/test/integration/062_defer_state_tests/test_defer_state.py b/test/integration/062_defer_state_tests/test_defer_state.py index d48d84aae46..593dc034036 100644 --- a/test/integration/062_defer_state_tests/test_defer_state.py +++ b/test/integration/062_defer_state_tests/test_defer_state.py @@ -80,7 +80,7 @@ def run_and_snapshot_defer(self): results = self.run_dbt(['snapshot']) # no state, snapshot fails - with pytest.raises(dbt.exceptions.RuntimeException): + with pytest.raises(dbt.exceptions.DbtRuntimeError): results = self.run_dbt(['snapshot', '--state', 'state', '--defer']) # copy files diff --git a/test/integration/062_defer_state_tests/test_modified_state.py b/test/integration/062_defer_state_tests/test_modified_state.py index 5f64cd66ae1..085faf11d5b 100644 --- a/test/integration/062_defer_state_tests/test_modified_state.py +++ b/test/integration/062_defer_state_tests/test_modified_state.py @@ -6,7 +6,7 @@ import pytest -from dbt.exceptions import CompilationException, IncompatibleSchemaException +from dbt.exceptions import CompilationError, IncompatibleSchemaError class TestModifiedState(DBTIntegrationTest): @@ -95,7 +95,7 @@ def test_postgres_changed_seed_contents_state(self): assert len(results) == 1 assert results[0] == 'test.seed' - with pytest.raises(CompilationException) as exc: + with pytest.raises(CompilationError) as exc: self.run_dbt(['--warn-error', 'ls', '--resource-type', 'seed', '--select', 'state:modified', '--state', './state']) assert '>1MB' in str(exc.value) @@ -206,6 +206,6 @@ def test_postgres_changed_exposure(self): @use_profile('postgres') def test_postgres_previous_version_manifest(self): # This tests that a different schema version in the file throws an error - with self.assertRaises(IncompatibleSchemaException) as exc: + with self.assertRaises(IncompatibleSchemaError) as exc: results = self.run_dbt(['ls', '-s', 'state:modified', '--state', './previous_state']) self.assertEqual(exc.CODE, 10014) diff --git a/test/integration/062_defer_state_tests/test_run_results_state.py b/test/integration/062_defer_state_tests/test_run_results_state.py index 4f59c6faa75..58215009ad7 100644 --- a/test/integration/062_defer_state_tests/test_run_results_state.py +++ b/test/integration/062_defer_state_tests/test_run_results_state.py @@ -6,8 +6,6 @@ import pytest -from dbt.exceptions import CompilationException - class TestRunResultsState(DBTIntegrationTest): @property diff --git a/test/integration/068_partial_parsing_tests/test_partial_parsing.py b/test/integration/068_partial_parsing_tests/test_partial_parsing.py index fce32b42cf1..d411a738602 100644 --- a/test/integration/068_partial_parsing_tests/test_partial_parsing.py +++ b/test/integration/068_partial_parsing_tests/test_partial_parsing.py @@ -1,4 +1,4 @@ -from dbt.exceptions import CompilationException, ParsingException +from dbt.exceptions import CompilationError from dbt.contracts.graph.manifest import Manifest from dbt.contracts.files import ParseFileType from dbt.contracts.results import TestStatus @@ -144,7 +144,7 @@ def test_postgres_pp_models(self): # referred to in schema file self.copy_file('test-files/models-schema2.yml', 'models/schema.yml') self.rm_file('models/model_three.sql') - with self.assertRaises(CompilationException): + with self.assertRaises(CompilationError): results = self.run_dbt(["--partial-parse", "--warn-error", "run"]) # Put model back again @@ -212,7 +212,7 @@ def test_postgres_pp_models(self): # Remove the macro self.rm_file('macros/my_macro.sql') - with self.assertRaises(CompilationException): + with self.assertRaises(CompilationError): results = self.run_dbt(["--partial-parse", "--warn-error", "run"]) # put back macro file, got back to schema file with no macro @@ -310,7 +310,7 @@ def test_postgres_pp_sources(self): # remove sources schema file self.rm_file(normalize('models/sources.yml')) - with self.assertRaises(CompilationException): + with self.assertRaises(CompilationError): results = self.run_dbt(["--partial-parse", "run"]) # put back sources and add an exposures file @@ -319,7 +319,7 @@ def test_postgres_pp_sources(self): # remove seed referenced in exposures file self.rm_file(normalize('seeds/raw_customers.csv')) - with self.assertRaises(CompilationException): + with self.assertRaises(CompilationError): results = self.run_dbt(["--partial-parse", "run"]) # put back seed and remove depends_on from exposure @@ -333,7 +333,7 @@ def test_postgres_pp_sources(self): # Change seed name to wrong name self.copy_file('test-files/schema-sources5.yml', 'models/sources.yml') - with self.assertRaises(CompilationException): + with self.assertRaises(CompilationError): results = self.run_dbt(["--partial-parse", "--warn-error", "run"]) # Put back seed name to right name diff --git a/test/integration/068_partial_parsing_tests/test_pp_metrics.py b/test/integration/068_partial_parsing_tests/test_pp_metrics.py index b9cbc69e3aa..5debe6d2b85 100644 --- a/test/integration/068_partial_parsing_tests/test_pp_metrics.py +++ b/test/integration/068_partial_parsing_tests/test_pp_metrics.py @@ -1,4 +1,4 @@ -from dbt.exceptions import CompilationException, UndefinedMacroException +from dbt.exceptions import CompilationError from dbt.contracts.graph.manifest import Manifest from dbt.contracts.files import ParseFileType from dbt.contracts.results import TestStatus @@ -99,8 +99,8 @@ def test_postgres_metrics(self): # Then delete a metric self.copy_file('test-files/people_metrics3.yml', 'models/people_metrics.yml') - with self.assertRaises(CompilationException): - # We use "parse" here and not "run" because we're checking that the CompilationException + with self.assertRaises(CompilationError): + # We use "parse" here and not "run" because we're checking that the CompilationError # occurs at parse time, not compilation results = self.run_dbt(["parse"]) diff --git a/test/integration/068_partial_parsing_tests/test_pp_vars.py b/test/integration/068_partial_parsing_tests/test_pp_vars.py index e5f0752f6a9..a73bfc43fa3 100644 --- a/test/integration/068_partial_parsing_tests/test_pp_vars.py +++ b/test/integration/068_partial_parsing_tests/test_pp_vars.py @@ -1,4 +1,4 @@ -from dbt.exceptions import CompilationException, ParsingException +from dbt.exceptions import ParsingError from dbt.constants import SECRET_ENV_PREFIX from dbt.contracts.graph.manifest import Manifest from dbt.contracts.files import ParseFileType @@ -58,7 +58,7 @@ def test_postgres_env_vars_models(self): # copy a file with an env_var call without an env_var self.copy_file('test-files/env_var_model.sql', 'models/env_var_model.sql') - with self.assertRaises(ParsingException): + with self.assertRaises(ParsingError): results = self.run_dbt(["--partial-parse", "run"]) # set the env var @@ -84,7 +84,7 @@ def test_postgres_env_vars_models(self): # set an env_var in a schema file self.copy_file('test-files/env_var_schema.yml', 'models/schema.yml') self.copy_file('test-files/env_var_model_one.sql', 'models/model_one.sql') - with self.assertRaises(ParsingException): + with self.assertRaises(ParsingError): results = self.run_dbt(["--partial-parse", "run"]) # actually set the env_var @@ -139,7 +139,7 @@ def test_postgres_env_vars_models(self): # Delete database env var del os.environ['ENV_VAR_DATABASE'] - with self.assertRaises(ParsingException): + with self.assertRaises(ParsingError): results = self.run_dbt(["--partial-parse", "run"]) os.environ['ENV_VAR_DATABASE'] = 'test_dbt' @@ -149,7 +149,7 @@ def test_postgres_env_vars_models(self): results = self.run_dbt(["--partial-parse", "run"]) # Add source test using test_color and an env_var for color self.copy_file('test-files/env_var_schema2.yml', 'models/schema.yml') - with self.assertRaises(ParsingException): + with self.assertRaises(ParsingError): results = self.run_dbt(["--partial-parse", "run"]) os.environ['ENV_VAR_COLOR'] = 'green' results = self.run_dbt(["--partial-parse", "run"]) diff --git a/test/unit/test_adapter_connection_manager.py b/test/unit/test_adapter_connection_manager.py index 47db6b67ab0..b270f6a5d19 100644 --- a/test/unit/test_adapter_connection_manager.py +++ b/test/unit/test_adapter_connection_manager.py @@ -64,7 +64,7 @@ def test_retry_connection_fails_unhandled(self): * The Connection state should be "fail" and the handle None. * The resulting attempt count should be 1 as we are not explicitly configured to handle a ValueError. - * retry_connection should raise a FailedToConnectException with the Exception message. + * retry_connection should raise a FailedToConnectError with the Exception message. """ conn = self.postgres_connection attempts = 0 @@ -75,7 +75,7 @@ def connect(): raise ValueError("Something went horribly wrong") with self.assertRaisesRegex( - dbt.exceptions.FailedToConnectException, "Something went horribly wrong" + dbt.exceptions.FailedToConnectError, "Something went horribly wrong" ): BaseConnectionManager.retry_connection( @@ -99,7 +99,7 @@ def test_retry_connection_fails_handled(self): As a result: * The Connection state should be "fail" and the handle None. * The resulting attempt count should be 2 as we are configured to handle a ValueError. - * retry_connection should raise a FailedToConnectException with the Exception message. + * retry_connection should raise a FailedToConnectError with the Exception message. """ conn = self.postgres_connection attempts = 0 @@ -110,7 +110,7 @@ def connect(): raise ValueError("Something went horribly wrong") with self.assertRaisesRegex( - dbt.exceptions.FailedToConnectException, "Something went horribly wrong" + dbt.exceptions.FailedToConnectError, "Something went horribly wrong" ): BaseConnectionManager.retry_connection( @@ -173,7 +173,7 @@ def test_retry_connection_attempts(self): * The Connection state should be "fail" and the handle None, as connect never returns. * The resulting attempt count should be 11 as we are configured to handle a ValueError. - * retry_connection should raise a FailedToConnectException with the Exception message. + * retry_connection should raise a FailedToConnectError with the Exception message. """ conn = self.postgres_connection attempts = 0 @@ -185,7 +185,7 @@ def connect(): raise ValueError("Something went horribly wrong") with self.assertRaisesRegex( - dbt.exceptions.FailedToConnectException, "Something went horribly wrong" + dbt.exceptions.FailedToConnectError, "Something went horribly wrong" ): BaseConnectionManager.retry_connection( conn, @@ -208,7 +208,7 @@ def test_retry_connection_fails_handling_all_exceptions(self): * The Connection state should be "fail" and the handle None, as connect never returns. * The resulting attempt count should be 11 as we are configured to handle all Exceptions. - * retry_connection should raise a FailedToConnectException with the Exception message. + * retry_connection should raise a FailedToConnectError with the Exception message. """ conn = self.postgres_connection attempts = 0 @@ -220,7 +220,7 @@ def connect(): raise TypeError("An unhandled thing went horribly wrong") with self.assertRaisesRegex( - dbt.exceptions.FailedToConnectException, "An unhandled thing went horribly wrong" + dbt.exceptions.FailedToConnectError, "An unhandled thing went horribly wrong" ): BaseConnectionManager.retry_connection( conn, @@ -338,7 +338,7 @@ def connect(): return True with self.assertRaisesRegex( - dbt.exceptions.FailedToConnectException, "retry_limit cannot be negative" + dbt.exceptions.FailedToConnectError, "retry_limit cannot be negative" ): BaseConnectionManager.retry_connection( conn, @@ -365,7 +365,7 @@ def connect(): for retry_timeout in [-10, -2.5, lambda _: -100, lambda _: -10.1]: with self.assertRaisesRegex( - dbt.exceptions.FailedToConnectException, + dbt.exceptions.FailedToConnectError, "retry_timeout cannot be negative or return a negative time", ): BaseConnectionManager.retry_connection( @@ -392,7 +392,7 @@ def connect(): return True with self.assertRaisesRegex( - dbt.exceptions.FailedToConnectException, + dbt.exceptions.FailedToConnectError, "retry_limit cannot be negative", ): BaseConnectionManager.retry_connection( diff --git a/test/unit/test_cache.py b/test/unit/test_cache.py index f69b4783ee1..3f9c6e4f6bf 100644 --- a/test/unit/test_cache.py +++ b/test/unit/test_cache.py @@ -121,7 +121,7 @@ def test_dest_exists_error(self): self.cache.add(bar) self.assert_relations_exist('DBT', 'schema', 'foo', 'bar') - with self.assertRaises(dbt.exceptions.InternalException): + with self.assertRaises(dbt.exceptions.DbtInternalError): self.cache.rename(foo, bar) self.assert_relations_exist('DBT', 'schema', 'foo', 'bar') diff --git a/test/unit/test_config.py b/test/unit/test_config.py index 9cdc248b7ed..4c1707d28b9 100644 --- a/test/unit/test_config.py +++ b/test/unit/test_config.py @@ -928,7 +928,7 @@ def test_run_operation_task(self): def test_run_operation_task_with_bad_path(self): self.args.project_dir = 'bad_path' - with self.assertRaises(dbt.exceptions.RuntimeException): + with self.assertRaises(dbt.exceptions.DbtRuntimeError): new_task = RunOperationTask.from_args(self.args) diff --git a/test/unit/test_context.py b/test/unit/test_context.py index a567e032f55..34c8562402f 100644 --- a/test/unit/test_context.py +++ b/test/unit/test_context.py @@ -89,7 +89,7 @@ def test_var_not_defined(self): var = providers.RuntimeVar(self.context, self.config, self.model) self.assertEqual(var("foo", "bar"), "bar") - with self.assertRaises(dbt.exceptions.CompilationException): + with self.assertRaises(dbt.exceptions.CompilationError): var("foo") def test_parser_var_default_something(self): @@ -464,7 +464,7 @@ def test_macro_namespace_duplicates(config_postgres, manifest_fx): mn.add_macros(manifest_fx.macros.values(), {}) # same pkg, same name: error - with pytest.raises(dbt.exceptions.CompilationException): + with pytest.raises(dbt.exceptions.CompilationError): mn.add_macro(mock_macro("macro_a", "root"), {}) # different pkg, same name: no error diff --git a/test/unit/test_core_dbt_utils.py b/test/unit/test_core_dbt_utils.py index 1deb8a77552..546e4f6ca00 100644 --- a/test/unit/test_core_dbt_utils.py +++ b/test/unit/test_core_dbt_utils.py @@ -2,7 +2,7 @@ import tarfile import unittest -from dbt.exceptions import ConnectionException +from dbt.exceptions import ConnectionError from dbt.utils import _connection_exception_retry as connection_exception_retry @@ -19,7 +19,7 @@ def test_connection_exception_retry_success_requests_exception(self): def test_connection_exception_retry_max(self): Counter._reset() - with self.assertRaises(ConnectionException): + with self.assertRaises(ConnectionError): connection_exception_retry(lambda: Counter._add_with_exception(), 5) self.assertEqual(6, counter) # 6 = original attempt plus 5 retries diff --git a/test/unit/test_deps.py b/test/unit/test_deps.py index 650722ef6f4..27c6f66e015 100644 --- a/test/unit/test_deps.py +++ b/test/unit/test_deps.py @@ -133,7 +133,7 @@ def test_resolve_fail(self): self.assertEqual(c.git, 'http://example.com') self.assertEqual(c.revisions, ['0.0.1', '0.0.2']) - with self.assertRaises(dbt.exceptions.DependencyException): + with self.assertRaises(dbt.exceptions.DependencyError): c.resolved() def test_default_revision(self): @@ -264,7 +264,7 @@ def test_resolve_missing_package(self): package='dbt-labs-test/b', version='0.1.2' )) - with self.assertRaises(dbt.exceptions.DependencyException) as exc: + with self.assertRaises(dbt.exceptions.DependencyError) as exc: a.resolved() msg = 'Package dbt-labs-test/b was not found in the package index' @@ -276,7 +276,7 @@ def test_resolve_missing_version(self): version='0.1.4' )) - with self.assertRaises(dbt.exceptions.DependencyException) as exc: + with self.assertRaises(dbt.exceptions.DependencyError) as exc: a.resolved() msg = ( "Could not find a matching compatible version for package " @@ -298,7 +298,7 @@ def test_resolve_conflict(self): b = RegistryUnpinnedPackage.from_contract(b_contract) c = a.incorporate(b) - with self.assertRaises(dbt.exceptions.DependencyException) as exc: + with self.assertRaises(dbt.exceptions.DependencyError) as exc: c.resolved() msg = ( "Version error for package dbt-labs-test/a: Could not " diff --git a/test/unit/test_exceptions.py b/test/unit/test_exceptions.py index 6a47255e13c..e66e913b1a6 100644 --- a/test/unit/test_exceptions.py +++ b/test/unit/test_exceptions.py @@ -1,6 +1,6 @@ import pytest -from dbt.exceptions import raise_duplicate_macro_name, CompilationException +from dbt.exceptions import raise_duplicate_macro_name, CompilationError from .utils import MockMacro @@ -8,7 +8,7 @@ def test_raise_duplicate_macros_different_package(): macro_1 = MockMacro(package='dbt', name='some_macro') macro_2 = MockMacro(package='dbt-myadapter', name='some_macro') - with pytest.raises(CompilationException) as exc: + with pytest.raises(CompilationError) as exc: raise_duplicate_macro_name( node_1=macro_1, node_2=macro_2, @@ -24,7 +24,7 @@ def test_raise_duplicate_macros_same_package(): macro_1 = MockMacro(package='dbt', name='some_macro') macro_2 = MockMacro(package='dbt', name='some_macro') - with pytest.raises(CompilationException) as exc: + with pytest.raises(CompilationError) as exc: raise_duplicate_macro_name( node_1=macro_1, node_2=macro_2, diff --git a/test/unit/test_graph_selection.py b/test/unit/test_graph_selection.py index a0da5b490e9..4c40c1dff82 100644 --- a/test/unit/test_graph_selection.py +++ b/test/unit/test_graph_selection.py @@ -200,5 +200,5 @@ def test_parse_specs(spec, parents, parents_depth, children, children_depth, fil @pytest.mark.parametrize('invalid', invalid_specs, ids=lambda k: str(k)) def test_invalid_specs(invalid): - with pytest.raises(dbt.exceptions.RuntimeException): + with pytest.raises(dbt.exceptions.DbtRuntimeError): graph_selector.SelectionCriteria.from_single_spec(invalid) diff --git a/test/unit/test_graph_selector_methods.py b/test/unit/test_graph_selector_methods.py index 7532302784f..769199e841f 100644 --- a/test/unit/test_graph_selector_methods.py +++ b/test/unit/test_graph_selector_methods.py @@ -898,11 +898,11 @@ def test_select_state_no_change(manifest, previous_state): def test_select_state_nothing(manifest, previous_state): previous_state.manifest = None method = statemethod(manifest, previous_state) - with pytest.raises(dbt.exceptions.RuntimeException) as exc: + with pytest.raises(dbt.exceptions.DbtRuntimeError) as exc: search_manifest_using_method(manifest, method, 'modified') assert 'no comparison manifest' in str(exc.value) - with pytest.raises(dbt.exceptions.RuntimeException) as exc: + with pytest.raises(dbt.exceptions.DbtRuntimeError) as exc: search_manifest_using_method(manifest, method, 'new') assert 'no comparison manifest' in str(exc.value) diff --git a/test/unit/test_graph_selector_spec.py b/test/unit/test_graph_selector_spec.py index 68c8611ccac..d72325affc2 100644 --- a/test/unit/test_graph_selector_spec.py +++ b/test/unit/test_graph_selector_spec.py @@ -1,6 +1,6 @@ import pytest -from dbt.exceptions import RuntimeException +from dbt.exceptions import DbtRuntimeError from dbt.graph.selector_spec import ( SelectionCriteria, SelectionIntersection, @@ -111,10 +111,10 @@ def test_raw_parse_weird(): def test_raw_parse_invalid(): - with pytest.raises(RuntimeException): + with pytest.raises(DbtRuntimeError): SelectionCriteria.from_single_spec('invalid_method:something') - with pytest.raises(RuntimeException): + with pytest.raises(DbtRuntimeError): SelectionCriteria.from_single_spec('@foo+') diff --git a/test/unit/test_jinja.py b/test/unit/test_jinja.py index 6b8c939de64..5213f8d7d8c 100644 --- a/test/unit/test_jinja.py +++ b/test/unit/test_jinja.py @@ -6,7 +6,7 @@ from dbt.clients.jinja import get_rendered from dbt.clients.jinja import get_template from dbt.clients.jinja import extract_toplevel_blocks -from dbt.exceptions import CompilationException, JinjaRenderingException +from dbt.exceptions import CompilationError, JinjaRenderingError @contextmanager @@ -55,12 +55,12 @@ def expected_id(arg): ( '''foo: "{{ 'bar' | as_bool }}"''', returns('bar'), - raises(JinjaRenderingException), + raises(JinjaRenderingError), ), ( '''foo: "{{ 'bar' | as_number }}"''', returns('bar'), - raises(JinjaRenderingException), + raises(JinjaRenderingError), ), ( '''foo: "{{ 'bar' | as_native }}"''', @@ -116,7 +116,7 @@ def expected_id(arg): ( '''foo: "{{ 1 | as_bool }}"''', returns('1'), - raises(JinjaRenderingException), + raises(JinjaRenderingError), ), ( '''foo: "{{ 1 | as_number }}"''', @@ -136,7 +136,7 @@ def expected_id(arg): ( '''foo: "{{ '1' | as_bool }}"''', returns('1'), - raises(JinjaRenderingException), + raises(JinjaRenderingError), ), ( '''foo: "{{ '1' | as_number }}"''', @@ -171,7 +171,7 @@ def expected_id(arg): ( '''foo: "{{ True | as_number }}"''', returns('True'), - raises(JinjaRenderingException), + raises(JinjaRenderingError), ), ( '''foo: "{{ True | as_native }}"''', @@ -197,7 +197,7 @@ def expected_id(arg): ( '''foo: "{{ true | as_number }}"''', returns("True"), - raises(JinjaRenderingException), + raises(JinjaRenderingError), ), ( '''foo: "{{ true | as_native }}"''', @@ -254,7 +254,7 @@ def expected_id(arg): ( '''foo: "{{ True | as_number }}"''', returns("True"), - raises(JinjaRenderingException), + raises(JinjaRenderingError), ), ( '''foo: "{{ True | as_native }}"''', @@ -552,24 +552,24 @@ def test_materialization_parse(self): def test_nested_not_ok(self): # we don't allow nesting same blocks body = '{% myblock a %} {% myblock b %} {% endmyblock %} {% endmyblock %}' - with self.assertRaises(CompilationException): + with self.assertRaises(CompilationError): extract_toplevel_blocks(body, allowed_blocks={'myblock'}) def test_incomplete_block_failure(self): fullbody = '{% myblock foo %} {% endmyblock %}' for length in range(len('{% myblock foo %}'), len(fullbody)-1): body = fullbody[:length] - with self.assertRaises(CompilationException): + with self.assertRaises(CompilationError): extract_toplevel_blocks(body, allowed_blocks={'myblock'}) def test_wrong_end_failure(self): body = '{% myblock foo %} {% endotherblock %}' - with self.assertRaises(CompilationException): + with self.assertRaises(CompilationError): extract_toplevel_blocks(body, allowed_blocks={'myblock', 'otherblock'}) def test_comment_no_end_failure(self): body = '{# ' - with self.assertRaises(CompilationException): + with self.assertRaises(CompilationError): extract_toplevel_blocks(body) def test_comment_only(self): @@ -698,7 +698,7 @@ def test_unclosed_model_quotes(self): def test_if(self): # if you conditionally define your macros/models, don't body = '{% if true %}{% macro my_macro() %} adsf {% endmacro %}{% endif %}' - with self.assertRaises(CompilationException): + with self.assertRaises(CompilationError): extract_toplevel_blocks(body) def test_if_innocuous(self): @@ -710,7 +710,7 @@ def test_if_innocuous(self): def test_for(self): # no for-loops over macros. body = '{% for x in range(10) %}{% macro my_macro() %} adsf {% endmacro %}{% endfor %}' - with self.assertRaises(CompilationException): + with self.assertRaises(CompilationError): extract_toplevel_blocks(body) def test_for_innocuous(self): @@ -722,19 +722,19 @@ def test_for_innocuous(self): def test_endif(self): body = '{% snapshot foo %}select * from thing{% endsnapshot%}{% endif %}' - with self.assertRaises(CompilationException) as err: + with self.assertRaises(CompilationError) as err: extract_toplevel_blocks(body) self.assertIn('Got an unexpected control flow end tag, got endif but never saw a preceeding if (@ 1:53)', str(err.exception)) def test_if_endfor(self): body = '{% if x %}...{% endfor %}{% endif %}' - with self.assertRaises(CompilationException) as err: + with self.assertRaises(CompilationError) as err: extract_toplevel_blocks(body) self.assertIn('Got an unexpected control flow end tag, got endfor but expected endif next (@ 1:13)', str(err.exception)) def test_if_endfor_newlines(self): body = '{% if x %}\n ...\n {% endfor %}\n{% endif %}' - with self.assertRaises(CompilationException) as err: + with self.assertRaises(CompilationError) as err: extract_toplevel_blocks(body) self.assertIn('Got an unexpected control flow end tag, got endfor but expected endif next (@ 3:4)', str(err.exception)) diff --git a/test/unit/test_parser.py b/test/unit/test_parser.py index 38e439a696f..0699253417b 100644 --- a/test/unit/test_parser.py +++ b/test/unit/test_parser.py @@ -18,7 +18,7 @@ ModelNode, Macro, DependsOn, SingularTestNode, SnapshotNode, AnalysisNode, UnpatchedSourceDefinition ) -from dbt.exceptions import CompilationException, ParsingException +from dbt.exceptions import CompilationError, ParsingError from dbt.node_types import NodeType from dbt.parser import ( ModelParser, MacroParser, SingularTestParser, GenericTestParser, @@ -664,7 +664,7 @@ def test_basic(self): def test_sql_model_parse_error(self): block = self.file_block_for(sql_model_parse_error, 'nested/model_1.sql') - with self.assertRaises(CompilationException): + with self.assertRaises(CompilationError): self.parser.parse_file(block) def test_python_model_parse(self): @@ -724,31 +724,31 @@ def test_python_model_config_with_defaults(self): def test_python_model_single_argument(self): block = self.file_block_for(python_model_single_argument, 'nested/py_model.py') self.parser.manifest.files[block.file.file_id] = block.file - with self.assertRaises(ParsingException): + with self.assertRaises(ParsingError): self.parser.parse_file(block) def test_python_model_no_argument(self): block = self.file_block_for(python_model_no_argument, 'nested/py_model.py') self.parser.manifest.files[block.file.file_id] = block.file - with self.assertRaises(ParsingException): + with self.assertRaises(ParsingError): self.parser.parse_file(block) def test_python_model_incorrect_argument_name(self): block = self.file_block_for(python_model_incorrect_argument_name, 'nested/py_model.py') self.parser.manifest.files[block.file.file_id] = block.file - with self.assertRaises(ParsingException): + with self.assertRaises(ParsingError): self.parser.parse_file(block) def test_python_model_multiple_models(self): block = self.file_block_for(python_model_multiple_models, 'nested/py_model.py') self.parser.manifest.files[block.file.file_id] = block.file - with self.assertRaises(ParsingException): + with self.assertRaises(ParsingError): self.parser.parse_file(block) def test_python_model_incorrect_function_name(self): block = self.file_block_for(python_model_incorrect_function_name, 'nested/py_model.py') self.parser.manifest.files[block.file.file_id] = block.file - with self.assertRaises(ParsingException): + with self.assertRaises(ParsingError): self.parser.parse_file(block) def test_python_model_empty_file(self): @@ -759,13 +759,13 @@ def test_python_model_empty_file(self): def test_python_model_multiple_returns(self): block = self.file_block_for(python_model_multiple_returns, 'nested/py_model.py') self.parser.manifest.files[block.file.file_id] = block.file - with self.assertRaises(ParsingException): + with self.assertRaises(ParsingError): self.parser.parse_file(block) def test_python_model_no_return(self): block = self.file_block_for(python_model_no_return, 'nested/py_model.py') self.parser.manifest.files[block.file.file_id] = block.file - with self.assertRaises(ParsingException): + with self.assertRaises(ParsingError): self.parser.parse_file(block) def test_python_model_single_return(self): @@ -776,7 +776,7 @@ def test_python_model_single_return(self): def test_python_model_incorrect_ref(self): block = self.file_block_for(python_model_incorrect_ref, 'nested/py_model.py') self.parser.manifest.files[block.file.file_id] = block.file - with self.assertRaises(ParsingException): + with self.assertRaises(ParsingError): self.parser.parse_file(block) def test_python_model_default_materialization(self): @@ -1027,7 +1027,7 @@ def file_block_for(self, data, filename): def test_parse_error(self): block = self.file_block_for('{% snapshot foo %}select 1 as id{%snapshot bar %}{% endsnapshot %}', 'nested/snap_1.sql') - with self.assertRaises(CompilationException): + with self.assertRaises(CompilationError): self.parser.parse_file(block) def test_single_block(self): diff --git a/test/unit/test_postgres_adapter.py b/test/unit/test_postgres_adapter.py index 06a2ed7c497..0d56ff9ff63 100644 --- a/test/unit/test_postgres_adapter.py +++ b/test/unit/test_postgres_adapter.py @@ -12,7 +12,7 @@ from dbt.contracts.files import FileHash from dbt.contracts.graph.manifest import ManifestStateCheck from dbt.clients import agate_helper -from dbt.exceptions import ValidationException, DbtConfigError +from dbt.exceptions import DbtValidationError, DbtConfigError from psycopg2 import extensions as psycopg2_extensions from psycopg2 import DatabaseError @@ -58,8 +58,8 @@ def adapter(self): def test_acquire_connection_validations(self, psycopg2): try: connection = self.adapter.acquire_connection('dummy') - except ValidationException as e: - self.fail('got ValidationException: {}'.format(str(e))) + except DbtValidationError as e: + self.fail('got DbtValidationError: {}'.format(str(e))) except BaseException as e: self.fail('acquiring connection failed with unknown exception: {}' .format(str(e))) diff --git a/test/unit/test_registry_get_request_exception.py b/test/unit/test_registry_get_request_exception.py index 44033fe0546..3029971cad4 100644 --- a/test/unit/test_registry_get_request_exception.py +++ b/test/unit/test_registry_get_request_exception.py @@ -1,9 +1,9 @@ import unittest -from dbt.exceptions import ConnectionException +from dbt.exceptions import ConnectionError from dbt.clients.registry import _get_with_retries class testRegistryGetRequestException(unittest.TestCase): def test_registry_request_error_catching(self): # using non routable IP to test connection error logic in the _get_with_retries function - self.assertRaises(ConnectionException, _get_with_retries, '', 'http://0.0.0.0') + self.assertRaises(ConnectionError, _get_with_retries, '', 'http://0.0.0.0') diff --git a/test/unit/test_semver.py b/test/unit/test_semver.py index eff7603a2f6..b36c403e3a7 100644 --- a/test/unit/test_semver.py +++ b/test/unit/test_semver.py @@ -2,7 +2,7 @@ import itertools from typing import List -from dbt.exceptions import VersionsNotCompatibleException +from dbt.exceptions import VersionsNotCompatibleError from dbt.semver import VersionSpecifier, UnboundedVersionSpecifier, \ VersionRange, reduce_versions, versions_compatible, \ resolve_to_specific_version, filter_installable @@ -40,7 +40,7 @@ def assertVersionSetResult(self, inputs, output_range): def assertInvalidVersionSet(self, inputs): for permutation in itertools.permutations(inputs): - with self.assertRaises(VersionsNotCompatibleException): + with self.assertRaises(VersionsNotCompatibleError): reduce_versions(*permutation) def test__versions_compatible(self): diff --git a/tests/adapter/dbt/tests/adapter/query_comment/test_query_comment.py b/tests/adapter/dbt/tests/adapter/query_comment/test_query_comment.py index b764568fe16..053fcc506c8 100644 --- a/tests/adapter/dbt/tests/adapter/query_comment/test_query_comment.py +++ b/tests/adapter/dbt/tests/adapter/query_comment/test_query_comment.py @@ -1,6 +1,6 @@ import pytest import json -from dbt.exceptions import RuntimeException +from dbt.exceptions import DbtRuntimeError from dbt.version import __version__ as dbt_version from dbt.tests.util import run_dbt_and_capture from dbt.tests.adapter.query_comment.fixtures import MACROS__MACRO_SQL, MODELS__X_SQL @@ -77,7 +77,7 @@ def project_config_update(self): return {"query-comment": "{{ invalid_query_header() }}"} def run_assert_comments(self): - with pytest.raises(RuntimeException): + with pytest.raises(DbtRuntimeError): self.run_get_json(expect_pass=False) diff --git a/tests/functional/artifacts/test_override.py b/tests/functional/artifacts/test_override.py index 46a037bdcc5..a7b689a3670 100644 --- a/tests/functional/artifacts/test_override.py +++ b/tests/functional/artifacts/test_override.py @@ -1,6 +1,6 @@ import pytest from dbt.tests.util import run_dbt -from dbt.exceptions import CompilationException +from dbt.exceptions import CompilationError model_sql = """ select 1 as id @@ -30,6 +30,6 @@ def test_override_used( results = run_dbt(["run"]) assert len(results) == 1 # this should pick up our failure macro and raise a compilation exception - with pytest.raises(CompilationException) as excinfo: + with pytest.raises(CompilationError) as excinfo: run_dbt(["--warn-error", "docs", "generate"]) assert "rejected: no catalogs for you" in str(excinfo.value) diff --git a/tests/functional/artifacts/test_previous_version_state.py b/tests/functional/artifacts/test_previous_version_state.py index a7a7ed5417c..84fd8bab360 100644 --- a/tests/functional/artifacts/test_previous_version_state.py +++ b/tests/functional/artifacts/test_previous_version_state.py @@ -2,7 +2,7 @@ import os import shutil from dbt.tests.util import run_dbt -from dbt.exceptions import IncompatibleSchemaException +from dbt.exceptions import IncompatibleSchemaError from dbt.contracts.graph.manifest import WritableManifest # This is a *very* simple project, with just one model in it. @@ -84,7 +84,7 @@ def compare_previous_state( results = run_dbt(cli_args, expect_pass=expect_pass) assert len(results) == 0 else: - with pytest.raises(IncompatibleSchemaException): + with pytest.raises(IncompatibleSchemaError): run_dbt(cli_args, expect_pass=expect_pass) def test_compare_state_current(self, project): diff --git a/tests/functional/basic/test_invalid_reference.py b/tests/functional/basic/test_invalid_reference.py index 8a516027940..1c54d1b906a 100644 --- a/tests/functional/basic/test_invalid_reference.py +++ b/tests/functional/basic/test_invalid_reference.py @@ -1,6 +1,6 @@ import pytest from dbt.tests.util import run_dbt -from dbt.exceptions import CompilationException +from dbt.exceptions import CompilationError descendant_sql = """ @@ -24,5 +24,5 @@ def models(): def test_undefined_value(project): # Tests that a project with an invalid reference fails - with pytest.raises(CompilationException): + with pytest.raises(CompilationError): run_dbt(["compile"]) diff --git a/tests/functional/configs/test_configs.py b/tests/functional/configs/test_configs.py index 489b60fbbb1..97e29362d4b 100644 --- a/tests/functional/configs/test_configs.py +++ b/tests/functional/configs/test_configs.py @@ -3,7 +3,7 @@ import pytest import os -from dbt.exceptions import ParsingException +from dbt.exceptions import ParsingError from dbt.tests.util import run_dbt, update_config_file, write_file, check_relations_equal from tests.functional.configs.fixtures import BaseConfigProject, simple_snapshot @@ -109,7 +109,7 @@ def test_snapshots_materialization_proj_config(self, project): snapshots_dir = os.path.join(project.project_root, "snapshots") write_file(simple_snapshot, snapshots_dir, "mysnapshot.sql") - with pytest.raises(ParsingException): + with pytest.raises(ParsingError): run_dbt() diff --git a/tests/functional/configs/test_configs_in_schema_files.py b/tests/functional/configs/test_configs_in_schema_files.py index 0d702615474..a04b9ed43aa 100644 --- a/tests/functional/configs/test_configs_in_schema_files.py +++ b/tests/functional/configs/test_configs_in_schema_files.py @@ -2,7 +2,7 @@ from dbt.tests.util import run_dbt, get_manifest, check_relations_equal, write_file -from dbt.exceptions import CompilationException, ParsingException +from dbt.exceptions import CompilationError, ParsingError models_alt__schema_yml = """ version: 2 @@ -242,11 +242,11 @@ def test_config_layering( # copy a schema file with multiple metas # shutil.copyfile('extra-alt/untagged.yml', 'models-alt/untagged.yml') write_file(extra_alt__untagged_yml, project.project_root, "models", "untagged.yml") - with pytest.raises(ParsingException): + with pytest.raises(ParsingError): run_dbt(["run"]) # copy a schema file with config key in top-level of test and in config dict # shutil.copyfile('extra-alt/untagged2.yml', 'models-alt/untagged.yml') write_file(extra_alt__untagged2_yml, project.project_root, "models", "untagged.yml") - with pytest.raises(CompilationException): + with pytest.raises(CompilationError): run_dbt(["run"]) diff --git a/tests/functional/configs/test_disabled_model.py b/tests/functional/configs/test_disabled_model.py index 5ca56512e14..4b6e74adffd 100644 --- a/tests/functional/configs/test_disabled_model.py +++ b/tests/functional/configs/test_disabled_model.py @@ -2,7 +2,7 @@ from hologram import ValidationError from dbt.tests.util import run_dbt, get_manifest -from dbt.exceptions import CompilationException, ParsingException +from dbt.exceptions import CompilationError, ParsingError from tests.functional.configs.fixtures import ( schema_all_disabled_yml, @@ -47,7 +47,7 @@ def models(self): } def test_disabled_config(self, project): - with pytest.raises(CompilationException) as exc: + with pytest.raises(CompilationError) as exc: run_dbt(["parse"]) exc_str = " ".join(str(exc.value).split()) # flatten all whitespace expected_msg = "which is disabled" @@ -209,7 +209,7 @@ def models(self): } def test_disabled_config(self, project): - with pytest.raises(ParsingException) as exc: + with pytest.raises(ParsingError) as exc: run_dbt(["parse"]) exc_str = " ".join(str(exc.value).split()) # flatten all whitespace expected_msg = "Found 3 matching disabled nodes for model 'my_model_2'" diff --git a/tests/functional/configs/test_unused_configs.py b/tests/functional/configs/test_unused_configs.py index 7796472fea9..1bc887b03f1 100644 --- a/tests/functional/configs/test_unused_configs.py +++ b/tests/functional/configs/test_unused_configs.py @@ -1,7 +1,7 @@ import pytest from dbt.tests.util import run_dbt -from dbt.exceptions import CompilationException +from dbt.exceptions import CompilationError seeds__seed_csv = """id,value 4,2 @@ -41,7 +41,7 @@ def test_warn_unused_configuration_paths( self, project, ): - with pytest.raises(CompilationException) as excinfo: + with pytest.raises(CompilationError) as excinfo: run_dbt(["--warn-error", "seed"]) assert "Configuration paths exist" in str(excinfo.value) diff --git a/tests/functional/context_methods/test_builtin_functions.py b/tests/functional/context_methods/test_builtin_functions.py index 529087c851a..1e741a2b283 100644 --- a/tests/functional/context_methods/test_builtin_functions.py +++ b/tests/functional/context_methods/test_builtin_functions.py @@ -3,7 +3,7 @@ import os from dbt.tests.util import run_dbt, run_dbt_and_capture, write_file -from dbt.exceptions import CompilationException +from dbt.exceptions import CompilationError macros__validate_set_sql = """ {% macro validate_set() %} @@ -142,9 +142,9 @@ class TestContextBuiltinExceptions: # Assert compilation errors are raised with _strict equivalents def test_builtin_function_exception(self, project): write_file(models__set_exception_sql, project.project_root, "models", "raise.sql") - with pytest.raises(CompilationException): + with pytest.raises(CompilationError): run_dbt(["compile"]) write_file(models__zip_exception_sql, project.project_root, "models", "raise.sql") - with pytest.raises(CompilationException): + with pytest.raises(CompilationError): run_dbt(["compile"]) diff --git a/tests/functional/context_methods/test_cli_vars.py b/tests/functional/context_methods/test_cli_vars.py index 3e548b6f402..353d96d777b 100644 --- a/tests/functional/context_methods/test_cli_vars.py +++ b/tests/functional/context_methods/test_cli_vars.py @@ -5,7 +5,7 @@ from dbt.tests.util import run_dbt, get_artifact, write_config_file from dbt.tests.fixtures.project import write_project_files -from dbt.exceptions import RuntimeException, CompilationException +from dbt.exceptions import DbtRuntimeError, CompilationError models_complex__schema_yml = """ @@ -114,7 +114,7 @@ def test_cli_vars_in_profile(self, project, dbt_profile_data): profile = dbt_profile_data profile["test"]["outputs"]["default"]["host"] = "{{ var('db_host') }}" write_config_file(profile, project.profiles_dir, "profiles.yml") - with pytest.raises(RuntimeException): + with pytest.raises(DbtRuntimeError): results = run_dbt(["run"]) results = run_dbt(["run", "--vars", "db_host: localhost"]) assert len(results) == 1 @@ -148,7 +148,7 @@ def test_cli_vars_in_packages(self, project, packages_config): write_config_file(packages, project.project_root, "packages.yml") # Without vars args deps fails - with pytest.raises(RuntimeException): + with pytest.raises(DbtRuntimeError): run_dbt(["deps"]) # With vars arg deps succeeds @@ -200,7 +200,7 @@ def test_vars_in_selectors(self, project): # Update the selectors.yml file to have a var write_config_file(var_selectors_yml, project.project_root, "selectors.yml") - with pytest.raises(CompilationException): + with pytest.raises(CompilationError): run_dbt(["run"]) # Var in cli_vars works diff --git a/tests/functional/context_methods/test_secret_env_vars.py b/tests/functional/context_methods/test_secret_env_vars.py index 9cd4c2eacac..710c104f551 100644 --- a/tests/functional/context_methods/test_secret_env_vars.py +++ b/tests/functional/context_methods/test_secret_env_vars.py @@ -2,7 +2,7 @@ import os from dbt.constants import SECRET_ENV_PREFIX -from dbt.exceptions import ParsingException, InternalException +from dbt.exceptions import ParsingError, DbtInternalError from tests.functional.context_methods.first_dependency import FirstDependencyProject from dbt.tests.util import run_dbt, run_dbt_and_capture @@ -30,7 +30,7 @@ def models(self): return {"context.sql": secret_bad__context_sql} def test_disallow_secret(self, project): - with pytest.raises(ParsingException): + with pytest.raises(ParsingError): run_dbt(["compile"]) @@ -130,7 +130,7 @@ def packages(self): } def test_fail_clone_with_scrubbing(self, project): - with pytest.raises(InternalException) as excinfo: + with pytest.raises(DbtInternalError) as excinfo: _, log_output = run_dbt_and_capture(["deps"]) assert "abc123" not in str(excinfo.value) @@ -149,7 +149,7 @@ def packages(self): } def test_fail_clone_with_scrubbing(self, project): - with pytest.raises(InternalException) as excinfo: + with pytest.raises(DbtInternalError) as excinfo: _, log_output = run_dbt_and_capture(["deps"]) # we should not see any manipulated form of the secret value (abc123) here diff --git a/tests/functional/context_methods/test_var_in_generate_name.py b/tests/functional/context_methods/test_var_in_generate_name.py index 5025cb8fede..2bbba457e58 100644 --- a/tests/functional/context_methods/test_var_in_generate_name.py +++ b/tests/functional/context_methods/test_var_in_generate_name.py @@ -1,7 +1,7 @@ import pytest from dbt.tests.util import run_dbt, update_config_file -from dbt.exceptions import CompilationException +from dbt.exceptions import CompilationError model_sql = """ select 1 as id @@ -27,7 +27,7 @@ def models(self): def test_generate_schema_name_var(self, project): # var isn't set, so generate_name macro fails - with pytest.raises(CompilationException) as excinfo: + with pytest.raises(CompilationError) as excinfo: run_dbt(["compile"]) assert "Required var 'somevar' not found in config" in str(excinfo.value) diff --git a/tests/functional/dependencies/test_local_dependency.py b/tests/functional/dependencies/test_local_dependency.py index 3e0bc5efdb7..13605028519 100644 --- a/tests/functional/dependencies/test_local_dependency.py +++ b/tests/functional/dependencies/test_local_dependency.py @@ -184,7 +184,7 @@ def models(self): def test_missing_dependency(self, project): # dbt should raise a runtime exception - with pytest.raises(dbt.exceptions.RuntimeException): + with pytest.raises(dbt.exceptions.DbtRuntimeError): run_dbt(["compile"]) @@ -335,12 +335,12 @@ def prepare_dependencies(self, project): ) def test_local_dependency_same_name(self, prepare_dependencies, project): - with pytest.raises(dbt.exceptions.DependencyException): + with pytest.raises(dbt.exceptions.DependencyError): run_dbt(["deps"], expect_pass=False) def test_local_dependency_same_name_sneaky(self, prepare_dependencies, project): shutil.copytree("duplicate_dependency", "./dbt_packages/duplicate_dependency") - with pytest.raises(dbt.exceptions.CompilationException): + with pytest.raises(dbt.exceptions.CompilationError): run_dbt(["compile"]) # needed to avoid compilation errors from duplicate package names in test autocleanup diff --git a/tests/functional/deprecations/test_deprecations.py b/tests/functional/deprecations/test_deprecations.py index fc76289b2ee..a70b3687c69 100644 --- a/tests/functional/deprecations/test_deprecations.py +++ b/tests/functional/deprecations/test_deprecations.py @@ -63,7 +63,7 @@ def test_data_path(self, project): def test_data_path_fail(self, project): deprecations.reset_deprecations() assert deprecations.active_deprecations == set() - with pytest.raises(dbt.exceptions.CompilationException) as exc: + with pytest.raises(dbt.exceptions.CompilationError) as exc: run_dbt(["--warn-error", "debug"]) exc_str = " ".join(str(exc.value).split()) # flatten all whitespace expected_msg = "The `data-paths` config has been renamed" @@ -107,7 +107,7 @@ def test_package_path(self, project): def test_package_path_not_set(self, project): deprecations.reset_deprecations() assert deprecations.active_deprecations == set() - with pytest.raises(dbt.exceptions.CompilationException) as exc: + with pytest.raises(dbt.exceptions.CompilationError) as exc: run_dbt(["--warn-error", "clean"]) exc_str = " ".join(str(exc.value).split()) # flatten all whitespace expected_msg = "path has changed from `dbt_modules` to `dbt_packages`." @@ -134,7 +134,7 @@ def test_package_redirect(self, project): def test_package_redirect_fail(self, project): deprecations.reset_deprecations() assert deprecations.active_deprecations == set() - with pytest.raises(dbt.exceptions.CompilationException) as exc: + with pytest.raises(dbt.exceptions.CompilationError) as exc: run_dbt(["--warn-error", "deps"]) exc_str = " ".join(str(exc.value).split()) # flatten all whitespace expected_msg = "The `fishtown-analytics/dbt_utils` package is deprecated in favor of `dbt-labs/dbt_utils`" @@ -159,7 +159,7 @@ def test_metric_handle_rename(self, project): def test_metric_handle_rename_fail(self, project): deprecations.reset_deprecations() assert deprecations.active_deprecations == set() - with pytest.raises(dbt.exceptions.CompilationException) as exc: + with pytest.raises(dbt.exceptions.CompilationError) as exc: # turn off partial parsing to ensure that the metric is re-parsed run_dbt(["--warn-error", "--no-partial-parse", "parse"]) exc_str = " ".join(str(exc.value).split()) # flatten all whitespace @@ -182,7 +182,7 @@ def test_exposure_name(self, project): def test_exposure_name_fail(self, project): deprecations.reset_deprecations() assert deprecations.active_deprecations == set() - with pytest.raises(dbt.exceptions.CompilationException) as exc: + with pytest.raises(dbt.exceptions.CompilationError) as exc: run_dbt(["--warn-error", "--no-partial-parse", "parse"]) exc_str = " ".join(str(exc.value).split()) # flatten all whitespace expected_msg = "Starting in v1.3, the 'name' of an exposure should contain only letters, numbers, and underscores." diff --git a/tests/functional/duplicates/test_duplicate_analysis.py b/tests/functional/duplicates/test_duplicate_analysis.py index e9050860ad9..44dc4c6f167 100644 --- a/tests/functional/duplicates/test_duplicate_analysis.py +++ b/tests/functional/duplicates/test_duplicate_analysis.py @@ -1,6 +1,6 @@ import pytest -from dbt.exceptions import CompilationException +from dbt.exceptions import CompilationError from dbt.tests.util import run_dbt @@ -27,7 +27,7 @@ def analyses(self): def test_duplicate_model_enabled(self, project): message = "dbt found two analyses with the name" - with pytest.raises(CompilationException) as exc: + with pytest.raises(CompilationError) as exc: run_dbt(["compile"]) exc_str = " ".join(str(exc.value).split()) # flatten all whitespace assert message in exc_str diff --git a/tests/functional/duplicates/test_duplicate_exposure.py b/tests/functional/duplicates/test_duplicate_exposure.py index 6035da7c110..140db21cd07 100644 --- a/tests/functional/duplicates/test_duplicate_exposure.py +++ b/tests/functional/duplicates/test_duplicate_exposure.py @@ -1,6 +1,6 @@ import pytest -from dbt.exceptions import CompilationException +from dbt.exceptions import CompilationError from dbt.tests.util import run_dbt @@ -26,6 +26,6 @@ def models(self): def test_duplicate_exposure(self, project): message = "dbt found two exposures with the name" - with pytest.raises(CompilationException) as exc: + with pytest.raises(CompilationError) as exc: run_dbt(["compile"]) assert message in str(exc.value) diff --git a/tests/functional/duplicates/test_duplicate_macro.py b/tests/functional/duplicates/test_duplicate_macro.py index 1fc7282808f..35b843f5891 100644 --- a/tests/functional/duplicates/test_duplicate_macro.py +++ b/tests/functional/duplicates/test_duplicate_macro.py @@ -1,6 +1,6 @@ import pytest -from dbt.exceptions import CompilationException +from dbt.exceptions import CompilationError from dbt.tests.util import run_dbt @@ -43,7 +43,7 @@ def macros(self): def test_duplicate_macros(self, project): message = 'dbt found two macros named "some_macro" in the project' - with pytest.raises(CompilationException) as exc: + with pytest.raises(CompilationError) as exc: run_dbt(["parse"]) exc_str = " ".join(str(exc.value).split()) # flatten all whitespace assert message in exc_str @@ -64,7 +64,7 @@ def macros(self): def test_duplicate_macros(self, project): message = 'dbt found two macros named "some_macro" in the project' - with pytest.raises(CompilationException) as exc: + with pytest.raises(CompilationError) as exc: run_dbt(["compile"]) exc_str = " ".join(str(exc.value).split()) # flatten all whitespace assert message in exc_str diff --git a/tests/functional/duplicates/test_duplicate_metric.py b/tests/functional/duplicates/test_duplicate_metric.py index e40295278b9..f8beca39c24 100644 --- a/tests/functional/duplicates/test_duplicate_metric.py +++ b/tests/functional/duplicates/test_duplicate_metric.py @@ -1,6 +1,6 @@ import pytest -from dbt.exceptions import CompilationException +from dbt.exceptions import CompilationError from dbt.tests.util import run_dbt @@ -46,6 +46,6 @@ def models(self): def test_duplicate_metric(self, project): message = "dbt found two metrics with the name" - with pytest.raises(CompilationException) as exc: + with pytest.raises(CompilationError) as exc: run_dbt(["compile"]) assert message in str(exc.value) diff --git a/tests/functional/duplicates/test_duplicate_model.py b/tests/functional/duplicates/test_duplicate_model.py index fbcd1b79671..7a53fd6de63 100644 --- a/tests/functional/duplicates/test_duplicate_model.py +++ b/tests/functional/duplicates/test_duplicate_model.py @@ -1,6 +1,6 @@ import pytest -from dbt.exceptions import CompilationException, DuplicateResourceName +from dbt.exceptions import CompilationError, DuplicateResourceNameError from dbt.tests.fixtures.project import write_project_files from dbt.tests.util import run_dbt, get_manifest @@ -54,7 +54,7 @@ def models(self): def test_duplicate_model_enabled(self, project): message = "dbt found two models with the name" - with pytest.raises(CompilationException) as exc: + with pytest.raises(CompilationError) as exc: run_dbt(["compile"]) exc_str = " ".join(str(exc.value).split()) # flatten all whitespace assert message in exc_str @@ -108,7 +108,7 @@ def packages(self): def test_duplicate_model_enabled_across_packages(self, project): run_dbt(["deps"]) message = "dbt found two models with the name" - with pytest.raises(DuplicateResourceName) as exc: + with pytest.raises(DuplicateResourceNameError) as exc: run_dbt(["run"]) assert message in str(exc.value) diff --git a/tests/functional/duplicates/test_duplicate_source.py b/tests/functional/duplicates/test_duplicate_source.py index 181aaf5d18e..1100345aabc 100644 --- a/tests/functional/duplicates/test_duplicate_source.py +++ b/tests/functional/duplicates/test_duplicate_source.py @@ -1,6 +1,6 @@ import pytest -from dbt.exceptions import CompilationException +from dbt.exceptions import CompilationError from dbt.tests.util import run_dbt @@ -22,6 +22,6 @@ def models(self): def test_duplicate_source_enabled(self, project): message = "dbt found two sources with the name" - with pytest.raises(CompilationException) as exc: + with pytest.raises(CompilationError) as exc: run_dbt(["compile"]) assert message in str(exc.value) diff --git a/tests/functional/fail_fast/test_fail_fast_run.py b/tests/functional/fail_fast/test_fail_fast_run.py index 3ea3c4bc0f0..5c0c8cf849d 100644 --- a/tests/functional/fail_fast/test_fail_fast_run.py +++ b/tests/functional/fail_fast/test_fail_fast_run.py @@ -2,7 +2,7 @@ from dbt.tests.util import run_dbt from tests.functional.fail_fast.fixtures import models, project_files # noqa: F401 -from dbt.exceptions import FailFastException +from dbt.exceptions import FailFastError def check_audit_table(project, count=1): @@ -43,7 +43,7 @@ def test_fail_fast_run( self, project, ): - with pytest.raises(FailFastException): + with pytest.raises(FailFastError): run_dbt(["run", "--threads", "1", "--fail-fast"]) check_audit_table(project) @@ -62,6 +62,6 @@ def test_fail_fast_run_user_config( self, project, ): - with pytest.raises(FailFastException): + with pytest.raises(FailFastError): run_dbt(["run", "--threads", "1"]) check_audit_table(project) diff --git a/tests/functional/hooks/test_model_hooks.py b/tests/functional/hooks/test_model_hooks.py index 79f3632bd8e..99a05c9c895 100644 --- a/tests/functional/hooks/test_model_hooks.py +++ b/tests/functional/hooks/test_model_hooks.py @@ -2,7 +2,7 @@ from pathlib import Path -from dbt.exceptions import CompilationException +from dbt.exceptions import CompilationError from dbt.tests.util import ( run_dbt, @@ -422,7 +422,7 @@ def models(self): return {"hooks.sql": models__hooks_error} def test_run_duplicate_hook_defs(self, project): - with pytest.raises(CompilationException) as exc: + with pytest.raises(CompilationError) as exc: run_dbt() assert "pre_hook" in str(exc.value) assert "pre-hook" in str(exc.value) diff --git a/tests/functional/invalid_model_tests/test_invalid_models.py b/tests/functional/invalid_model_tests/test_invalid_models.py index 29739dcac20..09db17bc325 100644 --- a/tests/functional/invalid_model_tests/test_invalid_models.py +++ b/tests/functional/invalid_model_tests/test_invalid_models.py @@ -1,6 +1,6 @@ import pytest -from dbt.exceptions import CompilationException, ParsingException +from dbt.exceptions import CompilationError, ParsingError from dbt.tests.util import ( run_dbt, @@ -129,7 +129,7 @@ def models(self): } def test_view_disabled(self, project): - with pytest.raises(ParsingException) as exc: + with pytest.raises(ParsingError) as exc: run_dbt(["seed"]) assert "enabled" in str(exc.value) @@ -146,7 +146,7 @@ def models(self): } def test_referencing_disabled_model(self, project): - with pytest.raises(CompilationException) as exc: + with pytest.raises(CompilationError) as exc: run_dbt() assert "which is disabled" in str(exc.value) @@ -160,7 +160,7 @@ def models(self): return {"models__dependent_on_view.sql": models__dependent_on_view} def test_models_not_found(self, project): - with pytest.raises(CompilationException) as exc: + with pytest.raises(CompilationError) as exc: run_dbt() assert "which was not found" in str(exc.value) @@ -176,7 +176,7 @@ def models(self): return {"models__with_bad_macro.sql": models__with_bad_macro} def test_with_invalid_macro_call(self, project): - with pytest.raises(CompilationException) as exc: + with pytest.raises(CompilationError) as exc: run_dbt(["compile"]) assert "macro 'dbt_macro__some_macro' takes no keyword argument 'invalid'" in str( @@ -207,7 +207,7 @@ def project_config_update(self): } def test_postgres_source_disabled(self, project): - with pytest.raises(CompilationException) as exc: + with pytest.raises(CompilationError) as exc: run_dbt() assert "which is disabled" in str(exc.value) @@ -221,7 +221,7 @@ def models(self): return {"models__referencing_disabled_source.sql": models__referencing_disabled_source} def test_source_missing(self, project): - with pytest.raises(CompilationException) as exc: + with pytest.raises(CompilationError) as exc: run_dbt() assert "which was not found" in str(exc.value) diff --git a/tests/functional/macros/test_macros.py b/tests/functional/macros/test_macros.py index 899be2453b1..e7f25acab3a 100644 --- a/tests/functional/macros/test_macros.py +++ b/tests/functional/macros/test_macros.py @@ -97,7 +97,7 @@ def macros(self): return {"my_macros.sql": macros__no_default_macros} def test_invalid_macro(self, project): - with pytest.raises(dbt.exceptions.CompilationException) as exc: + with pytest.raises(dbt.exceptions.CompilationError) as exc: run_dbt() assert "In dispatch: No macro named 'dispatch_to_nowhere' found" in str(exc.value) @@ -213,7 +213,7 @@ def macros(self): return {"macro.sql": macros__deprecated_adapter_macro} def test_invalid_macro(self, project): - with pytest.raises(dbt.exceptions.CompilationException) as exc: + with pytest.raises(dbt.exceptions.CompilationError) as exc: run_dbt() assert 'The "adapter_macro" macro has been deprecated' in str(exc.value) diff --git a/tests/functional/materializations/test_incremental.py b/tests/functional/materializations/test_incremental.py index f6ec8b2a3e9..7e8df9ea6f1 100644 --- a/tests/functional/materializations/test_incremental.py +++ b/tests/functional/materializations/test_incremental.py @@ -1,6 +1,6 @@ import pytest from dbt.tests.util import run_dbt, get_manifest -from dbt.exceptions import RuntimeException +from dbt.exceptions import DbtRuntimeError from dbt.context.providers import generate_runtime_model_context @@ -43,10 +43,10 @@ def test_basic(project): assert type(macro_func).__name__ == "MacroGenerator" # These two incremental strategies are not valid for Postgres - with pytest.raises(RuntimeException) as excinfo: + with pytest.raises(DbtRuntimeError) as excinfo: macro_func = project.adapter.get_incremental_strategy_macro(context, "merge") assert "merge" in str(excinfo.value) - with pytest.raises(RuntimeException) as excinfo: + with pytest.raises(DbtRuntimeError) as excinfo: macro_func = project.adapter.get_incremental_strategy_macro(context, "insert_overwrite") assert "insert_overwrite" in str(excinfo.value) diff --git a/tests/functional/metrics/test_metric_configs.py b/tests/functional/metrics/test_metric_configs.py index 88c39e0537d..d81c97f79a6 100644 --- a/tests/functional/metrics/test_metric_configs.py +++ b/tests/functional/metrics/test_metric_configs.py @@ -1,7 +1,7 @@ import pytest from hologram import ValidationError from dbt.contracts.graph.model_config import MetricConfig -from dbt.exceptions import CompilationException +from dbt.exceptions import CompilationError from dbt.tests.util import run_dbt, update_config_file, get_manifest @@ -106,7 +106,7 @@ def test_metrics_all_configs(self, project): assert config_test_table == pytest.expected_config -# Test CompilationException if a model references a disabled metric +# Test CompilationError if a model references a disabled metric class TestDisabledMetricRef(MetricConfigTests): @pytest.fixture(scope="class") def models(self): @@ -134,7 +134,7 @@ def test_disabled_metric_ref_model(self, project): } update_config_file(new_enabled_config, project.project_root, "dbt_project.yml") - with pytest.raises(CompilationException): + with pytest.raises(CompilationError): run_dbt(["parse"]) diff --git a/tests/functional/metrics/test_metrics.py b/tests/functional/metrics/test_metrics.py index de8c022f3d3..10e34770cf1 100644 --- a/tests/functional/metrics/test_metrics.py +++ b/tests/functional/metrics/test_metrics.py @@ -1,7 +1,7 @@ import pytest from dbt.tests.util import run_dbt, get_manifest -from dbt.exceptions import ParsingException +from dbt.exceptions import ParsingError from tests.functional.metrics.fixtures import ( @@ -85,14 +85,14 @@ def models(self): "people.sql": models_people_sql, } - # tests that we get a ParsingException with an invalid model ref, where + # tests that we get a ParsingError with an invalid model ref, where # the model name does not have quotes def test_simple_metric( self, project, ): # initial run - with pytest.raises(ParsingException): + with pytest.raises(ParsingError): run_dbt(["run"]) @@ -104,14 +104,14 @@ def models(self): "people.sql": models_people_sql, } - # tests that we get a ParsingException with an invalid model ref, where + # tests that we get a ParsingError with an invalid model ref, where # the model name does not have quotes def test_simple_metric( self, project, ): # initial run - with pytest.raises(ParsingException): + with pytest.raises(ParsingError): run_dbt(["run"]) @@ -123,13 +123,13 @@ def models(self): "people.sql": models_people_sql, } - # tests that we get a ParsingException with a missing expression + # tests that we get a ParsingError with a missing expression def test_simple_metric( self, project, ): # initial run - with pytest.raises(ParsingException): + with pytest.raises(ParsingError): run_dbt(["run"]) @@ -142,7 +142,7 @@ def models(self): } def test_names_with_spaces(self, project): - with pytest.raises(ParsingException) as exc: + with pytest.raises(ParsingError) as exc: run_dbt(["run"]) assert "cannot contain spaces" in str(exc.value) @@ -156,7 +156,7 @@ def models(self): } def test_names_with_special_char(self, project): - with pytest.raises(ParsingException) as exc: + with pytest.raises(ParsingError) as exc: run_dbt(["run"]) assert "must contain only letters, numbers and underscores" in str(exc.value) @@ -170,7 +170,7 @@ def models(self): } def test_names_with_leading_number(self, project): - with pytest.raises(ParsingException) as exc: + with pytest.raises(ParsingError) as exc: run_dbt(["run"]) assert "must begin with a letter" in str(exc.value) @@ -184,7 +184,7 @@ def models(self): } def test_long_name(self, project): - with pytest.raises(ParsingException) as exc: + with pytest.raises(ParsingError) as exc: run_dbt(["run"]) assert "cannot contain more than 250 characters" in str(exc.value) @@ -198,7 +198,7 @@ def models(self): } def test_invalid_derived_metrics(self, project): - with pytest.raises(ParsingException): + with pytest.raises(ParsingError): run_dbt(["run"]) @@ -294,14 +294,14 @@ def models(self): "people.sql": models_people_sql, } - # Tests that we get a ParsingException with an invalid metric definition. + # Tests that we get a ParsingError with an invalid metric definition. # This metric definition is missing timestamp but HAS a time_grains property def test_simple_metric( self, project, ): # initial run - with pytest.raises(ParsingException): + with pytest.raises(ParsingError): run_dbt(["run"]) @@ -313,12 +313,12 @@ def models(self): "people.sql": models_people_sql, } - # Tests that we get a ParsingException with an invalid metric definition. + # Tests that we get a ParsingError with an invalid metric definition. # This metric definition is missing timestamp but HAS a window property def test_simple_metric( self, project, ): # initial run - with pytest.raises(ParsingException): + with pytest.raises(ParsingError): run_dbt(["run"]) diff --git a/tests/functional/schema_tests/test_schema_v2_tests.py b/tests/functional/schema_tests/test_schema_v2_tests.py index 44a6696931b..36495fd7020 100644 --- a/tests/functional/schema_tests/test_schema_v2_tests.py +++ b/tests/functional/schema_tests/test_schema_v2_tests.py @@ -95,7 +95,7 @@ alt_local_utils__macros__type_timestamp_sql, all_quotes_schema__schema_yml, ) -from dbt.exceptions import ParsingException, CompilationException, DuplicateResourceName +from dbt.exceptions import ParsingError, CompilationError, DuplicateResourceNameError from dbt.contracts.results import TestStatus @@ -410,7 +410,7 @@ def test_malformed_schema_will_break_run( self, project, ): - with pytest.raises(ParsingException): + with pytest.raises(ParsingError): run_dbt() @@ -904,7 +904,7 @@ def test_generic_test_collision( project, ): """These tests collide, since only the configs differ""" - with pytest.raises(DuplicateResourceName) as exc: + with pytest.raises(DuplicateResourceNameError) as exc: run_dbt() assert "dbt found two tests with the name" in str(exc.value) @@ -922,7 +922,7 @@ def test_generic_test_config_custom_macros( project, ): """This test has a reference to a custom macro its configs""" - with pytest.raises(CompilationException) as exc: + with pytest.raises(CompilationError) as exc: run_dbt() assert "Invalid generic test configuration" in str(exc) @@ -987,7 +987,7 @@ def test_invalid_schema_file( self, project, ): - with pytest.raises(ParsingException) as exc: + with pytest.raises(ParsingError) as exc: run_dbt() assert re.search(r"'models' is not a list", str(exc)) diff --git a/tests/functional/simple_snapshot/test_missing_strategy_snapshot.py b/tests/functional/simple_snapshot/test_missing_strategy_snapshot.py index 33e6b61aebc..dfb51f7992e 100644 --- a/tests/functional/simple_snapshot/test_missing_strategy_snapshot.py +++ b/tests/functional/simple_snapshot/test_missing_strategy_snapshot.py @@ -1,6 +1,6 @@ import pytest from dbt.tests.util import run_dbt -from dbt.exceptions import ParsingException +from dbt.exceptions import ParsingError from tests.functional.simple_snapshot.fixtures import ( models__schema_yml, models__ref_snapshot_sql, @@ -43,7 +43,7 @@ def macros(): def test_missing_strategy(project): - with pytest.raises(ParsingException) as exc: + with pytest.raises(ParsingError) as exc: run_dbt(["compile"], expect_pass=False) assert "Snapshots must be configured with a 'strategy'" in str(exc.value) diff --git a/tests/functional/source_overrides/test_source_overrides_duplicate_model.py b/tests/functional/source_overrides/test_source_overrides_duplicate_model.py index cd35fd6f7c2..e3cdebe4794 100644 --- a/tests/functional/source_overrides/test_source_overrides_duplicate_model.py +++ b/tests/functional/source_overrides/test_source_overrides_duplicate_model.py @@ -1,5 +1,5 @@ import os -from dbt.exceptions import CompilationException +from dbt.exceptions import CompilationError import pytest from dbt.tests.util import run_dbt @@ -56,7 +56,7 @@ def project_config_update(self): def test_source_duplicate_overrides(self, project): run_dbt(["deps"]) - with pytest.raises(CompilationException) as exc: + with pytest.raises(CompilationError) as exc: run_dbt(["compile"]) assert "dbt found two schema.yml entries for the same source named" in str(exc.value) diff --git a/tests/functional/sources/test_simple_source.py b/tests/functional/sources/test_simple_source.py index 0c69f859b6b..cd08647f367 100644 --- a/tests/functional/sources/test_simple_source.py +++ b/tests/functional/sources/test_simple_source.py @@ -1,7 +1,7 @@ import os import pytest import yaml -from dbt.exceptions import ParsingException +from dbt.exceptions import ParsingError from dbt.tests.util import ( run_dbt, @@ -164,7 +164,7 @@ def models(self): } def test_malformed_schema_will_break_run(self, project): - with pytest.raises(ParsingException): + with pytest.raises(ParsingError): self.run_dbt_with_vars(project, ["seed"]) diff --git a/tests/functional/sources/test_source_fresher_state.py b/tests/functional/sources/test_source_fresher_state.py index 362f9a816c0..a97694a9c5a 100644 --- a/tests/functional/sources/test_source_fresher_state.py +++ b/tests/functional/sources/test_source_fresher_state.py @@ -4,7 +4,7 @@ import pytest from datetime import datetime, timedelta -from dbt.exceptions import InternalException +from dbt.exceptions import DbtInternalError from dbt.tests.util import AnyStringWith, AnyFloat @@ -619,7 +619,7 @@ class TestSourceFresherNoPreviousState(SuccessfulSourceFreshnessTest): def test_intentional_failure_no_previous_state(self, project): self.run_dbt_with_vars(project, ["run"]) # TODO add the current and previous but with previous as null - with pytest.raises(InternalException) as excinfo: + with pytest.raises(DbtInternalError) as excinfo: self.run_dbt_with_vars( project, ["run", "-s", "source_status:fresher", "--defer", "--state", "previous_state"], @@ -641,7 +641,7 @@ def test_intentional_failure_no_previous_state(self, project): copy_to_previous_state() assert previous_state_results[0].max_loaded_at is not None - with pytest.raises(InternalException) as excinfo: + with pytest.raises(DbtInternalError) as excinfo: self.run_dbt_with_vars( project, ["run", "-s", "source_status:fresher", "--defer", "--state", "previous_state"], diff --git a/tests/unit/test_connection_retries.py b/tests/unit/test_connection_retries.py index 8b031ce5ab4..9076adb7ef9 100644 --- a/tests/unit/test_connection_retries.py +++ b/tests/unit/test_connection_retries.py @@ -1,7 +1,7 @@ import functools import pytest from requests.exceptions import RequestException -from dbt.exceptions import ConnectionException +from dbt.exceptions import ConnectionError from dbt.utils import _connection_exception_retry @@ -28,7 +28,7 @@ class TestMaxRetries: def test_no_retry(self): fn_to_retry = functools.partial(no_success_fn) - with pytest.raises(ConnectionException): + with pytest.raises(ConnectionError): _connection_exception_retry(fn_to_retry, 3) diff --git a/tests/unit/test_deprecations.py b/tests/unit/test_deprecations.py index df7a43c867a..3f03e3e35a5 100644 --- a/tests/unit/test_deprecations.py +++ b/tests/unit/test_deprecations.py @@ -12,7 +12,7 @@ def to_be_decorated(): # simpletest that the return value is not modified -def test_deprecated(): +def test_deprecated_func(): assert(hasattr(to_be_decorated, '__wrapped__')) assert(to_be_decorated() == 5) @@ -36,7 +36,7 @@ def is_deprecated(self, func): def test_missing_config(self): func = dbt.exceptions.missing_config - exception = dbt.exceptions.MissingConfig + exception = dbt.exceptions.MissingConfigError model = argparse.Namespace() model.unique_id = '' name = "" @@ -49,7 +49,7 @@ def test_missing_config(self): def test_missing_materialization(self): func = dbt.exceptions.missing_materialization - exception = dbt.exceptions.MissingMaterialization + exception = dbt.exceptions.MissingMaterializationError model = argparse.Namespace() model.config = argparse.Namespace() model.config.materialized = '' @@ -63,7 +63,7 @@ def test_missing_materialization(self): def test_missing_relation(self): func = dbt.exceptions.missing_relation - exception = dbt.exceptions.MissingRelation + exception = dbt.exceptions.MissingRelationError relation = "" self.is_deprecated(func) @@ -74,7 +74,7 @@ def test_missing_relation(self): def test_raise_ambiguous_alias(self): func = dbt.exceptions.raise_ambiguous_alias - exception = dbt.exceptions.AmbiguousAlias + exception = dbt.exceptions.AmbiguousAliasError node_1 = argparse.Namespace() node_1.unique_id = "" node_1.original_file_path = "" @@ -91,7 +91,7 @@ def test_raise_ambiguous_alias(self): def test_raise_ambiguous_catalog_match(self): func = dbt.exceptions.raise_ambiguous_catalog_match - exception = dbt.exceptions.AmbiguousCatalogMatch + exception = dbt.exceptions.AmbiguousCatalogMatchError unique_id = "" match_1 = {"metadata": {"schema": ""}} match_2 = {"metadata": {"schema": ""}} @@ -104,7 +104,7 @@ def test_raise_ambiguous_catalog_match(self): def test_raise_cache_inconsistent(self): func = dbt.exceptions.raise_cache_inconsistent - exception = dbt.exceptions.CacheInconsistency + exception = dbt.exceptions.CacheInconsistencyError msg = "" self.is_deprecated(func) @@ -115,7 +115,7 @@ def test_raise_cache_inconsistent(self): def test_raise_dataclass_not_dict(self): func = dbt.exceptions.raise_dataclass_not_dict - exception = dbt.exceptions.DataclassNotDict + exception = dbt.exceptions.DataclassNotDictError obj = "" self.is_deprecated(func) @@ -126,7 +126,7 @@ def test_raise_dataclass_not_dict(self): def test_raise_compiler_error(self): func = dbt.exceptions.raise_compiler_error - exception = dbt.exceptions.CompilationException + exception = dbt.exceptions.CompilationError msg = "" self.is_deprecated(func) @@ -137,7 +137,7 @@ def test_raise_compiler_error(self): def test_raise_database_error(self): func = dbt.exceptions.raise_database_error - exception = dbt.exceptions.DatabaseException + exception = dbt.exceptions.DbtDatabaseError msg = "" self.is_deprecated(func) @@ -148,7 +148,7 @@ def test_raise_database_error(self): def test_raise_dep_not_found(self): func = dbt.exceptions.raise_dep_not_found - exception = dbt.exceptions.DependencyNotFound + exception = dbt.exceptions.DependencyNotFoundError node = "" node_description = "" required_pkg = "" @@ -161,7 +161,7 @@ def test_raise_dep_not_found(self): def test_raise_dependency_error(self): func = dbt.exceptions.raise_dependency_error - exception = dbt.exceptions.DependencyException + exception = dbt.exceptions.DependencyError msg = "" self.is_deprecated(func) @@ -172,7 +172,7 @@ def test_raise_dependency_error(self): def test_raise_duplicate_patch_name(self): func = dbt.exceptions.raise_duplicate_patch_name - exception = dbt.exceptions.DuplicatePatchPath + exception = dbt.exceptions.DuplicatePatchPathError patch_1 = argparse.Namespace() patch_1.name = "" patch_1.original_file_path = "" @@ -186,7 +186,7 @@ def test_raise_duplicate_patch_name(self): def test_raise_duplicate_resource_name(self): func = dbt.exceptions.raise_duplicate_resource_name - exception = dbt.exceptions.DuplicateResourceName + exception = dbt.exceptions.DuplicateResourceNameError node_1 = argparse.Namespace() node_1.name = "" node_1.resource_type = NodeType('model') @@ -207,7 +207,7 @@ def test_raise_duplicate_resource_name(self): def test_raise_invalid_property_yml_version(self): func = dbt.exceptions.raise_invalid_property_yml_version - exception = dbt.exceptions.InvalidPropertyYML + exception = dbt.exceptions.PropertyYMLError path = "" issue = "" @@ -219,7 +219,7 @@ def test_raise_invalid_property_yml_version(self): def test_raise_not_implemented(self): func = dbt.exceptions.raise_not_implemented - exception = dbt.exceptions.NotImplementedException + exception = dbt.exceptions.NotImplementedError msg = "" self.is_deprecated(func) @@ -230,7 +230,7 @@ def test_raise_not_implemented(self): def test_relation_wrong_type(self): func = dbt.exceptions.relation_wrong_type - exception = dbt.exceptions.RelationWrongType + exception = dbt.exceptions.RelationWrongTypeError relation = argparse.Namespace() relation.type = "" @@ -244,7 +244,7 @@ def test_relation_wrong_type(self): def test_raise_duplicate_alias(self): func = dbt.exceptions.raise_duplicate_alias - exception = dbt.exceptions.DuplicateAlias + exception = dbt.exceptions.DuplicateAliasError kwargs = {"": ""} aliases = {"": ""} canonical_key = "" @@ -257,7 +257,7 @@ def test_raise_duplicate_alias(self): def test_raise_duplicate_source_patch_name(self): func = dbt.exceptions.raise_duplicate_source_patch_name - exception = dbt.exceptions.DuplicateSourcePatchName + exception = dbt.exceptions.DuplicateSourcePatchNameError patch_1 = argparse.Namespace() patch_1.name = "" patch_1.path = "" @@ -273,7 +273,7 @@ def test_raise_duplicate_source_patch_name(self): def test_raise_duplicate_macro_patch_name(self): func = dbt.exceptions.raise_duplicate_macro_patch_name - exception = dbt.exceptions.DuplicateMacroPatchName + exception = dbt.exceptions.DuplicateMacroPatchNameError patch_1 = argparse.Namespace() patch_1.package_name = "" patch_1.name = "" @@ -288,7 +288,7 @@ def test_raise_duplicate_macro_patch_name(self): def test_raise_duplicate_macro_name(self): func = dbt.exceptions.raise_duplicate_macro_name - exception = dbt.exceptions.DuplicateMacroName + exception = dbt.exceptions.DuplicateMacroNameError node_1 = argparse.Namespace() node_1.name = "" node_1.package_name = "" @@ -308,7 +308,7 @@ def test_raise_duplicate_macro_name(self): def test_approximate_relation_match(self): func = dbt.exceptions.approximate_relation_match - exception = dbt.exceptions.ApproximateMatch + exception = dbt.exceptions.ApproximateMatchError target = "" relation = "" @@ -320,7 +320,7 @@ def test_approximate_relation_match(self): def test_get_relation_returned_multiple_results(self): func = dbt.exceptions.get_relation_returned_multiple_results - exception = dbt.exceptions.RelationReturnedMultipleResults + exception = dbt.exceptions.RelationReturnedMultipleResultsError kwargs = {} matches = [] @@ -332,7 +332,7 @@ def test_get_relation_returned_multiple_results(self): def test_system_error(self): func = dbt.exceptions.system_error - exception = dbt.exceptions.OperationException + exception = dbt.exceptions.OperationError operation_name = "" self.is_deprecated(func) @@ -343,7 +343,7 @@ def test_system_error(self): def test_invalid_materialization_argument(self): func = dbt.exceptions.invalid_materialization_argument - exception = dbt.exceptions.InvalidMaterializationArg + exception = dbt.exceptions.MaterializationArgError name = "" argument = "" @@ -386,7 +386,7 @@ def test_bad_package_spec(self): def test_raise_git_cloning_problem(self): func = dbt.exceptions.raise_git_cloning_problem - exception = dbt.exceptions.GitCloningProblem + exception = dbt.exceptions.UnknownGitCloningProblemError repo = "" self.is_deprecated(func) @@ -397,7 +397,7 @@ def test_raise_git_cloning_problem(self): def test_macro_invalid_dispatch_arg(self): func = dbt.exceptions.macro_invalid_dispatch_arg - exception = dbt.exceptions.MacroInvalidDispatchArg + exception = dbt.exceptions.MacroDispatchArgError macro_name = "" self.is_deprecated(func) @@ -408,7 +408,7 @@ def test_macro_invalid_dispatch_arg(self): def test_dependency_not_found(self): func = dbt.exceptions.dependency_not_found - exception = dbt.exceptions.GraphDependencyNotFound + exception = dbt.exceptions.GraphDependencyNotFoundError node = argparse.Namespace() node.unique_id = "" dependency = "" @@ -421,7 +421,7 @@ def test_dependency_not_found(self): def test_target_not_found(self): func = dbt.exceptions.target_not_found - exception = dbt.exceptions.TargetNotFound + exception = dbt.exceptions.TargetNotFoundError node = argparse.Namespace() node.unique_id = "" node.original_file_path = "" @@ -437,7 +437,7 @@ def test_target_not_found(self): def test_doc_target_not_found(self): func = dbt.exceptions.doc_target_not_found - exception = dbt.exceptions.DocTargetNotFound + exception = dbt.exceptions.DocTargetNotFoundError model = argparse.Namespace() model.unique_id = "" target_doc_name = "" @@ -451,7 +451,7 @@ def test_doc_target_not_found(self): def test_ref_bad_context(self): func = dbt.exceptions.ref_bad_context - exception = dbt.exceptions.RefBadContext + exception = dbt.exceptions.RefBadContextError model = argparse.Namespace() model.name = "" args = [] @@ -464,7 +464,7 @@ def test_ref_bad_context(self): def test_metric_invalid_args(self): func = dbt.exceptions.metric_invalid_args - exception = dbt.exceptions.MetricInvalidArgs + exception = dbt.exceptions.MetricArgsError model = argparse.Namespace() model.unique_id = "" args = [] @@ -477,7 +477,7 @@ def test_metric_invalid_args(self): def test_ref_invalid_args(self): func = dbt.exceptions.ref_invalid_args - exception = dbt.exceptions.RefInvalidArgs + exception = dbt.exceptions.RefArgsError model = argparse.Namespace() model.unique_id = "" args = [] @@ -490,7 +490,7 @@ def test_ref_invalid_args(self): def test_invalid_bool_error(self): func = dbt.exceptions.invalid_bool_error - exception = dbt.exceptions.InvalidBoolean + exception = dbt.exceptions.BooleanError return_value = "" macro_name = "" @@ -502,7 +502,7 @@ def test_invalid_bool_error(self): def test_invalid_type_error(self): func = dbt.exceptions.invalid_type_error - exception = dbt.exceptions.InvalidMacroArgType + exception = dbt.exceptions.MacroArgTypeError method_name = "" arg_name = "" got_value = "" @@ -516,7 +516,7 @@ def test_invalid_type_error(self): def test_disallow_secret_env_var(self): func = dbt.exceptions.disallow_secret_env_var - exception = dbt.exceptions.DisallowSecretEnvVar + exception = dbt.exceptions.SecretEnvVarLocationError env_var_name = "" self.is_deprecated(func) @@ -527,7 +527,7 @@ def test_disallow_secret_env_var(self): def test_raise_parsing_error(self): func = dbt.exceptions.raise_parsing_error - exception = dbt.exceptions.ParsingException + exception = dbt.exceptions.ParsingError msg = "" self.is_deprecated(func) @@ -538,7 +538,7 @@ def test_raise_parsing_error(self): def test_raise_unrecognized_credentials_type(self): func = dbt.exceptions.raise_unrecognized_credentials_type - exception = dbt.exceptions.UnrecognizedCredentialType + exception = dbt.exceptions.UnrecognizedCredentialTypeError typename = "" supported_types = [] @@ -550,7 +550,7 @@ def test_raise_unrecognized_credentials_type(self): def test_raise_patch_targets_not_found(self): func = dbt.exceptions.raise_patch_targets_not_found - exception = dbt.exceptions.PatchTargetNotFound + exception = dbt.exceptions.PatchTargetNotFoundError node = argparse.Namespace() node.name = "" node.original_file_path = "" @@ -564,7 +564,7 @@ def test_raise_patch_targets_not_found(self): def test_multiple_matching_relations(self): func = dbt.exceptions.multiple_matching_relations - exception = dbt.exceptions.RelationReturnedMultipleResults + exception = dbt.exceptions.RelationReturnedMultipleResultsError kwargs = {} matches = [] @@ -576,7 +576,7 @@ def test_multiple_matching_relations(self): def test_materialization_not_available(self): func = dbt.exceptions.materialization_not_available - exception = dbt.exceptions.MaterializationNotAvailable + exception = dbt.exceptions.MaterializationNotAvailableError model = argparse.Namespace() model.config = argparse.Namespace() model.config.materialized = "" @@ -590,7 +590,7 @@ def test_materialization_not_available(self): def test_macro_not_found(self): func = dbt.exceptions.macro_not_found - exception = dbt.exceptions.MacroNotFound + exception = dbt.exceptions.MacroNotFoundError model = argparse.Namespace() model.unique_id = "" target_macro_id = "" diff --git a/tests/unit/test_events.py b/tests/unit/test_events.py index a7056a729b2..c7d0260a93b 100644 --- a/tests/unit/test_events.py +++ b/tests/unit/test_events.py @@ -133,7 +133,7 @@ def test_event_codes(self): AdapterDeprecationWarning(old_name="", new_name=""), MetricAttributesRenamed(metric_name=""), ExposureNameDeprecation(exposure=""), - FunctionDeprecated(function_name="", reason="", suggested_action="", version=""), + InternalDeprecation(name="", reason="", suggested_action="", version=""), # E - DB Adapter ====================== AdapterEventDebug(), @@ -183,9 +183,9 @@ def test_event_codes(self): ParseCmdOut(msg="testing"), GenericTestFileParse(path=""), MacroFileParse(path=""), - PartialParsingExceptionProcessingFile(file=""), + PartialParsingErrorProcessingFile(file=""), PartialParsingFile(file_id=""), - PartialParsingException(exc_info={}), + PartialParsingError(exc_info={}), PartialParsingSkipParsing(), UnableToPartialParse(reason="something went wrong"), PartialParsingNotEnabled(), @@ -344,7 +344,7 @@ def test_event_codes(self): # W - Node testing ====================== CatchableExceptionOnRun(exc=""), - InternalExceptionOnRun(build_path="", exc=""), + InternalErrorOnRun(build_path="", exc=""), GenericExceptionOnRun(build_path="", unique_id="", exc=""), NodeConnectionReleaseError(node_name="", exc=""), FoundStats(stat_line=""),