diff --git a/website/docs/docs/build/saved-queries.md b/website/docs/docs/build/saved-queries.md index fdedbbb7f8f..77553b288ee 100644 --- a/website/docs/docs/build/saved-queries.md +++ b/website/docs/docs/build/saved-queries.md @@ -36,23 +36,23 @@ saved_queries: cache: enabled: true # Or false if you want it disabled by default query_params: - metrics: - - simple_metric - group_by: - - "Dimension('user__ds')" - where: - - "{{ Dimension('user__ds', 'DAY') }} <= now()" - - "{{ Dimension('user__ds', 'DAY') }} >= '2023-01-01'" + metrics: + - simple_metric + group_by: + - "Dimension('user__ds')" + where: + - "{{ Dimension('user__ds', 'DAY') }} <= now()" + - "{{ Dimension('user__ds', 'DAY') }} >= '2023-01-01'" exports: - - name: my_export - config: - alias: my_export_alias - export_as: table - schema: my_export_schema_name + - name: my_export + config: + alias: my_export_alias + export_as: table + schema: my_export_schema_name ``` - + @@ -65,20 +65,21 @@ saved_queries: description: "{{ doc('saved_query_description') }}" label: Test saved query query_params: - metrics: - - simple_metric - group_by: - - "Dimension('user__ds')" - where: - - "{{ Dimension('user__ds', 'DAY') }} <= now()" - - "{{ Dimension('user__ds', 'DAY') }} >= '2023-01-01'" + metrics: + - simple_metric + group_by: + - "Dimension('user__ds')" + where: + - "{{ Dimension('user__ds', 'DAY') }} <= now()" + - "{{ Dimension('user__ds', 'DAY') }} >= '2023-01-01'" exports: - - name: my_export - config: - alias: my_export_alias - export_as: table - schema: my_export_schema_name + - name: my_export + config: + alias: my_export_alias + export_as: table + schema: my_export_schema_name ``` + diff --git a/website/docs/docs/build/unit-tests.md b/website/docs/docs/build/unit-tests.md index d8eb195008b..00e80d5531a 100644 --- a/website/docs/docs/build/unit-tests.md +++ b/website/docs/docs/build/unit-tests.md @@ -40,6 +40,12 @@ You should unit test a model: - Prior to refactoring the transformation logic (especially if the refactor is significant). - Models with high "criticality" (public, contracted models or models directly upstream of an exposure). +### When to run unit tests + +dbt Labs strongly recommends only running unit tests in development or CI environments. Since the inputs of the unit tests are static, there's no need to use additional compute cycles running them in production. Use them in development for a test-driven approach and CI to ensure changes don't break them. + +Use the [resource type](/reference/global-configs/resource-type) flag `--exclude-resource-type` or the `DBT_EXCLUDE_RESOURCE_TYPE` environment variable to exclude unit tests from your production builds and save compute. + ## Unit testing a model This example creates a new `dim_customers` model with a field `is_valid_email_address` that calculates whether or not the customer’s email is valid: @@ -293,7 +299,16 @@ unit_tests: - {id: 1, first_name: emily} ``` -## Known limitations + +## Unit test exit codes + +Unit test successes and failures are represented by two exit codes: +- Pass (0) +- Fail (1) + +Exit codes differ from data test success and failure outputs because they don't directly reflect failing data tests. Data tests are queries designed to check specific conditions in your data, and they return one row per failed test case (for example, the number of values with duplicates for the `unique` test). dbt reports the number of failing records as failures. Whereas, each unit test represents one 'test case', so results are always 0 (pass) or 1 (fail) regardless of how many records failed within that test case. + +Learn about [exit codes](/reference/exit-codes) for more information. ## Additional resources diff --git a/website/docs/docs/cloud/cloud-cli-installation.md b/website/docs/docs/cloud/cloud-cli-installation.md index 206f8d68a99..c67216ce386 100644 --- a/website/docs/docs/cloud/cloud-cli-installation.md +++ b/website/docs/docs/cloud/cloud-cli-installation.md @@ -19,9 +19,9 @@ dbt commands are run against dbt Cloud's infrastructure and benefit from: ## Prerequisites -The dbt Cloud CLI is available in all [deployment regions](/docs/cloud/about-cloud/access-regions-ip-addresses) and for both multi-tenant and single-tenant accounts (Azure single-tenant not supported at this time). +The dbt Cloud CLI is available in all [deployment regions](/docs/cloud/about-cloud/access-regions-ip-addresses) and for both multi-tenant and single-tenant accounts. -- Ensure you are using dbt version 1.5 or higher. Refer to [dbt Cloud versions](/docs/dbt-versions/upgrade-dbt-version-in-cloud) to upgrade. +- You are on dbt version 1.5 or higher. Alternatively, set it to [Keep on latest version](/docs/dbt-versions/upgrade-dbt-version-in-cloud#keep-on-latest-version) to always use the latest version. ## Install dbt Cloud CLI diff --git a/website/docs/docs/cloud/dbt-cloud-ide/keyboard-shortcuts.md b/website/docs/docs/cloud/dbt-cloud-ide/keyboard-shortcuts.md index 61fe47a235a..2b1abbe9c28 100644 --- a/website/docs/docs/cloud/dbt-cloud-ide/keyboard-shortcuts.md +++ b/website/docs/docs/cloud/dbt-cloud-ide/keyboard-shortcuts.md @@ -11,16 +11,16 @@ Use this dbt Cloud IDE page to help you quickly reference some common operation | Shortcut description | macOS | Windows | |--------|----------------|------------------| -| View a full list of editor shortcuts | Fn-F1 | Fn-F1 | -| Select a file to open | Command-O | Control-O | -| Close currently active editor tab | Option-W | Alt-W | -| Preview code | Command-Enter | Control-Enter | -| Compile code | Command-Shift-Enter | Control-Shift-Enter | -| Reveal a list of dbt functions in the editor | Enter two underscores `__` | Enter two underscores `__` | -| Open the command palette to invoke dbt commands and actions | Command-P / Command-Shift-P | Control-P / Control-Shift-P | -| Multi-edit in the editor by selecting multiple lines | Option-Click / Shift-Option-Command / Shift-Option-Click | Hold Alt and Click | +| View the full list of editor shortcuts to help your development, such as adding a line comment, changing tab display size, building modified models, changing editor font size, and more. | Fn-F1 | Fn-F1 | +| Select a file to open. | Command-O | Control-O | +| Close the currently active editor tab. | Option-W | Alt-W | +| Preview code. | Command-Enter | Control-Enter | +| Compile code. | Command-Shift-Enter | Control-Shift-Enter | +| Reveal a list of dbt functions in the editor. | Enter two underscores `__` | Enter two underscores `__` | +| Open the command palette to invoke dbt commands and actions. | Command-P / Command-Shift-P | Control-P / Control-Shift-P | +| Multi-edit in the editor by selecting multiple lines. | Option-Click / Shift-Option-Command / Shift-Option-Click | Hold Alt and Click | | Open the [**Invocation History Drawer**](/docs/cloud/dbt-cloud-ide/ide-user-interface#invocation-history) located at the bottom of the IDE. | Control-backtick (or Control + `) | Control-backtick (or Ctrl + `) | -| Add a block comment to the selected code. SQL files will use the Jinja syntax `({# #})` rather than the SQL one `(/* */)`.

Markdown files will use the Markdown syntax `()` | Command-Option-/ | Control-Alt-/ | +| Add a block comment to the selected code. SQL files will use the Jinja syntax `({# #})` rather than the SQL one `(/* */)`.

Markdown files will use the Markdown syntax `()`. | Command-Option-/ | Control-Alt-/ | ## Related docs diff --git a/website/docs/docs/dbt-versions/core-upgrade/01-upgrading-to-v1.8.md b/website/docs/docs/dbt-versions/core-upgrade/01-upgrading-to-v1.8.md index f9e3b95a97c..a6a39da45ca 100644 --- a/website/docs/docs/dbt-versions/core-upgrade/01-upgrading-to-v1.8.md +++ b/website/docs/docs/dbt-versions/core-upgrade/01-upgrading-to-v1.8.md @@ -1,5 +1,5 @@ --- -title: "Upgrading to v1.8" +title: "Upgrading to v1.8 (latest)" id: upgrading-to-v1.8 description: New features and changes in dbt Core v1.8 displayed_sidebar: "docs" @@ -78,6 +78,20 @@ models: The [`run`](/reference/commands/run#the-`--empty`-flag) and [`build`](/reference/commands/build#the---empty-flag) commands now support the `--empty` flag for building schema-only dry runs. The `--empty` flag limits the refs and sources to zero rows. dbt will still execute the model SQL against the target data warehouse but will avoid expensive reads of input data. This validates dependencies and ensures your models will build properly. +### Deprecated functionality + +The ability for installed packages to override built-in materializations without explicit opt-in from the user is being deprecated. + +- Overriding a built-in materialization from an installed package raises a deprecation warning. +- Using a custom materialization from an installed package does not raise a deprecation warning. +- Using a built-in materialization package override from the root project via a wrapping materialization is still supported. For example: + + ```sql + {% materialization view, default %} + {{ return(my_cool_package.materialization_view_default()) }} + {% endmaterialization %} + ``` + ### Managing changes to legacy behaviors dbt Core v1.8 has introduced flags for [managing changes to legacy behaviors](/reference/global-configs/legacy-behaviors). You may opt into recently introduced changes (disabled by default), or opt out of mature changes (enabled by default), by setting `True` / `False` values, respectively, for `flags` in `dbt_project.yml`. diff --git a/website/docs/docs/dbt-versions/core-upgrade/02-upgrading-to-v1.7.md b/website/docs/docs/dbt-versions/core-upgrade/02-upgrading-to-v1.7.md index bf66673817f..fb6375e3744 100644 --- a/website/docs/docs/dbt-versions/core-upgrade/02-upgrading-to-v1.7.md +++ b/website/docs/docs/dbt-versions/core-upgrade/02-upgrading-to-v1.7.md @@ -1,5 +1,5 @@ --- -title: "Upgrading to v1.7 (latest)" +title: "Upgrading to v1.7" id: upgrading-to-v1.7 description: New features and changes in dbt Core v1.7 displayed_sidebar: "docs" @@ -30,6 +30,7 @@ This is a relatively small behavior change, but worth calling out in case you no Beginning with v1.7, running [`dbt deps`](/reference/commands/deps) creates or updates the `package-lock.yml` file in the _project_root_ where `packages.yml` is recorded. The `package-lock.yml` file contains a record of all packages installed and, if subsequent `dbt deps` runs contain no updated packages in `dependencies.yml` or `packages.yml`, dbt-core installs from `package-lock.yml`. + ## New and changed features and functionality - [`dbt docs generate`](/reference/commands/cmd-docs) now supports `--select` to generate [catalog metadata](/reference/artifacts/catalog-json) for a subset of your project. Currently available for Snowflake and Postgres only, but other adapters are coming soon. @@ -76,6 +77,20 @@ The run_results.json now includes three attributes related to the `applied` stat - `compiled_code`: Rendered string of the code that was compiled (empty after parsing, but full string after compiling). - `relation_name`: The fully-qualified name of the object that was (or will be) created/updated within the database. +### Deprecated functionality + +The ability for installed packages to override built-in materializations without explicit opt-in from the user is being deprecated. + +- Overriding a built-in materialization from an installed package raises a deprecation warning. +- Using a custom materialization from an installed package does not raise a deprecation warning. +- Using a built-in materialization package override from the root project via a wrapping materialization is still supported. For example: + + ``` + {% materialization view, default %} + {{ return(my_cool_package.materialization_view_default()) }} + {% endmaterialization %} + ``` + ### Quick hits diff --git a/website/docs/docs/dbt-versions/core-upgrade/03-upgrading-to-v1.6.md b/website/docs/docs/dbt-versions/core-upgrade/03-upgrading-to-v1.6.md index 01426d1b6ff..a989217b3cf 100644 --- a/website/docs/docs/dbt-versions/core-upgrade/03-upgrading-to-v1.6.md +++ b/website/docs/docs/dbt-versions/core-upgrade/03-upgrading-to-v1.6.md @@ -85,6 +85,20 @@ More consistency and flexibility around packages. Resources defined in a package [**Project dependencies**](/docs/collaborate/govern/project-dependencies): Introduces `dependencies.yml` and dependent `projects` as a feature of dbt Cloud Enterprise. Allows enforcing model access (public vs. protected/private) across project/package boundaries. Enables cross-project `ref` of public models, without requiring the installation of upstream source code. ::: +### Deprecated functionality + +The ability for installed packages to override built-in materializations without explicit opt-in from the user is being deprecated. + +- Overriding a built-in materialization from an installed package raises a deprecation warning. +- Using a custom materialization from an installed package does not raise a deprecation warning. +- Using a built-in materialization package override from the root project via a wrapping materialization is still supported. For example: + + ``` + {% materialization view, default %} + {{ return(my_cool_package.materialization_view_default()) }} + {% endmaterialization %} + ``` + ### Quick hits - [`state:unmodified` and `state:old`](/reference/node-selection/methods#the-state-method) for [MECE](https://en.wikipedia.org/wiki/MECE_principle) stateful selection diff --git a/website/docs/docs/dbt-versions/release-notes.md b/website/docs/docs/dbt-versions/release-notes.md index 2c03202c15a..432a1f28944 100644 --- a/website/docs/docs/dbt-versions/release-notes.md +++ b/website/docs/docs/dbt-versions/release-notes.md @@ -18,6 +18,10 @@ Release notes are grouped by month. For customers using dbt Virtual Private Clou ## May 2024 + +- **Update**: The [dbt Cloud CLI](/docs/cloud/cloud-cli-installation) is now available for Azure single tenant and is accessible in all [deployment regions](/docs/cloud/about-cloud/access-regions-ip-addresses) for both multi-tenant and single-tenant accounts. +- **New**: The [dbt Semantic Layer](/docs/use-dbt-semantic-layer/dbt-sl) introduces [declarative caching](/docs/use-dbt-semantic-layer/sl-cache), allowing you to cache common queries to speed up performance and reduce query compute costs. Available for dbt Cloud Team or Enterprise accounts. + - The **Keep on latest version** setting is now Generally Available (previously Public Preview). diff --git a/website/docs/docs/use-dbt-semantic-layer/dbt-sl.md b/website/docs/docs/use-dbt-semantic-layer/dbt-sl.md index 21b15d518f1..55ca01323df 100644 --- a/website/docs/docs/use-dbt-semantic-layer/dbt-sl.md +++ b/website/docs/docs/use-dbt-semantic-layer/dbt-sl.md @@ -47,6 +47,12 @@ plan="dbt Cloud Team or Enterprise" link="/docs/use-dbt-semantic-layer/setup-sl" icon="dbt-bit"/> + + + + + + 📹 Check out this video demo to see how declarative caching works! + +This video demonstrates the concept of declarative caching, how to run it using the dbt Cloud scheduler, and how fast your dashboards load as a result. + + + + + +How declarative caching works: +- Make sure your saved queries YAML configuration file has [exports](/docs/use-dbt-semantic-layer/exports) defined. +- Running a saved query triggers the dbt Semantic Layer to: + - Build a cached table from a saved query, with exports defined, into your data platform. + - Make sure any query requests that match the saved query's inputs use the cache, returning data more quickly. + - Automatically invalidates the cache when it detects new and fresh data in any upstream models related to the metrics in your cached table. + - Refreshes (or rebuilds) the cache the next time you run the saved query. + +Refer to the following diagram, which illustrates what happens when the dbt Semantic Layer receives a query request: + + + +### Declarative caching setup + +To populate the cache, you need to configure an export in your saved query YAML file configuration _and_ set the `cache config` to `true`. You can't cache a saved query without an export defined. + + + +```yaml +saved_queries: + - name: my_saved_query + ... # Rest of the saved queries configuration. + config: + cache: + enabled: true # Set to true to enable, defaults to false. + exports: + - name: order_data_key_metrics + config: + export_as: table +``` + + +To enable saved queries at the project level, you can set the `saved-queries` configuration in the [`dbt_project.yml` file](/reference/dbt_project.yml). This saves you time in configuring saved queries in each file: + + + +```yaml +saved-queries: + my_saved_query: + config: + +cache: + enabled: true +``` + + +### Run your declarative cache + +After setting up declarative caching in your YAML configuration, you can now run [exports](/docs/use-dbt-semantic-layer/exports) with the dbt Cloud job scheduler to build a cached table from a saved query into your data platform. + +- Use [exports to set up a job](/docs/use-dbt-semantic-layer/exports) to run a saved query dbt Cloud. +- The dbt Semantic Layer builds a cache table in your data platform in a dedicated `dbt_sl_cache` schema. +- The cache schema and tables are created using your deployment credentials. You need to grant read access to this schema for your Semantic Layer user. +- The cache refreshes (or rebuilds) on the same schedule as the saved query job. + + + +After a successful job run, you can go back to your dashboard to experience the speed and benefits of declarative caching. + +## Cache management + +dbt Cloud uses the metadata from your dbt model runs to intelligently manage cache invalidation. When you start a dbt job, it keeps track of the last model runtime and checks the freshness of the metrics upstream of your cache. + +If an upstream model has data in it that was created after the cache was created, dbt Cloud invalidates the cache. This means queries won't use outdated cases and will instead query directly from the source data. Stale, outdated cache tables are periodically dropped and dbt Cloud will write a new cache the next time your saved query runs. + +You can manually invalidate the cache through the [dbt Semantic Layer APIs](/docs/dbt-cloud-apis/sl-api-overview) using the `InvalidateCacheResult` field. + +## Related docs +- [Saved queries](/docs/build/saved-queries) +- [dbt Semantic Layer FAQs](/docs/use-dbt-semantic-layer/sl-faqs) diff --git a/website/docs/docs/use-dbt-semantic-layer/sl-faqs.md b/website/docs/docs/use-dbt-semantic-layer/sl-faqs.md index e9e33356aa2..79825084709 100644 --- a/website/docs/docs/use-dbt-semantic-layer/sl-faqs.md +++ b/website/docs/docs/use-dbt-semantic-layer/sl-faqs.md @@ -124,6 +124,25 @@ The dbt Semantic Layer dynamically computes the metric using the underlying data The dbt Semantic Layer does not store a physical copy of your data. It uses underlying tables to construct or compute the requested output. + + +The dbt Semantic Layer is part of dbt Cloud. It allows data teams to define metrics once, centrally, and access them from any integrated analytics tool, ensuring consistent answers across diverse datasets. In providing this service, dbt Labs permits clients to access Semantic Layer metrics. Client data passes through the Semantic Layer on the way back from the data warehouse. + +dbt Labs handles this in a secure way using encryption and authentication from the client’s data warehouse. In certain cases, such data may be cached on dbt Labs system ephemerally (data is not persistently stored). + +dbt Labs employees cannot access cached data during normal business operations and must have a business need and/or direct manager approval for access to the underlying infrastructure. Access would only be when necessary for providing a client services and never with the purpose of enriching dbt Labs. + +No client warehouse data is retained on dbt Labs's systems. We offer a caching solution to optimize query performance. The caching feature uses client data warehouse storage rather than being stored on dbt Labs’s systems. In addition, this feature is activated only through a client opt-in. Therefore, caching is always in client hands and at client discretion + + + + + +Yes it does. + + + + MetricFlow is hosted in dbt Cloud. Requests from the [Semantic Layer APIs](/docs/dbt-cloud-apis/sl-api-overview) are routed from our API gateway to MetricFlow, which generates the SQL to compute what's requested by the user. MetricFlow hands the SQL back to our gateway, which then executes it against the data platform. @@ -212,7 +231,7 @@ Yes, while [entities](/docs/build/entities) must be defined under “entities, -There are a number of data applications have integrations with the dbt Semantic Layer, including Tableau, Google Sheets, Hex, and Mode, among others. +There are a number of data applications that have integrations with the dbt Semantic Layer, including Tableau, Google Sheets, Hex, and Mode, among others. Refer to [Available integrations](/docs/use-dbt-semantic-layer/avail-sl-integrations) for more information. diff --git a/website/docs/reference/dbt-jinja-functions/cross-database-macros.md b/website/docs/reference/dbt-jinja-functions/cross-database-macros.md index e13dba0bdec..0c7e93bf296 100644 --- a/website/docs/reference/dbt-jinja-functions/cross-database-macros.md +++ b/website/docs/reference/dbt-jinja-functions/cross-database-macros.md @@ -58,6 +58,7 @@ Please make sure to take a look at the [SQL expressions section](#sql-expression - [cast\_bool\_to\_text](#cast_bool_to_text) - [safe\_cast](#safe_cast) - [Date and time functions](#date-and-time-functions) + - [date](#date) - [dateadd](#dateadd) - [datediff](#datediff) - [date\_trunc](#date_trunc) @@ -106,6 +107,7 @@ Please make sure to take a look at the [SQL expressions section](#sql-expression - [cast\_bool\_to\_text](#cast_bool_to_text) - [safe\_cast](#safe_cast) - [Date and time functions](#date-and-time-functions) + - [date](#date) - [dateadd](#dateadd) - [datediff](#datediff) - [date\_trunc](#date_trunc) @@ -176,6 +178,7 @@ Please make sure to take a look at the [SQL expressions section](#sql-expression - [safe_cast](#safe_cast) [**Date and time functions**](#date-and-time-functions) +- [date](#date) - [dateadd](#dateadd) - [datediff](#datediff) - [date_trunc](#date_trunc) @@ -884,6 +887,35 @@ For databases that support it, this macro will return `NULL` when the cast fails ## Date and time functions +### date + +**Availability**: +dbt v1.8 or later. For more information, select the version from the documentation navigation menu. + + + +__Args__: + + * `year`: an integer + * `month`: an integer + * `day`: an integer + +This macro converts the `year`, `month`, and `day` into an SQL `DATE` type. + +**Usage**: + +```sql +{{ dbt.date(2023, 10, 4) }} +``` + +**Sample output (PostgreSQL)**: + +```sql +to_date('2023-10-04', 'YYYY-MM-DD') +``` + + + ### dateadd __Args__: diff --git a/website/docs/reference/global-configs/warnings.md b/website/docs/reference/global-configs/warnings.md index 967f2209d44..0cb4add5f0d 100644 --- a/website/docs/reference/global-configs/warnings.md +++ b/website/docs/reference/global-configs/warnings.md @@ -12,20 +12,55 @@ Turning on the `WARN_ERROR` config will convert dbt warnings into errors. Any ti dbt --warn-error run ... ``` + - -Converting any and all warnings to errors may suit your needs perfectly, but there may be some warnings you just don't care about, and some you care about a lot. +Converting any warnings to errors may suit your needs perfectly, but there may be some warnings you just don't care about, and some you care about a lot. The `WARN_ERROR_OPTIONS` config gives you more granular control over _exactly which types of warnings_ are treated as errors. + + + +Warnings that should be treated as errors can be specified through `include` and/or `exclude` parameters. Warning names can be found in [dbt-core's types.py file](https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/events/types.py), where each class name that inherits from `WarnLevel` corresponds to a warning name (e.g. `AdapterDeprecationWarning`, `NoNodesForSelectionCriteria`). + +The `include` parameter can be set to `"all"` or `"*"` to treat all warnings as exceptions, or to a list of specific warning names to treat as exceptions. When `include` is set to `"all"` or `"*"`, the optional `exclude` parameter can be set to exclude specific warnings from being treated as exceptions. + + + + + +- Warnings that should be treated as errors can be specified through `error` and/or `warn` parameters. Warning names can be found in [dbt-core's types.py file](https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/events/types.py), where each class name that inherits from `WarnLevel` corresponds to a warning name (e.g. `AdapterDeprecationWarning`, `NoNodesForSelectionCriteria`). + +- The `error` parameter can be set to `"all"` or `"*"` to treat all warnings as exceptions, or to a list of specific warning names to treat as exceptions. When `error` is set to `"all"` or `"*"`, the optional `warn` parameter can be set to exclude specific warnings from being treated as exceptions. + +- Use the `silence` parameter to ignore warnings through project flags, without needing to re-specify the silence list every time. For example, to silence deprecation warnings or certain warnings you want to ignore across your project, you can specify them in the `silence` parameter. This is useful in large projects where certain warnings aren't critical and can be ignored to keep the noise low and logs clean. + + + + +```yaml +name: "my_dbt_project" +tests: + +enabled: True +flags: + warn_error_options: + error: # Previously called "include" + warn: # Previously called "exclude" + silence: # To silence or ignore warnings + - TestsConfigDeprecation + - NoNodesForSelectionCriteria +``` + + -The `WARN_ERROR_OPTIONS` config gives you more granular control over _exactly which types of warnings_ are treated as errors. Warnings that should be treated as errors can be specified through `include` and/or `exclude` parameters. Warning names can be found in [dbt-core's types.py file](https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/events/types.py), where each class name that inherits from `WarnLevel` corresponds to a warning name (e.g. `AdapterDeprecationWarning`, `NoNodesForSelectionCriteria`). + -The `include` parameter can set to `"all"` or `"*"` to treat all warnings as exceptions, or to a list of specific warning names to treat as exceptions. When include is set to `"all"` or `"*"`, the optional `exclude` parameter can be set to exclude specifc warnings from being treated as exceptions. :::info `WARN_ERROR` and `WARN_ERROR_OPTIONS` are mutually exclusive -`WARN_ERROR` and `WARN_ERROR_OPTIONS` are mutually exclusive. You can only specify one, even when you're specifying the config in multiple places (e.g. env var + CLI flag), otherwise you'll see a usage error. +`WARN_ERROR` and `WARN_ERROR_OPTIONS` are mutually exclusive. You can only specify one, even when you're specifying the config in multiple places (e.g. env var + CLI flag), otherwise, you'll see a usage error. ::: + + ```text dbt --warn-error-options '{"include": "all"}' run ... @@ -47,17 +82,58 @@ DBT_WARN_ERROR_OPTIONS='{"include": ["NoNodesForSelectionCriteria"]}' dbt run ... ``` + + ```yaml - config: warn_error_options: include: all exclude: - NoNodesForSelectionCriteria +``` + + + + + + +```text +dbt --warn-error-options '{"error": "all"}' run +... +``` + +```text +dbt --warn-error-options '{"error": "all", "warn": ["NoNodesForSelectionCriteria"]}' run +... ``` + + +```text +dbt --warn-error-options '{"error": ["NoNodesForSelectionCriteria"]}' run +... +``` + +```text +DBT_WARN_ERROR_OPTIONS='{"error": ["NoNodesForSelectionCriteria"]}' dbt run +... +``` + + + +```yaml +config: + warn_error_options: + error: # Previously called "include" + warn: # Previously called "exclude" + - NoNodesForSelectionCriteria + silence: # Silence or ignore warnings + - TestsConfigDeprecation + - NoNodesForSelectionCriteria +``` + diff --git a/website/docs/reference/resource-properties/constraints.md b/website/docs/reference/resource-properties/constraints.md index 9b569f8a94d..b8111ef0adb 100644 --- a/website/docs/reference/resource-properties/constraints.md +++ b/website/docs/reference/resource-properties/constraints.md @@ -37,12 +37,12 @@ models: # model-level constraints constraints: - type: primary_key - columns: FIRST_COLUMN, SECOND_COLUMN, ... + columns: [FIRST_COLUMN, SECOND_COLUMN, ...] - type: FOREIGN_KEY # multi_column - columns: FIRST_COLUMN, SECOND_COLUMN, ... + columns: [FIRST_COLUMN, SECOND_COLUMN, ...] expression: "OTHER_MODEL_SCHEMA.OTHER_MODEL_NAME (OTHER_MODEL_FIRST_COLUMN, OTHER_MODEL_SECOND_COLUMN, ...)" - type: check - columns: FIRST_COLUMN, SECOND_COLUMN, ... + columns: [FIRST_COLUMN, SECOND_COLUMN, ...] expression: "FIRST_COLUMN != SECOND_COLUMN" name: HUMAN_FRIENDLY_NAME - type: ... diff --git a/website/sidebars.js b/website/sidebars.js index 837161ec2d6..0af7ea50eee 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -518,8 +518,9 @@ const sidebarSettings = { href: `/guides/sl-snowflake-qs`, }, "docs/use-dbt-semantic-layer/setup-sl", - "docs/use-dbt-semantic-layer/exports", "docs/use-dbt-semantic-layer/sl-architecture", + "docs/use-dbt-semantic-layer/exports", + "docs/use-dbt-semantic-layer/sl-cache", "docs/use-dbt-semantic-layer/sl-faqs", { type: "category", diff --git a/website/snippets/core-versions-table.md b/website/snippets/core-versions-table.md index 5f46762922e..6ef2bf3ba4e 100644 --- a/website/snippets/core-versions-table.md +++ b/website/snippets/core-versions-table.md @@ -2,8 +2,8 @@ | dbt Core | Initial release | Support level and end date | |:-------------------------------------------------------------:|:---------------:|:-------------------------------------:| -| [**v1.8**](/docs/dbt-versions/core-upgrade/upgrading-to-v1.8) | May 2024 | Release Candidate | -| [**v1.7**](/docs/dbt-versions/core-upgrade/upgrading-to-v1.7) | Nov 2, 2023 | Active — Nov 1, 2024 | +| [**v1.8**](/docs/dbt-versions/core-upgrade/upgrading-to-v1.8) | May 9 2024 | Active — May 8, 2025 | +| [**v1.7**](/docs/dbt-versions/core-upgrade/upgrading-to-v1.7) | Nov 2, 2023 | Critical — Nov 1, 2024 | | [**v1.6**](/docs/dbt-versions/core-upgrade/upgrading-to-v1.6) | Jul 31, 2023 | Critical — Jul 30, 2024 | | [**v1.5**](/docs/dbt-versions/core-upgrade/upgrading-to-v1.5) | Apr 27, 2023 | End of Life* ⚠️ | | [**v1.4**](/docs/dbt-versions/core-upgrade/upgrading-to-v1.4) | Jan 25, 2023 | End of Life* ⚠️ | diff --git a/website/static/img/docs/dbt-cloud/semantic-layer/cache-creation-flow.jpg b/website/static/img/docs/dbt-cloud/semantic-layer/cache-creation-flow.jpg new file mode 100644 index 00000000000..0e4bfef0a7c Binary files /dev/null and b/website/static/img/docs/dbt-cloud/semantic-layer/cache-creation-flow.jpg differ diff --git a/website/static/img/docs/dbt-cloud/semantic-layer/declarative-cache-query-flow.jpg b/website/static/img/docs/dbt-cloud/semantic-layer/declarative-cache-query-flow.jpg new file mode 100644 index 00000000000..4e71a3b22d4 Binary files /dev/null and b/website/static/img/docs/dbt-cloud/semantic-layer/declarative-cache-query-flow.jpg differ