From 2a1df5b7fd1549605ee6ec110ada6ebe75b9ee41 Mon Sep 17 00:00:00 2001 From: Jason Karlavige Date: Thu, 23 May 2024 08:46:37 -0400 Subject: [PATCH 01/12] adjust lightbox css to center-align images --- website/src/components/lightbox/styles.module.css | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/website/src/components/lightbox/styles.module.css b/website/src/components/lightbox/styles.module.css index 1f50a2f0427..d47ea990e0d 100644 --- a/website/src/components/lightbox/styles.module.css +++ b/website/src/components/lightbox/styles.module.css @@ -8,9 +8,12 @@ :local(.docImage) { filter: drop-shadow(4px 4px 6px #aaaaaa33); margin: 10px auto; - padding-right: 10px; display: block; - max-width: 80%; +} + +.docImage img { + margin: 10px auto; + display: block; } :local(.collapsed) { @@ -18,10 +21,10 @@ padding: 0 5px; } -.leftAlignLightbox { +.docImage.leftAlignLightbox img { margin: 10px 0; } -.rightAlignLightbox { +.docImage.rightAlignLightbox img { margin: 10px 0 10px auto; } From 7b646c9ecdd2745091b62623fa7eb23a266db406 Mon Sep 17 00:00:00 2001 From: Joey Gault <145610486+joeygaultdbt@users.noreply.github.com> Date: Thu, 23 May 2024 11:58:11 -0400 Subject: [PATCH 02/12] Update vercel.json Updated redirect for '/quickstarts' to '/docs/get-started-dbt' --- website/vercel.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/vercel.json b/website/vercel.json index 4c528570f8f..e7ed6848205 100644 --- a/website/vercel.json +++ b/website/vercel.json @@ -4874,7 +4874,7 @@ }, { "source": "/quickstarts", - "destination": "/guides", + "destination": "/docs/get-started-dbt", "permanent": true }, { From 344ea60bb8c5db2bff841889fe132d7086b9950c Mon Sep 17 00:00:00 2001 From: Cameron Afzal Date: Thu, 23 May 2024 12:15:54 -0700 Subject: [PATCH 03/12] Clarify CLL caveats --- website/docs/docs/collaborate/column-level-lineage.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/website/docs/docs/collaborate/column-level-lineage.md b/website/docs/docs/collaborate/column-level-lineage.md index 864487a168d..3dfeca7be6f 100644 --- a/website/docs/docs/collaborate/column-level-lineage.md +++ b/website/docs/docs/collaborate/column-level-lineage.md @@ -49,7 +49,8 @@ When exploring your data products, navigating column lineage allows analytics en ## Caveats -Following are the CLL caveats/limitations. +### Column usage +Column-level lineage reflects the lineage from `select` statements in your models' SQL code. It won't reflect other usage like joins and filters. ### SQL parsing From 7488c829c1526220a4c02eec3766e8ae649022ab Mon Sep 17 00:00:00 2001 From: mirnawong1 Date: Thu, 23 May 2024 22:14:25 +0100 Subject: [PATCH 04/12] update expandables component --- website/docs/docs/build/custom-schemas.md | 4 +- .../docs/docs/build/fill-nulls-advanced.md | 4 +- .../docs/docs/build/metricflow-commands.md | 4 +- .../docs/docs/cloud/configure-cloud-cli.md | 4 +- .../dbt-cloud-ide/develop-in-the-cloud.md | 4 +- .../docs/collaborate/dbt-explorer-faqs.md | 96 +++++------ .../docs/docs/collaborate/explore-projects.md | 32 ++-- .../docs/dbt-versions/2023-release-notes.md | 152 +++++++++--------- .../docs/docs/dbt-versions/release-notes.md | 44 ++--- .../docs/use-dbt-semantic-layer/sl-faqs.md | 144 ++++++++--------- website/docs/guides/core-cloud-2.md | 24 +-- .../dbt-jinja-functions/statement-blocks.md | 8 +- website/snippets/_config-dbt-version-check.md | 8 +- website/src/components/expandable/index.js | 89 +++++++--- .../components/expandable/styles.module.css | 74 ++++++++- website/src/theme/MDXComponents/index.js | 4 +- website/static/js/headerLinkCopy.js | 2 +- 17 files changed, 396 insertions(+), 301 deletions(-) diff --git a/website/docs/docs/build/custom-schemas.md b/website/docs/docs/build/custom-schemas.md index 33d8c088266..846f2d1c341 100644 --- a/website/docs/docs/build/custom-schemas.md +++ b/website/docs/docs/build/custom-schemas.md @@ -98,7 +98,7 @@ To customize this macro, copy the example code in the section [How does dbt gene Be careful. dbt will ignore any custom `generate_schema_name` macros included in installed packages. - + If you're modifying how dbt generates schema names, don't just replace ```{{ default_schema }}_{{ custom_schema_name | trim }}``` with ```{{ custom_schema_name | trim }}``` in the ```generate_schema_name``` macro. @@ -124,7 +124,7 @@ If you remove ```{{ default_schema }}```, it causes developers to override each ``` - + ### generate_schema_name arguments diff --git a/website/docs/docs/build/fill-nulls-advanced.md b/website/docs/docs/build/fill-nulls-advanced.md index 16595257da0..3aff6ba7e82 100644 --- a/website/docs/docs/build/fill-nulls-advanced.md +++ b/website/docs/docs/build/fill-nulls-advanced.md @@ -126,9 +126,9 @@ Now, if you combine the metrics in a `derived` metric, there will be a zero valu ## FAQs - + For additional examples and discussion on how to handle null values in derived metrics that use data from multiple tables, check out [MetricFlow issue #1031](https://github.com/dbt-labs/metricflow/issues/1031). - + diff --git a/website/docs/docs/build/metricflow-commands.md b/website/docs/docs/build/metricflow-commands.md index 035eb5fcf53..3b1ed92fbff 100644 --- a/website/docs/docs/build/metricflow-commands.md +++ b/website/docs/docs/build/metricflow-commands.md @@ -88,13 +88,13 @@ You can use the `dbt sl` prefix before the command name to execute them in the d When you make changes to metrics, make sure to run `dbt parse` at a minimum to update the dbt Semantic Layer. This updates the `semantic_manifest.json` file, reflecting your changes when querying metrics. By running `dbt parse`, you won't need to rebuild all the models. ::: - + Check out the following video for a short video demo of how to query or preview metrics with the dbt Cloud CLI: - + diff --git a/website/docs/docs/cloud/configure-cloud-cli.md b/website/docs/docs/cloud/configure-cloud-cli.md index 2b41f16b709..a396579b7f3 100644 --- a/website/docs/docs/cloud/configure-cloud-cli.md +++ b/website/docs/docs/cloud/configure-cloud-cli.md @@ -111,7 +111,7 @@ As a tip, most command-line tools have a `--help` flag to show available command ::: ## FAQs - + If you've never had a `.dbt` directory, you should perform the following recommended steps to create one. If you already have a `.dbt` directory, move the `dbt_cloud.yml` file into it. @@ -152,4 +152,4 @@ move %USERPROFILE%\Downloads\dbt_cloud.yml %USERPROFILE%\.dbt\dbt_cloud.yml This command moves the `dbt_cloud.yml` from the `Downloads` folder to the `.dbt` folder. If your `dbt_cloud.yml` file is located elsewhere, adjust the path accordingly. - + diff --git a/website/docs/docs/cloud/dbt-cloud-ide/develop-in-the-cloud.md b/website/docs/docs/cloud/dbt-cloud-ide/develop-in-the-cloud.md index e73a3f30e15..1e561b379b4 100644 --- a/website/docs/docs/cloud/dbt-cloud-ide/develop-in-the-cloud.md +++ b/website/docs/docs/cloud/dbt-cloud-ide/develop-in-the-cloud.md @@ -102,7 +102,7 @@ Nice job, you're ready to start developing and building models 🎉! ### Considerations - To improve your experience using dbt Cloud, we suggest that you turn off ad blockers. This is because some project file names, such as `google_adwords.sql`, might resemble ad traffic and trigger ad blockers. - To preserve performance, there's a file size limitation for repositories over 6 GB. If you have a repo over 6 GB, please contact [dbt Support](mailto:support@getdbt.com) before running dbt Cloud. -- +- ### Start-up process There are three start-up states when using or launching the Cloud IDE: @@ -120,7 +120,7 @@ Nice job, you're ready to start developing and building models 🎉! - **Saved but uncommitted code —** When you save a file, the data gets stored in durable, long-term storage, but isn't synced back to git. To switch branches using the **Change branch** option, you must "Commit and sync" or "Revert" changes. Changing branches isn't available for saved-but-uncommitted code. This is to ensure your uncommitted changes don't get lost. - **Committed code —** This is stored in the branch with your git provider and you can check out other (remote) branches. - + ## Build and document your projects diff --git a/website/docs/docs/collaborate/dbt-explorer-faqs.md b/website/docs/docs/collaborate/dbt-explorer-faqs.md index b470c7c7074..c4203bdd031 100644 --- a/website/docs/docs/collaborate/dbt-explorer-faqs.md +++ b/website/docs/docs/collaborate/dbt-explorer-faqs.md @@ -9,102 +9,102 @@ pagination_next: null ## Overview - + dbt Explorer makes it easy and intuitive to understand your entire lineage — from data source to the reporting layer — so you can troubleshoot, improve, and optimize your pipelines. With built-in features like project recommendations and model performance analysis, you can be sure you have appropriate test and documentation coverage across your estate and quickly spot and remediate slow-running models. With column-level lineage, you can quickly identify the potential downstream impacts of table changes or work backwards to quickly understand the root cause of an incident. dbt Explorer gives teams the insights they need to improve data quality proactively, ensuring pipelines stay performant and data trust remains solid. - + - + dbt Explorer is generally available to all regions and deployment types on the dbt Cloud [Enterprise and Team plans](https://www.getdbt.com/). Certain features within dbt Explorer, such as multi-project lineage and column-level lineage, are only available on the Enterprise plan. dbt Explorer can be accessed by users with developer and read-only seats. - + - + dbt Explorer is the default documentation experience for dbt Cloud customers. dbt Docs is still available but doesn't offer the same speed, metadata, or visibility as dbt Explorer and will become a legacy feature. - + ## How dbt Explorer works - + No. dbt Explorer and all of its features are only available as a dbt Cloud user experience. dbt Explorer reflects the metadata from your dbt Cloud project(s) and their runs. - + - + dbt Explorer supports a production or staging [deployment environment](/docs/deploy/deploy-environments) for each project you want to explore. It defaults to the latest production or staging state of a project. Users can only assign one production and one staging environment per dbt Cloud project. Support for development (dbt Cloud CLI and dbt Cloud IDE) environments is coming soon. - + - + Simply select **Explore** from the dbt Cloud top navigation bar. dbt Explorer automatically updates after each dbt Cloud run in the given project’s environment (production, by default). The dbt commands you run within the environment will generate and update the metadata in dbt Explorer, so make sure to run the correct combination of commands within the jobs of the environment; for more details, refer to [Generate metadata](/docs/collaborate/explore-projects#generate-metadata). - + - + Yes. The lineage that powers dbt Explorer is also available through the Discovery API. - + - + dbt Explorer reflects all the lineage defined within the dbt project. Our vision for dbt Explorer is to incorporate additional metadata from external tools like data loaders (sources) and BI/analytics tools (exposures) integrated with dbt Cloud, all seamlessly incorporated into the lineage of the dbt Cloud project. - + ## Key features - + Yes. Refer to [Explore multiple projects](/docs/collaborate/explore-multiple-projects) to learn more. - + - + Resource search capabilities include using keywords, partial strings (fuzzy search), and set operators like `OR`. Meanwhile, lineage search supports using dbt selectors. For details, refer to [Keyword search](/docs/collaborate/explore-projects#search-resources). - + - + dbt Cloud updates the performance charts and metrics after a job run. - + - + A chart of models built by month is available in the dbt Cloud dashboard. - + - + Yes. Today, you can edit descriptions in the dbt Cloud IDE or CLI by changing the YAML files within the dbt project. In the future, dbt Explorer will support more ways of editing descriptions. - + - + Recommendations largely mirror the best practice rules from the `dbt_project_evaluator` package. At this time, recommendations can’t be customized. In the future, dbt Explorer will likely support recommendation customization capabilities (for example, in project code). - + ## Column-level lineage - + Column-level lineage in dbt Explorer can be used to improve many data development workflows, including: @@ -113,61 +113,61 @@ Column-level lineage in dbt Explorer can be used to improve many data developmen - **Impact analysis** — Trace transformations and usage to avoid introducing issues for consumers - **Efficiency** — Prune unnecessary columns to reduce costs and data team overhead - + - + Yes. Column-level lineage can handle name changes across instances of the column in the dbt project. - + - + No. Cross-project column lineage is supported in the sense of viewing how a public model is used across projects, but not on a column-level. - + - + Yes, a reused column, labeled as passthrough or rename, inherits its description from source and upstream model columns. In other words, source and upstream model columns propagate their descriptions downstream whenever they are not transformed, meaning you don’t need to manually define the description. Refer to [Inherited column descriptions](/docs/collaborate/column-level-lineage#inherited-column-descriptions) for more info. - + - + Not currently, but we plan to incorporate column-level awareness across features in dbt Cloud in the future. - + ## Availability, access, and permissions - + Read-only users can consume metadata in dbt Explorer. More bespoke experiences and exploration avenues for analysts and less-technical contributors will be provided in the future. - + - + dbt Explorer is available on the dbt Cloud Team and Enterprise plans. Certain features within dbt Explorer, like multi-project lineage and column-level lineage, are only available on the Enterprise plan. - + - + No. dbt Explorer is a dbt Cloud-only product experience. - + - + Yes, users with read-only access can use the dbt Explorer. Specific feature availability within dbt Explorer will depend on your dbt Cloud plan. - + - + The ability to embed and share views is being evaluated as a potential future capability. - + diff --git a/website/docs/docs/collaborate/explore-projects.md b/website/docs/docs/collaborate/explore-projects.md index ec49659e1a5..b5fe6382320 100644 --- a/website/docs/docs/collaborate/explore-projects.md +++ b/website/docs/docs/collaborate/explore-projects.md @@ -45,7 +45,7 @@ By default, dbt Explorer shows the project's [applied state](/docs/dbt-cloud-api To explore the lineage graphs of tests and macros, view [their resource details pages](#view-resource-details). By default, dbt Explorer excludes these resources from the full lineage graph unless a search query returns them as results. - + - Hover over any item in the graph to display the resource’s name and type. - Zoom in and out on the graph by mouse-scrolling. @@ -71,7 +71,7 @@ To explore the lineage graphs of tests and macros, view [their resource details - [View resource details](#view-resource-details) by selecting a node (double-clicking) in the graph. - Click **Lenses** (lower right corner of the graph) to use Explorer's [lenses](#lenses) feature. - + ### Example of full lineage graph @@ -87,7 +87,7 @@ When you apply a lens, tags become visible on the nodes in the lineage graph, in Lenses are helpful to analyze a subset of the DAG if you're zoomed in, or to find models/issues from a larger vantage point. - + A resource in your project is characterized by resource type, materialization type, or model layer, as well as its latest run or latest test status. Lenses are available for the following metadata: @@ -100,7 +100,7 @@ A resource in your project is characterized by resource type, materialization ty - **Staging** — A model with the prefix `stg_`. Or, a model that lives in the `/staging/` subdirectory. - **Test Status**: The status from the latest execution of the tests that ran again this resource. In the case that a model has multiple tests with different results, the lens reflects the 'worst case' status. - + ### Example of lenses @@ -116,7 +116,7 @@ Example of applying the **Tests Status** _lens_, where each model name displays You can locate resources in your project by performing a keyword search in the search bar. All resource names, column names, resource descriptions, warehouse relations, and code matching your search criteria will be displayed as a list on the main (center) section of the page. When searching for an exact column name, the results show all relational nodes containing that column in their schemas. If there's a match, a notice in the search result indicates the resource contains the specified column. Also, you can apply filters to further refine your search results. - + - **Partial keyword search** — This is also referred to as fuzzy search. - **Exclude keywords** — Prepend a minus sign (-) to the keyword you want to exclude from search results. For example, `-user` will exclude all matches of that keyword from search results. @@ -124,9 +124,9 @@ You can locate resources in your project by performing a keyword search in the s - **Phrase search** — Surround a string of keywords with double quotation marks to search for that exact phrase (for example, `"stg users"`). To learn more, refer to [Phrase search](https://en.wikipedia.org/wiki/Phrase_search) on Wikipedia. - **SQL keyword search** — Use SQL keywords in your search. For example, the search results `int github users joined` will include matches that contain that specific string of keywords (similar to phrase searching). - + - + The **Filters** side panel becomes available after you perform a keyword search. Use this panel to further refine the results from your keyword search. By default, Explorer searches across all resources in the project. You can filter on: @@ -138,7 +138,7 @@ The **Filters** side panel becomes available after you perform a keyword search. Under the the **Models** option, you can filter on model properties (access or materialization type). Also available are **Advanced** options, where you can limit the search results to column name, model code, and more. - + ### Example of keyword search Example of results from searching on the keyword `item` and applying the filters models, description, and code: @@ -177,7 +177,7 @@ In the upper right corner of the resource details page, you can: - Click the [Open in IDE](#open-in-ide) icon to examine the resource using the [dbt Cloud IDE](/docs/cloud/dbt-cloud-ide/develop-in-the-cloud). - Click the Share icon to copy the page's link to your clipboard. - + - **Status bar** (below the page title) — Information on the last time the model ran, whether the run was successful, how the data is materialized, number of rows, and the size of the model. - **General** tab includes: @@ -190,9 +190,9 @@ In the upper right corner of the resource details page, you can: - **Code** tab — The source code and compiled code for the model. - **Columns** tab — The available columns in the model. This tab also shows tests results (if any) that you can select to view the test's details page. A :white_check_mark: denotes a passing test. To filter the columns in the resource, you can use the search bar that's located at the top of the columns view. - + - + - **Status bar** (below the page title) — Information on the last time the exposure was updated. - **General** tab includes: @@ -202,9 +202,9 @@ In the upper right corner of the resource details page, you can: - **Details** section — Details like exposure type, maturity, owner information, and more. - **Relationships** section — The nodes the exposure **Depends On**. - + - + - **Status bar** (below the page title) — Information on the last time the test ran, whether the test passed, test name, test target, and column name. Defaults to all if not specified. - **Test Type** (next to the Status bar) — Information on the different test types available: Unit test or Data test. Defaults to all if not specified. @@ -222,9 +222,9 @@ Example of the Tests view: - + - + - **Status bar** (below the page title) — Information on the last time the source was updated and the number of tables the source uses. - **General** tab includes: @@ -235,7 +235,7 @@ Example of the Tests view: - **Relationships** section — A table that lists all the sources used with their freshness status, the timestamp of when freshness was last checked, and the timestamp of when the source was last loaded. - **Columns** tab — The available columns in the source. This tab also shows tests results (if any) that you can select to view the test's details page. A :white_check_mark: denotes a passing test. - + ### Example of model details diff --git a/website/docs/docs/dbt-versions/2023-release-notes.md b/website/docs/docs/dbt-versions/2023-release-notes.md index 2185d2c8d95..3ffbebbd161 100644 --- a/website/docs/docs/dbt-versions/2023-release-notes.md +++ b/website/docs/docs/dbt-versions/2023-release-notes.md @@ -11,7 +11,7 @@ Archived release notes for dbt Cloud from 2023 ## December 2023 -- +- The dbt Labs team continues to work on adding new features, fixing bugs, and increasing reliability for the dbt Semantic Layer. The following list explains the updates and fixes for December 2023 in more detail. @@ -27,9 +27,9 @@ Archived release notes for dbt Cloud from 2023 - Deprecation — We deprecated dbt Metrics and the legacy dbt Semantic Layer, both supported on dbt version 1.5 or lower. This change came into effect on December 15th, 2023. - Improved dbt converter tool — The [dbt converter tool](https://github.com/dbt-labs/dbt-converter) can now help automate some of the work in converting from LookML (Looker's modeling language) for those who are migrating. Previously this wasn’t available. - + -- +- The extended attributes feature in dbt Cloud is now GA! It allows for an environment level override on any YAML attribute that a dbt adapter accepts in its `profiles.yml`. You can provide a YAML snippet to add or replace any [profile](/docs/core/connect-data-platform/profiles.yml) value. @@ -39,9 +39,9 @@ Archived release notes for dbt Cloud from 2023 - + -- +- dbt Labs has deprecated dbt Metrics and the legacy dbt Semantic Layer, both supported on dbt version 1.5 or lower. This change starts on December 15th, 2023. @@ -74,11 +74,11 @@ Archived release notes for dbt Cloud from 2023 - [Upgrade version in dbt Cloud](/docs/dbt-versions/upgrade-dbt-version-in-cloud) - [Version migration guides](/docs/dbt-versions/core-upgrade) - + ## November 2023 -- +- There are new quality-of-life improvements in dbt Cloud for email and Slack notifications about your jobs: @@ -90,9 +90,9 @@ Archived release notes for dbt Cloud from 2023 To learn more, check out [Job notifications](/docs/deploy/job-notifications). - + -- +- There are new quality-of-life improvements in dbt Cloud for email and Slack notifications about your jobs: @@ -104,9 +104,9 @@ Archived release notes for dbt Cloud from 2023 To learn more, check out [Job notifications](/docs/deploy/job-notifications). - + -- +- Now available for dbt Cloud Enterprise plans is a new option to enable Git repository caching for your job runs. When enabled, dbt Cloud caches your dbt project's Git repository and uses the cached copy instead if there's an outage with the Git provider. This feature improves the reliability and stability of your job runs. @@ -114,11 +114,11 @@ Archived release notes for dbt Cloud from 2023 - + ## October 2023 -- +- Beginning December 1, 2023, the [Administrative API](/docs/dbt-cloud-apis/admin-cloud-api) v2 and v3 will expect you to limit all "list" or `GET` API methods to 100 results per API request. This limit enhances the efficiency and stability of our services. If you need to handle more than 100 results, then use the `limit` and `offset` query parameters to paginate those results; otherwise, you will receive an error. @@ -126,9 +126,9 @@ Archived release notes for dbt Cloud from 2023 Refer to the [API v3 Pagination](https://docs.getdbt.com/dbt-cloud/api-v3#/) or [API v2 Pagination](https://docs.getdbt.com/dbt-cloud/api-v2#/) sections for more information on how to paginate your API responses. - + -- +- We are excited to announce the dbt Cloud CLI, **unified command line for dbt**, is available in public preview. It’s a local development experience, powered by dbt Cloud. It’s easy to get started: `pip3 install dbt` or `brew install dbt` and you’re ready to go. @@ -153,9 +153,9 @@ Archived release notes for dbt Cloud from 2023 Refer to [dbt Cloud CLI](/docs/cloud/cloud-cli-installation) to learn more. - + -- +- If you don't set a [custom branch](/docs/dbt-cloud-environments#custom-branch-behavior) for your dbt Cloud environment, it now defaults to the default branch of your Git repository (for example, `main`). Previously, [CI jobs](/docs/deploy/ci-jobs) would run for pull requests (PRs) that were opened against _any branch_ or updated with new commits if the **Custom Branch** option wasn't set. @@ -163,9 +163,9 @@ Archived release notes for dbt Cloud from 2023 Your Git pull requests (PRs) might not trigger against your default branch if you're using Azure DevOps and the default branch isn't `main` or `master`. To resolve this, [set up a custom branch](/faqs/Environments/custom-branch-settings) with the branch you want to target. - + -- +- The dbt Cloud IDE and dbt Cloud CLI now automatically installs `dbt deps` when your environment starts or when necessary. Previously, it would prompt you to run `dbt deps` during initialization. @@ -177,9 +177,9 @@ Archived release notes for dbt Cloud from 2023 - When you update the package version in the `packages.yml` or `dependencies.yml` file. - If you edit the `dependencies.yml` file and the number of packages remains the same, run `dbt deps`. (Note that this is a known bug dbt Labs will fix in the future.) - + -- +- Previously in dbt Cloud, you could only rerun an errored job from start but now you can also rerun it from its point of failure. @@ -187,9 +187,9 @@ Archived release notes for dbt Cloud from 2023 - + -- +- Hello from the dbt Docs team: @mirnawong1, @matthewshaver, @nghi-ly, and @runleonarun! First, we’d like to thank the 15 new community contributors to docs.getdbt.com. We merged [107 PRs](https://github.com/dbt-labs/docs.getdbt.com/pulls?q=is%3Apr+merged%3A2023-09-01..2023-09-31) in September. @@ -219,9 +219,9 @@ Archived release notes for dbt Cloud from 2023 Stay tuned for a flurry of releases in October and a filterable guides section that will make guides easier to find! - + -- +- If you're using the legacy Semantic Layer, we _highly_ recommend you [upgrade your dbt version](/docs/dbt-versions/upgrade-dbt-version-in-cloud) to dbt v1.6 or higher and [migrate](/guides/sl-migration) to the latest Semantic Layer. @@ -240,11 +240,11 @@ Archived release notes for dbt Cloud from 2023 - Team and Enterprise customers can use 1,000 Queried Metrics per month for no additional cost on a limited trial basis, subject to reasonable use limitations. Refer to [Billing](/docs/cloud/billing#what-counts-as-a-queried-metric) for more information. - dbt Cloud Developer plans and dbt Core users can define metrics but won't be able to query them with integrated tools. - + ## September 2023 -- +- dbt Cloud now has two distinct job types: [deploy jobs](/docs/deploy/deploy-jobs) for building production data assets, and [continuous integration (CI) jobs](/docs/deploy/ci-jobs) for checking code changes. These jobs perform fundamentally different tasks so dbt Labs improved the setup experience with better defaults for each. @@ -290,9 +290,9 @@ Archived release notes for dbt Cloud from 2023 - Your project overrides the [generate_schema_name macro](/docs/build/custom-schemas) but it doesn't contain the required prefix `dbt_cloud_pr_`. For details, refer to [Troubleshooting](/docs/deploy/ci-jobs#troubleshooting). - You're using a [non-native Git integration](/docs/deploy/ci-jobs#trigger-a-ci-job-with-the-api). This is because automatic deletion relies on incoming webhooks from Git providers, which is only available through the native integrations. - + -- +- Hello from dbt's Product Documentation team (the stewards of the docs.getdbt.com site): @mirnawong1, @matthewshaver, @nghi-ly, and @runleonarun. What a busy summer! We merged 256 PRs between July 1st and August 31. @@ -329,9 +329,9 @@ Archived release notes for dbt Cloud from 2023 * Blog posts published this summer include [Optimizing Materialized Views with dbt](/blog/announcing-materialized-views), [Data Vault 2.0 with dbt Cloud](/blog/data-vault-with-dbt-cloud), and [Create dbt Documentation and Tests 10x faster with ChatGPT](/blog/create-dbt-documentation-10x-faster-with-chatgpt) - We now have two new best practice guides: [How we build our metrics](/best-practices/how-we-build-our-metrics/semantic-layer-1-intro) and [Set up Continuous Integration](/guides/set-up-ci). - + -- +- Previously, when dbt Labs released a new [version](/docs/dbt-versions/core#how-dbt-core-uses-semantic-versioning) in dbt Cloud, the older patch _prerelease_ version and the _latest_ version remained as options in the dropdown menu available in the **Environment settings**. Now, when the _latest_ version is released, the _prerelease_ version will be removed and all customers remaining on it will be migrated seamlessly. There will be no interruptions to service when this migration occurs. @@ -339,11 +339,11 @@ Archived release notes for dbt Cloud from 2023 - + ## August 2023 -- +- dbt Labs has deprecated and will be deprecating certain query patterns and replacing them with new conventions to enhance the performance of the dbt Cloud [Discovery API](/docs/dbt-cloud-apis/discovery-api). @@ -469,9 +469,9 @@ Archived release notes for dbt Cloud from 2023 ``` - + -- +- We're excited to announce that we replaced the backend service that powers the Cloud IDE with a more reliable server -- dbt-server. Because this release contains foundational changes, IDE v1.2 requires dbt v1.6 or higher. This significant update follows the rebuild of the IDE frontend last year. We're committed to improving the IDE to provide you with a better experience. @@ -503,11 +503,11 @@ Archived release notes for dbt Cloud from 2023 - `{{this}}` function does not display properly in preview/compile with dbt-server - + ## July 2023 -- +- We’ve introduced significant improvements to the dbt Cloud Scheduler, offering improved performance, durability, and scalability. @@ -539,11 +539,11 @@ Archived release notes for dbt Cloud from 2023 - Legacy Team accounts have a fixed number of run slots. - Both Team and Developer plans are limited to one project each. For larger-scale needs, our [Enterprise plan](https://www.getdbt.com/pricing/) offers features such as audit logging, unlimited job concurrency and projects, and more. - + ## June 2023 -- +- dbt Labs is excited to announce you can now lint and format your dbt code in the dbt Cloud IDE. This is an enhanced development workflow which empowers you to effortlessly prioritize code quality. @@ -563,9 +563,9 @@ Archived release notes for dbt Cloud from 2023 - + -- +- dbt Cloud CI is a critical part of the analytics engineering workflow. Large teams rely on process to ensure code quality is high, and they look to dbt Cloud CI to automate testing code changes in an efficient way, enabling speed while keep the bar high. With status checks directly posted to their dbt PRs, developers gain the confidence that their code changes will work as expected in production, and once you’ve grown accustomed to seeing that green status check in your PR, you won’t be able to work any other way. @@ -583,9 +583,9 @@ Archived release notes for dbt Cloud from 2023 To learn more, refer to [Continuous integration](/docs/deploy/continuous-integration) and [CI jobs](/docs/deploy/ci-jobs). - + -- +- dbt Labs updated the docs for the [dbt Cloud Administrative API](/docs/dbt-cloud-apis/admin-cloud-api) and they are now available for both [v2](/dbt-cloud/api-v2#/) and [v3](/dbt-cloud/api-v3#/). @@ -595,9 +595,9 @@ Archived release notes for dbt Cloud from 2023 - You can now test endpoints directly from within the API docs. And, you can choose which [regional server](/docs/cloud/about-cloud/access-regions-ip-addresses) to use (North America, APAC, or EMEA). - With the new UI, you can more easily generate code for any endpoint. - + -- +- Hello from the dbt Docs team: @mirnawong1, @matthewshaver, @nghi-ly, and @runleonarun! First, we’d like to thank the 17 new community contributors to docs.getdbt.com — ✨ @aaronbini, @sjaureguimodo, @aranke, @eiof, @tlochner95, @mani-dbt, @iamtodor, @monilondo, @vrfn, @raginjason, @AndrewRTsao, @MitchellBarker, @ajaythomas, @smitsrr, @leoguyaux, @GideonShils, @michaelmherrera! @@ -624,11 +624,11 @@ Archived release notes for dbt Cloud from 2023 - Add an Azure DevOps example in the [Customizing CI/CD with custom pipelines](/guides/custom-cicd-pipelines) guide. - + ## May 2023 -- +- To continue improving your [Cloud IDE](/docs/cloud/dbt-cloud-ide/develop-in-the-cloud) development experience, the dbt Labs team continues to work on adding new features, fixing bugs, and increasing reliability ✨. @@ -669,9 +669,9 @@ Archived release notes for dbt Cloud from 2023 - Tooltip for tab name with a long file name is no longer cut off - Lint button should no longer available in main branch - + -- +- New usability and design improvements to the **Run History** dashboard in dbt Cloud are now available. These updates allow people to discover the information they need more easily by reducing the number of clicks, surfacing more relevant information, keeping people in flow state, and designing the look and feel to be more intuitive to use. @@ -697,9 +697,9 @@ Archived release notes for dbt Cloud from 2023 We will retain older run history in cold storage and can make it available to customers who reach out to our Support team. To request older run history info, contact the Support team at [support@getdbt.com](mailto:support@getdbt.com) or use the dbt Cloud application chat by clicking the `?` icon in the dbt Cloud UI. - + -- +- New usability and design improvements to the run details and logs in dbt Cloud are now available. The ability to triage errors in logs is a big benefit of using dbt Cloud's job and scheduler functionality. The updates help make the process of finding the root cause much easier. @@ -713,9 +713,9 @@ Archived release notes for dbt Cloud from 2023 - + -- +- Hello from the dbt Docs team: @mirnawong1, @matthewshaver, @nghi-ly, and @runleonarun! First, we’d like to thank the 13 new community contributors to docs.getdbt.com! @@ -751,11 +751,11 @@ Archived release notes for dbt Cloud from 2023 - Added an [FAQ](/faqs/Warehouse/db-connection-dbt-compile) to clarify the common question users have on *Why does dbt compile needs to connect to the database?* - Published a [discourse article](https://discourse.getdbt.com/t/how-to-configure-external-user-email-notifications-in-dbt-cloud/8393) about configuring job notifications for non-dbt Cloud users - + ## April 2023 -- +- ## New features @@ -790,9 +790,9 @@ Archived release notes for dbt Cloud from 2023 * Radio buttons for **Summary** and **Details** in the logs section now consistently update to show the accurate tab selection * IDE no longer throws the console error `Error: Illegal argument` and redirects to the `Something went wrong` page - + -- +- Starting May 15, 2023, we will support only the following `order_by` functionality for the List Runs endpoint: @@ -810,9 +810,9 @@ Archived release notes for dbt Cloud from 2023 For more info, refer to our [documentation](/dbt-cloud/api-v2-legacy#tag/Runs/operation/listRunsForAccount). - + -- +- The dbt Cloud Scheduler now prevents queue clog by canceling unnecessary runs of over-scheduled jobs. @@ -822,15 +822,15 @@ Archived release notes for dbt Cloud from 2023 Now, the dbt Cloud scheduler detects when a scheduled job is set to run too frequently and appropriately cancels runs that don’t need to be processed. Specifically, scheduled jobs can only ever have one run of the job in the queue, and if a more recent run gets queued, the early queued run will get canceled with a helpful error message. Users will still need to either refactor the job so it runs faster or change the job schedule to run less often if the job often gets into an over-scheduled state. - + -- +- The Starburst (Trino compatible) connection is now generally available in dbt Cloud. This means you can now use dbt Cloud to connect with Starburst Galaxy, Starburst Enterprise, and self-hosted Trino. This feature is powered by the [`dbt-trino`](https://github.com/starburstdata/dbt-trino) adapter. To learn more, check out our Quickstart guide for [dbt Cloud and Starburst Galaxy](https://docs.getdbt.com/guides/starburst-galaxy). - + -- +- Hello from the dbt Docs team: @mirnawong1, @matthewshaver, @nghi-ly, and @runleonarun! We want to share some highlights introduced to docs.getdbt.com in the last month: @@ -864,11 +864,11 @@ Archived release notes for dbt Cloud from 2023 - [dbt Squared: Leveraging dbt Core and dbt Cloud together at scale](/blog/dbt-squared) - [Audit_helper in dbt: Bringing data auditing to a higher level](/blog/audit-helper-for-migration) - + ## March 2023 -- +- dbt Cloud now requires dbt version 1.0 or later. As of March 1, 2023, we removed all instances of older dbt versions from dbt Cloud. @@ -882,9 +882,9 @@ Archived release notes for dbt Cloud from 2023 - [Upgrade Q&A on breaking changes](/docs/dbt-versions/upgrade-dbt-version-in-cloud#upgrading-legacy-versions-under-10) - [Version migration guides](/docs/dbt-versions/core-upgrade) - + -- +- To continue improving your [Cloud IDE](/docs/cloud/dbt-cloud-ide/develop-in-the-cloud) development experience, the dbt Labs team continue to work on adding new features, fixing bugs, and increasing reliability ✨. @@ -928,9 +928,9 @@ Archived release notes for dbt Cloud from 2023 - Reverting single files now reloads the file contents in the tab. Previously, it didn't reload. - The file tree no longer collapses on the first click when there is a project subdirectory defined. - + -- +- To make the API more scalable and reliable, we've implemented a maximum limit of `100` for all API requests to our `list` endpoints. If API requests exceed the maximum limit parameter of `100`, a user will receive an API error message. @@ -938,19 +938,19 @@ Archived release notes for dbt Cloud from 2023 Refer to the [Pagination](https://docs.getdbt.com/dbt-cloud/api-v2-legacy#section/Pagination) section for more information on this change. - + ## Feb 2023 -- +- You can now use the `--no-partial-parse` flag to disable partial parsing in your dbt Cloud job commands.  Previously, the [`--no-partial-parse` global config](/reference/global-configs/parsing) was only available in dbt Core. For more information, refer to [partial parsing](/reference/parsing#partial-parsing). - + -- +- To continue improving our [Cloud IDE](/docs/cloud/dbt-cloud-ide/develop-in-the-cloud) experience, the dbt Labs team worked on fixing bugs, increasing reliability, and adding new features ✨. @@ -986,11 +986,11 @@ Archived release notes for dbt Cloud from 2023 - You can now use the `--no-partial-parse` flag to disable partial parsing in your dbt Cloud job commands.  - Previously, the [`--no-partial-parse` global config](/reference/global-configs/parsing) was only available in dbt Core. For more information, refer to [partial parsing](/reference/parsing#partial-parsing). - + ## January 2023 -- +- In the spirit of continuing to improve our [Cloud IDE](/docs/cloud/dbt-cloud-ide/develop-in-the-cloud) experience, the dbt Labs team worked on fixing bugs, increasing reliability, and adding new features ✨. @@ -1022,4 +1022,4 @@ Archived release notes for dbt Cloud from 2023 - You can now see repository status in the IDE, and the IDE finds the SSH folder - Scroll bars and download CSV no longer flicker within the preview pane - + diff --git a/website/docs/docs/dbt-versions/release-notes.md b/website/docs/docs/dbt-versions/release-notes.md index 1c924ea7b4d..404147eda96 100644 --- a/website/docs/docs/dbt-versions/release-notes.md +++ b/website/docs/docs/dbt-versions/release-notes.md @@ -36,7 +36,7 @@ The following features are new or enhanced as part of our [dbt Cloud Launch Show - **New:** [Unit tests](/docs/build/unit-tests) are now GA in dbt Cloud. Unit tests enable you to test your SQL model logic against a set of static inputs. -- +- Native support in dbt Cloud for Azure Synapse Analytics is now available in [Public Preview](/docs/dbt-versions/product-lifecycles#dbt-cloud)! @@ -51,7 +51,7 @@ The following features are new or enhanced as part of our [dbt Cloud Launch Show - Documenting your models. - Scheduling a job to run. - + - **New:** MetricFlow enables you to now add metrics as dimensions to your metric filters to create more complex metrics and gain more insights. Available for all dbt Cloud Semantic Layer users. @@ -59,7 +59,7 @@ The following features are new or enhanced as part of our [dbt Cloud Launch Show - **New:** Oauth login support via [Databricks](/docs/cloud/manage-access/set-up-databricks-oauth) is now GA to Enterprise customers. -- +- dbt Explorer's current capabilities — including column-level lineage, model performance analysis, and project recommendations — are now Generally Available for dbt Cloud Enterprise and Teams plans. With Explorer, you can more easily navigate your dbt Cloud project – including models, sources, and their columns – to gain a better understanding of its latest production or staging state. @@ -71,7 +71,7 @@ The following features are new or enhanced as part of our [dbt Cloud Launch Show - [Model performance](/docs/collaborate/model-performance) - [Project recommendations](/docs/collaborate/project-recommendations) - + - **New:** Native support for Microsoft Fabric in dbt Cloud is now GA. This feature is powered by the [dbt-fabric](https://github.com/Microsoft/dbt-fabric) adapter. To learn more, refer to [Connect Microsoft Fabric](/docs/cloud/connect-data-platform/connect-microsoft-fabric) and [Microsoft Fabric DWH configurations](/reference/resource-configs/fabric-configs). There's also a [quickstart guide](https://docs.getdbt.com/guides/microsoft-fabric?step=1) to help you get started. @@ -87,7 +87,7 @@ The following features are new or enhanced as part of our [dbt Cloud Launch Show - **New**: The [dbt Semantic Layer](/docs/use-dbt-semantic-layer/dbt-sl) introduces [declarative caching](/docs/use-dbt-semantic-layer/sl-cache), allowing you to cache common queries to speed up performance and reduce query compute costs. Available for dbt Cloud Team or Enterprise accounts. -- +- The **Keep on latest version** setting is now Generally Available (previously Public Preview). @@ -97,13 +97,13 @@ The following features are new or enhanced as part of our [dbt Cloud Launch Show - + - **Behavior change:** Introduced the `require_resource_names_without_spaces` flag, opt-in and disabled by default. If set to `True`, dbt will raise an exception if it finds a resource name containing a space in your project or an installed package. This will become the default in a future version of dbt. Read [No spaces in resource names](/reference/global-configs/legacy-behaviors#no-spaces-in-resource-names) for more information. ## April 2024 -- +- @@ -111,7 +111,7 @@ The following features are new or enhanced as part of our [dbt Cloud Launch Show - + - **Behavior change:** Introduced the `require_explicit_package_overrides_for_builtin_materializations` flag, opt-in and disabled by default. If set to `True`, dbt will only use built-in materializations defined in the root project or within dbt, rather than implementations in packages. This will become the default in May 2024 (dbt Core v1.8 and dbt Cloud "Keep on latest version"). Read [Package override for built-in materialization](/reference/global-configs/legacy-behaviors#package-override-for-built-in-materialization) for more information. @@ -144,7 +144,7 @@ The following features are new or enhanced as part of our [dbt Cloud Launch Show - **New:** You can access a list of your [exports](/docs/use-dbt-semantic-layer/exports) with the new list saved-queries command by adding `--show-exports` - **New:** The dbt Semantic Layer and [Tableau Connector](/docs/use-dbt-semantic-layer/tableau) now supports relative date filters in Tableau. -- +- You can now use the [exports](/docs/use-dbt-semantic-layer/exports) feature with [dbt Semantic Layer](/docs/use-dbt-semantic-layer/dbt-sl), allowing you to query reliable metrics and fast data reporting. Exports enhance the saved queries feature, allowing you to write commonly used queries directly within your data platform using dbt Cloud's job scheduler. @@ -154,9 +154,9 @@ The following features are new or enhanced as part of our [dbt Cloud Launch Show - + -- +- @@ -168,9 +168,9 @@ The following features are new or enhanced as part of our [dbt Cloud Launch Show - + -- +- @@ -182,10 +182,10 @@ The following features are new or enhanced as part of our [dbt Cloud Launch Show - + -- +- You can now [override the dbt version](/docs/dbt-versions/upgrade-dbt-version-in-cloud#override-dbt-version) that's configured for the development environment within your project and use a different version — affecting only your user account. This lets you test new dbt features without impacting other people working on the same project. And when you're satisfied with the test results, you can safely upgrade the dbt version for your project(s). @@ -193,9 +193,9 @@ The following features are new or enhanced as part of our [dbt Cloud Launch Show - + -- +- You can now edit, format, or lint files and execute dbt commands directly in your primary git branch in the [dbt Cloud IDE](/docs/cloud/dbt-cloud-ide/develop-in-the-cloud). This enhancement is available across various repositories, including native integrations, imported git URLs, and managed repos. @@ -217,7 +217,7 @@ The following features are new or enhanced as part of our [dbt Cloud Launch Show - + - **Enhancement:** The dbt Semantic Layer [Google Sheets integration](/docs/use-dbt-semantic-layer/gsheets) now exposes a note on the cell where the data was requested, indicating clearer data requests. The integration also now exposes a new **Time Range** option, which allows you to quickly select date ranges. - **Enhancement:** The [GraphQL API](/docs/dbt-cloud-apis/sl-graphql) includes a `requiresMetricTime` parameter to better handle metrics that must be grouped by time. (Certain metrics defined in MetricFlow can't be looked at without a time dimension). @@ -238,7 +238,7 @@ The following features are new or enhanced as part of our [dbt Cloud Launch Show ## January 2024 -- +- Hello from the dbt Docs team: @mirnawong1, @matthewshaver, @nghi-ly, and @runleonarun! First, we’d like to thank the 10 new community contributors to docs.getdbt.com :pray: What a busy start to the year! We merged 110 PRs in January. @@ -269,13 +269,13 @@ The following features are new or enhanced as part of our [dbt Cloud Launch Show - Added saved queries to [Metricflow commands](https://docs.getdbt.com/docs/build/metricflow-commands#list-saved-queries) - Removed [as_text docs](https://github.com/dbt-labs/docs.getdbt.com/pull/4726) that were wildly outdated - + - **New:** New metric type that allows you to measure conversion events. For example, users who viewed a web page and then filled out a form. For more details, refer to [Conversion metrics](/docs/build/conversion). - **New:** Instead of specifying the fully qualified dimension name (for example, `order__user__country`) in the group by or filter expression, you now only need to provide the primary entity and dimensions name, like `user__county`. - **New:** You can now query the [saved queries](/docs/build/saved-queries) you've defined in the dbt Semantic Layer using [Tableau](/docs/use-dbt-semantic-layer/tableau), [GraphQL API](/docs/dbt-cloud-apis/sl-graphql), [JDBC API](docs/dbt-cloud-apis/sl-jdbc), and the [dbt Cloud CLI](/docs/cloud/cloud-cli-installation). -- +- By default, dbt parses all the files in your project at the beginning of every dbt invocation. Depending on the size of your project, this operation can take a long time to complete. With the new partial parsing feature in dbt Cloud, you can reduce the time it takes for dbt to parse your project. When enabled, dbt Cloud parses only the changed files in your project instead of parsing all the project files. As a result, your dbt invocations will take less time to run. @@ -283,7 +283,7 @@ The following features are new or enhanced as part of our [dbt Cloud Launch Show - + - **Enhancement:** The YAML spec parameter `label` is now available for Semantic Layer metrics in [JDBC and GraphQL APIs](/docs/dbt-cloud-apis/sl-api-overview). This means you can conveniently use `label` as a display name for your metrics when exposing them. - **Enhancement:** Added support for `create_metric: true` for a measure, which is a shorthand to quickly create metrics. This is useful in cases when metrics are only used to build other metrics. diff --git a/website/docs/docs/use-dbt-semantic-layer/sl-faqs.md b/website/docs/docs/use-dbt-semantic-layer/sl-faqs.md index 79825084709..b0233857a5e 100644 --- a/website/docs/docs/use-dbt-semantic-layer/sl-faqs.md +++ b/website/docs/docs/use-dbt-semantic-layer/sl-faqs.md @@ -19,7 +19,7 @@ The dbt Semantic Layer is powered by MetricFlow, which is a source-available com ## Overview of the dbt Semantic Layer - + The primary value of the dbt Semantic Layer is to centralize and bring consistency to your metrics across your organization. Additionally, it allows you to: @@ -28,9 +28,9 @@ The primary value of the dbt Semantic Layer is to centralize and bring consisten - **Simplify your code** by not duplicating metric logic and allowing MetricFlow to perform complex calculations for you. - **Empower stakeholders** with rich context and flexible, yet governed experiences. - + - + dbt Metrics is the now-deprecated dbt package that was used to define metrics within dbt. dbt Metrics has been replaced with [MetricFlow](/docs/build/about-metricflow), a more flexible and powerful engine, which powers the foundation of the dbt Semantic Layer today. @@ -39,15 +39,15 @@ MetricFlow introduces SQL generation to the dbt Semantic Layer and offers more a - **Query construction** — MetricFlow iteratively constructs queries using a dataflow plan, our internal DAG for generating SQL. By comparison, dbt Metrics relied on templated Jinja to construct SQL. - **Joins** — MetricFlow also has a sophisticated way of handling joins, which dbt Metrics did not support. With MetricFlow you can effortlessly access all valid dimensions for your metrics on the fly, even when they are defined in different semantic models. - + - + Yes, absolutely! Join the [dbt Slack community](https://app.slack.com/client/T0VLPD22H) and [#dbt-cloud-semantic-layer](https://getdbt.slack.com/archives/C046L0VTVR6) slack channel for all things related to the dbt Semantic Layer. - + - + The dbt Semantic Layer is flexible enough to work with many common modeling approaches. It references dbt models, which means how you configure your Semantic Layer will mirror the modeling approach you've taken with the underlying data. @@ -57,16 +57,16 @@ The primary consideration is the flexibility and performance of the underlying q - A fully denormalized data model is simpler, will be materialized to a specific grain, but won’t be able to join to other tables. While the dbt Semantic Layer will work for both cases, it's best to allow MetricFlow do handle some level of denormalization for you in order to provide more flexibility to metric consumers. - + - + The dbt Semantic Layer measures usage in distinct 'Queried Metrics'. Refer to the [Billing](/docs/cloud/billing#what-counts-as-a-queried-metric) to learn more about pricing. - + ## Availability - + The dbt Semantic Layer supports the following data platforms: @@ -76,55 +76,55 @@ The dbt Semantic Layer supports the following data platforms: - Redshift Support for other data platforms, such as Fabric and Trino, isn't available at this time. If you're interested in using the dbt Semantic Layer with a data platform not on the list, please [contact us](https://www.getdbt.com/get-started). - + - + Yes, the dbt Semantic Layer is compatible with [dbt v1.6 or higher](/docs/dbt-versions/upgrade-dbt-version-in-cloud). - + - + Yes, dbt Cloud [Enterprise or Team](https://www.getdbt.com/pricing) plan customers can access the dbt Semantic Layer. - + - + The dbt Semantic Layer is proprietary to dbt Cloud, however some components of it are open-source. dbt Core users can use MetricFlow features, like defining metrics in their projects, without a dbt Cloud plan. dbt Core users can also query their semantic layer locally using the command line. However, they won't be able to use the [APIs](/docs/dbt-cloud-apis/sl-api-overview) or [available integrations](/docs/use-dbt-semantic-layer/avail-sl-integrations) to access metrics dynamically. - + - + If you're interested in the this type of implementation, please reach out to us [here](https://www.getdbt.com/get-started). - + ## How does the dbt Semantic Layer work? - + You can use tables and dbt models to calculate metrics as an option, but it's a static approach that is rigid and cumbersome to maintain. That’s because metrics are seldom useful on their own: they usually need dimensions, grains, and attributes for business users to analyze (or slice and dice) data effectively. If you create a table with a metric, you’ll need to create numerous other tables derived from that table to show the desired metric cut by the desired dimension or time grain. Mature data models have thousands of dimensions, so you can see how this will quickly result in unnecessary duplication, maintenance, and costs. It's also incredibly hard to predict all the slices of data that a user is going to need ahead of time. With the dbt Semantic Layer, you don’t need to pre-join or build any tables; rather, you can simply add a few lines of code to your semantic model, and that data will only be computed upon request. - + - + No, you don't. When querying the dbt Semantic Layer through the [Semantic Layer APIs](/docs/dbt-cloud-apis/sl-api-overview), you're not materializing any data by default. The dbt Semantic Layer dynamically computes the metric using the underlying data tables. Then it returns the output to the end user. - + - + The dbt Semantic Layer does not store a physical copy of your data. It uses underlying tables to construct or compute the requested output. - + - + The dbt Semantic Layer is part of dbt Cloud. It allows data teams to define metrics once, centrally, and access them from any integrated analytics tool, ensuring consistent answers across diverse datasets. In providing this service, dbt Labs permits clients to access Semantic Layer metrics. Client data passes through the Semantic Layer on the way back from the data warehouse. @@ -134,21 +134,21 @@ dbt Labs employees cannot access cached data during normal business operations a No client warehouse data is retained on dbt Labs's systems. We offer a caching solution to optimize query performance. The caching feature uses client data warehouse storage rather than being stored on dbt Labs’s systems. In addition, this feature is activated only through a client opt-in. Therefore, caching is always in client hands and at client discretion - + - + Yes it does. - + - + MetricFlow is hosted in dbt Cloud. Requests from the [Semantic Layer APIs](/docs/dbt-cloud-apis/sl-api-overview) are routed from our API gateway to MetricFlow, which generates the SQL to compute what's requested by the user. MetricFlow hands the SQL back to our gateway, which then executes it against the data platform. - + - + 1. You define [semantic models](/docs/build/semantic-models) in YAML files that describe your data, including entities (for joins), measures (with aggregation types as a building block to your metrics), and dimensions (to slice and dice your metrics). @@ -157,133 +157,133 @@ MetricFlow is hosted in dbt Cloud. Requests from the [Semantic Layer APIs](/docs Read our [dbt Semantic Layer quickstart](/guides/sl-snowflake-qs) guide for more information. - + - + Beginning in March 2024, the dbt Semantic Layer will offer two layers of caching: - The result cache, which caches query results in the data platform so that subsequent runs of the same query are faster. - A declarative cache which also lives in your data platform. - + - + No, the dbt Semantic Layer is flexible enough to work with many data modeling approaches including Snowflake, Star schemas, Data vaults, or other normalized tables. - + - + MetricFlow always tries to generate SQL in the most performant way, while ensuring the metric value is correct. It generates SQL in a way that allows us to add optimizations, like predicate pushdown, to ensure we don’t perform full table scans. - + - + The latency of query runtimes is low, in the order of milliseconds. - + - + If the underlying metric aggregation is different, then these would be different metrics. However, if teams have different definitions because they're using specific filters or dimensions, it's still the same metric. They're just using it in different ways. This can be managed by adjusting how the metric is viewed in downstream tools or setting up [saved queries](/docs/build/saved-queries) to handle the various permutations of it. - + ## Build metrics and semantic models - + MetricFlow does not currently support custom aggregations on measures. You can find supported aggregation types [here](/docs/build/measures#aggregation). - + - + [Joins](/docs/build/join-logic) are identified through [entities](/docs/build/entities) defined in a [semantic model](/docs/build/semantic-models). These are the keys in your dataset. You can specify `foreign`, `unique`, `primary`, or `natural` joins. With multiple semantic models and the entities within them, MetricFlow creates a graph using the semantic models as nodes and the join paths as edges to perform joins automatically. MetricFlow chooses the appropriate join type and avoids fan-out or chasm joins with other tables based on the entity types. You can find supported join types [here](/docs/build/join-logic#types-of-joins). - + - + Expr (short for “expression”) allows you to put any arbitrary SQL supported by your data platform in any definition of a measure, entity, or dimension. This is useful if you want the object name in the semantic model to be different than what it’s called in the database. Or if you want to include logic in the definition of the component you're creating. The MetricFlow spec is deliberately opinionated, and we offer “expr” as an escape hatch to allow developers to be more expressive. - + - + Yes, we approach this by specifying a [dimension](/docs/build/dimensions) that a metric cannot be aggregated across (such as `time`). You can learn how to configure semi-additive dimensions [here](/docs/build/measures#non-additive-dimensions). - + - + Yes, while [entities](/docs/build/entities) must be defined under “entities,” they can be queried like dimensions in downstream tools. Additionally, if the entity isn't used to perform joins across your semantic models, you may optionally define it as a dimension. - + ## Available integrations - + There are a number of data applications that have integrations with the dbt Semantic Layer, including Tableau, Google Sheets, Hex, and Mode, among others. Refer to [Available integrations](/docs/use-dbt-semantic-layer/avail-sl-integrations) for more information. - + - + You can use [exports](/docs/use-dbt-semantic-layer/exports) to materialize your metrics into a table or view in your data platform. From there, you can connect your visualization tool to your data platform. Although this approach doesn't provide the dynamic benefits of the dbt Semantic Layer, you still benefit from centralized metrics and from using MetricFlow configurations to define, generate, and compute SQL for your metrics. - + - + Creating an [export](/docs/use-dbt-semantic-layer/exports) allows you to bring your governed metric definitions into your data platform as a table or view. This means your metric logic is managed centrally in dbt, instead of as a view in your data platform and ensures that metric values remain consistent across all interfaces. - + - + Yes, all of our interfaces or APIs expose metric descriptions, which you can surface in downstream tools. - + ## Permissions and access - + Currently, the credentials you configure when setting up the dbt Semantic Layer are used for every request. Any physical access policies you have tied to your credentials will be respected. We are currently working on introducing more fine-grained access controls, including user-level access and group credentials, that enable flexible granular permissions. - + ## Implementation - + We recommend to build your semantic layer on top of the [marts layer](/best-practices/how-we-structure/5-semantic-layer-marts), which represents the clean and transformed data from your dbt models. - + - + Semantic layer credentials are different than the credentials you use to run dbt models. Specifically, we recommend a less privileged set of credentials since consumers are only reading data. - + - + Currently, semantic models can be created from dbt models that live across projects ([dbt Mesh](/best-practices/how-we-mesh/mesh-1-intro)). In the future, users will also be able to use mesh concepts on semantic objects and define metrics across dbt projects. - + - + If you're using the legacy Semantic Layer, we highly recommend you [upgrade your dbt version](/docs/dbt-versions/upgrade-dbt-version-in-cloud) to dbt v1.6 or higher to use the latest dbt Semantic Layer. Refer to the dedicated [migration guide](/guides/sl-migration) for more info. - + diff --git a/website/docs/guides/core-cloud-2.md b/website/docs/guides/core-cloud-2.md index fe2d084af03..3c7c21663c4 100644 --- a/website/docs/guides/core-cloud-2.md +++ b/website/docs/guides/core-cloud-2.md @@ -64,31 +64,31 @@ If you have rolled out your own dbt Core deployment, you have probably come up w As you plan your move, consider your workflow and team layout to ensure a smooth transition. Here are some key considerations to keep in mind: - + You don’t need to move every team and every developer’s workflow all at once. Many customers with large dbt deployments start by moving one team and one project. Once the benefits of a consolidated platform are clear, move the rest of your teams and workflows. While long-term ‘hybrid’ deployments can be challenging, it may make sense as a temporary on-ramp. - + - + Assess the users or personas involved in the pre-move, during the move, and post-move. - **Administrators**: Plan for new [access controls](/docs/cloud/manage-access/about-user-access) in dbt Cloud, such as deciding what teams can manage themselves and what should be standardized. Determine who will be responsible for setting up and maintaining projects, data platform connections, and environments. - **Data developers** (data analysts, data engineers, analytics engineers, business analysts): Determine onboarding order, workflow adaptation in dbt Cloud, training on [dbt Cloud CLI](/docs/cloud/cloud-cli-installation) or [dbt Cloud IDE](/docs/cloud/dbt-cloud-ide/develop-in-the-cloud) usage, and role changes. - **Data consumers:** Discover data insights by using [dbt Explorer](/docs/collaborate/explore-projects) to view your project's resources (such as models, tests, and metrics) and their lineage to gain a better understanding of its latest production state. - + - + If you have multiple teams of dbt developers, think about how to start your onboarding sequence for dbt Cloud: - Start with downstream (like business-embedded teams) who may benefit from the dbt Cloud IDE as dev experience (less technical users) and sharing features (like auto-deferral and dbt Explorer) to share with their stakeholders, moving to more technical teams later. - Consider setting up a [CI job](/docs/deploy/ci-jobs) in dbt Cloud (even before development or production jobs) to streamline development workflows. This is especially beneficial if there's no existing CI process. - + - + Discover how dbt Cloud can help simplify development, orchestration, and testing: - **Development**: Develop dbt models, allowing you to build, test, run, and version control your dbt projects using the dbt Cloud CLI (command line interface or code editor) or dbt Cloud IDE (browser-based). @@ -100,23 +100,23 @@ Discover how dbt Cloud can help simplify development, orchestration, and testing - For many teams, dbt Cloud CI represents a major improvement compared to their previous development workflows. - **How are you defining tests today?**: While testing production data is important, it’s not the most efficient way to catch logical errors introduced by developers You can use [unit testing](/docs/build/unit-tests) to allow you to validate your SQL modeling logic on a small set of static inputs *before* you materialize your full model in production. - + - + Transition to dbt Cloud's [access control](/docs/cloud/manage-access/about-user-access) mechanisms to ensure security and proper access management. dbt Cloud administrators can use dbt Cloud's permission model to control user-level access in a dbt Cloud account: - **License-based access controls:** Users are configured with account-wide license types. These licenses control the things a user can do within the application: view project metadata, develop changes within those projects, or administer access to those projects. - **Role-based Access Control (RBAC):** Users are assigned to *groups* with specific permissions on specific projects or all projects in the account. A user may be a member of multiple groups, and those groups may have permissions on multiple projects. - + - + If you require isolation between production and pre-production data environments due to sensitive data, dbt Cloud can support Development, Staging, and Production data [environments](/docs/dbt-cloud-environments). This provides developers with the benefits of an enhanced workflow while ensuring isolation between Staging and Production data, and locking down permissions on Prod. - + ## Move to dbt Cloud diff --git a/website/docs/reference/dbt-jinja-functions/statement-blocks.md b/website/docs/reference/dbt-jinja-functions/statement-blocks.md index a5e40b1a7f0..3be706fafb7 100644 --- a/website/docs/reference/dbt-jinja-functions/statement-blocks.md +++ b/website/docs/reference/dbt-jinja-functions/statement-blocks.md @@ -35,7 +35,7 @@ statement(name=None, fetch_result=False, auto_begin=True) When executing a `statement`, dbt needs to understand how to resolve references to other dbt models or resources. If you are already `ref`ing the model outside of the statement block, the dependency will be automatically inferred, but otherwise you will need to [force the dependency](/reference/dbt-jinja-functions/ref#forcing-dependencies) with `-- depends_on`. - + ```sql -- depends_on: {{ ref('users') }} @@ -49,9 +49,9 @@ When executing a `statement`, dbt needs to understand how to resolve references */ {%- endcall %} ``` - + - + ```sql @@ -67,7 +67,7 @@ When executing a `statement`, dbt needs to understand how to resolve references select id * 2 from {{ ref('users') }} ``` - + __Args__: - `name` (string): The name for the result set returned by this statement diff --git a/website/snippets/_config-dbt-version-check.md b/website/snippets/_config-dbt-version-check.md index 01ad895f6e2..5c21c0fa63f 100644 --- a/website/snippets/_config-dbt-version-check.md +++ b/website/snippets/_config-dbt-version-check.md @@ -3,12 +3,12 @@ Starting in 2024, when you select **Keep on latest version** in dbt Cloud, dbt w dbt Labs is committed to zero breaking changes for code in dbt projects, with ongoing releases to dbt Cloud and new versions of dbt Core. We also recommend these best practices: - + If you install dbt packages for use in your project, whether the package is maintained by your colleagues or a member of the open source dbt community, we recommend pinning the package to a specific revision or `version` boundary. Since v1.7, dbt manages this out-of-the-box by _locking_ the version/revision of packages in development in order to guarantee predictable builds in production. To learn more, refer to [Predictable package installs](/reference/commands/deps#predictable-package-installs). - - + + If you maintain dbt packages, whether on behalf of your colleagues or members of the open source community, we recommend writing defensive code that checks to verify that other required packages and global macros are available. For example, if your package depends on the availability of a `date_spine` macro in the global `dbt` namespace, you can write: @@ -28,4 +28,4 @@ If you maintain dbt packages, whether on behalf of your colleagues or members of - \ No newline at end of file + diff --git a/website/src/components/expandable/index.js b/website/src/components/expandable/index.js index eb1dc966ad1..1e971c1e182 100644 --- a/website/src/components/expandable/index.js +++ b/website/src/components/expandable/index.js @@ -1,23 +1,22 @@ /* eslint-disable */ -import React, { useState } from 'react'; +import React, { useState, useEffect } from 'react'; import styles from './styles.module.css'; function slugify(text) { return text.toString().toLowerCase() - .normalize('NFD') // Normalize to NFD Unicode form - .replace(/[\u0300-\u036f]/g, '') // Remove diacritics - .replace(/\s+/g, '-') // Replace spaces with - - .replace(/[^\w\-]+/g, '') // Remove all non-word chars - .replace(/\-\-+/g, '-') // Replace multiple - with single - - .replace(/^-+/, '') // Trim - from start - .replace(/-+$/, ''); // Trim - from end + .normalize('NFD') // normalize to nfd unicode form + .replace(/[\u0300-\u036f]/g, '') // remove diacritics + .replace(/\s+/g, '-') // replace spaces with - + .replace(/[^\w\-]+/g, '') // remove all non-word chars + .replace(/\-\-+/g, '-') // replace multipl - with a single - + .replace(/^-+/, '') // trim - from the start + .replace(/-+$/, ''); // trim - from the end } -function expandable({ children, alt_header = null }) { - if(!alt_header) { return null; } +function Expandable({ children, alt_header = null }) { + if (!alt_header) { return null; } const [isOn, setOn] = useState(false); - // generate a slug from the alt_header const anchorId = slugify(alt_header); const handleToggleClick = (event) => { @@ -25,27 +24,65 @@ function expandable({ children, alt_header = null }) { setOn(current => !current); }; + const handleCopyClick = (event) => { + event.preventDefault(); + event.stopPropagation(); + const url = `${window.location.href.split('#')[0]}#${anchorId}`; + navigator.clipboard.writeText(url).then(() => { + showCopyPopup(); + }); + }; + + const showCopyPopup = () => { + const popup = document.createElement('div'); + popup.classList.add('copy-popup'); + popup.innerText = 'Link copied!'; + + // Add close button ('x') + const closeButton = document.createElement('span'); + closeButton.classList.add('close-button'); + closeButton.innerHTML = ' ×'; // '×' symbol for 'x' + closeButton.addEventListener('click', () => { + if (document.body.contains(popup)) { + document.body.removeChild(popup); + } + }); + popup.appendChild(closeButton); + + document.body.appendChild(popup); + + setTimeout(() => { + if (document.body.contains(popup)) { + document.body.removeChild(popup); + } + }, 3000); +}; + +useEffect(() => { + if (window.location.hash === `#${anchorId}`) { + setOn(true); + const element = document.getElementById(anchorId); + if (element) { + element.scrollIntoView({ behavior: 'smooth' }); + } + } +}, [anchorId]); + return ( -
- +
+
  - {alt_header} - -
+ + {alt_header} + + +
+
{children}
); } -export default expandable; +export default Expandable; diff --git a/website/src/components/expandable/styles.module.css b/website/src/components/expandable/styles.module.css index 9345b7986e3..394418e41ee 100644 --- a/website/src/components/expandable/styles.module.css +++ b/website/src/components/expandable/styles.module.css @@ -4,14 +4,49 @@ text-decoration: none; transition: text-decoration 0.3s; /* Smooth transition */ font-weight: 600; - margin-bottom: 20px; + margin-bottom: 10px; } -:local(.link:hover) :local(.headerText), -:local(.link:focus) :local(.headerText) { - text-decoration: underline; +:local(.header) { + display: flex; + align-items: center; cursor: pointer; -} + color: rgba(18, 12, 12, 0.862); + font-weight: 550; + margin-bottom: 10px; + text-decoration: none; + } + + + :local(.copyIcon) { + width: 12px; + height: 12px; + background-image: url('/img/copy.png'); + background-size: contain; + background-repeat: no-repeat; + opacity: 0; + transition: opacity 0.3s ease-in-out; + margin-left: 4px; + vertical-align: middle; + cursor: pointer; + } + + :local(.header:hover) :local(.copyIcon) { + opacity: 1; + } + + .copy-popup { + position: fixed; + top: 10px; + left: 50%; + transform: translateX(-50%); + background-color: #047377; + color: rgb(236, 236, 236); + padding: 10px; + border-radius: 5px; + z-index: 9999; + } + :local(.toggle)::before { content: ''; @@ -46,10 +81,29 @@ border-color: rgb(253, 153, 83); } +:local(.copyPopup) { + position: fixed; + top: 20px; + left: 50%; + transform: translateX(-50%); + background-color: #000; + color: #fff; + padding: 10px; + border-radius: 5px; + box-shadow: 0 0 10px rgba(0, 0, 0, 0.1); + z-index: 1000; + opacity: 0; + transition: opacity 0.3s ease-in-out; + } + + :local(.copyPopup.show) { + opacity: 1; + } + .expandableContainer :local(.body) { - margin-top: 10px; + margin-top: 2px; margin-left: .5em; - padding: 10px; + padding: 5px; background-color: transparent; } @@ -58,6 +112,10 @@ color: rgba(255, 255, 255, 0.801); /* White text in dark mode */ } +:local(html[data-theme='dark'] details .headerText) { + color: rgba(18, 12, 12, 0.862); /* this adds black text inside details in dark mode */ + } + :local(.body > p:last-child) { margin-bottom: 0px; @@ -81,6 +139,6 @@ } .expandableContainer { - margin-bottom: 10px; /* Adjust this value as needed to create space */ + margin-bottom: 5px; /* Adjust this value as needed to create space */ } \ No newline at end of file diff --git a/website/src/theme/MDXComponents/index.js b/website/src/theme/MDXComponents/index.js index 4104632d28c..e284fb2653f 100644 --- a/website/src/theme/MDXComponents/index.js +++ b/website/src/theme/MDXComponents/index.js @@ -44,7 +44,7 @@ import dbtEditor from '@site/src/components/dbt-editor'; import Icon from '@site/src/components/icon'; import Lifecycle from '@site/src/components/lifeCycle'; import detailsToggle from '@site/src/components/detailsToggle'; -import expandable from '@site/src/components/expandable'; +import Expandable from '@site/src/components/expandable'; import ConfettiTrigger from '@site/src/components/confetti/'; const MDXComponents = { @@ -96,7 +96,7 @@ const MDXComponents = { Icon: Icon, Lifecycle: Lifecycle, detailsToggle: detailsToggle, - expandable: expandable, + Expandable: Expandable, ConfettiTrigger: ConfettiTrigger, }; export default MDXComponents; diff --git a/website/static/js/headerLinkCopy.js b/website/static/js/headerLinkCopy.js index 3f7b33b59fb..59a6140eb0f 100644 --- a/website/static/js/headerLinkCopy.js +++ b/website/static/js/headerLinkCopy.js @@ -8,7 +8,7 @@ window.addEventListener("load", copyHeader); // separating function from eventlistener to understand they are two separate things function copyHeader () { - const headers = document.querySelectorAll("h2.anchor, h3.anchor, .expandable-anchor.anchor"); + const headers = document.querySelectorAll("h2.anchor, h3.anchor, h4.anchor"); headers.forEach((header) => { header.style.cursor = "pointer"; From e7486102770b25222b74cd0011f7ba8a9bf57c1b Mon Sep 17 00:00:00 2001 From: Mirna Wong <89008547+mirnawong1@users.noreply.github.com> Date: Fri, 24 May 2024 10:33:57 +0100 Subject: [PATCH 05/12] Update column-level-lineage.md --- website/docs/docs/collaborate/column-level-lineage.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/website/docs/docs/collaborate/column-level-lineage.md b/website/docs/docs/collaborate/column-level-lineage.md index 3dfeca7be6f..bcd77dc83fe 100644 --- a/website/docs/docs/collaborate/column-level-lineage.md +++ b/website/docs/docs/collaborate/column-level-lineage.md @@ -49,6 +49,8 @@ When exploring your data products, navigating column lineage allows analytics en ## Caveats +Refer to the following CLL caveats or limitations as you navigate dbt Explorer. + ### Column usage Column-level lineage reflects the lineage from `select` statements in your models' SQL code. It won't reflect other usage like joins and filters. From cc8b28d90521b030853f319305200dc2d0b751cc Mon Sep 17 00:00:00 2001 From: Mirna Wong <89008547+mirnawong1@users.noreply.github.com> Date: Fri, 24 May 2024 10:35:08 +0100 Subject: [PATCH 06/12] Update website/docs/docs/collaborate/column-level-lineage.md --- website/docs/docs/collaborate/column-level-lineage.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/docs/collaborate/column-level-lineage.md b/website/docs/docs/collaborate/column-level-lineage.md index bcd77dc83fe..bdd2aa3247f 100644 --- a/website/docs/docs/collaborate/column-level-lineage.md +++ b/website/docs/docs/collaborate/column-level-lineage.md @@ -52,7 +52,7 @@ When exploring your data products, navigating column lineage allows analytics en Refer to the following CLL caveats or limitations as you navigate dbt Explorer. ### Column usage -Column-level lineage reflects the lineage from `select` statements in your models' SQL code. It won't reflect other usage like joins and filters. +Column-level lineage reflects the lineage from `select` statements in your models' SQL code. It doesn't reflect other usage like joins and filters. ### SQL parsing From 9ee03eca3fd878af6415fb3254576654820d4423 Mon Sep 17 00:00:00 2001 From: mirnawong1 Date: Fri, 24 May 2024 11:47:04 +0100 Subject: [PATCH 07/12] remove outdated callout --- website/docs/docs/dbt-cloud-apis/discovery-api.md | 8 ++------ website/docs/docs/dbt-cloud-apis/discovery-querying.md | 1 - .../docs/dbt-cloud-apis/schema-discovery-environment.mdx | 2 -- website/snippets/discovery-public-preview-banner.md | 3 --- website/snippets/metadata-api-prerequisites.md | 3 +-- 5 files changed, 3 insertions(+), 14 deletions(-) delete mode 100644 website/snippets/discovery-public-preview-banner.md diff --git a/website/docs/docs/dbt-cloud-apis/discovery-api.md b/website/docs/docs/dbt-cloud-apis/discovery-api.md index 747128cf7bc..438cf431060 100644 --- a/website/docs/docs/dbt-cloud-apis/discovery-api.md +++ b/website/docs/docs/dbt-cloud-apis/discovery-api.md @@ -11,20 +11,16 @@ You can access the Discovery API through [ad hoc queries](/docs/dbt-cloud-apis/d - You can query the dbt Cloud metadata: - At the [environment](/docs/environments-in-dbt) level for both the latest state (use the `environment` endpoint) and historical run results (use `modelByEnvironment`) of a dbt Cloud project in production. - At the job level for results on a specific dbt Cloud job run for a given resource type, like `models` or `test`. -:::tip Public Preview -The Discovery API is currently available in Public Preview for dbt Cloud accounts on a Team or Enterprise plan. It’s available to all multi-tenant and to only select single-tenant accounts (please ask your account team to confirm). Preview features are stable and can be considered for production deployments, but there might still be some planned additions and modifications to product behavior before moving to General Availability. For details, refer to [dbt Product lifecycles](/docs/dbt-versions/product-lifecycles). - -::: + ## What you can use the Discovery API for -Click the tabs below to learn more about the API's use cases, the analysis you can do, and the results you can achieve by integrating with it. +Click the following tabs to learn more about the API's use cases, the analysis you can do, and the results you can achieve by integrating with it. To use the API directly or integrate your tool with it, refer to [Uses case and examples](/docs/dbt-cloud-apis/discovery-use-cases-and-examples) for detailed information. diff --git a/website/docs/docs/dbt-cloud-apis/discovery-querying.md b/website/docs/docs/dbt-cloud-apis/discovery-querying.md index bd5527273ce..4344d2de4e8 100644 --- a/website/docs/docs/dbt-cloud-apis/discovery-querying.md +++ b/website/docs/docs/dbt-cloud-apis/discovery-querying.md @@ -27,7 +27,6 @@ Once you've created a token, you can use it in the Authorization header of reque 3. For specific query points, refer to the [schema documentation](/docs/dbt-cloud-apis/discovery-schema-job). - ## Run queries using HTTP requests You can run queries by sending a `POST` request to the Discovery API, making sure to replace: diff --git a/website/docs/docs/dbt-cloud-apis/schema-discovery-environment.mdx b/website/docs/docs/dbt-cloud-apis/schema-discovery-environment.mdx index a89d8f31962..c28cc17e3c4 100644 --- a/website/docs/docs/dbt-cloud-apis/schema-discovery-environment.mdx +++ b/website/docs/docs/dbt-cloud-apis/schema-discovery-environment.mdx @@ -6,8 +6,6 @@ id: "discovery-schema-environment" import { QueryArgsTable, SchemaTable } from "./schema"; - - The environment object allows you to query information about a particular model based on `environmentId`. The [Example queries](#example-queries) illustrate a few fields you can query with this `environment` object. Refer to [Fields](#fields) to view the entire schema, which provides all possible fields you can query. diff --git a/website/snippets/discovery-public-preview-banner.md b/website/snippets/discovery-public-preview-banner.md deleted file mode 100644 index b5ac16cdf9c..00000000000 --- a/website/snippets/discovery-public-preview-banner.md +++ /dev/null @@ -1,3 +0,0 @@ -:::info Public Preview -This feature is currently in Public Preview and subject to change. If you want to provide feedback, please [contact us](mailto:metadata@dbtlabs.com). -::: diff --git a/website/snippets/metadata-api-prerequisites.md b/website/snippets/metadata-api-prerequisites.md index 6e2d1550223..eb66b80bc2a 100644 --- a/website/snippets/metadata-api-prerequisites.md +++ b/website/snippets/metadata-api-prerequisites.md @@ -2,5 +2,4 @@ - dbt Cloud [multi-tenant](/docs/cloud/about-cloud/tenancy#multi-tenant) or [single tenant](/docs/cloud/about-cloud/tenancy#single-tenant) account - You must be on a [Team or Enterprise plan](https://www.getdbt.com/pricing/) -- Your projects must be on dbt version 1.0 or higher. Refer to [Version migration guides](/docs/dbt-versions/core-upgrade) to upgrade - +- Your projects must be on dbt version 1.0 or higher. Refer to [Upgrade dbt version in Cloud](/docs/dbt-versions/upgrade-dbt-version-in-cloud) to upgrade. From fedd9ad6f2ded2c0265de42569690ab28ede744c Mon Sep 17 00:00:00 2001 From: Dave Haworth Date: Fri, 24 May 2024 08:57:10 -0500 Subject: [PATCH 08/12] Add reference to VPCE ID network policies for SF OAuth docs --- website/docs/docs/cloud/manage-access/set-up-snowflake-oauth.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/docs/cloud/manage-access/set-up-snowflake-oauth.md b/website/docs/docs/cloud/manage-access/set-up-snowflake-oauth.md index 192ff51cb90..3f4b5668cf3 100644 --- a/website/docs/docs/cloud/manage-access/set-up-snowflake-oauth.md +++ b/website/docs/docs/cloud/manage-access/set-up-snowflake-oauth.md @@ -103,7 +103,7 @@ This error might be because of a configuration issue in the Snowflake OAuth flow * In the Snowflake OAuth flow, `role` in the profile config is not optional, as it does not inherit from the project connection config. So each user must supply their role, regardless of whether it is provided in the project connection. #### Server error 500 -If you experience a 500 server error when redirected from Snowflake to dbt Cloud, double-check that you have allow listed [dbt Cloud's IP addresses](/docs/cloud/about-cloud/access-regions-ip-addresses) on a Snowflake account level. +If you experience a 500 server error when redirected from Snowflake to dbt Cloud, double-check that you have allow listed [dbt Cloud's IP addresses](/docs/cloud/about-cloud/access-regions-ip-addresses), or [VPC Endpoint ID when connecting over PrivateLink](https://docs.getdbt.com/docs/cloud/secure/snowflake-privatelink#configuring-network-policies), on a Snowflake account level. Enterprise customers who have single-tenant deployments will have a different range of IP addresses (network CIDR ranges) to allow list. From 890835212955f3a120ad3357f00a91cd1a53c22c Mon Sep 17 00:00:00 2001 From: Mirna Wong <89008547+mirnawong1@users.noreply.github.com> Date: Fri, 24 May 2024 15:46:19 +0100 Subject: [PATCH 09/12] Update website/docs/docs/cloud/manage-access/set-up-snowflake-oauth.md --- website/docs/docs/cloud/manage-access/set-up-snowflake-oauth.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/docs/cloud/manage-access/set-up-snowflake-oauth.md b/website/docs/docs/cloud/manage-access/set-up-snowflake-oauth.md index 3f4b5668cf3..460934e10c4 100644 --- a/website/docs/docs/cloud/manage-access/set-up-snowflake-oauth.md +++ b/website/docs/docs/cloud/manage-access/set-up-snowflake-oauth.md @@ -103,7 +103,7 @@ This error might be because of a configuration issue in the Snowflake OAuth flow * In the Snowflake OAuth flow, `role` in the profile config is not optional, as it does not inherit from the project connection config. So each user must supply their role, regardless of whether it is provided in the project connection. #### Server error 500 -If you experience a 500 server error when redirected from Snowflake to dbt Cloud, double-check that you have allow listed [dbt Cloud's IP addresses](/docs/cloud/about-cloud/access-regions-ip-addresses), or [VPC Endpoint ID when connecting over PrivateLink](https://docs.getdbt.com/docs/cloud/secure/snowflake-privatelink#configuring-network-policies), on a Snowflake account level. +If you experience a 500 server error when redirected from Snowflake to dbt Cloud, double-check that you have allow listed [dbt Cloud's IP addresses](/docs/cloud/about-cloud/access-regions-ip-addresses), or [VPC Endpoint ID when connecting over PrivateLink](/docs/cloud/secure/snowflake-privatelink#configuring-network-policies), on a Snowflake account level. Enterprise customers who have single-tenant deployments will have a different range of IP addresses (network CIDR ranges) to allow list. From 20cfffd938325b7b2a718c64ff683e9b41e71760 Mon Sep 17 00:00:00 2001 From: Mirna Wong <89008547+mirnawong1@users.noreply.github.com> Date: Fri, 24 May 2024 15:50:16 +0100 Subject: [PATCH 10/12] Update website/docs/docs/cloud/manage-access/set-up-snowflake-oauth.md --- website/docs/docs/cloud/manage-access/set-up-snowflake-oauth.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/docs/cloud/manage-access/set-up-snowflake-oauth.md b/website/docs/docs/cloud/manage-access/set-up-snowflake-oauth.md index 460934e10c4..1cd24c16481 100644 --- a/website/docs/docs/cloud/manage-access/set-up-snowflake-oauth.md +++ b/website/docs/docs/cloud/manage-access/set-up-snowflake-oauth.md @@ -103,7 +103,7 @@ This error might be because of a configuration issue in the Snowflake OAuth flow * In the Snowflake OAuth flow, `role` in the profile config is not optional, as it does not inherit from the project connection config. So each user must supply their role, regardless of whether it is provided in the project connection. #### Server error 500 -If you experience a 500 server error when redirected from Snowflake to dbt Cloud, double-check that you have allow listed [dbt Cloud's IP addresses](/docs/cloud/about-cloud/access-regions-ip-addresses), or [VPC Endpoint ID when connecting over PrivateLink](/docs/cloud/secure/snowflake-privatelink#configuring-network-policies), on a Snowflake account level. +If you experience a 500 server error when redirected from Snowflake to dbt Cloud, double-check that you have allow-listed [dbt Cloud's IP addresses](/docs/cloud/about-cloud/access-regions-ip-addresses), or [VPC Endpoint ID (for PrivateLink connections)](/docs/cloud/secure/snowflake-privatelink#configuring-network-policies), on a Snowflake account level. Enterprise customers who have single-tenant deployments will have a different range of IP addresses (network CIDR ranges) to allow list. From 38616aa55507496494962c3f2b7dcfd75b279e03 Mon Sep 17 00:00:00 2001 From: Ly Nguyen <107218380+nghi-ly@users.noreply.github.com> Date: Fri, 24 May 2024 08:54:15 -0700 Subject: [PATCH 11/12] Update website/snippets/metadata-api-prerequisites.md --- website/snippets/metadata-api-prerequisites.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/snippets/metadata-api-prerequisites.md b/website/snippets/metadata-api-prerequisites.md index eb66b80bc2a..de0938ea13b 100644 --- a/website/snippets/metadata-api-prerequisites.md +++ b/website/snippets/metadata-api-prerequisites.md @@ -2,4 +2,4 @@ - dbt Cloud [multi-tenant](/docs/cloud/about-cloud/tenancy#multi-tenant) or [single tenant](/docs/cloud/about-cloud/tenancy#single-tenant) account - You must be on a [Team or Enterprise plan](https://www.getdbt.com/pricing/) -- Your projects must be on dbt version 1.0 or higher. Refer to [Upgrade dbt version in Cloud](/docs/dbt-versions/upgrade-dbt-version-in-cloud) to upgrade. +- Your projects must be on dbt version 1.0 or later. Refer to [Upgrade dbt version in Cloud](/docs/dbt-versions/upgrade-dbt-version-in-cloud) to upgrade. From c47c9e2ca7ddc285801fae895eadd1c235b00379 Mon Sep 17 00:00:00 2001 From: Jordan Stein Date: Fri, 24 May 2024 11:53:30 -0700 Subject: [PATCH 12/12] update version requirment for caching 1.6 --> 1.8 --- website/docs/docs/use-dbt-semantic-layer/sl-cache.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/docs/docs/use-dbt-semantic-layer/sl-cache.md b/website/docs/docs/use-dbt-semantic-layer/sl-cache.md index 91da24d9e2c..e88c753ca82 100644 --- a/website/docs/docs/use-dbt-semantic-layer/sl-cache.md +++ b/website/docs/docs/use-dbt-semantic-layer/sl-cache.md @@ -18,7 +18,7 @@ While you can use caching to speed up your queries and reduce compute time, know ## Prerequisites - dbt Cloud [Team or Enterprise](https://www.getdbt.com/) plan. -- dbt Cloud environments on dbt version 1.6 or higher. Or select [Keep on latest version](/docs/dbt-versions/upgrade-dbt-version-in-cloud#keep-on-latest-version). +- dbt Cloud environments on dbt version 1.8 or higher. Or select [Keep on latest version](/docs/dbt-versions/upgrade-dbt-version-in-cloud#keep-on-latest-version). - A successful job run and [production environment](/docs/deploy/deploy-environments#set-as-production-environment). - For declarative caching, you need to have [exports](/docs/use-dbt-semantic-layer/exports) defined in your [saved queries](/docs/build/saved-queries) YAML configuration file.