Skip to content

Commit

Permalink
Merge pull request #165 from Snowflake-Labs/jsummer/add-dbt-model-ins…
Browse files Browse the repository at this point in the history
…tructions

Jsummer/add dbt model instructions
  • Loading branch information
sfc-gh-jsummer authored Sep 25, 2024
2 parents 3d22da3 + bda3f0c commit e9ff486
Show file tree
Hide file tree
Showing 4 changed files with 64 additions and 18 deletions.
9 changes: 7 additions & 2 deletions admin_apps/partner/cortex.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,10 @@
import pandas as pd
import streamlit as st

from semantic_model_generator.data_processing.proto_utils import proto_to_dict
from semantic_model_generator.data_processing.proto_utils import (
proto_to_dict,
yaml_to_semantic_model,
)


class CortexDimension:
Expand Down Expand Up @@ -134,7 +137,9 @@ def create_comparison_df(self) -> pd.DataFrame:

@staticmethod
def create_cortex_table_list() -> None:
cortex_semantic = proto_to_dict(st.session_state["semantic_model"])
cortex_semantic = proto_to_dict(
yaml_to_semantic_model(st.session_state["last_saved_yaml"])
)
# Need to replace table details in current entire yaml
st.session_state["current_yaml_as_dict"] = cortex_semantic
tables = []
Expand Down
16 changes: 14 additions & 2 deletions admin_apps/partner/dbt.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,20 @@

# Partner semantic support instructions
DBT_IMAGE = "admin_apps/images/dbt-signature_tm_black.png"
DBT_INSTRUCTIONS = """
We extract metadata from your **DBT** semantic yaml file(s) and merge it with a generated Cortex Analyst semantic file.
DBT_MODEL_INSTRUCTIONS = """
### [SQL Model](https://docs.getdbt.com/docs/build/sql-models)
Materialize your SQL model(s) as Snowflake table(s) and generate a Cortex Analyst semantic file for them directly.
> Steps:
> 1) Update dbt model(s) to be [materialized](https://docs.getdbt.com/docs/build/materializations) in Snowflake.
> 2) Update dbt model(s) to [persist docs](https://docs.getdbt.com/reference/resource-configs/persist_docs) to capture table/column descriptions.
> 3) Run dbt model(s) to materialize in Snowflake.
> 4) Select **🛠 Create a new semantic model** on the homepage and select the materialized Snowflake table(s).
"""
DBT_SEMANTIC_INSTRUCTIONS = """
### [Semantic Model](https://docs.getdbt.com/docs/build/semantic-models)
We extract metadata from your dbt semantic yaml file(s) and merge it with a generated Cortex Analyst semantic file.
**Note**: The DBT semantic layer must be sourced from tables/views in Snowflake.
> Steps:
Expand Down
2 changes: 1 addition & 1 deletion admin_apps/partner/looker.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
# Partner semantic support instructions
LOOKER_IMAGE = "admin_apps/images/looker.png"
LOOKER_INSTRUCTIONS = """
We materialize your Explore dataset in Looker as Snowflake table(s) and generate a Cortex Analyst semantic file.
We materialize your [Explore](https://cloud.google.com/looker/docs/reference/param-explore-explore) dataset in Looker as Snowflake table(s) and generate a Cortex Analyst semantic file.
Metadata from your Explore fields can be merged with the generated Cortex Analyst semantic file.
**Note**: Views referenced in the Looker Explores must be tables/views in Snowflake. Looker SDK credentials are required.
Expand Down
55 changes: 42 additions & 13 deletions admin_apps/partner/partner_utils.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import json
import time
from enum import Enum
from typing import Any

import numpy as np
Expand All @@ -17,20 +18,32 @@
from semantic_model_generator.data_processing.proto_utils import yaml_to_semantic_model


class PartnerTool(Enum):
DBT_SQL_MODEL = "dbt - SQL Model"
DBT_SEMANTIC_MODEL = "dbt - Semantic Model"
LOOKER_EXPLORE = "Looker - Explore"


def set_partner_instructions() -> None:
"""
Sets instructions and partner logo in session_state based on selected partner.
Returns: None
"""

if st.session_state.get("partner_tool", None):
if st.session_state["partner_tool"] == "dbt":
from admin_apps.partner.dbt import DBT_IMAGE, DBT_INSTRUCTIONS
if st.session_state["partner_tool"] == PartnerTool.DBT_SQL_MODEL.value:
from admin_apps.partner.dbt import DBT_IMAGE, DBT_MODEL_INSTRUCTIONS

instructions = DBT_MODEL_INSTRUCTIONS
image = DBT_IMAGE
image_size = (72, 32)
elif st.session_state["partner_tool"] == PartnerTool.DBT_SEMANTIC_MODEL.value:
from admin_apps.partner.dbt import DBT_IMAGE, DBT_SEMANTIC_INSTRUCTIONS

instructions = DBT_INSTRUCTIONS
instructions = DBT_SEMANTIC_INSTRUCTIONS
image = DBT_IMAGE
image_size = (72, 32)
elif st.session_state["partner_tool"] == "looker":
elif st.session_state["partner_tool"] == PartnerTool.LOOKER_EXPLORE.value:
from admin_apps.partner.looker import LOOKER_IMAGE, LOOKER_INSTRUCTIONS

instructions = LOOKER_INSTRUCTIONS
Expand All @@ -47,7 +60,7 @@ def configure_partner_semantic() -> None:
Returns: None
"""

partners = [None, "dbt", "looker"]
partners = [tool.value for tool in PartnerTool]

st.selectbox(
"Select the partner tool",
Expand All @@ -58,7 +71,7 @@ def configure_partner_semantic() -> None:
)
if st.session_state.get("partner_tool", None):
with st.expander(
f"{st.session_state.get('partner_tool', '').title()} Instructions",
"Instructions",
expanded=True,
):
render_image(
Expand All @@ -71,12 +84,14 @@ def configure_partner_semantic() -> None:
if st.session_state.get("partner_tool", None):
st.session_state["selected_partner"] = st.session_state["partner_tool"]

if st.session_state["partner_tool"] == "dbt":
if st.session_state["partner_tool"] == PartnerTool.DBT_SEMANTIC_MODEL.value:
upload_dbt_semantic()
if st.session_state["partner_tool"] == "looker":
if st.session_state["partner_tool"] == PartnerTool.LOOKER_EXPLORE.value:
from admin_apps.partner.looker import set_looker_semantic

set_looker_semantic()
if st.session_state["partner_tool"] == PartnerTool.DBT_SQL_MODEL.value:
st.session_state["partner_setup"] = False


class PartnerCompareRow:
Expand Down Expand Up @@ -231,9 +246,11 @@ def integrate_partner_semantics() -> None:
"""

st.write(
"Specify how to merge semantic metadata from partner tools with Cortex Analyst's semantic model."
"Specify how to merge semantic metadata from your selected partner tool with Cortex Analyst's semantic model."
)

st.write(f"Partner: **{st.session_state.get('selected_partner', None)}**")

COMPARE_SEMANTICS_HELP = """Which semantic file should be checked first for necessary metadata.
Where metadata is missing, the other semantic file will be checked."""

Expand All @@ -253,11 +270,17 @@ def integrate_partner_semantics() -> None:
# Execute pre-processing behind the scenes based on vendor tool
CortexSemanticTable.create_cortex_table_list()

if st.session_state.get("selected_partner", None) == "looker":
if (
st.session_state.get("selected_partner", None)
== PartnerTool.LOOKER_EXPLORE.value
):
from admin_apps.partner.looker import LookerSemanticTable

LookerSemanticTable.create_cortex_table_list()
elif st.session_state.get("selected_partner", None) == "dbt":
elif (
st.session_state.get("selected_partner", None)
== PartnerTool.DBT_SEMANTIC_MODEL.value
):
pass
else:
st.error("Selected partner tool not available.")
Expand Down Expand Up @@ -303,13 +326,19 @@ def integrate_partner_semantics() -> None:
semantic_cortex_tbl
)

if st.session_state.get("selected_partner", None) == "looker":
if (
st.session_state.get("selected_partner", None)
== PartnerTool.LOOKER_EXPLORE.value
):
from admin_apps.partner.looker import LookerSemanticTable

partner_fields_df = LookerSemanticTable.retrieve_df_by_name(
semantic_partner_tbl
)
if st.session_state.get("selected_partner", None) == "dbt":
if (
st.session_state.get("selected_partner", None)
== PartnerTool.DBT_SEMANTIC_MODEL.value
):
partner_fields_df = DBTSemanticModel.retrieve_df_by_name(
semantic_partner_tbl
)
Expand Down

0 comments on commit e9ff486

Please sign in to comment.