Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add: sqlmesh key_metrics model #2584

Draft
wants to merge 7 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 45 additions & 0 deletions warehouse/metrics_mesh/macros/oso_union.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
from functools import reduce

from sqlglot import expressions as exp
from sqlmesh import macro
from sqlmesh.core.macros import MacroEvaluator, union
from sqlmesh.utils.errors import SQLMeshError


@macro()
def oso_union(
evaluator: MacroEvaluator,
kind: exp.Literal,
type_: exp.Literal,
*sources: exp.Expression,
) -> exp.Query:
"""
Returns a UNION of the given CTEs or tables, selecting all columns.
SQLMesh does not support UNIONs of CTEs, so this macro is a workaround to allow that.

Args:
evaluator: MacroEvaluator instance
kind: Either 'CTE' or 'TABLE' to specify the source type.
type_: Either 'ALL' or 'DISTINCT' for the UNION type.
*sources: CTEs or tables to union.
"""

union_type = type_.name.upper()
if union_type not in ("ALL", "DISTINCT"):
raise SQLMeshError(
f"Invalid UNION type '{type_}'. Expected 'ALL' or 'DISTINCT'."
)

source_kind = kind.name.upper()
if source_kind not in ("CTE", "TABLE"):
raise SQLMeshError(f"Invalid kind '{kind}'. Expected 'CTE' or 'TABLE'.")

if source_kind == "CTE":
selects = [exp.select("*").from_(source.this) for source in sources]

return reduce(
lambda a, b: a.union(b, distinct=union_type == "DISTINCT"),
selects,
)

return union(evaluator, type_, *sources)
24 changes: 24 additions & 0 deletions warehouse/metrics_mesh/models/metrics_factories.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,6 +296,30 @@
),
entity_types=["artifact", "project", "collection"],
),
"key_commit_count": MetricQueryDef(
ref="key_commit_count.sql",
entity_types=["artifact"],
),
"key_developer_count": MetricQueryDef(
ref="key_developer_count.sql",
entity_types=["artifact"],
),
"key_first_commit": MetricQueryDef(
ref="key_first_commit.sql",
entity_types=["artifact"],
),
"key_fork_count": MetricQueryDef(
ref="key_fork_count.sql",
entity_types=["artifact"],
),
"key_last_commit": MetricQueryDef(
ref="key_last_commit.sql",
entity_types=["artifact"],
),
"key_star_count": MetricQueryDef(
ref="key_star_count.sql",
entity_types=["artifact"],
),
},
default_dialect="clickhouse",
)
14 changes: 14 additions & 0 deletions warehouse/metrics_mesh/oso_metrics/key_commit_count.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
select distinct
now() as metrics_sample_date,
events.event_source,
@metrics_entity_type_col(
'to_{entity_type}_id',
table_alias := events,
),
'' as from_artifact_id,
'TOTAL_COMMITS' as metric,
count(*) as amount,
'COUNT' as unit
from metrics.events_daily_to_artifact as events
where event_type = 'COMMIT_CODE'
group by 2, 3
14 changes: 14 additions & 0 deletions warehouse/metrics_mesh/oso_metrics/key_developer_count.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
select distinct
now() as metrics_sample_date,
events.event_source,
@metrics_entity_type_col(
'to_{entity_type}_id',
table_alias := events,
),
'' as from_artifact_id,
'DEVELOPER_COUNT' as metric,
count(distinct from_artifact_id) as amount,
'COUNT' as unit
from metrics.events_daily_to_artifact as events
where event_type in ('COMMIT_CODE', 'PULL_REQUEST_OPENED')
group by 2, 3
18 changes: 18 additions & 0 deletions warehouse/metrics_mesh/oso_metrics/key_first_commit.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
select distinct
now() as metrics_sample_date,
events.event_source,
@metrics_entity_type_col(
'to_{entity_type}_id',
table_alias := events,
),
'' as from_artifact_id,
'FIRST_COMMIT' as metric,
@str_to_unix_timestamp(
split_part(
cast(min(bucket_day) as string), ' ', 1
),
) as amount,
'UNIX_TIMESTAMP' as unit
from metrics.events_daily_to_artifact as events
where event_type = 'COMMIT_CODE'
group by 2, 3
14 changes: 14 additions & 0 deletions warehouse/metrics_mesh/oso_metrics/key_fork_count.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
select distinct
now() as metrics_sample_date,
events.event_source,
@metrics_entity_type_col(
'to_{entity_type}_id',
table_alias := events,
),
'' as from_artifact_id,
'FORK_COUNT' as metric,
count(*) as amount,
'COUNT' as unit
from metrics.events_daily_to_artifact as events
where event_type = 'FORKED'
group by 2, 3
18 changes: 18 additions & 0 deletions warehouse/metrics_mesh/oso_metrics/key_last_commit.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
select distinct
now() as metrics_sample_date,
events.event_source,
@metrics_entity_type_col(
'to_{entity_type}_id',
table_alias := events,
),
'' as from_artifact_id,
'LAST_COMMIT' as metric,
@str_to_unix_timestamp(
split_part(
cast(max(bucket_day) as string), ' ', 1
)
) as amount,
'UNIX_TIMESTAMP' as unit
from metrics.events_daily_to_artifact as events
where event_type = 'COMMIT_CODE'
group by 2, 3
14 changes: 14 additions & 0 deletions warehouse/metrics_mesh/oso_metrics/key_star_count.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
select distinct
now() as metrics_sample_date,
events.event_source,
@metrics_entity_type_col(
'to_{entity_type}_id',
table_alias := events,
),
'' as from_artifact_id,
'STAR_COUNT' as metric,
count(*) as amount,
'COUNT' as unit
from metrics.events_daily_to_artifact as events
where event_type = 'STARRED'
group by 2, 3
16 changes: 14 additions & 2 deletions warehouse/metrics_tools/definition.py
Original file line number Diff line number Diff line change
Expand Up @@ -433,6 +433,17 @@ def generate_dependency_refs_for_name(self, name: str):
time_aggregation=time_aggregation,
)
)
# if there is no _source.time_aggregations or _source.rolling
# means it is a point in time metric aka key metric over the
# whole time period for the specific entity type
if not self._source.time_aggregations and not self._source.rolling:
refs.append(
PeerMetricDependencyRef(
name=name,
entity_type=entity,
time_aggregation="over_all_time",
)
)
return refs

@property
Expand All @@ -449,8 +460,9 @@ def metric_type(self):
return "time_aggregation"
elif self._source.rolling is not None:
return "rolling"
# This _shouldn't_ happen
raise Exception("unknown metric type")
# If neither time_aggregations or rolling is set then it is a point in
# time metric
return "over_all_time"

def generate_query_ref(
self,
Expand Down
36 changes: 36 additions & 0 deletions warehouse/metrics_tools/factory/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -393,6 +393,10 @@ def generate_model_for_rendered_query(
self.generate_time_aggregation_model_for_rendered_query(
calling_file, query_config, dependencies
)
case "over_all_time":
self.generate_point_in_time_model_for_rendered_query(
calling_file, query_config, dependencies
)

def generate_rolling_python_model_for_rendered_query(
self,
Expand Down Expand Up @@ -546,6 +550,38 @@ def generate_time_aggregation_model_for_rendered_query(
partitioned_by=partitioned_by,
)

def generate_point_in_time_model_for_rendered_query(
self,
calling_file: str,
query_config: MetricQueryConfig,
dependencies: t.Set[str],
):
"""Generate model for point in time models"""
config = self.serializable_config(query_config)
ref = query_config["ref"]
columns = METRICS_COLUMNS_BY_ENTITY[ref["entity_type"]]

grain = [
"metric",
f"to_{ref['entity_type']}_id",
"from_artifact_id",
"event_source",
"metrics_sample_date",
]

GeneratedModel.create(
func=generated_query,
entrypoint_path=calling_file,
config=config,
name=f"{self.catalog}.{query_config['table_name']}",
kind=ModelKindName.FULL,
dialect="clickhouse",
columns=columns,
grain=grain,
start="1970-01-01",
additional_macros=self.generated_model_additional_macros,
)

def serializable_config(self, query_config: MetricQueryConfig):
# Use a simple python sql model to generate the time_aggregation model
config: t.Dict[str, t.Any] = t.cast(dict, query_config.copy())
Expand Down
Loading