Skip to content

Commit

Permalink
fix
Browse files Browse the repository at this point in the history
  • Loading branch information
tommydangerous committed May 14, 2024
1 parent a61bead commit 9b2e2ec
Show file tree
Hide file tree
Showing 4 changed files with 94 additions and 87 deletions.
4 changes: 3 additions & 1 deletion mlops/unit_2_training/custom/dashboard_data_source.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

from pandas import Series
from scipy.sparse import csr_matrix
from sklearn.base import BaseEstimator
from xgboost import Booster

if 'custom' not in globals():
Expand All @@ -10,14 +11,15 @@

@custom
def source(
model: Booster,
training_results: Tuple[Booster, BaseEstimator],
settings: Tuple[
Dict[str, Union[bool, float, int, str]],
csr_matrix,
Series,
],
**kwargs,
) -> Tuple[Booster, csr_matrix, csr_matrix]:
model, _ = training_results
_, X_train, y_train = settings

return model, X_train, y_train
4 changes: 3 additions & 1 deletion mlops/unit_3_observability/custom/dashboard_data_source.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

from pandas import Series
from scipy.sparse import csr_matrix
from sklearn.base import BaseEstimator
from xgboost import Booster

if 'custom' not in globals():
Expand All @@ -10,14 +11,15 @@

@custom
def source(
model: Booster,
training_results: Tuple[Booster, BaseEstimator],
settings: Tuple[
Dict[str, Union[bool, float, int, str]],
csr_matrix,
Series,
],
**kwargs,
) -> Tuple[Booster, csr_matrix, csr_matrix]:
model, _ = training_results
_, X_train, y_train = settings

return model, X_train, y_train
6 changes: 4 additions & 2 deletions mlops/unit_4_triggering/custom/dashboard_data_source.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

from pandas import Series
from scipy.sparse import csr_matrix
from sklearn.base import BaseEstimator
from xgboost import Booster

if 'custom' not in globals():
Expand All @@ -10,14 +11,15 @@

@custom
def source(
model: Booster,
training_results: Tuple[Booster, BaseEstimator],
settings: Tuple[
Dict[str, Union[bool, float, int, str]],
csr_matrix,
Series,
],
**kwargs,
) -> Tuple[Booster, csr_matrix, csr_matrix]:
model, _ = training_results
_, X_train, y_train = settings

return model, X_train, y_train
return model, X_train, y_train
167 changes: 84 additions & 83 deletions mlops/unit_4_triggering/pipelines/xgboost_training/metadata.yaml
Original file line number Diff line number Diff line change
@@ -1,92 +1,92 @@
blocks:
- all_upstream_blocks_executed: true
color: null
configuration:
global_data_product:
uuid: training_set
downstream_blocks:
- xgboost
executor_config: null
executor_type: local_python
has_callback: false
language: python
name: Training set
retry_config: null
status: executed
timeout: null
type: global_data_product
upstream_blocks: []
uuid: training_set
- all_upstream_blocks_executed: true
color: null
configuration:
file_path: transformers/hyperparameter_tuning/xgboost.py
file_source:
path: unit_2_training/transformers/hyperparameter_tuning/xgboost.py
downstream_blocks:
- xgboost
- dashboard_data_source
executor_config: null
executor_type: local_python
has_callback: false
language: python
name: xgboost Hyperparameter tuning
retry_config: null
status: executed
timeout: null
type: transformer
upstream_blocks:
- training_set
uuid: hyperparameter_tuning/xgboost
- all_upstream_blocks_executed: true
color: null
configuration:
file_path: data_exporters/xgboost.py
file_source:
path: unit_2_training/data_exporters/xgboost.py
downstream_blocks:
- dashboard_data_source
executor_config: null
executor_type: local_python
has_callback: false
language: python
name: XGBoost
retry_config: null
status: updated
timeout: null
type: data_exporter
upstream_blocks:
- training_set
- hyperparameter_tuning/xgboost
uuid: xgboost
- all_upstream_blocks_executed: false
color: pink
configuration:
file_path: custom/dashboard_data_source.py
file_source:
path: unit_2_training/custom/dashboard_data_source.py
downstream_blocks: []
executor_config: null
executor_type: local_python
has_callback: false
language: python
name: Dashboard data source
retry_config: null
status: executed
timeout: null
type: custom
upstream_blocks:
- xgboost
- hyperparameter_tuning/xgboost
uuid: dashboard_data_source
- all_upstream_blocks_executed: true
color: null
configuration:
global_data_product:
uuid: training_set
downstream_blocks:
- xgboost
executor_config: null
executor_type: local_python
has_callback: false
language: python
name: Training set
retry_config: null
status: executed
timeout: null
type: global_data_product
upstream_blocks: []
uuid: training_set
- all_upstream_blocks_executed: true
color: null
configuration:
file_path: transformers/hyperparameter_tuning/xgboost.py
file_source:
path: unit_2_training/transformers/hyperparameter_tuning/xgboost.py
downstream_blocks:
- xgboost
- dashboard_data_source
executor_config: null
executor_type: local_python
has_callback: false
language: python
name: xgboost Hyperparameter tuning
retry_config: null
status: executed
timeout: null
type: transformer
upstream_blocks:
- training_set
uuid: hyperparameter_tuning/xgboost
- all_upstream_blocks_executed: true
color: null
configuration:
file_path: data_exporters/xgboost.py
file_source:
path: unit_2_training/data_exporters/xgboost.py
downstream_blocks:
- dashboard_data_source
executor_config: null
executor_type: local_python
has_callback: false
language: python
name: XGBoost
retry_config: null
status: updated
timeout: null
type: data_exporter
upstream_blocks:
- training_set
- hyperparameter_tuning/xgboost
uuid: xgboost
- all_upstream_blocks_executed: false
color: pink
configuration:
file_path: custom/dashboard_data_source.py
file_source:
path: unit_2_training/custom/dashboard_data_source.py
downstream_blocks: []
executor_config: null
executor_type: local_python
has_callback: false
language: python
name: Dashboard data source
retry_config: null
status: executed
timeout: null
type: custom
upstream_blocks:
- xgboost
- hyperparameter_tuning/xgboost
uuid: dashboard_data_source
cache_block_output_in_memory: false
callbacks: []
concurrency_config: {}
conditionals: []
created_at: "2024-05-07 18:27:34.902705+00:00"
created_at: '2024-05-07 18:27:34.902705+00:00'
data_integration: null
description: "XGBoost is a scalable and efficient implementation of gradient boosted
decision trees, a powerful ensemble machine learning technique. "
description: 'XGBoost is a scalable and efficient implementation of gradient boosted
decision trees, a powerful ensemble machine learning technique. '
executor_config: {}
executor_count: 1
executor_type: null
Expand All @@ -106,4 +106,5 @@ variables:
early_stopping_rounds: 1
max_depth: 1
max_evaluations: 1
variables_dir: /root/.mage_data/unit_4_triggering
widgets: []

0 comments on commit 9b2e2ec

Please sign in to comment.