Skip to content

Commit

Permalink
Merge pull request #12 from zenml-io/2024.11.28
Browse files Browse the repository at this point in the history
Removing the deprecated `log_xxx_metadata` calls
  • Loading branch information
bcdurak authored Dec 2, 2024
2 parents 82f7c12 + ece1d46 commit f9ff2ba
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 7 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -55,5 +55,5 @@ jobs:
uses: ./.github/actions/llm_finetuning_template_test
with:
python-version: ${{ matrix.python-version }}
ref-zenml: ${{ inputs.ref-zenml || 'develop' }}
ref-zenml: ${{ inputs.ref-zenml || 'feature/followup-run-metadata' }}
ref-template: ${{ inputs.ref-template || github.ref }}
9 changes: 7 additions & 2 deletions template/steps/log_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from typing import Any, Dict

from zenml import log_model_metadata, step, get_step_context
from zenml import log_metadata, step, get_step_context


@step(enable_cache=False)
Expand All @@ -24,4 +24,9 @@ def log_metadata_from_step_artifact(

metadata = {artifact_name: metadata_dict}

log_model_metadata(metadata)
if context.model:
log_metadata(
metadata=metadata,
model_name=context.model.name,
model_version=context.model.version,
)
9 changes: 5 additions & 4 deletions template/steps/prepare_datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from materializers.directory_materializer import DirectoryMaterializer
from typing_extensions import Annotated
from utils.tokenizer import generate_and_tokenize_prompt, load_tokenizer
from zenml import log_model_metadata, step
from zenml import log_metadata, step
from zenml.materializers import BuiltInMaterializer
from zenml.utils.cuda_utils import cleanup_gpu_memory

Expand All @@ -33,11 +33,12 @@ def prepare_data(

cleanup_gpu_memory(force=True)

log_model_metadata(
{
log_metadata(
metadata={
"system_prompt": system_prompt,
"base_model_id": base_model_id,
}
},
infer_model=True,
)

tokenizer = load_tokenizer(base_model_id, False, use_fast)
Expand Down

0 comments on commit f9ff2ba

Please sign in to comment.