diff --git a/google_cloud_automlops/deployments/cloudbuild/builder.py b/google_cloud_automlops/deployments/cloudbuild/builder.py index e5aedc9..9374348 100644 --- a/google_cloud_automlops/deployments/cloudbuild/builder.py +++ b/google_cloud_automlops/deployments/cloudbuild/builder.py @@ -22,9 +22,10 @@ # Try backported to PY<37 `importlib_resources` from importlib_resources import files as import_files -from jinja2 import Template - -from google_cloud_automlops.utils.utils import write_file +from google_cloud_automlops.utils.utils import ( + render_jinja, + write_file +) from google_cloud_automlops.utils.constants import ( BASE_DIR, CLOUDBUILD_TEMPLATES_PATH, @@ -48,46 +49,18 @@ def build(config: CloudBuildConfig): config.use_ci: Flag that determines whether to use Cloud CI/CD. """ # Write cloud build config - write_file(GENERATED_CLOUDBUILD_FILE, create_cloudbuild_jinja( - config.artifact_repo_location, - config.artifact_repo_name, - config.naming_prefix, - config.project_id, - config.pubsub_topic_name, - config.use_ci), 'w') - -def create_cloudbuild_jinja( - artifact_repo_location: str, - artifact_repo_name: str, - naming_prefix: str, - project_id: str, - pubsub_topic_name: str, - use_ci: bool) -> str: - """Generates content for the cloudbuild.yaml, to be written to the base_dir. - This file contains the ci/cd manifest for AutoMLOps. - - Args: - artifact_repo_location: Region of the artifact repo (default use with Artifact Registry). - artifact_repo_name: Artifact repo name where components are stored (default use with Artifact Registry). - naming_prefix: Unique value used to differentiate pipelines and services across AutoMLOps runs. - project_id: The project ID. - pubsub_topic_name: The name of the pubsub topic to publish to. - use_ci: Flag that determines whether to use Cloud CI/CD. - - Returns: - str: Contents of cloudbuild.yaml. - """ - component_base_relative_path = COMPONENT_BASE_RELATIVE_PATH if use_ci else f'{BASE_DIR}{COMPONENT_BASE_RELATIVE_PATH}' - template_file = import_files(CLOUDBUILD_TEMPLATES_PATH) / 'cloudbuild.yaml.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - artifact_repo_location=artifact_repo_location, - artifact_repo_name=artifact_repo_name, + component_base_relative_path = COMPONENT_BASE_RELATIVE_PATH if config.use_ci else f'{BASE_DIR}{COMPONENT_BASE_RELATIVE_PATH}' + write_file( + filepath=GENERATED_CLOUDBUILD_FILE, + text=render_jinja( + template_path=import_files(CLOUDBUILD_TEMPLATES_PATH) / 'cloudbuild.yaml.j2', + artifact_repo_location=config.artifact_repo_location, + artifact_repo_name=config.artifact_repo_name, component_base_relative_path=component_base_relative_path, generated_license=GENERATED_LICENSE, generated_parameter_values_path=GENERATED_PARAMETER_VALUES_PATH, - naming_prefix=naming_prefix, - project_id=project_id, - pubsub_topic_name=pubsub_topic_name, - use_ci=use_ci) + naming_prefix=config.naming_prefix, + project_id=config.project_id, + pubsub_topic_name=config.pubsub_topic_name, + use_ci=config.use_ci), + mode='w') diff --git a/google_cloud_automlops/deployments/github_actions/builder.py b/google_cloud_automlops/deployments/github_actions/builder.py index 56d3288..58f756b 100644 --- a/google_cloud_automlops/deployments/github_actions/builder.py +++ b/google_cloud_automlops/deployments/github_actions/builder.py @@ -22,9 +22,11 @@ # Try backported to PY<37 `importlib_resources` from importlib_resources import files as import_files -from jinja2 import Template +from google_cloud_automlops.utils.utils import ( + render_jinja, + write_file +) -from google_cloud_automlops.utils.utils import write_file from google_cloud_automlops.utils.constants import ( GENERATED_GITHUB_ACTIONS_FILE, COMPONENT_BASE_RELATIVE_PATH, @@ -52,65 +54,23 @@ def build(config: GitHubActionsConfig): config.workload_identity_service_account: Service account for workload identity federation. """ # Write github actions config - write_file(GENERATED_GITHUB_ACTIONS_FILE, create_github_actions_jinja( - config.artifact_repo_location, - config.artifact_repo_name, - config.naming_prefix, - config.project_id, - config.project_number, - config.pubsub_topic_name, - config.source_repo_branch, - config.use_ci, - config.workload_identity_pool, - config.workload_identity_provider, - config.workload_identity_service_account), 'w') - -def create_github_actions_jinja( - artifact_repo_location: str, - artifact_repo_name: str, - naming_prefix: str, - project_id: str, - project_number: str, - pubsub_topic_name: str, - source_repo_branch: str, - use_ci: bool, - workload_identity_pool: str, - workload_identity_provider: str, - workload_identity_service_account: str) -> str: - """Generates content for the github_actions.yaml, to be written to the .github/workflows directory. - This file contains the ci/cd manifest for AutoMLOps. - - Args: - artifact_repo_location: Region of the artifact repo (default use with Artifact Registry). - artifact_repo_name: Artifact repo name where components are stored (default use with Artifact Registry). - naming_prefix: Unique value used to differentiate pipelines and services across AutoMLOps runs. - project_id: The project ID. - project_number: The project number. - pubsub_topic_name: The name of the pubsub topic to publish to. - source_repo_branch: The branch to use in the source repository. - use_ci: Flag that determines whether to use Cloud CI/CD. - workload_identity_pool: Pool for workload identity federation. - workload_identity_provider: Provider for workload identity federation. - workload_identity_service_account: Service account for workload identity federation. - - Returns: - str: Contents of github_actions.yaml. - """ - template_file = import_files(GITHUB_ACTIONS_TEMPLATES_PATH) / 'github_actions.yaml.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - artifact_repo_location=artifact_repo_location, - artifact_repo_name=artifact_repo_name, + write_file( + filepath=GENERATED_GITHUB_ACTIONS_FILE, + text=render_jinja( + template_path=import_files(GITHUB_ACTIONS_TEMPLATES_PATH) / 'github_actions.yaml.j2', + artifact_repo_location=config.artifact_repo_location, + artifact_repo_name=config.artifact_repo_name, component_base_relative_path=COMPONENT_BASE_RELATIVE_PATH, generated_license=GENERATED_LICENSE, generated_parameter_values_path=GENERATED_PARAMETER_VALUES_PATH, - naming_prefix=naming_prefix, - project_id=project_id, - project_number=project_number, - pubsub_topic_name=pubsub_topic_name, - source_repo_branch=source_repo_branch, - use_ci=use_ci, - workload_identity_pool=workload_identity_pool, - workload_identity_provider=workload_identity_provider, - workload_identity_service_account=workload_identity_service_account) + naming_prefix=config.naming_prefix, + project_id=config.project_id, + project_number=config.project_number, + pubsub_topic_name=config.pubsub_topic_name, + source_repo_branch=config.source_repo_branch, + use_ci=config.use_ci, + workload_identity_pool=config.workload_identity_pool, + workload_identity_provider=config.workload_identity_provider, + workload_identity_service_account=config.workload_identity_service_account + ), + mode='w') diff --git a/google_cloud_automlops/deployments/gitops/git_utils.py b/google_cloud_automlops/deployments/gitops/git_utils.py index 9fb547c..e124f85 100644 --- a/google_cloud_automlops/deployments/gitops/git_utils.py +++ b/google_cloud_automlops/deployments/gitops/git_utils.py @@ -28,8 +28,6 @@ import os import subprocess -from jinja2 import Template - from google_cloud_automlops.utils.constants import ( BASE_DIR, GENERATED_DEFAULTS_FILE, @@ -39,6 +37,7 @@ from google_cloud_automlops.utils.utils import ( execute_process, read_yaml_file, + render_jinja, write_file ) from google_cloud_automlops.deployments.enums import ( @@ -76,7 +75,11 @@ def git_workflow(): has_remote_branch = subprocess.check_output( [f'''git -C {BASE_DIR} ls-remote origin {defaults['gcp']['source_repository_branch']}'''], shell=True, stderr=subprocess.STDOUT) - write_file(f'{BASE_DIR}.gitignore', _create_gitignore_jinja(), 'w') + write_file( + f'{BASE_DIR}.gitignore', + render_jinja(template_path=import_files(GITOPS_TEMPLATES_PATH) / 'gitignore.j2'), + 'w') + # This will initialize the branch, a second push will be required to trigger the cloudbuild job after initializing if not has_remote_branch: execute_process(f'git -C {BASE_DIR} add .gitignore', to_null=False) @@ -102,15 +105,3 @@ def git_workflow(): if deployment_framework == Deployer.CLOUDBUILD.value: logging.info( f'''Cloud Build job running at: https://console.cloud.google.com/cloud-build/builds;region={defaults['gcp']['build_trigger_location']}''') - - -def _create_gitignore_jinja() -> str: - """Generates code for .gitignore file. - - Returns: - str: .gitignore file. - """ - template_file = import_files(GITOPS_TEMPLATES_PATH) / 'gitignore.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render() diff --git a/google_cloud_automlops/orchestration/kfp/builder.py b/google_cloud_automlops/orchestration/kfp/builder.py index 127325f..662e313 100644 --- a/google_cloud_automlops/orchestration/kfp/builder.py +++ b/google_cloud_automlops/orchestration/kfp/builder.py @@ -25,14 +25,13 @@ import re import textwrap -from jinja2 import Template - from google_cloud_automlops.utils.utils import ( execute_process, get_components_list, make_dirs, read_file, read_yaml_file, + render_jinja, is_using_kfp_spec, write_and_chmod, write_file, @@ -74,16 +73,76 @@ def build(config: KfpConfig): """ # Write scripts for building pipeline, building components, running pipeline, and running all files - write_and_chmod(GENERATED_PIPELINE_SPEC_SH_FILE, build_pipeline_spec_jinja()) - write_and_chmod(GENERATED_BUILD_COMPONENTS_SH_FILE, build_components_jinja()) - write_and_chmod(GENERATED_RUN_PIPELINE_SH_FILE, run_pipeline_jinja()) - write_and_chmod(GENERATED_RUN_ALL_SH_FILE, run_all_jinja()) + scripts_path = import_files(KFP_TEMPLATES_PATH + '.scripts') + + # Write script for building pipeline + write_and_chmod( + GENERATED_PIPELINE_SPEC_SH_FILE, + render_jinja( + template_path=scripts_path / 'build_pipeline_spec.sh.j2', + generated_license=GENERATED_LICENSE, + base_dir=BASE_DIR)) + + # Write script for building components + write_and_chmod( + GENERATED_BUILD_COMPONENTS_SH_FILE, + render_jinja( + template_path=scripts_path / 'build_components.sh.j2', + generated_license=GENERATED_LICENSE, + base_dir=BASE_DIR)) + + # Write script for running pipeline + write_and_chmod( + GENERATED_RUN_PIPELINE_SH_FILE, + render_jinja( + template_path=scripts_path / 'run_pipeline.sh.j2', + generated_license=GENERATED_LICENSE, + base_dir=BASE_DIR)) + + # Write script for running all files + write_and_chmod( + GENERATED_RUN_ALL_SH_FILE, + render_jinja( + template_path=scripts_path / 'run_all.sh.j2', + generated_license=GENERATED_LICENSE, + base_dir=BASE_DIR)) + + # If using CI, write script for publishing to pubsub topic if config.use_ci: - write_and_chmod(GENERATED_PUBLISH_TO_TOPIC_FILE, publish_to_topic_jinja(pubsub_topic_name=config.pubsub_topic_name)) + write_and_chmod( + GENERATED_PUBLISH_TO_TOPIC_FILE, + render_jinja( + template_path=scripts_path / 'publish_to_topic.sh.j2', + base_dir=BASE_DIR, + generated_license=GENERATED_LICENSE, + generated_parameter_values_path=GENERATED_PARAMETER_VALUES_PATH, + pubsub_topic_name=config.pubsub_topic_name)) + + # If using model monitoring, write correspointing scripts to model_monitoring directory if config.setup_model_monitoring: - write_and_chmod(GENERATED_MODEL_MONITORING_SH_FILE, create_model_monitoring_job_jinja()) - write_file(GENERATED_MODEL_MONITORING_MONITOR_PY_FILE, model_monitoring_monitor_jinja(), 'w') - write_file(GENERATED_MODEL_MONITORING_REQUIREMENTS_FILE, model_monitoring_requirements_jinja(), 'w') + # Writes script create_model_monitoring_job.sh which creates a Vertex AI model monitoring job + write_and_chmod( + filepath=GENERATED_MODEL_MONITORING_SH_FILE, + text=render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.scripts') / 'create_model_monitoring_job.sh.j2', + generated_license=GENERATED_LICENSE, + base_dir=BASE_DIR + )) + + # Writes monitor.py to create or update a model monitoring job in Vertex AI for a deployed model endpoint + write_file( + filepath=GENERATED_MODEL_MONITORING_MONITOR_PY_FILE, + text=render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.model_monitoring') / 'monitor.py.j2', + generated_license=GENERATED_LICENSE + ), + mode='w') + + # Writes a requirements.txt to the model_monitoring directory + write_file( + filepath=GENERATED_MODEL_MONITORING_REQUIREMENTS_FILE, + text=render_jinja(template_path=import_files(KFP_TEMPLATES_PATH + '.model_monitoring') / 'requirements.txt.j2'), + mode='w') # Create components and pipelines components_path_list = get_components_list(full_path=True) @@ -95,10 +154,22 @@ def build(config: KfpConfig): write_file(f'{BASE_DIR}scripts/pipeline_spec/.gitkeep', '', 'w') # Write readme.md to description the contents of the directory - write_file(f'{BASE_DIR}README.md', readme_jinja(config.setup_model_monitoring, config.use_ci), 'w') + write_file( + f'{BASE_DIR}README.md', + render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH) / 'README.md.j2', + setup_model_monitoring=config.setup_model_monitoring, + use_ci=config.use_ci), + 'w') # Write dockerfile to the component base directory - write_file(f'{GENERATED_COMPONENT_BASE}/Dockerfile', component_base_dockerfile_jinja(config.base_image), 'w') + write_file( + f'{GENERATED_COMPONENT_BASE}/Dockerfile', + render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.components.component_base') / 'Dockerfile.j2', + base_image=config.base_image, + generated_license=GENERATED_LICENSE), + 'w') # Write requirements.txt to the component base directory write_file(f'{GENERATED_COMPONENT_BASE}/requirements.txt', create_component_base_requirements(), 'w') @@ -145,7 +216,14 @@ def build_component(component_path: str): + '.py') # Write task script to component base - write_file(task_filepath, component_base_task_file_jinja(custom_code_contents, kfp_spec_bool), 'w') + write_file( + task_filepath, + render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.components.component_base.src') / 'task.py.j2', + custom_code_contents=custom_code_contents, + generated_license=GENERATED_LICENSE, + kfp_spec_bool=kfp_spec_bool), + 'w') # Update component_spec to include correct image and startup command component_spec['implementation']['container']['image'] = compspec_image @@ -171,25 +249,48 @@ def build_pipeline(custom_training_job_specs: list, pipeline_parameter_values: Dictionary of runtime parameters for the PipelineJob. """ defaults = read_yaml_file(GENERATED_DEFAULTS_FILE) + # Get the names of the components components_list = get_components_list(full_path=False) + # Read pipeline definition pipeline_scaffold_contents = read_file(PIPELINE_CACHE_FILE) + # Add indentation pipeline_scaffold_contents = textwrap.indent(pipeline_scaffold_contents, 4 * ' ') + # Construct pipeline.py project_id = defaults['gcp']['project_id'] - write_file(GENERATED_PIPELINE_FILE, pipeline_jinja( - components_list, - custom_training_job_specs, - pipeline_scaffold_contents, - project_id=project_id), 'w') + write_file( + GENERATED_PIPELINE_FILE, + render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.pipelines') / 'pipeline.py.j2', + components_list=components_list, + custom_training_job_specs=custom_training_job_specs, + generated_license=GENERATED_LICENSE, + pipeline_scaffold_contents=pipeline_scaffold_contents, + project_id=project_id), + 'w') + # Construct pipeline_runner.py - write_file(GENERATED_PIPELINE_RUNNER_FILE, pipeline_runner_jinja(), 'w') + write_file( + GENERATED_PIPELINE_RUNNER_FILE, + render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.pipelines') / 'pipeline_runner.py.j2', + generated_license=GENERATED_LICENSE), + 'w') + # Construct requirements.txt - write_file(GENERATED_PIPELINE_REQUIREMENTS_FILE, pipeline_requirements_jinja(), 'w') + write_file( + GENERATED_PIPELINE_REQUIREMENTS_FILE, + render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.pipelines') / 'requirements.txt.j2', + pinned_kfp_version=PINNED_KFP_VERSION), + 'w') + # Add pipeline_spec_path to dict pipeline_parameter_values['gs_pipeline_spec_path'] = defaults['pipelines']['gs_pipeline_job_spec_path'] + # Construct pipeline_parameter_values.json serialized_params = json.dumps(pipeline_parameter_values, indent=4) write_file(BASE_DIR + GENERATED_PARAMETER_VALUES_PATH, serialized_params, 'w') @@ -206,20 +307,35 @@ def build_services(): submission_service_base = BASE_DIR + 'services/submission_service' # Write cloud run dockerfile - write_file(f'{submission_service_base}/Dockerfile', submission_service_dockerfile_jinja(), 'w') + write_file( + f'{submission_service_base}/Dockerfile', + render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.services.submission_service') / 'Dockerfile.j2', + base_dir=BASE_DIR, + generated_license=GENERATED_LICENSE), + 'w') # Write requirements files for cloud run base and queueing svc - write_file(f'{submission_service_base}/requirements.txt', submission_service_requirements_jinja( - pipeline_job_submission_service_type=defaults['gcp']['pipeline_job_submission_service_type']), 'w') + write_file( + f'{submission_service_base}/requirements.txt', + render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.services.submission_service') / 'requirements.txt.j2', + pinned_kfp_version=PINNED_KFP_VERSION, + pipeline_job_submission_service_type=defaults['gcp']['pipeline_job_submission_service_type']), + 'w') # Write main code files for cloud run base and queueing svc - write_file(f'{submission_service_base}/main.py', submission_service_main_jinja( - naming_prefix=defaults['gcp']['naming_prefix'], - pipeline_root=defaults['pipelines']['pipeline_storage_path'], - pipeline_job_runner_service_account=defaults['gcp']['pipeline_job_runner_service_account'], - pipeline_job_submission_service_type=defaults['gcp']['pipeline_job_submission_service_type'], - project_id=defaults['gcp']['project_id'], - setup_model_monitoring=defaults['gcp']['setup_model_monitoring']), 'w') + write_file( + f'{submission_service_base}/main.py', + render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.services.submission_service') / 'main.py.j2', + generated_license=GENERATED_LICENSE, + pipeline_root=defaults['pipelines']['pipeline_storage_path'], + pipeline_job_runner_service_account=defaults['gcp']['pipeline_job_runner_service_account'], + pipeline_job_submission_service_type=defaults['gcp']['pipeline_job_submission_service_type'], + project_id=defaults['gcp']['project_id'], + setup_model_monitoring=defaults['gcp']['setup_model_monitoring']), + 'w') def create_component_base_requirements(): @@ -291,291 +407,3 @@ def create_component_base_requirements(): # Stringify and sort reqs_str = ''.join(r+'\n' for r in sorted(set_of_requirements)) return reqs_str - - -def build_pipeline_spec_jinja() -> str: - """Generates code for build_pipeline_spec.sh which builds the pipeline specs. - - Returns: - str: build_pipeline_spec.sh script. - """ - template_file = import_files(KFP_TEMPLATES_PATH + '.scripts') / 'build_pipeline_spec.sh.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - generated_license=GENERATED_LICENSE, - base_dir=BASE_DIR) - - -def build_components_jinja() -> str: - """Generates code for build_components.sh which builds the components. - - Returns: - str: build_components.sh script. - """ - template_file = import_files(KFP_TEMPLATES_PATH + '.scripts') / 'build_components.sh.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - generated_license=GENERATED_LICENSE, - base_dir=BASE_DIR) - - -def run_pipeline_jinja() -> str: - """Generates code for run_pipeline.sh which runs the pipeline locally. - - Returns: - str: run_pipeline.sh script. - """ - template_file = import_files(KFP_TEMPLATES_PATH + '.scripts') / 'run_pipeline.sh.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - generated_license=GENERATED_LICENSE, - base_dir=BASE_DIR) - - -def run_all_jinja() -> str: - """Generates code for run_all.sh which builds runs all other shell scripts. - - Returns: - str: run_all.sh script. - """ - template_file = import_files(KFP_TEMPLATES_PATH + '.scripts') / 'run_all.sh.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - generated_license=GENERATED_LICENSE, - base_dir=BASE_DIR) - - -def create_model_monitoring_job_jinja() -> str: - """Generates code for create_model_monitoring_job.sh which creates a Vertex AI - model monitoring job. - - Returns: - str: create_model_monitoring_job.sh script. - """ - template_file = import_files(KFP_TEMPLATES_PATH + '.scripts') / 'create_model_monitoring_job.sh.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - generated_license=GENERATED_LICENSE, - base_dir=BASE_DIR) - - -def publish_to_topic_jinja(pubsub_topic_name: str) -> str: - """Generates code for publish_to_topic.sh which submits a message to the - pipeline job submission service. - - Args: - pubsub_topic_name: The name of the pubsub topic to publish to. - - Returns: - str: publish_to_topic.sh script. - """ - template_file = import_files(KFP_TEMPLATES_PATH + '.scripts') / 'publish_to_topic.sh.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - base_dir=BASE_DIR, - generated_license=GENERATED_LICENSE, - generated_parameter_values_path=GENERATED_PARAMETER_VALUES_PATH, - pubsub_topic_name=pubsub_topic_name) - - -def readme_jinja(setup_model_monitoring: bool, use_ci: str) -> str: - """Generates code for readme.md which is a readme markdown file to describe the contents of the - generated AutoMLOps code repo. - - Args: - setup_model_monitoring: Boolean parameter which specifies whether to set up a Vertex AI Model Monitoring Job. - use_ci: Flag that determines whether to use Cloud CI/CD. - - Returns: - str: README.md file. - """ - template_file = import_files(KFP_TEMPLATES_PATH) / 'README.md.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render(setup_model_monitoring=setup_model_monitoring, use_ci=use_ci) - - -def component_base_dockerfile_jinja(base_image: str) -> str: - """Generates code for a Dockerfile to be written to the component_base directory. - - Args: - base_image: The image to use in the component base dockerfile. - - Returns: - str: Dockerfile file. - """ - template_file = import_files(KFP_TEMPLATES_PATH + '.components.component_base') / 'Dockerfile.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - base_image=base_image, - generated_license=GENERATED_LICENSE) - - -def component_base_task_file_jinja(custom_code_contents: str, kfp_spec_bool: str) -> str: - """Generates code for the task.py file to be written to the component_base/src directory. - - Args: - custom_code_contents: Code inside of the component, specified by the user. - kfp_spec_bool: Boolean that specifies whether components are defined using kfp. - - Returns: - str: Contents of the task.py file. - """ - template_file = import_files(KFP_TEMPLATES_PATH + '.components.component_base.src') / 'task.py.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - custom_code_contents=custom_code_contents, - generated_license=GENERATED_LICENSE, - kfp_spec_bool=kfp_spec_bool) - - -def pipeline_runner_jinja() -> str: - """Generates code for the pipeline_runner.py file to be written to the pipelines directory. - - Returns: - str: pipeline_runner.py file. - """ - template_file = import_files(KFP_TEMPLATES_PATH + '.pipelines') / 'pipeline_runner.py.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render(generated_license=GENERATED_LICENSE) - - -def pipeline_jinja( - components_list: list, - custom_training_job_specs: list, - pipeline_scaffold_contents: str, - project_id: str) -> str: - """Generates code for the pipeline.py file to be written to the pipelines directory. - - Args: - components_list: Contains the names or paths of all component yamls in the dir. - custom_training_job_specs: Specifies the specs to run the training job with. - pipeline_scaffold_contents: The contents of the pipeline scaffold file, - which can be found at PIPELINE_CACHE_FILE. - project_id: The project ID. - - Returns: - str: pipeline.py file. - """ - template_file = import_files(KFP_TEMPLATES_PATH + '.pipelines') / 'pipeline.py.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - components_list=components_list, - custom_training_job_specs=custom_training_job_specs, - generated_license=GENERATED_LICENSE, - pipeline_scaffold_contents=pipeline_scaffold_contents, - project_id=project_id) - - -def pipeline_requirements_jinja() -> str: - """Generates code for a requirements.txt to be written to the pipelines directory. - - Returns: - str: requirements.txt file for pipelines. - """ - template_file = import_files(KFP_TEMPLATES_PATH + '.pipelines') / 'requirements.txt.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render(pinned_kfp_version=PINNED_KFP_VERSION) - - -def submission_service_dockerfile_jinja() -> str: - """Generates code for a Dockerfile to be written to the serivces/submission_service directory. - - Returns: - str: Dockerfile file. - """ - template_file = import_files(KFP_TEMPLATES_PATH + '.services.submission_service') / 'Dockerfile.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - base_dir=BASE_DIR, - generated_license=GENERATED_LICENSE) - - -def submission_service_requirements_jinja(pipeline_job_submission_service_type: str) -> str: - """Generates code for a requirements.txt to be written to the serivces/submission_service directory. - - Args: - pipeline_job_submission_service_type: The tool to host for the cloud submission service (e.g. cloud run, cloud functions). - - Returns: - str: requirements.txt file for submission_service. - """ - template_file = import_files(KFP_TEMPLATES_PATH + '.services.submission_service') / 'requirements.txt.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - pinned_kfp_version=PINNED_KFP_VERSION, - pipeline_job_submission_service_type=pipeline_job_submission_service_type) - - -def submission_service_main_jinja( - naming_prefix: str, - pipeline_root: str, - pipeline_job_runner_service_account: str, - pipeline_job_submission_service_type: str, - project_id: str, - setup_model_monitoring: str) -> str: - """Generates content for main.py to be written to the serivces/submission_service directory. - This file contains code for running a flask service that will act as a pipeline job submission service. - - Args: - naming_prefix: Unique value used to differentiate pipelines and services across AutoMLOps runs. - pipeline_root: GS location where to store metadata from pipeline runs. - pipeline_job_runner_service_account: Service Account to runner PipelineJobs. - pipeline_job_submission_service_type: The tool to host for the cloud submission service (e.g. cloud run, cloud functions). - project_id: The project ID. - setup_model_monitoring: Boolean parameter which specifies whether to set up a Vertex AI Model Monitoring Job. - - Returns: - str: Content of serivces/submission_service main.py. - """ - template_file = import_files(KFP_TEMPLATES_PATH + '.services.submission_service') / 'main.py.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - generated_license=GENERATED_LICENSE, - naming_prefix=naming_prefix, - pipeline_root=pipeline_root, - pipeline_job_runner_service_account=pipeline_job_runner_service_account, - pipeline_job_submission_service_type=pipeline_job_submission_service_type, - project_id=project_id, - setup_model_monitoring=setup_model_monitoring) - - -def model_monitoring_monitor_jinja() -> str: - """Generates content for monitor.py to be written to the model_monitoring directory. - This file contains code for creating or updating a Model Monitoring Job in Vertex AI - for a deployed model endpoint. - - Returns: - str: Content of model_monitoring/monitor.py. - """ - template_file = import_files(KFP_TEMPLATES_PATH + '.model_monitoring') / 'monitor.py.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - generated_license=GENERATED_LICENSE) - - -def model_monitoring_requirements_jinja() -> str: - """Generates code for a requirements.txt to be written to the model_monitoring directory. - - Returns: - str: requirements.txt file for model_monitoring. - """ - template_file = import_files(KFP_TEMPLATES_PATH + '.model_monitoring') / 'requirements.txt.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render() diff --git a/google_cloud_automlops/provisioning/gcloud/builder.py b/google_cloud_automlops/provisioning/gcloud/builder.py index 34dcfa6..929bee8 100644 --- a/google_cloud_automlops/provisioning/gcloud/builder.py +++ b/google_cloud_automlops/provisioning/gcloud/builder.py @@ -29,6 +29,7 @@ from google_cloud_automlops.utils.utils import ( get_required_apis, read_yaml_file, + render_jinja, write_and_chmod ) from google_cloud_automlops.utils.constants import ( @@ -75,118 +76,35 @@ def build( defaults = read_yaml_file(GENERATED_DEFAULTS_FILE) required_apis = get_required_apis(defaults) # create provision_resources.sh - write_and_chmod(GENERATED_RESOURCES_SH_FILE, provision_resources_script_jinja( - artifact_repo_location=config.artifact_repo_location, - artifact_repo_name=config.artifact_repo_name, - artifact_repo_type=config.artifact_repo_type, - build_trigger_location=config.build_trigger_location, - build_trigger_name=config.build_trigger_name, - deployment_framework=config.deployment_framework, - naming_prefix=config.naming_prefix, - pipeline_job_runner_service_account=config.pipeline_job_runner_service_account, - pipeline_job_submission_service_location=config.pipeline_job_submission_service_location, - pipeline_job_submission_service_name=config.pipeline_job_submission_service_name, - pipeline_job_submission_service_type=config.pipeline_job_submission_service_type, - project_id=project_id, - pubsub_topic_name=config.pubsub_topic_name, - required_apis=required_apis, - schedule_location=config.schedule_location, - schedule_name=config.schedule_name, - schedule_pattern=config.schedule_pattern, - source_repo_branch=config.source_repo_branch, - source_repo_name=config.source_repo_name, - source_repo_type=config.source_repo_type, - storage_bucket_location=config.storage_bucket_location, - storage_bucket_name=config.storage_bucket_name, - use_ci=config.use_ci, - vpc_connector=config.vpc_connector)) - - -def provision_resources_script_jinja( - artifact_repo_location: str, - artifact_repo_name: str, - artifact_repo_type: str, - build_trigger_location: str, - build_trigger_name: str, - deployment_framework: str, - naming_prefix: str, - pipeline_job_runner_service_account: str, - pipeline_job_submission_service_location: str, - pipeline_job_submission_service_name: str, - pipeline_job_submission_service_type: str, - project_id: str, - pubsub_topic_name: str, - required_apis: list, - schedule_location: str, - schedule_name: str, - schedule_pattern: str, - source_repo_branch: str, - source_repo_name: str, - source_repo_type: str, - storage_bucket_location: str, - storage_bucket_name: str, - use_ci: bool, - vpc_connector: str) -> str: - """Generates code for provision_resources.sh which sets up the project's environment. - - Args: - artifact_repo_location: Region of the artifact repo (default use with Artifact Registry). - artifact_repo_name: Artifact repo name where components are stored (default use with Artifact Registry). - artifact_repo_type: The type of artifact repository to use (e.g. Artifact Registry, JFrog, etc.) - build_trigger_location: The location of the build trigger (for cloud build). - build_trigger_name: The name of the build trigger (for cloud build). - deployment_framework: The CI tool to use (e.g. cloud build, github actions, etc.) - naming_prefix: Unique value used to differentiate pipelines and services across AutoMLOps runs. - pipeline_job_runner_service_account: Service Account to run PipelineJobs. - pipeline_job_submission_service_location: The location of the cloud submission service. - pipeline_job_submission_service_name: The name of the cloud submission service. - pipeline_job_submission_service_type: The tool to host for the cloud submission service (e.g. cloud run, cloud functions). - project_id: The project ID. - pubsub_topic_name: The name of the pubsub topic to publish to. - required_apis: List of APIs that are required to run the service. - schedule_location: The location of the scheduler resource. - schedule_name: The name of the scheduler resource. - schedule_pattern: Cron formatted value used to create a Scheduled retrain job. - source_repo_branch: The branch to use in the source repository. - source_repo_name: The name of the source repository to use. - source_repo_type: The type of source repository to use (e.g. gitlab, github, etc.) - storage_bucket_location: Region of the GS bucket. - storage_bucket_name: GS bucket name where pipeline run metadata is stored. - use_ci: Flag that determines whether to use Cloud CI/CD. - vpc_connector: The name of the vpc connector to use. - - Returns: - str: provision_resources.sh shell script. - """ - template_file = import_files(GCLOUD_TEMPLATES_PATH) / 'provision_resources.sh.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - artifact_repo_location=artifact_repo_location, - artifact_repo_name=artifact_repo_name, - artifact_repo_type=artifact_repo_type, + write_and_chmod( + GENERATED_RESOURCES_SH_FILE, + render_jinja( + template_path=import_files(GCLOUD_TEMPLATES_PATH) / 'provision_resources.sh.j2', + artifact_repo_location=config.artifact_repo_location, + artifact_repo_name=config.artifact_repo_name, + artifact_repo_type=config.artifact_repo_type, base_dir=BASE_DIR, - build_trigger_location=build_trigger_location, - build_trigger_name=build_trigger_name, - deployment_framework=deployment_framework, + build_trigger_location=config.build_trigger_location, + build_trigger_name=config.build_trigger_name, + deployment_framework=config.deployment_framework, generated_license=GENERATED_LICENSE, generated_parameter_values_path=GENERATED_PARAMETER_VALUES_PATH, - naming_prefix=naming_prefix, - pipeline_job_runner_service_account=pipeline_job_runner_service_account, - pipeline_job_submission_service_location=pipeline_job_submission_service_location, - pipeline_job_submission_service_name=pipeline_job_submission_service_name, - pipeline_job_submission_service_type=pipeline_job_submission_service_type, + naming_prefix=config.naming_prefix, + pipeline_job_runner_service_account=config.pipeline_job_runner_service_account, + pipeline_job_submission_service_location=config.pipeline_job_submission_service_location, + pipeline_job_submission_service_name=config.pipeline_job_submission_service_name, + pipeline_job_submission_service_type=config.pipeline_job_submission_service_type, project_id=project_id, - pubsub_topic_name=pubsub_topic_name, + pubsub_topic_name=config.pubsub_topic_name, required_apis=required_apis, required_iam_roles=IAM_ROLES_RUNNER_SA, - schedule_location=schedule_location, - schedule_name=schedule_name, - schedule_pattern=schedule_pattern, - source_repo_branch=source_repo_branch, - source_repo_name=source_repo_name, - source_repo_type=source_repo_type, - storage_bucket_location=storage_bucket_location, - storage_bucket_name=storage_bucket_name, - use_ci=use_ci, - vpc_connector=vpc_connector) + schedule_location=config.schedule_location, + schedule_name=config.schedule_name, + schedule_pattern=config.schedule_pattern, + source_repo_branch=config.source_repo_branch, + source_repo_name=config.source_repo_name, + source_repo_type=config.source_repo_type, + storage_bucket_location=config.storage_bucket_location, + storage_bucket_name=config.storage_bucket_name, + use_ci=config.use_ci, + vpc_connector=config.vpc_connector)) diff --git a/google_cloud_automlops/provisioning/pulumi/builder.py b/google_cloud_automlops/provisioning/pulumi/builder.py index a8b1040..840016a 100644 --- a/google_cloud_automlops/provisioning/pulumi/builder.py +++ b/google_cloud_automlops/provisioning/pulumi/builder.py @@ -18,10 +18,9 @@ # pylint: disable=line-too-long # pylint: disable=unused-import -from jinja2 import Template - from google_cloud_automlops.utils.utils import ( write_file, + render_jinja, make_dirs, ) @@ -77,105 +76,39 @@ def build( pulumi_folder = pipeline_model_name + '/' # create Pulumi.yaml - write_file(pulumi_folder + 'Pulumi.yaml', _create_pulumi_yaml_jinja( - pipeline_model_name=pipeline_model_name, - pulumi_runtime=config.pulumi_runtime), 'w' - ) - - # create Pulumi.dev.yaml - write_file(pulumi_folder + 'Pulumi.dev.yaml', _create_pulumi_dev_yaml_jinja( - project_id=project_id, - pipeline_model_name=pipeline_model_name, - region=config.region, - gcs_bucket_name=gcs_bucket_name), 'w' - ) - - # create python __main__.py - if config.pulumi_runtime == PulumiRuntime.PYTHON: - write_file(pulumi_folder + '__main__.py', _create_main_python_jinja( - artifact_repo_name=artifact_repo_name, - source_repo_name=source_repo_name, - cloudtasks_queue_name=cloudtasks_queue_name, - cloud_build_trigger_name=cloud_build_trigger_name), 'w' - ) - - -def _create_pulumi_yaml_jinja( - pipeline_model_name: str, - pulumi_runtime: str, -) -> str: - """Generates code for Pulumi.yaml, the pulumi script that contains details to deploy project's GCP environment. - - Args: - config.pipeline_model_name: Name of the model being deployed. - config.pulumi_runtime: The pulumi runtime option (default: PulumiRuntime.PYTHON). - - Returns: - str: Pulumi.yaml config script. - """ - - with open(PULUMI_TEMPLATES_PATH / 'Pulumi.yaml.jinja', 'r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( + write_file( + pulumi_folder + 'Pulumi.yaml', + render_jinja( + template_path=PULUMI_TEMPLATES_PATH / 'Pulumi.yaml.jinja', generated_license=GENERATED_LICENSE, pipeline_model_name=pipeline_model_name, - pulumi_runtime=pulumi_runtime.value, - ) - - -def _create_pulumi_dev_yaml_jinja( - project_id: str, - pipeline_model_name: str, - region: str, - gcs_bucket_name: str, -) -> str: - """Generates code for Pulumi.dev.yaml, the pulumi script that contains details to deploy dev environment config. - - Args: - project_id: The project ID. - config.pipeline_model_name: Name of the model being deployed. - config.region: region used in gcs infrastructure config. - config.gcs_bucket_name: gcs bucket name to use as part of the model infrastructure. - - Returns: - str: Pulumi.dev.yaml config script. - """ + pulumi_runtime=config.pulumi_runtime.value), + 'w' + ) - with open(PULUMI_TEMPLATES_PATH / 'Pulumi.dev.yaml.jinja', 'r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( + # create Pulumi.dev.yaml + write_file( + pulumi_folder + 'Pulumi.dev.yaml', + render_jinja( + template_path=PULUMI_TEMPLATES_PATH / 'Pulumi.dev.yaml.jinja', generated_license=GENERATED_LICENSE, project_id=project_id, pipeline_model_name=pipeline_model_name, - region=region, - gcs_bucket_name=gcs_bucket_name, - ) - - -def _create_main_python_jinja( - artifact_repo_name, - source_repo_name, - cloudtasks_queue_name, - cloud_build_trigger_name, -) -> str: - """Generates code for __main__.py, the pulumi script that creates the primary resources. - - Args: - artifact_repo_name: name of the artifact registry for the model infrastructure. - source_repo_name: source repository used as part of the the model infra. - cloudtasks_queue_name: name of the task queue used for model scheduling. - cloud_build_trigger_name: name of the cloud build trigger for the model infra. - - Returns: - str: Main pulumi script. - """ + region=config.region, + gcs_bucket_name=gcs_bucket_name), + 'w' + ) - with open(PULUMI_TEMPLATES_PATH / 'python/__main__.py.jinja', 'r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - generated_license=GENERATED_LICENSE, - artifact_repo_name=artifact_repo_name, - source_repo_name=source_repo_name, - cloudtasks_queue_name=cloudtasks_queue_name, - cloud_build_trigger_name=cloud_build_trigger_name, + # create python __main__.py + if config.pulumi_runtime == PulumiRuntime.PYTHON: + write_file( + pulumi_folder + '__main__.py', + render_jinja( + template_path=PULUMI_TEMPLATES_PATH / 'python/__main__.py.jinja', + generated_license=GENERATED_LICENSE, + artifact_repo_name=artifact_repo_name, + source_repo_name=source_repo_name, + cloudtasks_queue_name=cloudtasks_queue_name, + cloud_build_trigger_name=cloud_build_trigger_name), + 'w' ) diff --git a/google_cloud_automlops/provisioning/terraform/builder.py b/google_cloud_automlops/provisioning/terraform/builder.py index 4d6b874..3994413 100644 --- a/google_cloud_automlops/provisioning/terraform/builder.py +++ b/google_cloud_automlops/provisioning/terraform/builder.py @@ -23,11 +23,10 @@ # Try backported to PY<37 `importlib_resources` from importlib_resources import files as import_files -from jinja2 import Template - from google_cloud_automlops.utils.utils import ( get_required_apis, read_yaml_file, + render_jinja, write_and_chmod, write_file ) @@ -81,368 +80,180 @@ def build( """ defaults = read_yaml_file(GENERATED_DEFAULTS_FILE) required_apis = get_required_apis(defaults) - # create environment/data.tf - write_file(f'{BASE_DIR}provision/environment/data.tf', create_environment_data_tf_jinja( - required_apis=required_apis, - use_ci=config.use_ci,), 'w') - # create environment/iam.tf - write_file(f'{BASE_DIR}provision/environment/iam.tf', create_environment_iam_tf_jinja(), 'w') - # create environment/main.tf - write_file(f'{BASE_DIR}provision/environment/main.tf', create_environment_main_tf_jinja( - artifact_repo_type=config.artifact_repo_type, - deployment_framework=config.deployment_framework, - naming_prefix=config.naming_prefix, - pipeline_job_submission_service_type=config.pipeline_job_submission_service_type, - schedule_pattern=config.schedule_pattern, - source_repo_type=config.source_repo_type, - use_ci=config.use_ci, - vpc_connector=config.vpc_connector), 'w') - # create environment/outputs.tf - write_file(f'{BASE_DIR}provision/environment/outputs.tf', create_environment_outputs_tf_jinja( - artifact_repo_type=config.artifact_repo_type, - deployment_framework=config.deployment_framework, - pipeline_job_submission_service_type=config.pipeline_job_submission_service_type, - schedule_pattern=config.schedule_pattern, - source_repo_type=config.source_repo_type, - use_ci=config.use_ci), 'w') - # create environment/provider.tf - write_file(f'{BASE_DIR}provision/environment/provider.tf', create_environment_provider_tf_jinja(), 'w') - # create environment/variables.tf - write_file(f'{BASE_DIR}provision/environment/variables.tf', create_environment_variables_tf_jinja(), 'w') - # create environment/variables.auto.tfvars - if config.deployment_framework == Deployer.CLOUDBUILD.value: - write_file(f'{BASE_DIR}provision/environment/variables.auto.tfvars', create_environment_variables_auto_tfvars_jinja( - artifact_repo_location=config.artifact_repo_location, - artifact_repo_name=config.artifact_repo_name, - build_trigger_location=config.build_trigger_location, - build_trigger_name=config.build_trigger_name, - pipeline_job_runner_service_account=config.pipeline_job_runner_service_account, - pipeline_job_submission_service_location=config.pipeline_job_submission_service_location, - pipeline_job_submission_service_name=config.pipeline_job_submission_service_name, - project_id=project_id, - provision_credentials_key=config.provision_credentials_key, - pubsub_topic_name=config.pubsub_topic_name, - schedule_location=config.schedule_location, - schedule_name=config.schedule_name, - schedule_pattern=config.schedule_pattern, - source_repo_branch=config.source_repo_branch, - source_repo_name=config.source_repo_name, - storage_bucket_location=config.storage_bucket_location, - storage_bucket_name=config.storage_bucket_name, - vpc_connector=config.vpc_connector), 'w') - #TODO: implement workload identity as optional - if config.deployment_framework == Deployer.GITHUB_ACTIONS.value: - write_file(f'{BASE_DIR}provision/environment/variables.auto.tfvars', create_environment_variables_auto_tfvars_jinja( - artifact_repo_location=config.artifact_repo_location, - artifact_repo_name=config.artifact_repo_name, - build_trigger_location=config.build_trigger_location, - build_trigger_name=config.build_trigger_name, - pipeline_job_runner_service_account=config.pipeline_job_runner_service_account, - pipeline_job_submission_service_location=config.pipeline_job_submission_service_location, - pipeline_job_submission_service_name=config.pipeline_job_submission_service_name, - project_id=project_id, - provision_credentials_key=config.provision_credentials_key, - pubsub_topic_name=config.pubsub_topic_name, - schedule_location=config.schedule_location, - schedule_name=config.schedule_name, - schedule_pattern=config.schedule_pattern, - source_repo_branch=config.source_repo_branch, - source_repo_name=config.source_repo_name, - storage_bucket_location=config.storage_bucket_location, - storage_bucket_name=config.storage_bucket_name, - vpc_connector=config.vpc_connector), 'w') - # create environment/versions.tf - write_file(f'{BASE_DIR}provision/environment/versions.tf', create_environment_versions_tf_jinja(storage_bucket_name=config.storage_bucket_name), 'w') - # create provision_resources.sh - write_and_chmod(GENERATED_RESOURCES_SH_FILE, create_provision_resources_script_jinja()) - # create state_bucket/main.tf - write_file(f'{BASE_DIR}provision/state_bucket/main.tf', create_state_bucket_main_tf_jinja(), 'w') - # create state_bucket/variables.tf - write_file(f'{BASE_DIR}provision/state_bucket/variables.tf', create_state_bucket_variables_tf_jinja(), 'w') - # create state_bucket/variables.auto.tfvars - write_file(f'{BASE_DIR}provision/state_bucket/variables.auto.tfvars', create_state_bucket_variables_auto_tfvars_jinja( - project_id=project_id, - storage_bucket_location=config.storage_bucket_location, - storage_bucket_name=config.storage_bucket_name), 'w') - -def create_environment_data_tf_jinja( - required_apis: list, - use_ci: bool) -> str: - """Generates code for environment/data.tf, the terraform hcl script that contains terraform remote backend and org project details. - - Args: - required_apis: List of APIs that are required to run the service. - use_ci: Flag that determines whether to use Cloud CI/CD. - - Returns: - str: environment/data.tf file. - """ - template_file = import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'data.tf.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( + # create environment/data.tf + write_file( + filepath=f'{BASE_DIR}provision/environment/data.tf', + text=render_jinja( + template_path=import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'data.tf.j2', generated_license=GENERATED_LICENSE, required_apis=required_apis, required_iam_roles=IAM_ROLES_RUNNER_SA, - use_ci=use_ci) - - -def create_environment_iam_tf_jinja() -> str: - """Generates code for environment/iam.tf, the terraform hcl script that contains service accounts iam bindings for project's environment. - - Returns: - str: environment/iam.tf file. - """ - template_file = import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'iam.tf.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render(generated_license=GENERATED_LICENSE) - - -def create_environment_main_tf_jinja( - artifact_repo_type: str, - deployment_framework: str, - naming_prefix: str, - pipeline_job_submission_service_type: str, - schedule_pattern: str, - source_repo_type: str, - use_ci: bool, - vpc_connector: str) -> str: - """Generates code for environment/main.tf, the terraform hcl script that contains terraform resources configs to deploy resources in the project. + use_ci=config.use_ci + ), + mode='w') - Args: - artifact_repo_type: The type of artifact repository to use (e.g. Artifact Registry, JFrog, etc.) - deployment_framework: The CI tool to use (e.g. cloud build, github actions, etc.) - naming_prefix: Unique value used to differentiate pipelines and services across AutoMLOps runs. - pipeline_job_submission_service_type: The tool to host for the cloud submission service (e.g. cloud run, cloud functions). - schedule_pattern: Cron formatted value used to create a Scheduled retrain job. - source_repo_type: The type of source repository to use (e.g. gitlab, github, etc.) - use_ci: Flag that determines whether to use Cloud CI/CD. - vpc_connector: The name of the vpc connector to use. + # create environment/iam.tf + write_file( + filepath=f'{BASE_DIR}provision/environment/iam.tf', + text=render_jinja( + template_path=import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'iam.tf.j2', + generated_license=GENERATED_LICENSE + ), + mode='w') - Returns: - str: environment/main.tf file. - """ - template_file = import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'main.tf.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - artifact_repo_type=artifact_repo_type, + # create environment/main.tf + write_file( + filepath=f'{BASE_DIR}provision/environment/main.tf', + text=render_jinja( + template_path=import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'main.tf.j2', + artifact_repo_type=config.artifact_repo_type, base_dir=BASE_DIR, - deployment_framework=deployment_framework, + deployment_framework=config.deployment_framework, generated_license=GENERATED_LICENSE, generated_parameter_values_path=GENERATED_PARAMETER_VALUES_PATH, - naming_prefix=naming_prefix, - pipeline_job_submission_service_type=pipeline_job_submission_service_type, - schedule_pattern=schedule_pattern, - source_repo_type=source_repo_type, - use_ci=use_ci, - vpc_connector=vpc_connector) - - -def create_environment_outputs_tf_jinja( - artifact_repo_type: str, - deployment_framework: str, - pipeline_job_submission_service_type: str, - schedule_pattern: str, - source_repo_type: str, - use_ci: bool) -> str: - """Generates code for environment/outputs.tf, the terraform hcl script that contains outputs from project's environment. - - Args: - artifact_repo_type: The type of artifact repository to use (e.g. Artifact Registry, JFrog, etc.) - deployment_framework: The CI tool to use (e.g. cloud build, github actions, etc.) - pipeline_job_submission_service_type: The tool to host for the cloud submission service (e.g. cloud run, cloud functions). - schedule_pattern: Cron formatted value used to create a Scheduled retrain job. - source_repo_type: The type of source repository to use (e.g. gitlab, github, etc.) - use_ci: Flag that determines whether to use Cloud CI/CD. + naming_prefix=config.naming_prefix, + pipeline_job_submission_service_type=config.pipeline_job_submission_service_type, + schedule_pattern=config.schedule_pattern, + source_repo_type=config.source_repo_type, + use_ci=config.use_ci, + vpc_connector=config.vpc_connector + ), + mode='w') - Returns: - str: environment/outputs.tf file. - """ - template_file = import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'outputs.tf.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - artifact_repo_type=artifact_repo_type, - deployment_framework=deployment_framework, + # create environment/outputs.tf + write_file( + filepath=f'{BASE_DIR}provision/environment/outputs.tf', + text=render_jinja( + template_path=import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'outputs.tf.j2', + artifact_repo_type=config.artifact_repo_type, + deployment_framework=config.deployment_framework, generated_license=GENERATED_LICENSE, - pipeline_job_submission_service_type=pipeline_job_submission_service_type, - schedule_pattern=schedule_pattern, - source_repo_type=source_repo_type, - use_ci=use_ci) - - -def create_environment_provider_tf_jinja() -> str: - """Generates code for environment/provider.tf, the terraform hcl script that contains teraform providers used to deploy project's environment. - - Returns: - str: environment/provider.tf file. - """ - template_file = import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'provider.tf.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - generated_license=GENERATED_LICENSE) - - -def create_environment_variables_tf_jinja() -> str: - """Generates code for environment/variables.tf, the terraform hcl script that contains variables used to deploy project's environment. - - Returns: - str: environment/variables.tf file. - """ - template_file = import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'variables.tf.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - generated_license=GENERATED_LICENSE) - - -def create_environment_variables_auto_tfvars_jinja( - artifact_repo_location: str, - artifact_repo_name: str, - build_trigger_location: str, - build_trigger_name: str, - pipeline_job_runner_service_account: str, - pipeline_job_submission_service_location: str, - pipeline_job_submission_service_name: str, - project_id: str, - provision_credentials_key: str, - pubsub_topic_name: str, - schedule_location: str, - schedule_name: str, - schedule_pattern: str, - source_repo_branch: str, - source_repo_name: str, - storage_bucket_location: str, - storage_bucket_name: str, - vpc_connector: str) -> str: - """Generates code for environment/variables.auto.tfvars, the terraform hcl script that contains teraform arguments for variables used to deploy project's environment. + pipeline_job_submission_service_type=config.pipeline_job_submission_service_type, + schedule_pattern=config.schedule_pattern, + source_repo_type=config.source_repo_type, + use_ci=config.use_ci + ), + mode='w') - Args: - artifact_repo_location: Region of the artifact repo (default use with Artifact Registry). - artifact_repo_name: Artifact repo name where components are stored (default use with Artifact Registry). - build_trigger_location: The location of the build trigger (for cloud build). - build_trigger_name: The name of the build trigger (for cloud build). - pipeline_job_runner_service_account: Service Account to run PipelineJobs. - pipeline_job_submission_service_location: The location of the cloud submission service. - pipeline_job_submission_service_name: The name of the cloud submission service. - project_id: The project ID. - pubsub_topic_name: The name of the pubsub topic to publish to. - schedule_location: The location of the scheduler resource. - schedule_name: The name of the scheduler resource. - schedule_pattern: Cron formatted value used to create a Scheduled retrain job. - source_repo_branch: The branch to use in the source repository. - source_repo_name: The name of the source repository to use. - storage_bucket_location: Region of the GS bucket. - storage_bucket_name: GS bucket name where pipeline run metadata is stored. - vpc_connector: The name of the vpc connector to use. + # create environment/provider.tf + write_file( + filepath=f'{BASE_DIR}provision/environment/provider.tf', + text=render_jinja( + template_path=import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'provider.tf.j2', + generated_license=GENERATED_LICENSE + ), + mode='w') - Returns: - str: environment/variables.auto.tfvars file. - """ - template_file = import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'variables.auto.tfvars.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - artifact_repo_location=artifact_repo_location, - artifact_repo_name=artifact_repo_name, - build_trigger_location=build_trigger_location, - build_trigger_name=build_trigger_name, - generated_license=GENERATED_LICENSE, - pipeline_job_runner_service_account=pipeline_job_runner_service_account, - pipeline_job_submission_service_location=pipeline_job_submission_service_location, - pipeline_job_submission_service_name=pipeline_job_submission_service_name, - project_id=project_id, - provision_credentials_key=provision_credentials_key, - pubsub_topic_name=pubsub_topic_name, - schedule_location=schedule_location, - schedule_name=schedule_name, - schedule_pattern=schedule_pattern, - source_repo_branch=source_repo_branch, - source_repo_name=source_repo_name, - storage_bucket_location=storage_bucket_location, - storage_bucket_name=storage_bucket_name, - vpc_connector=vpc_connector) + # create environment/variables.tf + write_file( + filepath=f'{BASE_DIR}provision/environment/variables.tf', + text=render_jinja( + template_path=import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'variables.tf.j2', + generated_license=GENERATED_LICENSE + ), + mode='w') + # create environment/variables.auto.tfvars + if config.deployment_framework == Deployer.CLOUDBUILD.value: + write_file( + filepath=f'{BASE_DIR}provision/environment/variables.auto.tfvars', + text=render_jinja( + template_path=import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'variables.auto.tfvars.j2', + artifact_repo_location=config.artifact_repo_location, + artifact_repo_name=config.artifact_repo_name, + build_trigger_location=config.build_trigger_location, + build_trigger_name=config.build_trigger_name, + generated_license=GENERATED_LICENSE, + pipeline_job_runner_service_account=config.pipeline_job_runner_service_account, + pipeline_job_submission_service_location=config.pipeline_job_submission_service_location, + pipeline_job_submission_service_name=config.pipeline_job_submission_service_name, + project_id=project_id, + provision_credentials_key=config.provision_credentials_key, + pubsub_topic_name=config.pubsub_topic_name, + schedule_location=config.schedule_location, + schedule_name=config.schedule_name, + schedule_pattern=config.schedule_pattern, + source_repo_branch=config.source_repo_branch, + source_repo_name=config.source_repo_name, + storage_bucket_location=config.storage_bucket_location, + storage_bucket_name=config.storage_bucket_name, + vpc_connector=config.vpc_connector + ), + mode='w') -def create_environment_versions_tf_jinja(storage_bucket_name: str) -> str: - """Generates code for environment/versions.tf, the terraform hcl script that contains teraform version information. - Args: - storage_bucket_name: GS bucket name where pipeline run metadata is stored. + #TODO: implement workload identity as optional + if config.deployment_framework == Deployer.GITHUB_ACTIONS.value: + write_file( + filepath=f'{BASE_DIR}provision/environment/variables.auto.tfvars', + text=render_jinja( + template_path=import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'variables.auto.tfvars.j2', + artifact_repo_location=config.artifact_repo_location, + artifact_repo_name=config.artifact_repo_name, + build_trigger_location=config.build_trigger_location, + build_trigger_name=config.build_trigger_name, + generated_license=GENERATED_LICENSE, + pipeline_job_runner_service_account=config.pipeline_job_runner_service_account, + pipeline_job_submission_service_location=config.pipeline_job_submission_service_location, + pipeline_job_submission_service_name=config.pipeline_job_submission_service_name, + project_id=project_id, + provision_credentials_key=config.provision_credentials_key, + pubsub_topic_name=config.pubsub_topic_name, + schedule_location=config.schedule_location, + schedule_name=config.schedule_name, + schedule_pattern=config.schedule_pattern, + source_repo_branch=config.source_repo_branch, + source_repo_name=config.source_repo_name, + storage_bucket_location=config.storage_bucket_location, + storage_bucket_name=config.storage_bucket_name, + vpc_connector=config.vpc_connector + ), + mode='w') - Returns: - str: environment/versions.tf file. - """ - template_file = import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'versions.tf.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( + # create environment/versions.tf + write_file( + filepath=f'{BASE_DIR}provision/environment/versions.tf', + text=render_jinja( + template_path=import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'versions.tf.j2', generated_license=GENERATED_LICENSE, - storage_bucket_name=storage_bucket_name) - + storage_bucket_name=config.storage_bucket_name + ), + mode='w') -def create_provision_resources_script_jinja() -> str: - """Generates code for provision_resources.sh which sets up the project's environment using terraform. - - Returns: - str: provision_resources.sh shell script. - """ - template_file = import_files(TERRAFORM_TEMPLATES_PATH) / 'provision_resources.sh.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( + # create provision_resources.sh + write_and_chmod( + filepath=GENERATED_RESOURCES_SH_FILE, + text=render_jinja( + template_path=import_files(TERRAFORM_TEMPLATES_PATH) / 'provision_resources.sh.j2', base_dir=BASE_DIR, - generated_license=GENERATED_LICENSE) - - -def create_state_bucket_variables_tf_jinja() -> str: - """Generates code for state_bucket/variables.tf, the terraform hcl script that contains variables used for the state_bucket. - - Returns: - str: state_bucket/variables.tf file. - """ - template_file = import_files(TERRAFORM_TEMPLATES_PATH + '.state_bucket') / 'variables.tf.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - generated_license=GENERATED_LICENSE) - + generated_license=GENERATED_LICENSE + )) -def create_state_bucket_variables_auto_tfvars_jinja( - project_id: str, - storage_bucket_location: str, - storage_bucket_name: str) -> str: - """Generates code for state_bucket/variables.auto.tfvars, the terraform hcl script that contains teraform arguments for variables used for the state_bucket. - Uses the string f'{storage_bucket_name}-bucket-tfstate' for the name of the storage state bucket. + # create state_bucket/main.tf + write_file( + filepath=f'{BASE_DIR}provision/state_bucket/main.tf', + text=render_jinja( + template_path=import_files(TERRAFORM_TEMPLATES_PATH + '.state_bucket') / 'main.tf.j2', + generated_license=GENERATED_LICENSE + ), + mode='w') - Args: - project_id: The project ID. - storage_bucket_location: Region of the GS bucket. - storage_bucket_name: GS bucket name where pipeline run metadata is stored. + # create state_bucket/variables.tf + write_file( + filepath=f'{BASE_DIR}provision/state_bucket/variables.tf', + text=render_jinja( + template_path=import_files(TERRAFORM_TEMPLATES_PATH + '.state_bucket') / 'variables.tf.j2', + generated_license=GENERATED_LICENSE + ), + mode='w') - Returns: - str: environment/variables.auto.tfvars file. - """ - template_file = import_files(TERRAFORM_TEMPLATES_PATH + '.state_bucket') / 'variables.auto.tfvars.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( + # create state_bucket/variables.auto.tfvars + write_file( + filepath=f'{BASE_DIR}provision/state_bucket/variables.auto.tfvars', + text=render_jinja( + template_path=import_files(TERRAFORM_TEMPLATES_PATH + '.state_bucket') / 'variables.auto.tfvars.j2', project_id=project_id, - storage_bucket_location=storage_bucket_location, - storage_bucket_name=storage_bucket_name) - - -def create_state_bucket_main_tf_jinja() -> str: - """Generates code for state_bucket/main.tf, the terraform hcl script that contains terraform resources configs to create the state_bucket. - - Returns: - str: state_bucket/main.tf file. - """ - template_file = import_files(TERRAFORM_TEMPLATES_PATH + '.state_bucket') / 'main.tf.j2' - with template_file.open('r', encoding='utf-8') as f: - template = Template(f.read()) - return template.render( - generated_license=GENERATED_LICENSE) + storage_bucket_location=config.storage_bucket_location, + storage_bucket_name=config.storage_bucket_name + ), + mode='w') diff --git a/google_cloud_automlops/utils/utils.py b/google_cloud_automlops/utils/utils.py index 642188f..28f4a7b 100644 --- a/google_cloud_automlops/utils/utils.py +++ b/google_cloud_automlops/utils/utils.py @@ -30,6 +30,7 @@ from packaging import version import yaml +from jinja2 import Template from googleapiclient import discovery import google.auth @@ -1014,3 +1015,18 @@ def resources_generation_manifest(defaults: dict): if defaults['gcp']['schedule_pattern'] != DEFAULT_SCHEDULE_PATTERN: logging.info( 'Cloud Scheduler Job: https://console.cloud.google.com/cloudscheduler') + +def render_jinja(template_path, **template_vars): + """Renders a Jinja2 template with provided variables. + + Args: + template_path (str): The path to the Jinja2 template file. + **template_vars: Keyword arguments representing variables to substitute + in the template. + + Returns: + str: The rendered template as a string. + """ + with open(template_path, 'r', encoding='utf-8') as f: + template = Template(f.read()) + return template.render(**template_vars) diff --git a/tests/unit/deployments/cloudbuild/builder_test.py b/tests/unit/deployments/cloudbuild/builder_test.py index 833d6bd..27331f5 100644 --- a/tests/unit/deployments/cloudbuild/builder_test.py +++ b/tests/unit/deployments/cloudbuild/builder_test.py @@ -15,11 +15,24 @@ # pylint: disable=line-too-long # pylint: disable=missing-module-docstring +try: + from importlib.resources import files as import_files +except ImportError: + # Try backported to PY<37 `importlib_resources` + from importlib_resources import files as import_files + from typing import List import pytest -from google_cloud_automlops.deployments.cloudbuild.builder import create_cloudbuild_jinja +from google_cloud_automlops.utils.constants import ( + BASE_DIR, + CLOUDBUILD_TEMPLATES_PATH, + COMPONENT_BASE_RELATIVE_PATH, + GENERATED_LICENSE, + GENERATED_PARAMETER_VALUES_PATH +) +from google_cloud_automlops.utils.utils import render_jinja @pytest.mark.parametrize( '''artifact_repo_location, artifact_repo_name, naming_prefix,''' @@ -81,13 +94,21 @@ def test_create_cloudbuild_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - cloudbuild_config = create_cloudbuild_jinja( - artifact_repo_location, - artifact_repo_name, - naming_prefix, - project_id, - pubsub_topic_name, - use_ci) + component_base_relative_path = COMPONENT_BASE_RELATIVE_PATH if use_ci else f'{BASE_DIR}{COMPONENT_BASE_RELATIVE_PATH}' + template_file = import_files(CLOUDBUILD_TEMPLATES_PATH) / 'cloudbuild.yaml.j2' + + cloudbuild_config = render_jinja( + template_path=template_file, + artifact_repo_location=artifact_repo_location, + artifact_repo_name=artifact_repo_name, + component_base_relative_path=component_base_relative_path, + generated_licensed=GENERATED_LICENSE, + generated_parameter_values_path=GENERATED_PARAMETER_VALUES_PATH, + naming_prefix=naming_prefix, + project_id=project_id, + pubsub_topic_name=pubsub_topic_name, + use_ci=use_ci + ) for snippet in expected_output_snippets: if is_included: diff --git a/tests/unit/deployments/github_actions/builder_test.py b/tests/unit/deployments/github_actions/builder_test.py index 4c99f04..a7ed5f6 100644 --- a/tests/unit/deployments/github_actions/builder_test.py +++ b/tests/unit/deployments/github_actions/builder_test.py @@ -16,11 +16,23 @@ # pylint: disable=missing-function-docstring # pylint: disable=missing-module-docstring +try: + from importlib.resources import files as import_files +except ImportError: + # Try backported to PY<37 `importlib_resources` + from importlib_resources import files as import_files + from typing import List import pytest -from google_cloud_automlops.deployments.github_actions.builder import create_github_actions_jinja +from google_cloud_automlops.utils.constants import ( + COMPONENT_BASE_RELATIVE_PATH, + GENERATED_LICENSE, + GENERATED_PARAMETER_VALUES_PATH, + GITHUB_ACTIONS_TEMPLATES_PATH +) +from google_cloud_automlops.utils.utils import render_jinja @pytest.mark.parametrize( @@ -95,18 +107,24 @@ def test_create_github_actions_jinja( expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - github_actions_config = create_github_actions_jinja( - artifact_repo_location, - artifact_repo_name, - naming_prefix, - project_id, - project_number, - pubsub_topic_name, - source_repo_branch, - use_ci, - workload_identity_pool, - workload_identity_provider, - workload_identity_service_account) + template_file = import_files(GITHUB_ACTIONS_TEMPLATES_PATH) / 'github_actions.yaml.j2' + github_actions_config = render_jinja( + template_path=template_file, + artifact_repo_location=artifact_repo_location, + artifact_repo_name=artifact_repo_name, + component_base_relative_path=COMPONENT_BASE_RELATIVE_PATH, + generated_license=GENERATED_LICENSE, + generated_parameter_values_path=GENERATED_PARAMETER_VALUES_PATH, + naming_prefix=naming_prefix, + project_id=project_id, + project_number=project_number, + pubsub_topic_name=pubsub_topic_name, + source_repo_branch=source_repo_branch, + use_ci=use_ci, + workload_identity_pool=workload_identity_pool, + workload_identity_provider=workload_identity_provider, + workload_identity_service_account=workload_identity_service_account + ) for snippet in expected_output_snippets: if is_included: diff --git a/tests/unit/orchestration/kfp/builder_test.py b/tests/unit/orchestration/kfp/builder_test.py index c8839d8..372e078 100644 --- a/tests/unit/orchestration/kfp/builder_test.py +++ b/tests/unit/orchestration/kfp/builder_test.py @@ -17,6 +17,11 @@ # pylint: disable=missing-module-docstring import json +try: + from importlib.resources import files as import_files +except ImportError: + # Try backported to PY<37 `importlib_resources` + from importlib_resources import files as import_files import os from typing import List @@ -24,37 +29,27 @@ import pytest_mock from google_cloud_automlops.utils.constants import ( + BASE_DIR, GENERATED_LICENSE, - PINNED_KFP_VERSION + GENERATED_PARAMETER_VALUES_PATH, + KFP_TEMPLATES_PATH, + PINNED_KFP_VERSION, ) import google_cloud_automlops.orchestration.kfp.builder from google_cloud_automlops.orchestration.kfp.builder import ( build_component, build_pipeline, build_services, - build_pipeline_spec_jinja, - build_components_jinja, - create_model_monitoring_job_jinja, - run_pipeline_jinja, - run_all_jinja, - publish_to_topic_jinja, - readme_jinja, - component_base_dockerfile_jinja, - component_base_task_file_jinja, - pipeline_runner_jinja, - pipeline_jinja, - pipeline_requirements_jinja, - submission_service_dockerfile_jinja, - submission_service_requirements_jinja, - submission_service_main_jinja ) import google_cloud_automlops.utils.utils from google_cloud_automlops.utils.utils import ( make_dirs, read_yaml_file, + render_jinja, write_yaml_file ) + DEFAULTS = { 'gcp': { 'artifact_repo_location': 'us-central1', @@ -364,7 +359,11 @@ def test_build_pipeline_spec_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - build_pipeline_spec_script = build_pipeline_spec_jinja() + build_pipeline_spec_script = render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.scripts') / 'build_pipeline_spec.sh.j2', + generated_license=GENERATED_LICENSE, + base_dir=BASE_DIR + ) for snippet in expected_output_snippets: if is_included: @@ -388,7 +387,11 @@ def test_build_components_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - build_components_script = build_components_jinja() + build_components_script = render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.scripts') / 'build_components.sh.j2', + generated_license=GENERATED_LICENSE, + base_dir=BASE_DIR + ) for snippet in expected_output_snippets: if is_included: @@ -412,7 +415,11 @@ def test_run_pipeline_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - run_pipeline_script = run_pipeline_jinja() + run_pipeline_script = render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.scripts') / 'run_pipeline.sh.j2', + generated_license=GENERATED_LICENSE, + base_dir=BASE_DIR + ) for snippet in expected_output_snippets: if is_included: @@ -437,7 +444,11 @@ def test_run_all_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - run_all_script = run_all_jinja() + run_all_script = render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.scripts') / 'run_all.sh.j2', + generated_license=GENERATED_LICENSE, + base_dir=BASE_DIR + ) for snippet in expected_output_snippets: if is_included: @@ -463,7 +474,13 @@ def test_publish_to_topic_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - publish_to_topic_script = publish_to_topic_jinja(pubsub_topic_name=pubsub_topic_name) + publish_to_topic_script = render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.scripts') / 'publish_to_topic.sh.j2', + base_dir=BASE_DIR, + generated_license=GENERATED_LICENSE, + generated_parameter_values_path=GENERATED_PARAMETER_VALUES_PATH, + pubsub_topic_name=pubsub_topic_name + ) for snippet in expected_output_snippets: if is_included: @@ -488,7 +505,11 @@ def test_create_model_monitoring_job_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - create_model_monitoring_job_script = create_model_monitoring_job_jinja() + create_model_monitoring_job_script = render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.scripts') / 'create_model_monitoring_job.sh.j2', + generated_license=GENERATED_LICENSE, + base_dir=BASE_DIR + ) for snippet in expected_output_snippets: if is_included: @@ -552,7 +573,11 @@ def test_readme_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - readme_str = readme_jinja(setup_model_monitoring=setup_model_monitoring, use_ci=use_ci) + readme_str = render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH) / 'README.md.j2', + setup_model_monitoring=setup_model_monitoring, + use_ci=use_ci + ) for snippet in expected_output_snippets: if is_included: @@ -579,7 +604,11 @@ def test_component_base_dockerfile_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - component_base_dockerfile = component_base_dockerfile_jinja(base_image) + component_base_dockerfile = render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.components.component_base') / 'Dockerfile.j2', + base_image=base_image, + generated_license=GENERATED_LICENSE + ) for snippet in expected_output_snippets: if is_included: @@ -630,7 +659,11 @@ def test_component_base_task_file_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - component_base_task_file = component_base_task_file_jinja(custom_code_contents, kfp_spec_bool) + component_base_task_file = render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.components.component_base.src') / 'task.py.j2', + custom_code_contents=custom_code_contents, + generated_license=GENERATED_LICENSE, + kfp_spec_bool=kfp_spec_bool) for snippet in expected_output_snippets: if is_included: @@ -654,7 +687,10 @@ def test_pipeline_runner_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - pipeline_runner_py = pipeline_runner_jinja() + pipeline_runner_py = render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.pipelines') / 'pipeline_runner.py.j2', + generated_license=GENERATED_LICENSE + ) for snippet in expected_output_snippets: if is_included: @@ -729,11 +765,13 @@ def test_pipeline_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - pipeline_py = pipeline_jinja( - components_list, - custom_training_job_specs, - pipeline_scaffold_contents, - project_id) + pipeline_py = render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.pipelines') / 'pipeline.py.j2', + components_list=components_list, + custom_training_job_specs=custom_training_job_specs, + generated_license=GENERATED_LICENSE, + pipeline_scaffold_contents=pipeline_scaffold_contents, + project_id=project_id) for snippet in expected_output_snippets: if is_included: @@ -757,7 +795,10 @@ def test_pipeline_requirements_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - pipeline_requirements_py = pipeline_requirements_jinja() + pipeline_requirements_py = render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.pipelines') / 'requirements.txt.j2', + pinned_kfp_version=PINNED_KFP_VERSION + ) for snippet in expected_output_snippets: if is_included: @@ -782,7 +823,11 @@ def test_submission_service_dockerfile_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - submission_service_dockerfile = submission_service_dockerfile_jinja() + submission_service_dockerfile = render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.services.submission_service') / 'Dockerfile.j2', + base_dir=BASE_DIR, + generated_license=GENERATED_LICENSE + ) for snippet in expected_output_snippets: if is_included: @@ -815,7 +860,11 @@ def test_submission_service_requirements_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - submission_service_requirements = submission_service_requirements_jinja(pipeline_job_submission_service_type=pipeline_job_submission_service_type) + submission_service_requirements = render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.services.submission_service') / 'requirements.txt.j2', + pinned_kfp_version=PINNED_KFP_VERSION, + pipeline_job_submission_service_type=pipeline_job_submission_service_type + ) for snippet in expected_output_snippets: if is_included: @@ -912,7 +961,9 @@ def test_submission_service_main_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - submission_service_main_py = submission_service_main_jinja( + submission_service_main_py = render_jinja( + template_path=import_files(KFP_TEMPLATES_PATH + '.services.submission_service') / 'main.py.j2', + generated_license=GENERATED_LICENSE, naming_prefix=naming_prefix, pipeline_root=pipeline_root, pipeline_job_runner_service_account=pipeline_job_runner_service_account, diff --git a/tests/unit/provisioning/gcloud/builder_test.py b/tests/unit/provisioning/gcloud/builder_test.py index 37e837d..0e7f48f 100644 --- a/tests/unit/provisioning/gcloud/builder_test.py +++ b/tests/unit/provisioning/gcloud/builder_test.py @@ -16,11 +16,25 @@ # pylint: disable=missing-function-docstring # pylint: disable=missing-module-docstring +try: + from importlib.resources import files as import_files +except ImportError: + # Try backported to PY<37 `importlib_resources` + from importlib_resources import files as import_files + from typing import List import pytest -from google_cloud_automlops.provisioning.gcloud.builder import provision_resources_script_jinja +from google_cloud_automlops.utils.utils import render_jinja + +from google_cloud_automlops.utils.constants import ( + BASE_DIR, + GCLOUD_TEMPLATES_PATH, + GENERATED_LICENSE, + GENERATED_PARAMETER_VALUES_PATH, + IAM_ROLES_RUNNER_SA, +) @pytest.mark.parametrize( '''artifact_repo_location, artifact_repo_name, artifact_repo_type, build_trigger_location,''' @@ -187,31 +201,36 @@ def test_provision_resources_script_jinja( use_ci: Flag that determines whether to use Cloud CI/CD. vpc_connector: The name of the vpc connector to use. """ - provision_resources_script = provision_resources_script_jinja( - artifact_repo_location=artifact_repo_location, - artifact_repo_name=artifact_repo_name, - artifact_repo_type=artifact_repo_type, - build_trigger_location=build_trigger_location, - build_trigger_name=build_trigger_name, - deployment_framework=deployment_framework, - naming_prefix=naming_prefix, - pipeline_job_runner_service_account=pipeline_job_runner_service_account, - pipeline_job_submission_service_location=pipeline_job_submission_service_location, - pipeline_job_submission_service_name=pipeline_job_submission_service_name, - pipeline_job_submission_service_type=pipeline_job_submission_service_type, - project_id=project_id, - pubsub_topic_name=pubsub_topic_name, - required_apis=required_apis, - schedule_location=schedule_location, - schedule_name=schedule_name, - schedule_pattern=schedule_pattern, - source_repo_branch=source_repo_branch, - source_repo_name=source_repo_name, - source_repo_type=source_repo_type, - storage_bucket_location=storage_bucket_location, - storage_bucket_name=storage_bucket_name, - use_ci=use_ci, - vpc_connector=vpc_connector) + provision_resources_script = render_jinja( + template_path=import_files(GCLOUD_TEMPLATES_PATH) / 'provision_resources.sh.j2', + artifact_repo_location=artifact_repo_location, + artifact_repo_name=artifact_repo_name, + artifact_repo_type=artifact_repo_type, + base_dir=BASE_DIR, + build_trigger_location=build_trigger_location, + build_trigger_name=build_trigger_name, + deployment_framework=deployment_framework, + generated_license=GENERATED_LICENSE, + generated_parameter_values_path=GENERATED_PARAMETER_VALUES_PATH, + naming_prefix=naming_prefix, + pipeline_job_runner_service_account=pipeline_job_runner_service_account, + pipeline_job_submission_service_location=pipeline_job_submission_service_location, + pipeline_job_submission_service_name=pipeline_job_submission_service_name, + pipeline_job_submission_service_type=pipeline_job_submission_service_type, + project_id=project_id, + pubsub_topic_name=pubsub_topic_name, + required_apis=required_apis, + required_iam_roles=IAM_ROLES_RUNNER_SA, + schedule_location=schedule_location, + schedule_name=schedule_name, + schedule_pattern=schedule_pattern, + source_repo_branch=source_repo_branch, + source_repo_name=source_repo_name, + source_repo_type=source_repo_type, + storage_bucket_location=storage_bucket_location, + storage_bucket_name=storage_bucket_name, + use_ci=use_ci, + vpc_connector=vpc_connector) for snippet in expected_output_snippets: if is_included: diff --git a/tests/unit/provisioning/terraform/builder_test.py b/tests/unit/provisioning/terraform/builder_test.py index 03e5c72..744791e 100644 --- a/tests/unit/provisioning/terraform/builder_test.py +++ b/tests/unit/provisioning/terraform/builder_test.py @@ -16,22 +16,23 @@ # pylint: disable=missing-function-docstring # pylint: disable=missing-module-docstring +try: + from importlib.resources import files as import_files +except ImportError: + # Try backported to PY<37 `importlib_resources` + from importlib_resources import files as import_files from typing import List import pytest -from google_cloud_automlops.utils.constants import GENERATED_LICENSE -from google_cloud_automlops.provisioning.terraform.builder import ( - create_environment_data_tf_jinja, - create_environment_iam_tf_jinja, - create_environment_main_tf_jinja, - create_environment_outputs_tf_jinja, - create_environment_provider_tf_jinja, - create_environment_variables_tf_jinja, - create_environment_versions_tf_jinja, - create_provision_resources_script_jinja, - create_state_bucket_variables_tf_jinja, - create_state_bucket_main_tf_jinja +from google_cloud_automlops.utils.utils import render_jinja + +from google_cloud_automlops.utils.constants import ( + BASE_DIR, + GENERATED_LICENSE, + GENERATED_PARAMETER_VALUES_PATH, + IAM_ROLES_RUNNER_SA, + TERRAFORM_TEMPLATES_PATH ) @@ -70,7 +71,13 @@ def test_create_environment_data_tf_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - data_tf_str = create_environment_data_tf_jinja(required_apis, use_ci) + data_tf_str = render_jinja( + template_path=import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'data.tf.j2', + generated_license=GENERATED_LICENSE, + required_apis=required_apis, + required_iam_roles=IAM_ROLES_RUNNER_SA, + use_ci=use_ci + ) for snippet in expected_output_snippets: if is_included: @@ -95,7 +102,10 @@ def test_create_environment_iam_tf_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - iam_tf_str = create_environment_iam_tf_jinja() + iam_tf_str = render_jinja( + template_path=import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'iam.tf.j2', + generated_license=GENERATED_LICENSE + ) for snippet in expected_output_snippets: if is_included: @@ -220,15 +230,20 @@ def test_create_environment_main_tf_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - main_tf_str = create_environment_main_tf_jinja( + main_tf_str = render_jinja( + template_path=import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'main.tf.j2', artifact_repo_type=artifact_repo_type, + base_dir=BASE_DIR, deployment_framework=deployment_framework, + generated_license=GENERATED_LICENSE, + generated_parameter_values_path=GENERATED_PARAMETER_VALUES_PATH, naming_prefix=naming_prefix, pipeline_job_submission_service_type=pipeline_job_submission_service_type, schedule_pattern=schedule_pattern, source_repo_type=source_repo_type, use_ci=use_ci, - vpc_connector=vpc_connector) + vpc_connector=vpc_connector + ) for snippet in expected_output_snippets: if is_included: @@ -355,13 +370,16 @@ def test_create_environment_outputs_tf_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - main_tf_str = create_environment_outputs_tf_jinja( + main_tf_str = render_jinja( + template_path=import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'outputs.tf.j2', artifact_repo_type=artifact_repo_type, deployment_framework=deployment_framework, + generated_license=GENERATED_LICENSE, pipeline_job_submission_service_type=pipeline_job_submission_service_type, schedule_pattern=schedule_pattern, source_repo_type=source_repo_type, - use_ci=use_ci) + use_ci=use_ci + ) for snippet in expected_output_snippets: if is_included: @@ -386,7 +404,10 @@ def test_create_environment_provider_tf_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - provider_tf_str = create_environment_provider_tf_jinja() + provider_tf_str = render_jinja( + template_path=import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'provider.tf.j2', + generated_license=GENERATED_LICENSE + ) for snippet in expected_output_snippets: if is_included: @@ -411,7 +432,10 @@ def test_create_environment_variables_tf_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - variables_tf_str = create_environment_variables_tf_jinja() + variables_tf_str = render_jinja( + template_path=import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'variables.tf.j2', + generated_license=GENERATED_LICENSE + ) for snippet in expected_output_snippets: if is_included: @@ -438,7 +462,11 @@ def test_create_environment_versions_tf_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - versions_tf_str = create_environment_versions_tf_jinja(storage_bucket_name=storage_bucket_name) + versions_tf_str = render_jinja( + template_path=import_files(TERRAFORM_TEMPLATES_PATH + '.environment') / 'versions.tf.j2', + generated_license=GENERATED_LICENSE, + storage_bucket_name=storage_bucket_name + ) for snippet in expected_output_snippets: if is_included: @@ -463,7 +491,11 @@ def test_create_provision_resources_script_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - provision_resources_script = create_provision_resources_script_jinja() + provision_resources_script = render_jinja( + template_path=import_files(TERRAFORM_TEMPLATES_PATH) / 'provision_resources.sh.j2', + base_dir=BASE_DIR, + generated_license=GENERATED_LICENSE + ) for snippet in expected_output_snippets: if is_included: @@ -488,7 +520,10 @@ def test_create_state_bucket_variables_tf_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - variables_tf_str = create_state_bucket_variables_tf_jinja() + variables_tf_str = render_jinja( + template_path=import_files(TERRAFORM_TEMPLATES_PATH + '.state_bucket') / 'variables.tf.j2', + generated_license=GENERATED_LICENSE + ) for snippet in expected_output_snippets: if is_included: @@ -513,7 +548,10 @@ def test_create_state_bucket_main_tf_jinja( is_included: Boolean that determines whether to check if the expected_output_snippets exist in the string or not. expected_output_snippets: Strings that are expected to be included (or not) based on the is_included boolean. """ - main_tf_str = create_state_bucket_main_tf_jinja() + main_tf_str = render_jinja( + template_path=import_files(TERRAFORM_TEMPLATES_PATH + '.state_bucket') / 'main.tf.j2', + generated_license=GENERATED_LICENSE + ) for snippet in expected_output_snippets: if is_included: diff --git a/tests/unit/utils/utils_test.py b/tests/unit/utils/utils_test.py index 110503f..f29b797 100644 --- a/tests/unit/utils/utils_test.py +++ b/tests/unit/utils/utils_test.py @@ -19,6 +19,7 @@ from contextlib import nullcontext as does_not_raise import os +import tempfile from typing import Callable, List import pandas as pd @@ -36,6 +37,7 @@ make_dirs, read_file, read_yaml_file, + render_jinja, stringify_job_spec_list, update_params, validate_use_ci, @@ -529,3 +531,27 @@ def test_stringify_job_spec_list(job_spec_list: List[dict], expected_output: Lis formatted_spec = stringify_job_spec_list(job_spec_list=job_spec_list) assert formatted_spec == expected_output + + +@pytest.mark.parametrize( + 'template_string, template_vars, expected_output', + [ + ('Hello {{ name1 }} my name is {{ name2 }}', {'name1': 'Alice', 'name2': 'John'}, 'Hello Alice my name is John'), + ('The answer is: {{ result }}', {'result': 42}, 'The answer is: 42'), + ] +) +def test_render_jinja(template_string, template_vars, expected_output): + """Tests the render_jinja function using temporary files.""" + + with tempfile.TemporaryDirectory() as tmpdirname: # Creates temp directory + template_path = os.path.join(tmpdirname, 'template.txt.j2') + + # Write the template to the temporary file + with open(template_path, 'w', encoding='utf-8') as f: + f.write(template_string) + + # Call the render_jinja function + result = render_jinja(template_path, **template_vars) + + # Assertion + assert result == expected_output