diff --git a/.github/workflows/test-mlperf-inference-mlcommons-cpp-resnet50.yml b/.github/workflows/test-mlperf-inference-mlcommons-cpp-resnet50.yml index f726b8ecc..4b7189629 100644 --- a/.github/workflows/test-mlperf-inference-mlcommons-cpp-resnet50.yml +++ b/.github/workflows/test-mlperf-inference-mlcommons-cpp-resnet50.yml @@ -4,7 +4,7 @@ name: MLPerf inference MLCommons C++ ResNet50 on: - pull_request: + pull_request_target: branches: [ "main", "dev", "mlperf-inference" ] paths: - '.github/workflows/test-mlperf-inference-mlcommons-cpp-resnet50.yml' @@ -13,16 +13,18 @@ on: jobs: build: - - runs-on: ubuntu-latest + runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: [ "3.12" ] llvm-version: [ "15.0.6", "16.0.4", "17.0.6" ] + os: [ubuntu-latest, windows-latest, macos-latest] exclude: - - llvm-version: "15.0.6" - - llvm-version: "16.0.4" + - llvm-version: "15.0.6" + - llvm-version: "16.0.4" + - os: windows-latest + - os: macos-latest steps: - uses: actions/checkout@v3 @@ -36,6 +38,25 @@ jobs: cm pull repo --url=${{ github.event.pull_request.head.repo.html_url }} --checkout=${{ github.event.pull_request.head.ref }} cm run script --quiet --tags=get,sys-utils-cm cm run script --quiet --tags=install,prebuilt,llvm --version=${{ matrix.llvm-version }} - - name: Test MLPerf Inference MLCommons C++ ResNet50 + - name: Test MLPerf Inference MLCommons C++ ResNet50 on ${{ matrix.os }} + if: matrix.os == 'windows-latest' + run: | + cmr "app mlperf inference mlcommons cpp" --submitter="MLCommons" --hw_name=gh_${{ matrix.os }} --adr.loadgen.tags=_from-pip --pip_loadgen=yes -v --quiet + - name: Test MLPerf Inference MLCommons C++ ResNet50 on ${{ matrix.os }} + if: matrix.os != 'windows-latest' + run: | + cmr "app mlperf inference mlcommons cpp" --submitter="MLCommons" --hw_name=gh_${{ matrix.os }} -v --quiet + - name: Push Results + if: github.repository_owner == 'gateoverflow' + env: + USER: "GitHub Action" + EMAIL: "admin@gateoverflow.com" + GITHUB_TOKEN: ${{ secrets.TEST_RESULTS_GITHUB_TOKEN }} run: | - cmr "app mlperf inference mlcommons cpp" -v --quiet + git config --global user.name "${{ env.USER }}" + git config --global user.email "${{ env.EMAIL }}" + git config --global credential.https://github.com.helper "" + git config --global credential.https://github.com.helper "!gh auth git-credential" + git config --global credential.https://gist.github.com.helper "" + git config --global credential.https://gist.github.com.helper "!gh auth git-credential" + cm run script --tags=push,github,mlperf,inference,submission --repo_url=https://github.com/gateoverflow/mlperf_inference_test_submissions_v5.0 --repo_branch=main --commit_message="Results from MLCommons C++ ResNet50 GH action on ${{ matrix.os }}" --quiet diff --git a/.github/workflows/test-mlperf-inference-sdxl.yaml b/.github/workflows/test-mlperf-inference-sdxl.yaml index 0f68195d1..2b616e960 100644 --- a/.github/workflows/test-mlperf-inference-sdxl.yaml +++ b/.github/workflows/test-mlperf-inference-sdxl.yaml @@ -6,7 +6,7 @@ on: jobs: build_reference: - if: github.repository_owner == 'gateoverflow_off' + if: github.repository_owner == 'gateoverflow' runs-on: [ self-hosted, linux, x64 ] strategy: fail-fast: false @@ -22,30 +22,5 @@ jobs: export CM_REPOS=$HOME/GH_CM python3 -m pip install cm4mlops cm pull repo - cm run script --tags=run-mlperf,inference,_performance-only,_short --submitter="MLCommons" --docker --model=sdxl --backend=${{ matrix.backend }} --device=cuda --scenario=Offline --test_query_count=1 --precision=${{ matrix.precision }} --target_qps=1 --quiet --docker_it=no --docker_cm_repo=gateoverflow@cm4mlops --adr.compiler.tags=gcc --hw_name=gh_action --docker_dt=yes --results_dir=$HOME/gh_action_results --submission_dir=$HOME/gh_action_submissions --clean - cm run script --tags=run-mlperf,inference,_short --model=sdxl --implementation=reference --backend=${{ matrix.backend }} --category=edge --scenario=Offline --execution_mode=test --device=${{ matrix.device }} --precision=${{ matrix.precision }} --docker --docker_it=no --docker_cm_repo=gateoverflow@cm4mlops --docker_dt=yes --quiet --results_dir=$HOME/gh_action_results --submission_dir=$HOME/gh_action_submissions --precision=float16 --env.CM_MLPERF_MODEL_SDXL_DOWNLOAD_TO_HOST=yes --clean - cm run script --tags=generate,inference,submission --clean --preprocess_submission=yes --run-checker --tar=yes --env.CM_TAR_OUTFILE=submission.tar.gz --division=open --category=edge --run_style=test --adr.submission-checker.tags=_short-run --quiet --submitter=MLCommons --submission_dir=$HOME/gh_action_submissions --results_dir=$HOME/gh_action_results/test_results + cm run script --tags=run-mlperf,inference,_submission,_short --submitter="MLCommons" --docker --model=sdxl --backend=${{ matrix.backend }} --device=cuda --scenario=Offline --test_query_count=1 --precision=${{ matrix.precision }} --target_qps=1 --quiet --docker_it=no --docker_cm_repo=gateoverflow@cm4mlops --adr.compiler.tags=gcc --hw_name=gh_action --docker_dt=yes --results_dir=$HOME/gh_action_results --submission_dir=$HOME/gh_action_submissions --clean cm run script --tags=push,github,mlperf,inference,submission --repo_url=https://github.com/gateoverflow/cm4mlperf-inference --repo_branch=mlperf-inference-results-scc24 --commit_message="Results from self hosted Github actions - NVIDIARTX4090" --quiet --submission_dir=$HOME/gh_action_submissions - - build_nvidia: - if: github.repository_owner == 'gateoverflow_off' - runs-on: [ self-hosted, linux, x64 ] - strategy: - fail-fast: false - matrix: - python-version: [ "3.12" ] - backend: [ "tensorrt" ] - precision: [ "float16" ] - implementation: [ "nvidia" ] - steps: - - name: Test MLPerf Inference SDXL Nvidia - run: | - source gh_action/bin/deactivate || python3 -m venv gh_action - source gh_action/bin/activate - export CM_REPOS=$HOME/GH_CM - cm pull repo - cm run script --tags=run-mlperf,inference,_performance-only,_short --submitter="MLCommons" --docker --model=sdxl --implementation=${{ matrix.implementation }} --backend=${{ matrix.backend }} --device=cuda --scenario=Offline --test_query_count=1 --precision=${{ matrix.precision }} --target_qps=1 --quiet --docker_it=no --docker_cm_repo=gateoverflow@cm4mlops --adr.compiler.tags=gcc --hw_name=gh_action --docker_dt=yes --results_dir=$HOME/gh_action_results --submission_dir=$HOME/gh_action_submissions --clean - cm run script --tags=run-mlperf,inference,_short --model=sdxl --implementation=${{ matrix.implementation }} --backend=${{ matrix.backend }} --category=edge --scenario=Offline --execution_mode=test --device=${{ matrix.device }} --precision=${{ matrix.precision }} --docker --docker_it=no --docker_cm_repo=gateoverflow@cm4mlops --docker_dt=yes --quiet --results_dir=$HOME/gh_action_results --submission_dir=$HOME/gh_action_submissions --precision=float16 --env.CM_MLPERF_MODEL_SDXL_DOWNLOAD_TO_HOST=yes --clean - cm run script --tags=generate,inference,submission --clean --preprocess_submission=yes --run-checker --tar=yes --env.CM_TAR_OUTFILE=submission.tar.gz --division=open --category=edge --run_style=test --adr.submission-checker.tags=_short-run --quiet --submitter=MLCommons --submission_dir=$HOME/gh_action_submissions --results_dir=$HOME/gh_action_results/test_results - cm run script --tags=push,github,mlperf,inference,submission --repo_url=https://github.com/gateoverflow/cm4mlperf-inference --repo_branch=mlperf-inference-results-scc24 --commit_message="Results from self hosted Github actions - NVIDIARTX4090" --quiet --submission_dir=$HOME/gh_action_submissions - diff --git a/.github/workflows/test-qaic-software-kit.yml b/.github/workflows/test-qaic-software-kit.yml index e3a186daa..4b877da00 100644 --- a/.github/workflows/test-qaic-software-kit.yml +++ b/.github/workflows/test-qaic-software-kit.yml @@ -38,4 +38,3 @@ jobs: - name: Test Software Kit for compilation on Ubuntu 20.04 run: | cm run script --tags=get,qaic,software,kit --adr.compiler.tags=${{ matrix.compiler }} --adr.compiler.version=${{ matrix.llvm-version }} --quiet - cm run script --tags=get,qaic,software,kit --adr.compiler.tags=${{ matrix.compiler }} --adr.compiler.version=${{ matrix.llvm-version }} --quiet diff --git a/automation/script/module.py b/automation/script/module.py index 9494e7a39..b2b527b21 100644 --- a/automation/script/module.py +++ b/automation/script/module.py @@ -413,10 +413,6 @@ def _run(self, i): ignore_script_error = i.get('ignore_script_error', False) - # Get constant env and state - const = i.get('const',{}) - const_state = i.get('const_state',{}) - # Detect current path and record in env for further use in native scripts current_path = os.path.abspath(os.getcwd()) r = _update_env(env, 'CM_TMP_CURRENT_PATH', current_path) @@ -838,8 +834,8 @@ def _run(self, i): script_artifact_env = meta.get('env',{}) env.update(script_artifact_env) - - + script_artifact_state = meta.get('state',{}) + utils.merge_dicts({'dict1':state, 'dict2':script_artifact_state, 'append_lists':True, 'append_unique':True}) @@ -853,7 +849,7 @@ def _run(self, i): # STEP 700: Overwrite env with keys from the script input (to allow user friendly CLI) - # IT HAS THE PRIORITY OVER meta['default_env'] and meta['env'] + # IT HAS THE PRIORITY OVER meta['default_env'] and meta['env'] but not over the meta from versions/variations # (env OVERWRITE - user enforces it from CLI) # (it becomes const) if input_mapping: @@ -866,7 +862,9 @@ def _run(self, i): # update_env_from_input_mapping(const, i, docker_input_mapping) - + #Update env/state with cost + env.update(const) + utils.merge_dicts({'dict1':state, 'dict2':const_state, 'append_lists':True, 'append_unique':True}) @@ -882,7 +880,7 @@ def _run(self, i): variations = script_artifact.meta.get('variations', {}) state['docker'] = meta.get('docker', {}) - r = self._update_state_from_variations(i, meta, variation_tags, variations, env, state, deps, post_deps, prehook_deps, posthook_deps, new_env_keys_from_meta, new_state_keys_from_meta, add_deps_recursive, run_state, recursion_spaces, verbose) + r = self._update_state_from_variations(i, meta, variation_tags, variations, env, state, const, const_state, deps, post_deps, prehook_deps, posthook_deps, new_env_keys_from_meta, new_state_keys_from_meta, add_deps_recursive, run_state, recursion_spaces, verbose) if r['return'] > 0: return r @@ -952,7 +950,7 @@ def _run(self, i): if version!='' and version in versions: versions_meta = versions[version] - r = update_state_from_meta(versions_meta, env, state, deps, post_deps, prehook_deps, posthook_deps, new_env_keys_from_meta, new_state_keys_from_meta, i) + r = update_state_from_meta(versions_meta, env, state, const, const_state, deps, post_deps, prehook_deps, posthook_deps, new_env_keys_from_meta, new_state_keys_from_meta, i) if r['return']>0: return r adr=get_adr(versions_meta) if adr: @@ -1328,7 +1326,7 @@ def _run(self, i): if default_version in versions: versions_meta = versions[default_version] - r = update_state_from_meta(versions_meta, env, state, deps, post_deps, prehook_deps, posthook_deps, new_env_keys_from_meta, new_state_keys_from_meta, i) + r = update_state_from_meta(versions_meta, env, state, const, const_state, deps, post_deps, prehook_deps, posthook_deps, new_env_keys_from_meta, new_state_keys_from_meta, i) if r['return']>0: return r if "add_deps_recursive" in versions_meta: @@ -1374,7 +1372,6 @@ def _run(self, i): r = update_env_with_values(env) if r['return']>0: return r - # Clean some output files clean_tmp_files(clean_files, recursion_spaces) @@ -1451,8 +1448,12 @@ def _run(self, i): elif pip_version_max != '': pip_version_string = '<='+pip_version_max + env.update(const) + utils.merge_dicts({'dict1':state, 'dict2':const_state, 'append_lists':True, 'append_unique':True}) + r = _update_env(env, 'CM_TMP_PIP_VERSION_STRING', pip_version_string) if r['return']>0: return r + if pip_version_string != '': logging.debug(recursion_spaces+' # potential PIP version string (if needed): '+pip_version_string) @@ -1462,10 +1463,6 @@ def _run(self, i): logging.debug(recursion_spaces+' - Running preprocess ...') - # Update env and state with const - utils.merge_dicts({'dict1':env, 'dict2':const, 'append_lists':True, 'append_unique':True}) - utils.merge_dicts({'dict1':state, 'dict2':const_state, 'append_lists':True, 'append_unique':True}) - run_script_input['run_state'] = run_state ii = copy.deepcopy(customize_common_input) @@ -1916,7 +1913,7 @@ def _dump_version_info_for_script(self, output_dir = os.getcwd(), quiet = False, return {'return': 0} ###################################################################################### - def _update_state_from_variations(self, i, meta, variation_tags, variations, env, state, deps, post_deps, prehook_deps, posthook_deps, new_env_keys_from_meta, new_state_keys_from_meta, add_deps_recursive, run_state, recursion_spaces, verbose): + def _update_state_from_variations(self, i, meta, variation_tags, variations, env, state, const, const_state, deps, post_deps, prehook_deps, posthook_deps, new_env_keys_from_meta, new_state_keys_from_meta, add_deps_recursive, run_state, recursion_spaces, verbose): # Save current explicit variations import copy @@ -2019,7 +2016,7 @@ def _update_state_from_variations(self, i, meta, variation_tags, variations, env if variation_tag_dynamic_suffix: self._update_variation_meta_with_dynamic_suffix(variation_meta, variation_tag_dynamic_suffix) - r = update_state_from_meta(variation_meta, env, state, deps, post_deps, prehook_deps, posthook_deps, new_env_keys_from_meta, new_state_keys_from_meta, i) + r = update_state_from_meta(variation_meta, env, state, const, const_state, deps, post_deps, prehook_deps, posthook_deps, new_env_keys_from_meta, new_state_keys_from_meta, i) if r['return']>0: return r if variation_meta.get('script_name', '')!='': @@ -2050,7 +2047,7 @@ def _update_state_from_variations(self, i, meta, variation_tags, variations, env combined_variation_meta = variations[combined_variation] - r = update_state_from_meta(combined_variation_meta, env, state, deps, post_deps, prehook_deps, posthook_deps, new_env_keys_from_meta, new_state_keys_from_meta, i) + r = update_state_from_meta(combined_variation_meta, env, state, const, const_state, deps, post_deps, prehook_deps, posthook_deps, new_env_keys_from_meta, new_state_keys_from_meta, i) if r['return']>0: return r adr=get_adr(combined_variation_meta) @@ -3012,8 +3009,8 @@ def _run_deps(self, deps, clean_env_keys_deps, env, state, const, const_state, a 'remembered_selections': remembered_selections, 'env':env, 'state':state, - 'const':const, - 'const_state':const_state, + 'const':copy.deepcopy(const), + 'const_state':copy.deepcopy(const_state), 'add_deps_recursive':add_deps_recursive, 'debug_script_tags':debug_script_tags, 'verbose':verbose, @@ -3040,6 +3037,11 @@ def _run_deps(self, deps, clean_env_keys_deps, env, state, const, const_state, a r = update_env_with_values(env) if r['return']>0: return r + #Update env/state with cost + env.update(const) + utils.merge_dicts({'dict1':state, 'dict2':const_state, 'append_lists':True, 'append_unique':True}) + + return {'return': 0} ############################################################################## @@ -4418,7 +4420,7 @@ def update_env_with_values(env, fail_on_not_found=False, extra_env={}): # Check cases such as --env.CM_SKIP_COMPILE if type(value)==bool: - env[key] = str(value) + env[key] = value continue tmp_values = re.findall(r'<<<(.*?)>>>', str(value)) @@ -5110,7 +5112,7 @@ def update_env_from_input_mapping(env, inp, input_mapping): env[input_mapping[key]] = inp[key] ############################################################################## -def update_state_from_meta(meta, env, state, deps, post_deps, prehook_deps, posthook_deps, new_env_keys, new_state_keys, i): +def update_state_from_meta(meta, env, state, const, const_state, deps, post_deps, prehook_deps, posthook_deps, new_env_keys, new_state_keys, i): """ Internal: update env and state from meta """ @@ -5118,12 +5120,23 @@ def update_state_from_meta(meta, env, state, deps, post_deps, prehook_deps, post default_env = meta.get('default_env',{}) for key in default_env: env.setdefault(key, default_env[key]) + update_env = meta.get('env', {}) env.update(update_env) + update_const = meta.get('const', {}) + if update_const: + const.update(update_const) + env.update(const) + update_state = meta.get('state', {}) utils.merge_dicts({'dict1':state, 'dict2':update_state, 'append_lists':True, 'append_unique':True}) + update_const_state = meta.get('const_state', {}) + if const_state: + utils.merge_dicts({'dict1':const_state, 'dict2':update_const_state, 'append_lists':True, 'append_unique':True}) + utils.merge_dicts({'dict1':state, 'dict2':const_state, 'append_lists':True, 'append_unique':True}) + new_deps = meta.get('deps', []) if len(new_deps)>0: append_deps(deps, new_deps) diff --git a/automation/script/module_misc.py b/automation/script/module_misc.py index 54ab2dd48..91ce181af 100644 --- a/automation/script/module_misc.py +++ b/automation/script/module_misc.py @@ -1393,6 +1393,8 @@ def dockerfile(i): env=i.get('env', {}) state = i.get('state', {}) + const=i.get('const', {}) + const_state = i.get('const_state', {}) script_automation = i['self_module'] dockerfile_env=i.get('dockerfile_env', {}) @@ -1420,7 +1422,7 @@ def dockerfile(i): state['docker'] = docker_settings add_deps_recursive = i.get('add_deps_recursive', {}) - r = script_automation._update_state_from_variations(i, meta, variation_tags, variations, env, state, deps = [], post_deps = [], prehook_deps = [], posthook_deps = [], new_env_keys_from_meta = [], new_state_keys_from_meta = [], add_deps_recursive = add_deps_recursive, run_state = {}, recursion_spaces='', verbose = False) + r = script_automation._update_state_from_variations(i, meta, variation_tags, variations, env, state, const, const_state, deps = [], post_deps = [], prehook_deps = [], posthook_deps = [], new_env_keys_from_meta = [], new_state_keys_from_meta = [], add_deps_recursive = add_deps_recursive, run_state = {}, recursion_spaces='', verbose = False) if r['return'] > 0: return r @@ -1741,6 +1743,8 @@ def docker(i): env['CM_RUN_STATE_DOCKER'] = False script_automation = i['self_module'] state = i.get('state', {}) + const = i.get('const', {}) + const_state = i.get('const_state', {}) tags_split = i.get('tags', '').split(",") variation_tags = [ t[1:] for t in tags_split if t.startswith("_") ] @@ -1793,7 +1797,7 @@ def docker(i): state['docker'] = docker_settings add_deps_recursive = i.get('add_deps_recursive', {}) - r = script_automation._update_state_from_variations(i, meta, variation_tags, variations, env, state, deps = [], post_deps = [], prehook_deps = [], posthook_deps = [], new_env_keys_from_meta = [], new_state_keys_from_meta = [], add_deps_recursive = add_deps_recursive, run_state = {}, recursion_spaces='', verbose = False) + r = script_automation._update_state_from_variations(i, meta, variation_tags, variations, env, state, const, const_state, deps = [], post_deps = [], prehook_deps = [], posthook_deps = [], new_env_keys_from_meta = [], new_state_keys_from_meta = [], add_deps_recursive = add_deps_recursive, run_state = {}, recursion_spaces='', verbose = False) if r['return'] > 0: return r diff --git a/script/app-mlperf-inference/_cm.yaml b/script/app-mlperf-inference/_cm.yaml index f037f5367..704221305 100644 --- a/script/app-mlperf-inference/_cm.yaml +++ b/script/app-mlperf-inference/_cm.yaml @@ -659,6 +659,7 @@ variations: 3d-unet_,reference: docker: + image_name: mlperf-inference-mlcommons-python-implementation-3d-unet deps: - enable_if_env: CM_MLPERF_DATASET_3DUNET_DOWNLOAD_TO_HOST: @@ -698,6 +699,7 @@ variations: sdxl,reference,float16: docker: + image_name: mlperf-inference-mlcommons-python-implementation-sdxl-float16 deps: - enable_if_env: CM_MLPERF_MODEL_SDXL_DOWNLOAD_TO_HOST: @@ -706,6 +708,7 @@ variations: sdxl,reference,bfloat16: docker: + image_name: mlperf-inference-mlcommons-python-implementation-sdxl-bfloat16 deps: - enable_if_env: CM_MLPERF_MODEL_SDXL_DOWNLOAD_TO_HOST: @@ -714,6 +717,7 @@ variations: sdxl,reference,float32: docker: + image_name: mlperf-inference-mlcommons-python-implementation-sdxl-float32 deps: - enable_if_env: CM_MLPERF_MODEL_SDXL_DOWNLOAD_TO_HOST: @@ -765,6 +769,7 @@ variations: llama2-70b_,reference: docker: + image_name: mlperf-inference-mlcommons-python-implementation-llama2-70b deps: - enable_if_env: CM_MLPERF_MODEL_LLAMA2_70B_DOWNLOAD_TO_HOST: diff --git a/script/get-cuda/_cm.yaml b/script/get-cuda/_cm.yaml index d68e3fd7f..db5a30b0b 100644 --- a/script/get-cuda/_cm.yaml +++ b/script/get-cuda/_cm.yaml @@ -46,6 +46,7 @@ new_env_keys: - CUDA_PATH - CM_CUDA_* - CM_NVCC_* +- CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX5 - +PATH - +C_INCLUDE_PATH - +CPLUS_INCLUDE_PATH diff --git a/script/get-cuda/customize.py b/script/get-cuda/customize.py index 03e6c7bf7..11de3c6cd 100644 --- a/script/get-cuda/customize.py +++ b/script/get-cuda/customize.py @@ -214,5 +214,6 @@ def postprocess(i): env['+ LDFLAGS'].append("-L"+x) env['CM_CUDA_VERSION_STRING'] = "cu"+env['CM_CUDA_VERSION'].replace(".", "") + env['CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX5'] = env['CM_CUDA_VERSION_STRING'] return {'return':0, 'version': version} diff --git a/script/get-mlperf-inference-sut-configs/customize.py b/script/get-mlperf-inference-sut-configs/customize.py index e4a6a4048..75ef95b94 100644 --- a/script/get-mlperf-inference-sut-configs/customize.py +++ b/script/get-mlperf-inference-sut-configs/customize.py @@ -27,7 +27,7 @@ def postprocess(i): implementation_string = env['CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX'] if env.get('CM_MLPERF_SUT_NAME_IMPLEMENTATION_PREFIX', '') != '' else env.get('CM_MLPERF_IMPLEMENTATION', 'default') run_config = [] - for i in range(1,5): + for i in range(1,6): if env.get(f'CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX{i}', '') != '': run_config.append(env.get(f'CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX{i}')) diff --git a/script/run-mlperf-inference-app/_cm.yaml b/script/run-mlperf-inference-app/_cm.yaml index cefdf55d4..1054a1bb6 100644 --- a/script/run-mlperf-inference-app/_cm.yaml +++ b/script/run-mlperf-inference-app/_cm.yaml @@ -140,7 +140,8 @@ deps: - tags: install,pip-package,for-cmind-python,_package.tabulate - tags: get,mlperf,inference,utils -docker: +#We use this script as a command generator to run docker via app-mlperf-inference script +docker_off: mounts: - ${{ INSTALL_DATA_PATH }}:/install_data - ${{ DATA_PATH }}:/data @@ -248,6 +249,7 @@ variations: - short env: CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX4: scc24-base + CM_DOCKER_IMAGE_NAME: scc24 adr: coco2014-preprocessed: tags: _size.50,_with-sample-ids @@ -271,6 +273,7 @@ variations: extra_cache_tags: "scc24-main" env: CM_MLPERF_SUT_NAME_RUN_CONFIG_SUFFIX4: scc24-main + CM_DOCKER_IMAGE_NAME: scc24 deps: - tags: clean,nvidia,scratch,_sdxl,_downloaded-data extra_cache_rm_tags: scc24-base diff --git a/script/run-mlperf-inference-app/customize.py b/script/run-mlperf-inference-app/customize.py index e2361f2de..0aef8551c 100644 --- a/script/run-mlperf-inference-app/customize.py +++ b/script/run-mlperf-inference-app/customize.py @@ -14,6 +14,7 @@ def preprocess(i): os_info = i['os_info'] env = i['env'] + const = i.get('const', {}) inp = i['input'] state = i['state'] @@ -22,6 +23,12 @@ def preprocess(i): if env.get('CM_RUN_DOCKER_CONTAINER', '') == "yes": return {'return':0} + if env.get('CM_DOCKER_IMAGE_NAME', '') == 'scc24': + if env.get("CM_MLPERF_IMPLEMENTATION", "reference") == "reference": + env['CM_DOCKER_IMAGE_NAME'] = "scc24-reference" + elif "nvidia" in env.get("CM_MLPERF_IMPLEMENTATION", "reference"): + env['CM_DOCKER_IMAGE_NAME'] = "scc24-nvidia" + dump_version_info = env.get('CM_DUMP_VERSION_INFO', True) system_meta = state.get('CM_SUT_META', {}) @@ -194,6 +201,9 @@ def preprocess(i): if k.startswith("docker_"): docker_extra_input[k] = inp[k] inp = {} + + if env.get('CM_DOCKER_IMAGE_NAME', '') != '': + docker_extra_input['docker_image_name'] = env['CM_DOCKER_IMAGE_NAME'] else: action = "run" @@ -220,14 +230,16 @@ def preprocess(i): env['CM_MLPERF_LOADGEN_MODE'] = mode env_copy = copy.deepcopy(env) + const_copy = copy.deepcopy(const) print(f"\nRunning loadgen scenario: {scenario} and mode: {mode}") ii = {'action':action, 'automation':'script', 'tags': scenario_tags, 'quiet': 'true', - 'env': env_copy, 'input': inp, 'state': state, 'add_deps': copy.deepcopy(add_deps), 'add_deps_recursive': + 'env': env_copy, 'const': const_copy, 'input': inp, 'state': state, 'add_deps': copy.deepcopy(add_deps), 'add_deps_recursive': copy.deepcopy(add_deps_recursive), 'ad': ad, 'adr': copy.deepcopy(adr), 'v': verbose, 'print_env': print_env, 'print_deps': print_deps, 'dump_version_info': dump_version_info} if action == "docker": for k in docker_extra_input: ii[k] = docker_extra_input[k] + r = cm.access(ii) if r['return'] > 0: return r @@ -252,7 +264,7 @@ def preprocess(i): env['CM_MLPERF_LOADGEN_COMPLIANCE_TEST'] = test env['CM_MLPERF_LOADGEN_MODE'] = "compliance" ii = {'action':action, 'automation':'script', 'tags': scenario_tags, 'quiet': 'true', - 'env': copy.deepcopy(env), 'input': inp, 'state': state, 'add_deps': copy.deepcopy(add_deps), 'add_deps_recursive': + 'env': copy.deepcopy(env), 'const': copy.deepcopy(const), 'input': inp, 'state': state, 'add_deps': copy.deepcopy(add_deps), 'add_deps_recursive': copy.deepcopy(add_deps_recursive), 'adr': copy.deepcopy(adr), 'ad': ad, 'v': verbose, 'print_env': print_env, 'print_deps': print_deps, 'dump_version_info': dump_version_info} if action == "docker": for k in docker_extra_input: diff --git a/script/test-cm-scripts/_cm.yaml b/script/test-cm-scripts/_cm.yaml new file mode 100644 index 000000000..b8b7a9c79 --- /dev/null +++ b/script/test-cm-scripts/_cm.yaml @@ -0,0 +1,31 @@ +alias: test-cm-scripts +automation_alias: script +automation_uid: 5b4e0237da074764 +cache: false +tags: +- test +- test-scripts +- cm-test +env: + CM_VAR1: orig +const: + CM_CVAR1: orig +new_env_keys: + - CM_VAR* + - CM_CVAR* + +uid: 6fbe3884575c4e51 +variations: + v1: + env: + CM_VAR1: v1 + v2: + env: + CM_VAR1: v2 + CM_VAR2: v2 + const: + CM_VAR2: constv2 + v1,v2: + env: + CM_VAR1: combv1v2 + CM_VAR2: combv1v2 diff --git a/script/test-cm-scripts/customize.py b/script/test-cm-scripts/customize.py new file mode 100644 index 000000000..d12f9b3e1 --- /dev/null +++ b/script/test-cm-scripts/customize.py @@ -0,0 +1,22 @@ +from cmind import utils +import os + +def preprocess(i): + + os_info = i['os_info'] + + env = i['env'] + + meta = i['meta'] + + automation = i['automation'] + + quiet = (env.get('CM_QUIET', False) == 'yes') + + return {'return':0} + +def postprocess(i): + + env = i['env'] + + return {'return':0} diff --git a/script/test-cm-scripts/run.bat b/script/test-cm-scripts/run.bat new file mode 100644 index 000000000..648302ca7 --- /dev/null +++ b/script/test-cm-scripts/run.bat @@ -0,0 +1 @@ +rem native script diff --git a/script/test-cm-scripts/run.sh b/script/test-cm-scripts/run.sh new file mode 100644 index 000000000..4c23c380e --- /dev/null +++ b/script/test-cm-scripts/run.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +#CM Script location: ${CM_TMP_CURRENT_SCRIPT_PATH} + +#To export any variable +#echo "VARIABLE_NAME=VARIABLE_VALUE" >>tmp-run-env.out + +#${CM_PYTHON_BIN_WITH_PATH} contains the path to python binary if "get,python" is added as a dependency + +echo "Running: " +echo "${CM_RUN_CMD}" +echo "" + +if [[ ${CM_FAKE_RUN} != "yes" ]]; then + eval "${CM_RUN_CMD}" + test $? -eq 0 || exit 1 +fi diff --git a/tests/script/check.py b/tests/script/check.py index 7394406d8..aba7f7831 100644 --- a/tests/script/check.py +++ b/tests/script/check.py @@ -12,3 +12,13 @@ def check_list(r, string, found=True): raise Exception('CM search returned an empty list for ' + string) if len(r['list']) > 0 and not found: raise Exception('CM search returned at lease one entry for ' + string) + +def check_key_value(d, key, value, absent_ok=False): + if not d.get(key): + if absent_ok: + return True + else: + raise Exception(f"{key} is missing. Current values are {d}") + elif d[key] != value: + raise Exception(f"{key} is not having the expected value of {value}. Current value is {d[key]}") + diff --git a/tests/script/test_features.py b/tests/script/test_features.py index d116cbd5b..067909932 100644 --- a/tests/script/test_features.py +++ b/tests/script/test_features.py @@ -18,4 +18,10 @@ checks.check_list(r, "_NHWC") r = cm.access({'action':'search', 'automation': 'cache', 'tags': 'get,dataset,preprocessed,imagenet,-_NHWC'}) -checks.check_list(r, "_NHWC", False) +#checks.check_list(r, "-_NHWC", False) + + +r = cm.access({'action':'run', 'automation': 'script', 'tags': 'test-scripts,_v1,_v2'}) +new_env = r['new_env'] +checks.check_key_value(new_env, "CM_VAR1", "combv1v2") +checks.check_key_value(new_env, "CM_VAR2", "constv2")