Skip to content

Commit

Permalink
Added TEST06 for llama2
Browse files Browse the repository at this point in the history
  • Loading branch information
arjunsuresh committed Jul 26, 2024
1 parent 5d6d66b commit d8f465f
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 5 deletions.
8 changes: 4 additions & 4 deletions script/app-mlperf-inference/customize.py
Original file line number Diff line number Diff line change
Expand Up @@ -291,20 +291,20 @@ def postprocess(i):
cmd = ""
xcmd = ""

readme_init = "This experiment is generated using the [MLCommons Collective Mind automation framework (CM)](https://github.com/mlcommons/ck).\n\n"
readme_init = "This experiment is generated using the [MLCommons Collective Mind automation framework (CM)](https://github.com/mlcommons/cm4mlops).\n\n"

readme_init+= "*Check [CM MLPerf docs](https://mlcommons.github.io/inference) for more details.*\n\n"
readme_init+= "*Check [CM MLPerf docs](https://docs.mlcommons.org/inference) for more details.*\n\n"

readme_body = "## Host platform\n\n* OS version: {}\n* CPU version: {}\n* Python version: {}\n* MLCommons CM version: {}\n\n".format(platform.platform(),
platform.processor(), sys.version, cm.__version__)

x = repo_name
if repo_hash!='': x+=' --checkout='+str(repo_hash)

readme_body += "## CM Run Command\n\nSee [CM installation guide](https://github.com/mlcommons/ck/blob/master/docs/installation.md).\n\n"+ \
readme_body += "## CM Run Command\n\nSee [CM installation guide](https://docs.mlcommons.org/inference/install/).\n\n"+ \
"```bash\npip install -U cmind\n\ncm rm cache -f\n\ncm pull repo {}\n\n{}\n```".format(x, xcmd)

readme_body += "\n*Note that if you want to use the [latest automation recipes](https://access.cknowledge.org/playground/?action=scripts) for MLPerf (CM scripts),\n"+ \
readme_body += "\n*Note that if you want to use the [latest automation recipes](https://docs.mlcommons.org/inference) for MLPerf (CM scripts),\n"+ \
" you should simply reload {} without checkout and clean CM cache as follows:*\n\n".format(repo_name) + \
"```bash\ncm rm repo {}\ncm pull repo {}\ncm rm cache -f\n\n```".format(repo_name, repo_name)

Expand Down
5 changes: 4 additions & 1 deletion script/run-mlperf-inference-app/customize.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,10 +101,13 @@ def preprocess(i):
test_list = ["TEST01", "TEST05"]
if env['CM_MODEL'] in ["resnet50"]:
test_list.append("TEST04")
if "gpt" in env['CM_MODEL'] or "sdxl" in env['CM_MODEL'] or "llama2-70b" in env['CM_MODEL'] or "mixtral-8x7b" in env['CM_MODEL']:
if "gpt" in env['CM_MODEL'] or "llama2-70b" in env['CM_MODEL'] or "mixtral-8x7b" in env['CM_MODEL']:
test_list.remove("TEST01")
test_list.remove("TEST05")

if "llama2-70b" in env['CM_MODEL'] or "mixtral-8x7b" in env['CM_MODEL']:
test_list.append("TEST06")

variation_implementation= "_" + env.get("CM_MLPERF_IMPLEMENTATION", "reference")
variation_model= ",_" + env["CM_MLPERF_MODEL"]
variation_backend= ",_" + env["CM_MLPERF_BACKEND"] if env.get("CM_MLPERF_BACKEND","") != "" else ""
Expand Down

0 comments on commit d8f465f

Please sign in to comment.