Skip to content

Commit

Permalink
Small fix
Browse files Browse the repository at this point in the history
  • Loading branch information
Oscilloscope98 committed Oct 18, 2024
1 parent b95506d commit ea25595
Showing 1 changed file with 6 additions and 6 deletions.
12 changes: 6 additions & 6 deletions .github/workflows/llm_performance_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -1044,8 +1044,8 @@ jobs:
run: |
# temporarily remove gemma2 for Windows dGPU tests
if [ ${{ matrix.platform }} == "perf-igpu" ]; then
sed -i "s/- 'google\/gemma-2-2b-it'/# - 'google\/gemma-2-2b-it'/" python/llm/test/benchmark/igpu-perf/32-32_int4_fp16_443.yaml
sed -i "s/- 'google\/gemma-2-9b-it'/# - 'google\/gemma-2-9b-it'/" python/llm/test/benchmark/igpu-perf/32-32_int4_fp16_443.yaml
sed -i "s/- 'google\/gemma-2-2b-it'/# - 'google\/gemma-2-2b-it'/" python/llm/test/benchmark/igpu-perf/1024-128_int4_fp16_443.yaml
sed -i "s/- 'google\/gemma-2-9b-it'/# - 'google\/gemma-2-9b-it'/" python/llm/test/benchmark/igpu-perf/1024-128_int4_fp16_443.yaml
fi
sed -i 's/{today}_test3/{today}_test4/g' python/llm/dev/benchmark/all-in-one/run.py
sed -i "s/path to your local model hub/$MODEL_HUB_DIR/g" python/llm/test/benchmark/igpu-perf/1024-128_int4_fp16_443.yaml
Expand Down Expand Up @@ -1231,8 +1231,8 @@ jobs:
run: |
# temporarily remove gemma2 for Windows dGPU tests
if [ ${{ matrix.platform }} == "perf-igpu" ]; then
sed -i "s/- 'google\/gemma-2-2b-it'/# - 'google\/gemma-2-2b-it'/" python/llm/test/benchmark/igpu-perf/32-32_int4_fp16_443.yaml
sed -i "s/- 'google\/gemma-2-9b-it'/# - 'google\/gemma-2-9b-it'/" python/llm/test/benchmark/igpu-perf/32-32_int4_fp16_443.yaml
sed -i "s/- 'google\/gemma-2-2b-it'/# - 'google\/gemma-2-2b-it'/" python/llm/test/benchmark/igpu-perf/2048-256_int4_fp16_443.yaml
sed -i "s/- 'google\/gemma-2-9b-it'/# - 'google\/gemma-2-9b-it'/" python/llm/test/benchmark/igpu-perf/2048-256_int4_fp16_443.yaml
fi
sed -i 's/{today}_test3/{today}_test4/g' python/llm/dev/benchmark/all-in-one/run.py
sed -i "s/path to your local model hub/$MODEL_HUB_DIR/g" python/llm/test/benchmark/igpu-perf/2048-256_int4_fp16_443.yaml
Expand Down Expand Up @@ -1431,7 +1431,7 @@ jobs:
run: |
# temporarily remove gemma2 for Windows dGPU tests
if [ ${{ matrix.platform }} == "perf-igpu" ]; then
sed -i "s/- 'google\/gemma-2-2b-it'/# - 'google\/gemma-2-2b-it'/" python/llm/test/benchmark/igpu-perf/32-32_int4_fp16_443.yaml
sed -i "s/- 'google\/gemma-2-2b-it'/# - 'google\/gemma-2-2b-it'/" python/llm/test/benchmark/igpu-perf/3072-384_int4_fp16_443.yaml
fi
if [ ${{ matrix.platform }} == "perf-igpu" ]; then
Expand Down Expand Up @@ -1612,7 +1612,7 @@ jobs:
run: |
# temporarily remove gemma2 for Windows dGPU tests
if [ ${{ matrix.platform }} == "perf-igpu" ]; then
sed -i "s/- 'google\/gemma-2-2b-it'/# - 'google\/gemma-2-2b-it'/" python/llm/test/benchmark/igpu-perf/32-32_int4_fp16_443.yaml
sed -i "s/- 'google\/gemma-2-2b-it'/# - 'google\/gemma-2-2b-it'/" python/llm/test/benchmark/igpu-perf/4096-512_int4_fp16_443.yaml
fi
sed -i 's/{today}_test2/{today}_test3/g' python/llm/dev/benchmark/all-in-one/run.py
sed -i "s/path to your local model hub/$MODEL_HUB_DIR/g" python/llm/test/benchmark/igpu-perf/4096-512_int4_fp16_443.yaml
Expand Down

0 comments on commit ea25595

Please sign in to comment.