diff --git a/python/llm/test/benchmark/arc-perf-test-batch2.yaml b/python/llm/test/benchmark/arc-perf-test-batch2.yaml index 408cba5b444..88f3c8b5154 100644 --- a/python/llm/test/benchmark/arc-perf-test-batch2.yaml +++ b/python/llm/test/benchmark/arc-perf-test-batch2.yaml @@ -8,7 +8,7 @@ repo_id: - 'deepseek-ai/deepseek-coder-6.7b-instruct' - 'THUDM/glm-4-9b-chat' - 'openbmb/MiniCPM-2B-sft-bf16' - - 'Qwen/Qwen-VL-Chat' + #- 'Qwen/Qwen-VL-Chat' #- 'SmerkyG/rwkv-5-world-7b' #this model only fp32 is supported for now, fp16 and bf16 are not supported - '01-ai/Yi-6B-Chat' local_model_hub: '/mnt/disk1/models' diff --git a/python/llm/test/benchmark/arc-perf-test-batch4.yaml b/python/llm/test/benchmark/arc-perf-test-batch4.yaml index 725aca1c817..b382bed9783 100644 --- a/python/llm/test/benchmark/arc-perf-test-batch4.yaml +++ b/python/llm/test/benchmark/arc-perf-test-batch4.yaml @@ -8,7 +8,7 @@ repo_id: - 'deepseek-ai/deepseek-coder-6.7b-instruct' - 'THUDM/glm-4-9b-chat' - 'openbmb/MiniCPM-2B-sft-bf16' - - 'Qwen/Qwen-VL-Chat' + #- 'Qwen/Qwen-VL-Chat' #- 'SmerkyG/rwkv-5-world-7b' #this model only fp32 is supported for now, fp16 and bf16 are not supported - '01-ai/Yi-6B-Chat' local_model_hub: '/mnt/disk1/models' @@ -29,7 +29,7 @@ exclude: - 'baichuan-inc/Baichuan2-7B-Chat:2048' - 'baichuan-inc/Baichuan2-13B-Chat-4bit:1024' - 'baichuan-inc/Baichuan2-13B-Chat-4bit:2048' - - 'Qwen/Qwen-VL-Chat:2048' +# - 'Qwen/Qwen-VL-Chat:2048' # - 'fnlp/moss-moon-003-sft-4bit:1024' # - 'fnlp/moss-moon-003-sft-4bit:2048' task: 'continuation' # task can be 'continuation', 'QA' and 'summarize' diff --git a/python/llm/test/benchmark/arc-perf-test.yaml b/python/llm/test/benchmark/arc-perf-test.yaml index 9ca7aa315b9..09cf7bfde26 100644 --- a/python/llm/test/benchmark/arc-perf-test.yaml +++ b/python/llm/test/benchmark/arc-perf-test.yaml @@ -8,7 +8,7 @@ repo_id: - 'deepseek-ai/deepseek-coder-6.7b-instruct' - 'THUDM/glm-4-9b-chat' - 'openbmb/MiniCPM-2B-sft-bf16' - - 'Qwen/Qwen-VL-Chat' + #- 'Qwen/Qwen-VL-Chat' #- 'SmerkyG/rwkv-5-world-7b' #this model only fp32 is supported for now, fp16 and bf16 are not supported - '01-ai/Yi-6B-Chat' local_model_hub: '/mnt/disk1/models'