[CI/Build] Fix VLM broadcast tests tensor_parallel_size passing (#10161)
Signed-off-by: Isotr0py <2037008807@qq.com>
This commit is contained in:
parent
f83feccd7f
commit
47672f38b5
@ -34,6 +34,7 @@ COMMON_BROADCAST_SETTINGS = {
|
|||||||
"dtype": "half",
|
"dtype": "half",
|
||||||
"max_tokens": 5,
|
"max_tokens": 5,
|
||||||
"tensor_parallel_size": 2,
|
"tensor_parallel_size": 2,
|
||||||
|
"model_kwargs": {"device_map": "auto"},
|
||||||
"image_size_factors": [(.25, 0.5, 1.0)],
|
"image_size_factors": [(.25, 0.5, 1.0)],
|
||||||
"distributed_executor_backend": (
|
"distributed_executor_backend": (
|
||||||
"ray",
|
"ray",
|
||||||
|
|||||||
@ -158,6 +158,7 @@ class VLMTestInfo(NamedTuple):
|
|||||||
"max_model_len": self.max_model_len,
|
"max_model_len": self.max_model_len,
|
||||||
"max_num_seqs": self.max_num_seqs,
|
"max_num_seqs": self.max_num_seqs,
|
||||||
"task": self.task,
|
"task": self.task,
|
||||||
|
"tensor_parallel_size": self.tensor_parallel_size,
|
||||||
"hf_output_post_proc": self.hf_output_post_proc,
|
"hf_output_post_proc": self.hf_output_post_proc,
|
||||||
"vllm_output_post_proc": self.vllm_output_post_proc,
|
"vllm_output_post_proc": self.vllm_output_post_proc,
|
||||||
"auto_cls": self.auto_cls,
|
"auto_cls": self.auto_cls,
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user