[Bugfix] Lower gemma's unloaded_params exception to warning (#7002)

This commit is contained in:
Michael Goin 2024-08-01 15:01:07 -04:00 committed by GitHub
parent fb3db61688
commit f4fd390f5d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 12 additions and 9 deletions

View File

@ -404,6 +404,6 @@ class GemmaForCausalLM(nn.Module, SupportsLoRA):
loaded_params.add(name)
unloaded_params = params_dict.keys() - loaded_params
if unloaded_params:
raise RuntimeError(
"Some weights are not initialized from checkpoints: "
f"{unloaded_params}")
logger.warning(
"Some weights are not initialized from checkpoints: %s",
unloaded_params)

View File

@ -23,6 +23,7 @@ from transformers import Gemma2Config
from vllm.attention import Attention, AttentionMetadata
from vllm.config import CacheConfig, LoRAConfig
from vllm.distributed import get_tensor_model_parallel_world_size
from vllm.logger import init_logger
from vllm.model_executor.layers.activation import GeluAndMul
from vllm.model_executor.layers.layernorm import GemmaRMSNorm
from vllm.model_executor.layers.linear import (MergedColumnParallelLinear,
@ -41,6 +42,8 @@ from vllm.sequence import IntermediateTensors, SamplerOutput
from .interfaces import SupportsLoRA
logger = init_logger(__name__)
class Gemma2MLP(nn.Module):
@ -390,6 +393,6 @@ class Gemma2ForCausalLM(nn.Module, SupportsLoRA):
unloaded_params = params_dict.keys() - loaded_params
if unloaded_params:
raise RuntimeError(
"Some weights are not initialized from checkpoints: "
f"{unloaded_params}")
logger.warning(
"Some weights are not initialized from checkpoints: %s",
unloaded_params)

View File

@ -342,6 +342,6 @@ class PaliGemmaForConditionalGeneration(nn.Module, SupportsVision):
unloaded_params = params_dict.keys() - loaded_params
if unloaded_params:
raise RuntimeError(
"Some weights are not initialized from checkpoints: "
f"{unloaded_params}")
logger.warning(
"Some weights are not initialized from checkpoints: %s",
unloaded_params)