[Bugfix] Fix LoRA with PP (#7292)
This commit is contained in:
parent
48abee9e54
commit
6dffa4b0a6
@ -25,6 +25,7 @@ from vllm.lora.punica import PunicaWrapper
|
|||||||
from vllm.lora.utils import (from_layer, from_layer_logits_processor,
|
from vllm.lora.utils import (from_layer, from_layer_logits_processor,
|
||||||
parse_fine_tuned_lora_name, replace_submodule)
|
parse_fine_tuned_lora_name, replace_submodule)
|
||||||
from vllm.model_executor.models.interfaces import SupportsLoRA
|
from vllm.model_executor.models.interfaces import SupportsLoRA
|
||||||
|
from vllm.model_executor.models.utils import PPMissingLayer
|
||||||
from vllm.utils import is_pin_memory_available
|
from vllm.utils import is_pin_memory_available
|
||||||
|
|
||||||
logger = init_logger(__name__)
|
logger = init_logger(__name__)
|
||||||
@ -432,6 +433,8 @@ class LoRAModelManager(AdapterModelManager):
|
|||||||
def _create_lora_modules(self):
|
def _create_lora_modules(self):
|
||||||
for module_name, module in self.model.named_modules(
|
for module_name, module in self.model.named_modules(
|
||||||
remove_duplicate=False):
|
remove_duplicate=False):
|
||||||
|
if isinstance(module, PPMissingLayer):
|
||||||
|
continue
|
||||||
if not self._match_target_modules(module_name):
|
if not self._match_target_modules(module_name):
|
||||||
continue
|
continue
|
||||||
parts = module_name.split(".")[-1]
|
parts = module_name.split(".")[-1]
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user