2024-08-23 21:12:44 +08:00
|
|
|
import pytest
|
|
|
|
|
|
|
|
|
|
from vllm import LLM
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_empty_prompt():
|
2024-10-30 05:13:20 +08:00
|
|
|
llm = LLM(model="gpt2", enforce_eager=True)
|
2024-08-23 21:12:44 +08:00
|
|
|
with pytest.raises(ValueError, match='Prompt cannot be empty'):
|
|
|
|
|
llm.generate([""])
|
2024-10-30 05:13:20 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_out_of_vocab_token():
|
|
|
|
|
llm = LLM(model="gpt2", enforce_eager=True)
|
|
|
|
|
with pytest.raises(ValueError, match='out of vocabulary'):
|
|
|
|
|
llm.generate({"prompt_token_ids": [999999]})
|