test_skip_tokenizer_init.py 965 B

1234567891011121314151617181920212223
  1. import pytest
  2. from aphrodite.common.sampling_params import SamplingParams
  3. from aphrodite.endpoints.llm import LLM
  4. @pytest.mark.parametrize("model", ["facebook/opt-125m"])
  5. def test_skip_tokenizer_initialization(model: str):
  6. # This test checks if the flag skip_tokenizer_init skips the initialization
  7. # of tokenizer and detokenizer. The generated output is expected to contain
  8. # token ids.
  9. llm = LLM(model=model, skip_tokenizer_init=True)
  10. sampling_params = SamplingParams(prompt_logprobs=True, detokenize=True)
  11. with pytest.raises(ValueError) as err:
  12. llm.generate("abc", sampling_params)
  13. assert "prompts must be None if" in str(err.value)
  14. outputs = llm.generate({"prompt_token_ids": [1, 2, 3]},
  15. sampling_params=sampling_params)
  16. assert len(outputs) > 0
  17. completions = outputs[0].outputs
  18. assert len(completions) > 0
  19. assert completions[0].text == ""
  20. assert completions[0].token_ids