registry.py 8.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247
  1. import functools
  2. from collections import UserDict
  3. from dataclasses import dataclass
  4. from typing import (TYPE_CHECKING, Callable, Dict, Mapping, Optional, Protocol,
  5. Tuple, Type)
  6. from loguru import logger
  7. from torch import nn
  8. from transformers import PretrainedConfig
  9. from typing_extensions import TypeVar
  10. from .data import LLMInputs
  11. if TYPE_CHECKING:
  12. from aphrodite.common.config import ModelConfig, MultiModalConfig
  13. from aphrodite.common.sequence import SequenceData
  14. from aphrodite.multimodal import MultiModalDataDict, MultiModalRegistry
  15. C = TypeVar("C", bound=PretrainedConfig)
  16. @dataclass(frozen=True)
  17. class InputContext:
  18. """
  19. Contains information about the model which may be used to
  20. modify the inputs.
  21. """
  22. model_config: "ModelConfig"
  23. """The configuration of the model."""
  24. def get_multimodal_config(self) -> "MultiModalConfig":
  25. """
  26. Get the multimodal configuration of the model.
  27. Raises:
  28. ValueError: If the model is not multimodal.
  29. """
  30. multimodal_config = self.model_config.multimodal_config
  31. if multimodal_config is None:
  32. raise ValueError("No multimodal config found")
  33. return multimodal_config
  34. def get_hf_config(self, hf_config_type: Type[C]) -> C:
  35. """
  36. Get the HuggingFace configuration
  37. (:class:`transformers.PretrainedConfig`) of the model,
  38. additionally checking its type.
  39. Raises:
  40. ValueError: If the model is not of the specified type.
  41. """
  42. hf_config = self.model_config.hf_config
  43. if not isinstance(hf_config, hf_config_type):
  44. raise TypeError("Invalid type of HuggingFace config. "
  45. f"Expected type: {hf_config_type}, but "
  46. f"found type: {type(hf_config)}")
  47. return hf_config
  48. N = TypeVar("N", bound=Type[nn.Module])
  49. class DummyDataFactory(Protocol):
  50. def __call__(
  51. self,
  52. ctx: InputContext,
  53. seq_len: int,
  54. mm_counts: Mapping[str, int],
  55. ) -> Tuple["SequenceData", Optional["MultiModalDataDict"]]:
  56. """
  57. Create dummy data to be inputted into the model.
  58. Note:
  59. :data:`InputProcessor` is not applied to the dummy data.
  60. """
  61. ...
  62. class _MultiModalCounts(UserDict):
  63. """
  64. Wraps `mm_counts` for a more informative error message
  65. when attempting to access a plugin that does not exist.
  66. """
  67. def __getitem__(self, key: str) -> int:
  68. try:
  69. return super().__getitem__(key)
  70. except KeyError as exc:
  71. msg = (f"There is no multi-modal plugin with the key: {key}. "
  72. f"Available keys: {set(self.keys())}")
  73. raise KeyError(msg) from exc
  74. InputProcessor = Callable[[InputContext, LLMInputs], LLMInputs]
  75. """Preprocess the inputs to the model."""
  76. class InputRegistry:
  77. """
  78. A registry to dispatch data processing
  79. according to the target model.
  80. """
  81. def __init__(self) -> None:
  82. self._dummy_factories_by_model_type: Dict[Type[nn.Module],
  83. DummyDataFactory] = {}
  84. self._input_processors_by_model_type: Dict[Type[nn.Module],
  85. InputProcessor] = {}
  86. def _default_dummy_data_factory(
  87. self,
  88. ctx: InputContext,
  89. seq_len: int,
  90. mm_counts: Mapping[str, int],
  91. ) -> Tuple["SequenceData", Optional["MultiModalDataDict"]]:
  92. """
  93. The default dummy data factory represents the longest possible text
  94. that can be inputted to the model.
  95. Note:
  96. :data:`InputProcessor` is not applied to the dummy data.
  97. """
  98. # Avoid circular import
  99. from aphrodite.common.sequence import SequenceData
  100. dummy_seq_data = SequenceData([0] * seq_len)
  101. dummy_multi_modal_data = None
  102. return dummy_seq_data, dummy_multi_modal_data
  103. def register_dummy_data(self, factory: DummyDataFactory):
  104. """
  105. Register a dummy data factory to a model class.
  106. During memory profiling, the provided function is invoked to create
  107. dummy data to be inputted into the model. The resulting memory usage
  108. should be an upper bound of what the model would use at inference time.
  109. """
  110. def wrapper(model_cls: N) -> N:
  111. if model_cls in self._dummy_factories_by_model_type:
  112. logger.warning(
  113. f"Model class {model_cls} already has dummy data "
  114. f"registered to {self}. It is overwritten by the new one.")
  115. self._dummy_factories_by_model_type[model_cls] = factory
  116. return model_cls
  117. return wrapper
  118. def dummy_data_for_profiling(
  119. self,
  120. model_config: "ModelConfig",
  121. seq_len: int,
  122. mm_registry: "MultiModalRegistry",
  123. ) -> Tuple["SequenceData", Optional["MultiModalDataDict"]]:
  124. """
  125. Create dummy data for profiling the memory usage of a model.
  126. The model is identified by ``model_config``.
  127. See also:
  128. :ref:`enabling_multimodal_inputs`
  129. Note:
  130. This should be called after
  131. :meth:`~MultiModalRegistry.init_mm_limits_per_prompt`.
  132. """
  133. # Avoid circular import
  134. from aphrodite.modeling.model_loader import get_model_architecture
  135. model_cls, _ = get_model_architecture(model_config)
  136. dummy_factory = self._dummy_factories_by_model_type \
  137. .get(model_cls, self._default_dummy_data_factory)
  138. mm_counts = mm_registry.get_mm_limits_per_prompt(model_config)
  139. seq_data, mm_data = dummy_factory(
  140. InputContext(model_config),
  141. seq_len,
  142. _MultiModalCounts(mm_counts),
  143. )
  144. # Having more tokens is over-conservative but otherwise fine
  145. num_tokens = seq_data.prompt_token_ids
  146. assert len(num_tokens) >= seq_len, (
  147. f"Expected at least {seq_len} dummy tokens for profiling, "
  148. f"but found {len(num_tokens)} tokens instead.")
  149. if mm_data is not None:
  150. for k, v in mm_data.items():
  151. num_items = len(v) if isinstance(v, list) else 1
  152. num_expected = mm_counts[k]
  153. assert num_items >= num_expected, (
  154. f"Expected at least {num_expected} dummy '{k}' instances "
  155. f"for profiling, but found {num_items} instances instead.")
  156. return seq_data, mm_data
  157. def _default_input_processor(self, ctx: InputContext,
  158. inputs: LLMInputs) -> LLMInputs:
  159. """The default input processor is a no-op."""
  160. return inputs
  161. def register_input_processor(self, processor: InputProcessor):
  162. """
  163. Register an input processor to a model class.
  164. The provided function is invoked on each input to the model. This
  165. happens before
  166. :meth:`~aphrodite.multimodal.MultiModalRegistry.map_input`.
  167. See also:
  168. :ref:`input_processing_pipeline`
  169. """
  170. def wrapper(model_cls: N) -> N:
  171. if model_cls in self._input_processors_by_model_type:
  172. logger.warning(
  173. f"Model class {model_cls} already has input processor "
  174. f"registered to {self}. It is overwritten by the new one.")
  175. self._input_processors_by_model_type[model_cls] = processor
  176. return model_cls
  177. return wrapper
  178. def process_input(self, model_config: "ModelConfig",
  179. inputs: LLMInputs) -> LLMInputs:
  180. """
  181. Apply an input processor to an instance of model inputs.
  182. The model is identified by ``model_config``.
  183. See also:
  184. :ref:`input_processing_pipeline`
  185. """
  186. # Avoid circular import
  187. from aphrodite.modeling.model_loader import get_model_architecture
  188. model_cls, _ = get_model_architecture(model_config)
  189. processor = self._input_processors_by_model_type \
  190. .get(model_cls, self._default_input_processor)
  191. return processor(InputContext(model_config), inputs)
  192. def create_input_processor(self, model_config: "ModelConfig"):
  193. """
  194. Create an input processor (see :meth:`process_input`) for a
  195. specific model.
  196. """
  197. return functools.partial(self.process_input, model_config)