utils.py 1.5 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546
  1. import functools
  2. from typing import Dict
  3. @functools.lru_cache
  4. def _get_op_configs(op_type: str, batch: int, hidden_size: int):
  5. # TODO: add optimal configurations
  6. return None
  7. def _check_divisibility(hidden_size: int):
  8. # The bgmv_expand kernel requires that the hidden_size be divisible by
  9. # the number below.
  10. divisibility = [2, 4, 8, 16, 32, 64]
  11. divisibility.sort(reverse=True)
  12. for div in divisibility:
  13. if hidden_size % div == 0:
  14. return div
  15. # hidden_size is an odd number
  16. return 1
  17. def _get_default_config(op_type: str, batch: int, hidden_size: int):
  18. if op_type == "expand":
  19. return {
  20. "BLOCK_N": 256,
  21. "SPLIT_N": _check_divisibility(hidden_size),
  22. "num_warps": 8
  23. }
  24. else:
  25. return {"BLOCK_K": 256, "SPLIT_K": 64, "num_warps": 8}
  26. def get_lora_op_configs(op_type: str, batch: int,
  27. hidden_size: int) -> Dict[str, int]:
  28. """Inspired by `fused_moe_kernel`
  29. The return value will be a dictionary mapping an irregular grid of batch
  30. sizes and hidden_size to configurations of the bgmv-related kernel.
  31. NOTE: It currently only supports the default configuration. We plan to
  32. generate optimal configurations for different hardware in the future using
  33. scripts similar to `benchmark_moe.py`.
  34. """
  35. config = _get_op_configs(op_type, batch, hidden_size)
  36. if not config:
  37. config = _get_default_config(op_type, batch, hidden_size)
  38. return config