scan_cache.py 7.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234
  1. # coding=utf-8
  2. # Copyright 2022-present, the HuggingFace Inc. team.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. """Contains command to scan the HF cache directory.
  16. Usage:
  17. huggingface-cli scan-cache
  18. huggingface-cli scan-cache -v
  19. huggingface-cli scan-cache -vvv
  20. huggingface-cli scan-cache --dir ~/.cache/huggingface/hub
  21. """
  22. import time
  23. from typing import Optional
  24. from huggingface_hub.utils import CacheNotFound, HFCacheInfo, scan_cache_dir
  25. from huggingface_hub.commands._cli_utils import ANSI, tabulate
  26. from pathlib import Path
  27. def get_rows_json(hf_cache_info: HFCacheInfo) -> list[list[str | int | float]]:
  28. return [
  29. [
  30. repo.repo_id,
  31. repo.repo_type,
  32. revision.commit_hash,
  33. "{:>12}".format(revision.size_on_disk_str),
  34. repo.size_on_disk,
  35. revision.size_on_disk,
  36. revision.nb_files,
  37. repo.last_accessed_str,
  38. repo.last_accessed,
  39. revision.last_modified_str,
  40. revision.last_modified,
  41. ", ".join(sorted(revision.refs)),
  42. str(revision.snapshot_path),
  43. ]
  44. for repo in sorted(hf_cache_info.repos, key=lambda repo: repo.repo_path)
  45. for revision in sorted(
  46. repo.revisions, key=lambda revision: revision.commit_hash
  47. )
  48. ]
  49. def get_headers_json() -> list[str]:
  50. return [
  51. "repo_id",
  52. "repo_type",
  53. "commit_hash",
  54. "size_on_disk_str",
  55. "repo_size_on_disk",
  56. "revision_size_on_disk",
  57. "nb_files",
  58. "last_accessed_str",
  59. "last_accessed",
  60. "last_modified_str",
  61. "last_modified",
  62. "refs",
  63. "snapshot_path",
  64. ]
  65. def get_rows_verbose(hf_cache_info: HFCacheInfo) -> list[list[str | int]]:
  66. return [
  67. [
  68. repo.repo_id,
  69. repo.repo_type,
  70. revision.commit_hash,
  71. "{:>12}".format(revision.size_on_disk_str),
  72. revision.nb_files,
  73. revision.last_modified_str,
  74. ", ".join(sorted(revision.refs)),
  75. str(revision.snapshot_path),
  76. ]
  77. for repo in sorted(hf_cache_info.repos, key=lambda repo: repo.repo_path)
  78. for revision in sorted(
  79. repo.revisions, key=lambda revision: revision.commit_hash
  80. )
  81. ]
  82. def get_rows_quiet(hf_cache_info: HFCacheInfo) -> list[list[str | int]]:
  83. return [
  84. [
  85. repo.repo_id,
  86. repo.repo_type,
  87. "{:>12}".format(repo.size_on_disk_str),
  88. repo.nb_files,
  89. repo.last_accessed_str,
  90. repo.last_modified_str,
  91. ", ".join(sorted(repo.refs)),
  92. str(repo.repo_path),
  93. ]
  94. for repo in sorted(hf_cache_info.repos, key=lambda repo: repo.repo_path)
  95. ]
  96. def get_rows(verbosity: int, hf_cache_info: HFCacheInfo) -> list[list[str | int]]:
  97. if verbosity == 0:
  98. return get_rows_quiet(hf_cache_info)
  99. else:
  100. return get_rows_verbose(hf_cache_info)
  101. def get_headers_verbose() -> list[str]:
  102. return [
  103. "REPO ID",
  104. "REPO TYPE",
  105. "REVISION",
  106. "SIZE ON DISK",
  107. "NB FILES",
  108. "LAST_MODIFIED",
  109. "REFS",
  110. "LOCAL PATH",
  111. ]
  112. def get_headers_quiet() -> list[str]:
  113. return [
  114. "REPO ID",
  115. "REPO TYPE",
  116. "SIZE ON DISK",
  117. "NB FILES",
  118. "LAST_ACCESSED",
  119. "LAST_MODIFIED",
  120. "REFS",
  121. "LOCAL PATH",
  122. ]
  123. def render_as_markdown(rows: list[list[str | int]], headers: list[str]) -> str:
  124. markdown = ""
  125. # render headers
  126. markdown += " | ".join(headers) + "\n"
  127. markdown += " | ".join(["---"] * len(headers)) + "\n"
  128. # render rows
  129. for row in rows:
  130. markdown += " | ".join([str(x) for x in row]) + "\n"
  131. return markdown
  132. def get_table(verbosity: int, hf_cache_info: HFCacheInfo) -> str:
  133. if verbosity == 0:
  134. return tabulate(
  135. rows=[
  136. [
  137. repo.repo_id,
  138. repo.repo_type,
  139. "{:>12}".format(repo.size_on_disk_str),
  140. repo.nb_files,
  141. repo.last_accessed_str,
  142. repo.last_modified_str,
  143. ", ".join(sorted(repo.refs)),
  144. str(repo.repo_path),
  145. ]
  146. for repo in sorted(hf_cache_info.repos, key=lambda repo: repo.repo_path)
  147. ],
  148. headers=[
  149. "REPO ID",
  150. "REPO TYPE",
  151. "SIZE ON DISK",
  152. "NB FILES",
  153. "LAST_ACCESSED",
  154. "LAST_MODIFIED",
  155. "REFS",
  156. "LOCAL PATH",
  157. ],
  158. )
  159. else:
  160. return tabulate(
  161. rows=[
  162. [
  163. repo.repo_id,
  164. repo.repo_type,
  165. revision.commit_hash,
  166. "{:>12}".format(revision.size_on_disk_str),
  167. revision.nb_files,
  168. revision.last_modified_str,
  169. ", ".join(sorted(revision.refs)),
  170. str(revision.snapshot_path),
  171. ]
  172. for repo in sorted(hf_cache_info.repos, key=lambda repo: repo.repo_path)
  173. for revision in sorted(
  174. repo.revisions, key=lambda revision: revision.commit_hash
  175. )
  176. ],
  177. headers=[
  178. "REPO ID",
  179. "REPO TYPE",
  180. "REVISION",
  181. "SIZE ON DISK",
  182. "NB FILES",
  183. "LAST_MODIFIED",
  184. "REFS",
  185. "LOCAL PATH",
  186. ],
  187. )
  188. def scan_cache_and_print(
  189. verbosity: int = 0, cache_dir: Optional[str | Path] = None
  190. ) -> None:
  191. try:
  192. t0 = time.time()
  193. hf_cache_info = scan_cache_dir(cache_dir)
  194. t1 = time.time()
  195. except CacheNotFound as exc:
  196. cache_dir = exc.cache_dir
  197. print(f"Cache directory not found: {cache_dir}")
  198. return
  199. print(get_table(verbosity, hf_cache_info))
  200. print(
  201. f"\nDone in {round(t1-t0,1)}s. Scanned {len(hf_cache_info.repos)} repo(s)"
  202. f" for a total of {ANSI.red(hf_cache_info.size_on_disk_str)}."
  203. )
  204. if len(hf_cache_info.warnings) > 0:
  205. message = f"Got {len(hf_cache_info.warnings)} warning(s) while scanning."
  206. if verbosity >= 3:
  207. print(ANSI.gray(message))
  208. for warning in hf_cache_info.warnings:
  209. print(ANSI.gray(warning))
  210. else:
  211. print(ANSI.gray(message + " Use -vvv to print details."))