base.py 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990
  1. from __future__ import annotations
  2. from abc import ABC, abstractmethod
  3. from pathlib import Path
  4. from shutil import rmtree
  5. from typing import Any
  6. from onnxruntime.capi.onnxruntime_pybind11_state import InvalidProtobuf # type: ignore
  7. from ..config import get_cache_dir
  8. from ..schemas import ModelType
  9. class InferenceModel(ABC):
  10. _model_type: ModelType
  11. def __init__(
  12. self, model_name: str, cache_dir: Path | str | None = None, eager: bool = True, **model_kwargs: Any
  13. ) -> None:
  14. self.model_name = model_name
  15. self._loaded = False
  16. self._cache_dir = Path(cache_dir) if cache_dir is not None else get_cache_dir(model_name, self.model_type)
  17. loader = self.load if eager else self.download
  18. try:
  19. loader(**model_kwargs)
  20. except (OSError, InvalidProtobuf):
  21. self.clear_cache()
  22. loader(**model_kwargs)
  23. def download(self, **model_kwargs: Any) -> None:
  24. if not self.cached:
  25. self._download(**model_kwargs)
  26. def load(self, **model_kwargs: Any) -> None:
  27. self.download(**model_kwargs)
  28. self._load(**model_kwargs)
  29. self._loaded = True
  30. def predict(self, inputs: Any) -> Any:
  31. if not self._loaded:
  32. self.load()
  33. return self._predict(inputs)
  34. @abstractmethod
  35. def _predict(self, inputs: Any) -> Any:
  36. ...
  37. @abstractmethod
  38. def _download(self, **model_kwargs: Any) -> None:
  39. ...
  40. @abstractmethod
  41. def _load(self, **model_kwargs: Any) -> None:
  42. ...
  43. @property
  44. def model_type(self) -> ModelType:
  45. return self._model_type
  46. @property
  47. def cache_dir(self) -> Path:
  48. return self._cache_dir
  49. @cache_dir.setter
  50. def cache_dir(self, cache_dir: Path) -> None:
  51. self._cache_dir = cache_dir
  52. @property
  53. def cached(self) -> bool:
  54. return self.cache_dir.exists() and any(self.cache_dir.iterdir())
  55. @classmethod
  56. def from_model_type(cls, model_type: ModelType, model_name: str, **model_kwargs: Any) -> InferenceModel:
  57. subclasses = {subclass._model_type: subclass for subclass in cls.__subclasses__()}
  58. if model_type not in subclasses:
  59. raise ValueError(f"Unsupported model type: {model_type}")
  60. return subclasses[model_type](model_name, **model_kwargs)
  61. def clear_cache(self) -> None:
  62. if not self.cache_dir.exists():
  63. return
  64. if not rmtree.avoids_symlink_attacks:
  65. raise RuntimeError("Attempted to clear cache, but rmtree is not safe on this platform.")
  66. if self.cache_dir.is_dir():
  67. rmtree(self.cache_dir)
  68. else:
  69. self.cache_dir.unlink()
  70. self.cache_dir.mkdir(parents=True, exist_ok=True)