|
@@ -1,4 +1,3 @@
|
|
|
-import asyncio
|
|
|
from typing import Any
|
|
|
|
|
|
from aiocache.backends.memory import SimpleMemoryCache
|
|
@@ -48,13 +47,10 @@ class ModelCache:
|
|
|
"""
|
|
|
|
|
|
key = self.cache.build_key(model_name, model_type.value)
|
|
|
- model = await self.cache.get(key)
|
|
|
- if model is None:
|
|
|
- async with OptimisticLock(self.cache, key) as lock:
|
|
|
- model = await asyncio.get_running_loop().run_in_executor(
|
|
|
- None,
|
|
|
- lambda: InferenceModel.from_model_type(model_type, model_name, **model_kwargs),
|
|
|
- )
|
|
|
+ async with OptimisticLock(self.cache, key) as lock:
|
|
|
+ model = await self.cache.get(key)
|
|
|
+ if model is None:
|
|
|
+ model = InferenceModel.from_model_type(model_type, model_name, **model_kwargs)
|
|
|
await lock.cas(model, ttl=self.ttl)
|
|
|
return model
|
|
|
|