optimize class dependencies.
This commit is contained in:
@ -10,17 +10,29 @@ from pydantic.tools import parse_obj_as
|
||||
|
||||
from mdrsclient.exceptions import UnexpectedException
|
||||
from mdrsclient.models import Laboratories, Token, User
|
||||
from mdrsclient.settings import CONFIG_DIR_PATH
|
||||
from mdrsclient.settings import CONFIG_DIRNAME
|
||||
|
||||
|
||||
@dataclass
|
||||
class CacheData:
|
||||
user: User | None
|
||||
token: Token | None
|
||||
laboratories: Laboratories
|
||||
digest: str | None
|
||||
user: User | None = None
|
||||
token: Token | None = None
|
||||
laboratories: Laboratories = Laboratories()
|
||||
digest: str = ""
|
||||
|
||||
def calc_digest(self) -> str:
|
||||
def clear(self) -> None:
|
||||
self.user = None
|
||||
self.token = None
|
||||
self.laboratories.clear()
|
||||
self.digest = ""
|
||||
|
||||
def update_digest(self) -> None:
|
||||
self.digest = self.__calc_digest()
|
||||
|
||||
def verify_digest(self) -> bool:
|
||||
return self.digest == self.__calc_digest()
|
||||
|
||||
def __calc_digest(self) -> str:
|
||||
return hashlib.sha256(
|
||||
json.dumps(
|
||||
[
|
||||
@ -33,101 +45,96 @@ class CacheData:
|
||||
|
||||
|
||||
class CacheFile:
|
||||
serial: int
|
||||
cache_dir: str
|
||||
cache_file: str
|
||||
data: CacheData
|
||||
__serial: int
|
||||
__cache_dir: str
|
||||
__cache_file: str
|
||||
__data: CacheData
|
||||
|
||||
def __init__(self, remote: str) -> None:
|
||||
self.serial = -1
|
||||
self.cache_dir = os.path.join(CONFIG_DIR_PATH, "cache")
|
||||
self.cache_file = os.path.join(self.cache_dir, remote + ".json")
|
||||
self.data = CacheData(user=None, token=None, laboratories=Laboratories([]), digest=None)
|
||||
|
||||
def dump(self) -> CacheData | None:
|
||||
self.__load()
|
||||
return self.data
|
||||
self.__serial = -1
|
||||
self.__cache_dir = os.path.join(CONFIG_DIRNAME, "cache")
|
||||
self.__cache_file = os.path.join(self.__cache_dir, remote + ".json")
|
||||
self.__data = CacheData()
|
||||
|
||||
@property
|
||||
def token(self) -> Token | None:
|
||||
self.__load()
|
||||
return self.data.token
|
||||
return self.__data.token
|
||||
|
||||
@token.setter
|
||||
def token(self, token: Token) -> None:
|
||||
self.__load()
|
||||
self.data.token = token
|
||||
self.__data.token = token
|
||||
self.__save()
|
||||
|
||||
@token.deleter
|
||||
def token(self) -> None:
|
||||
if self.data.token is not None:
|
||||
if self.__data.token is not None:
|
||||
self.__clear()
|
||||
|
||||
@property
|
||||
def user(self) -> User | None:
|
||||
return self.data.user
|
||||
return self.__data.user
|
||||
|
||||
@user.setter
|
||||
def user(self, user: User) -> None:
|
||||
self.__load()
|
||||
self.data.user = user
|
||||
self.__data.user = user
|
||||
self.__save()
|
||||
|
||||
@user.deleter
|
||||
def user(self) -> None:
|
||||
if self.data.user is not None:
|
||||
if self.__data.user is not None:
|
||||
self.__clear()
|
||||
|
||||
@property
|
||||
def laboratories(self) -> Laboratories:
|
||||
return self.data.laboratories
|
||||
return self.__data.laboratories
|
||||
|
||||
@laboratories.setter
|
||||
def laboratories(self, laboratories: Laboratories) -> None:
|
||||
self.__load()
|
||||
self.data.laboratories = laboratories
|
||||
self.__data.laboratories = laboratories
|
||||
self.__save()
|
||||
|
||||
def __clear(self) -> None:
|
||||
self.data.user = None
|
||||
self.data.token = None
|
||||
self.data.laboratories.clear()
|
||||
self.__data.clear()
|
||||
self.__save()
|
||||
|
||||
def __load(self) -> None:
|
||||
if os.path.isfile(self.cache_file):
|
||||
stat = os.stat(self.cache_file)
|
||||
if os.path.isfile(self.__cache_file):
|
||||
stat = os.stat(self.__cache_file)
|
||||
serial = hash((stat.st_uid, stat.st_gid, stat.st_mode, stat.st_size, stat.st_mtime))
|
||||
if self.serial != serial:
|
||||
if self.__serial != serial:
|
||||
try:
|
||||
with open(self.cache_file) as f:
|
||||
with open(self.__cache_file) as f:
|
||||
data = parse_obj_as(CacheData, json.load(f))
|
||||
if data.digest != data.calc_digest():
|
||||
if not data.verify_digest():
|
||||
raise UnexpectedException("Cache data has been broken.")
|
||||
self.data = data
|
||||
except (ValidationError, UnexpectedException):
|
||||
self.__data = data
|
||||
except (ValidationError, UnexpectedException) as e:
|
||||
self.__clear()
|
||||
self.__save()
|
||||
print(e)
|
||||
else:
|
||||
self.serial = serial
|
||||
self.__serial = serial
|
||||
else:
|
||||
self.__clear()
|
||||
self.serial = -1
|
||||
self.__serial = -1
|
||||
|
||||
def __save(self) -> None:
|
||||
self.__ensure_cache_dir()
|
||||
with open(self.cache_file, "w") as f:
|
||||
with open(self.__cache_file, "w") as f:
|
||||
fcntl.flock(f, fcntl.LOCK_EX)
|
||||
self.data.digest = self.data.calc_digest()
|
||||
f.write(json.dumps(dataclasses.asdict(self.data)))
|
||||
stat = os.stat(self.cache_file)
|
||||
self.serial = hash((stat.st_uid, stat.st_gid, stat.st_mode, stat.st_size, stat.st_mtime))
|
||||
self.__data.update_digest()
|
||||
f.write(json.dumps(dataclasses.asdict(self.__data)))
|
||||
stat = os.stat(self.__cache_file)
|
||||
self.__serial = hash((stat.st_uid, stat.st_gid, stat.st_mode, stat.st_size, stat.st_mtime))
|
||||
# ensure file is secure.
|
||||
os.chmod(self.cache_file, 0o600)
|
||||
os.chmod(self.__cache_file, 0o600)
|
||||
|
||||
def __ensure_cache_dir(self) -> None:
|
||||
if not os.path.exists(self.cache_dir):
|
||||
os.makedirs(self.cache_dir)
|
||||
if not os.path.exists(self.__cache_dir):
|
||||
os.makedirs(self.__cache_dir)
|
||||
# ensure directory is secure.
|
||||
os.chmod(self.cache_dir, 0o700)
|
||||
os.chmod(self.__cache_dir, 0o700)
|
||||
|
Reference in New Issue
Block a user