fime/data.py

158 lines
4.9 KiB
Python
Raw Normal View History

2020-02-19 22:12:56 +00:00
import os
import json
import base64
2020-02-19 22:12:56 +00:00
import atexit
2020-02-24 12:16:17 +00:00
from datetime import datetime, date, timedelta
2020-02-19 22:12:56 +00:00
from threading import Thread, Event
from collections.abc import MutableMapping
from PySide2 import QtCore
data_dir_path = os.path.join(QtCore.QStandardPaths.writableLocation(QtCore.QStandardPaths.AppDataLocation),
"fimefracking")
tasks_path = os.path.join(data_dir_path, "tasks.json")
data_path = os.path.join(data_dir_path, "data_{}.json")
#save_delay = 3 * 60
save_delay = 3
2020-02-19 22:12:56 +00:00
class Tasks:
def __init__(self):
if not os.path.exists(data_dir_path):
os.mkdir(data_dir_path)
if os.path.exists(tasks_path):
with open(tasks_path, "r") as f:
encoded_tasks = json.loads(f.read())
self._tasks = list(map(lambda x: base64.b64decode(x.encode("utf-8")).decode("utf-8"), encoded_tasks))
2020-02-19 22:12:56 +00:00
else:
self._tasks = []
@property
def tasks(self):
return self._tasks
@tasks.setter
def tasks(self, tasks):
self._tasks = tasks
self._save()
def _save(self):
print("...saving tasks...")
encoded_tasks = list(map(lambda x: base64.b64encode(x.encode("utf-8")).decode("utf-8"), self._tasks))
2020-02-19 22:12:56 +00:00
with open(tasks_path, "w+") as f:
f.write(json.dumps(encoded_tasks))
2020-02-19 22:12:56 +00:00
class Data(MutableMapping):
def __init__(self):
if not os.path.exists(data_dir_path):
os.mkdir(data_dir_path)
self._cache = {}
self._hot_keys = []
self._trunning = False
2020-02-19 22:12:56 +00:00
self._tevent = Event()
self._thread = None
def cleanup():
self._trunning = False
2020-02-19 22:12:56 +00:00
self._tevent.set()
if self._thread:
self._thread.join()
2020-02-19 22:12:56 +00:00
atexit.register(cleanup)
def __getitem__(self, key):
dpath = data_path.format(key)
if key not in self._cache and os.path.exists(dpath):
with open(dpath, "r") as f:
self._cache[key] = json.loads(f.read())
2020-02-19 22:12:56 +00:00
return self._cache[key]
def __setitem__(self, key, value):
self._cache[key] = value
self._hot_keys.append(key)
self._schedule_save()
def _schedule_save(self):
if self._trunning:
2020-02-19 22:12:56 +00:00
return
self._trunning = True
2020-02-19 22:12:56 +00:00
self._thread = Thread(target=self._executor, daemon=True)
self._thread.start()
def _executor(self):
while self._trunning:
2020-02-19 22:12:56 +00:00
self._tevent.wait(save_delay)
self._save()
def _save(self):
for key in self._hot_keys:
print(f"... saving dict {key} ...")
to_write = self._cache[key] # apparently thread-safe
with open(data_path.format(key), "w+") as f:
f.write(json.dumps(to_write))
self._hot_keys = []
self._saving = False
def __delitem__(self, key):
return NotImplemented
2020-02-19 22:12:56 +00:00
def __iter__(self):
return NotImplemented
2020-02-19 22:12:56 +00:00
def __len__(self):
# TODO use glob?
return NotImplemented
2020-02-19 22:12:56 +00:00
def __repr__(self):
return f"{type(self).__name__}({self._cache})"
class Log:
def __init__(self):
self._data = Data()
2020-02-24 12:16:17 +00:00
def log(self, task, ptime=None):
if ptime is None:
ptime = datetime.now()
month = self._data.setdefault(ptime.strftime("%Y-%m"), {})
month.setdefault(ptime.strftime("%d"), [])\
.append(f"{ptime.strftime('%H:%M:%S')} {base64.b64encode(task.encode('utf-8')).decode('utf-8')}")
self._data[ptime.strftime("%Y-%m")] = month # necessary to trigger Data.__setitem__
def last_log(self, pdate=date.today()):
if pdate.strftime("%Y-%m") not in self._data or pdate.strftime("%d") not in self._data[pdate.strftime("%Y-%m")]:
return None
return base64.b64decode(
self._data[pdate.strftime("%Y-%m")][pdate.strftime("%d")][-1].split()[1].encode("utf-8")).decode("utf-8")
def report(self, pdate=date.today()):
tmp = []
for e in self._data[pdate.strftime("%Y-%m")][pdate.strftime("%d")]:
tstr, b64str = e.split()
task = base64.b64decode(b64str.encode("utf-8")).decode("utf-8")
start_time = datetime.combine(pdate, datetime.strptime(tstr, "%H:%M:%S").time())
tmp.append((task, start_time))
ret = []
2020-02-24 12:16:17 +00:00
dsum = timedelta()
for i, t in enumerate(tmp):
task, start_time = t
if i < len(tmp) - 1:
end_time = tmp[i+1][1]
else:
end_time = datetime.now()
duration = end_time - start_time
2020-02-24 12:16:17 +00:00
dsum += duration
dhours, rem = divmod(duration.seconds, 3600)
dmins, _ = divmod(rem, 60)
ret.append((task, start_time.strftime("%H:%M"), f"{dhours:02d}:{dmins:02d}"))
ret.append(("", "", ""))
dhours, rem = divmod(dsum.seconds, 3600)
dmins, _ = divmod(rem, 60)
ret.append(("Sum", "", f"{dhours:02d}:{dmins:02d}"))
return ret