2020-02-19 22:12:56 +00:00
|
|
|
import os
|
|
|
|
import json
|
2020-02-20 14:22:40 +00:00
|
|
|
import base64
|
2020-02-19 22:12:56 +00:00
|
|
|
import atexit
|
2020-02-24 14:41:08 +00:00
|
|
|
from datetime import datetime, date, time, timedelta
|
2020-02-19 22:12:56 +00:00
|
|
|
from threading import Thread, Event
|
|
|
|
from collections.abc import MutableMapping
|
|
|
|
|
|
|
|
from PySide2 import QtCore
|
|
|
|
|
|
|
|
data_dir_path = os.path.join(QtCore.QStandardPaths.writableLocation(QtCore.QStandardPaths.AppDataLocation),
|
|
|
|
"fimefracking")
|
|
|
|
|
|
|
|
tasks_path = os.path.join(data_dir_path, "tasks.json")
|
|
|
|
|
|
|
|
data_path = os.path.join(data_dir_path, "data_{}.json")
|
2020-02-20 14:22:40 +00:00
|
|
|
#save_delay = 3 * 60
|
|
|
|
save_delay = 3
|
2020-02-19 22:12:56 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Tasks:
|
|
|
|
def __init__(self):
|
|
|
|
if not os.path.exists(data_dir_path):
|
|
|
|
os.mkdir(data_dir_path)
|
|
|
|
if os.path.exists(tasks_path):
|
|
|
|
with open(tasks_path, "r") as f:
|
2020-02-20 14:22:40 +00:00
|
|
|
encoded_tasks = json.loads(f.read())
|
|
|
|
self._tasks = list(map(lambda x: base64.b64decode(x.encode("utf-8")).decode("utf-8"), encoded_tasks))
|
2020-02-19 22:12:56 +00:00
|
|
|
else:
|
|
|
|
self._tasks = []
|
|
|
|
|
|
|
|
@property
|
|
|
|
def tasks(self):
|
|
|
|
return self._tasks
|
|
|
|
|
|
|
|
@tasks.setter
|
|
|
|
def tasks(self, tasks):
|
|
|
|
self._tasks = tasks
|
|
|
|
self._save()
|
|
|
|
|
|
|
|
def _save(self):
|
2020-02-24 15:54:28 +00:00
|
|
|
print("... saving tasks ...")
|
2020-02-20 14:22:40 +00:00
|
|
|
encoded_tasks = list(map(lambda x: base64.b64encode(x.encode("utf-8")).decode("utf-8"), self._tasks))
|
2020-02-19 22:12:56 +00:00
|
|
|
with open(tasks_path, "w+") as f:
|
2020-02-20 14:22:40 +00:00
|
|
|
f.write(json.dumps(encoded_tasks))
|
2020-02-19 22:12:56 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Data(MutableMapping):
|
|
|
|
def __init__(self):
|
|
|
|
if not os.path.exists(data_dir_path):
|
|
|
|
os.mkdir(data_dir_path)
|
|
|
|
self._cache = {}
|
|
|
|
self._hot_keys = []
|
2020-02-20 14:22:40 +00:00
|
|
|
self._trunning = False
|
2020-02-19 22:12:56 +00:00
|
|
|
self._tevent = Event()
|
|
|
|
self._thread = None
|
|
|
|
|
|
|
|
def cleanup():
|
2020-02-20 14:22:40 +00:00
|
|
|
self._trunning = False
|
2020-02-19 22:12:56 +00:00
|
|
|
self._tevent.set()
|
|
|
|
if self._thread:
|
|
|
|
self._thread.join()
|
2020-02-20 14:22:40 +00:00
|
|
|
|
2020-02-19 22:12:56 +00:00
|
|
|
atexit.register(cleanup)
|
|
|
|
|
|
|
|
def __getitem__(self, key):
|
|
|
|
dpath = data_path.format(key)
|
|
|
|
if key not in self._cache and os.path.exists(dpath):
|
|
|
|
with open(dpath, "r") as f:
|
2020-02-20 14:22:40 +00:00
|
|
|
self._cache[key] = json.loads(f.read())
|
2020-02-19 22:12:56 +00:00
|
|
|
return self._cache[key]
|
|
|
|
|
|
|
|
def __setitem__(self, key, value):
|
|
|
|
self._cache[key] = value
|
|
|
|
self._hot_keys.append(key)
|
|
|
|
self._schedule_save()
|
|
|
|
|
|
|
|
def _schedule_save(self):
|
2020-02-20 14:22:40 +00:00
|
|
|
if self._trunning:
|
2020-02-19 22:12:56 +00:00
|
|
|
return
|
2020-02-20 14:22:40 +00:00
|
|
|
self._trunning = True
|
2020-02-19 22:12:56 +00:00
|
|
|
self._thread = Thread(target=self._executor, daemon=True)
|
|
|
|
self._thread.start()
|
|
|
|
|
|
|
|
def _executor(self):
|
2020-02-20 14:22:40 +00:00
|
|
|
while self._trunning:
|
2020-02-19 22:12:56 +00:00
|
|
|
self._tevent.wait(save_delay)
|
|
|
|
self._save()
|
|
|
|
|
|
|
|
def _save(self):
|
|
|
|
for key in self._hot_keys:
|
|
|
|
print(f"... saving dict {key} ...")
|
|
|
|
to_write = self._cache[key] # apparently thread-safe
|
|
|
|
with open(data_path.format(key), "w+") as f:
|
|
|
|
f.write(json.dumps(to_write))
|
|
|
|
self._hot_keys = []
|
|
|
|
self._saving = False
|
|
|
|
|
|
|
|
def __delitem__(self, key):
|
2020-02-20 14:22:40 +00:00
|
|
|
return NotImplemented
|
2020-02-19 22:12:56 +00:00
|
|
|
|
|
|
|
def __iter__(self):
|
2020-02-20 14:22:40 +00:00
|
|
|
return NotImplemented
|
2020-02-19 22:12:56 +00:00
|
|
|
|
|
|
|
def __len__(self):
|
2020-02-20 14:22:40 +00:00
|
|
|
# TODO use glob?
|
|
|
|
return NotImplemented
|
2020-02-19 22:12:56 +00:00
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return f"{type(self).__name__}({self._cache})"
|
2020-02-20 14:22:40 +00:00
|
|
|
|
|
|
|
|
|
|
|
class Log:
|
|
|
|
def __init__(self):
|
|
|
|
self._data = Data()
|
|
|
|
|
2020-02-24 15:54:28 +00:00
|
|
|
def cleanup():
|
|
|
|
self.log("End")
|
|
|
|
|
|
|
|
atexit.register(cleanup)
|
|
|
|
|
2020-02-24 12:16:17 +00:00
|
|
|
def log(self, task, ptime=None):
|
|
|
|
if ptime is None:
|
|
|
|
ptime = datetime.now()
|
2020-02-24 14:41:08 +00:00
|
|
|
# round to nearest minute
|
|
|
|
round_min = timedelta(minutes=round(ptime.second/60))
|
|
|
|
ptime = ptime - timedelta(seconds=ptime.second) + round_min
|
|
|
|
# month dance necessary to trigger Data.__setitem__
|
2020-02-20 14:22:40 +00:00
|
|
|
month = self._data.setdefault(ptime.strftime("%Y-%m"), {})
|
|
|
|
month.setdefault(ptime.strftime("%d"), [])\
|
2020-02-24 14:41:08 +00:00
|
|
|
.append(f"{ptime.strftime('%H:%M')} {base64.b64encode(task.encode('utf-8')).decode('utf-8')}")
|
|
|
|
self._data[ptime.strftime("%Y-%m")] = month
|
2020-02-20 14:22:40 +00:00
|
|
|
|
2020-02-24 15:54:28 +00:00
|
|
|
def last_log(self, pdate=None):
|
|
|
|
if pdate is None:
|
|
|
|
pdate = date.today()
|
|
|
|
if pdate.strftime("%Y-%m") not in self._data \
|
|
|
|
or pdate.strftime("%d") not in self._data[pdate.strftime("%Y-%m")] \
|
|
|
|
or len(self._data[pdate.strftime("%Y-%m")][pdate.strftime("%d")]) == 0:
|
2020-02-20 14:22:40 +00:00
|
|
|
return None
|
2020-02-24 15:54:28 +00:00
|
|
|
last = base64.b64decode(
|
2020-02-20 14:22:40 +00:00
|
|
|
self._data[pdate.strftime("%Y-%m")][pdate.strftime("%d")][-1].split()[1].encode("utf-8")).decode("utf-8")
|
2020-02-24 15:54:28 +00:00
|
|
|
if last == "End":
|
|
|
|
month = self._data[pdate.strftime("%Y-%m")]
|
|
|
|
del month[pdate.strftime("%d")][-1]
|
|
|
|
self._data[pdate.strftime("%Y-%m")] = month
|
|
|
|
last = base64.b64decode(
|
|
|
|
self._data[pdate.strftime("%Y-%m")][pdate.strftime("%d")][-1].split()[1].encode("utf-8")).decode("utf-8")
|
|
|
|
return last
|
2020-02-20 14:22:40 +00:00
|
|
|
|
2020-02-24 14:41:08 +00:00
|
|
|
def report(self, pdate=None):
|
|
|
|
if pdate is None:
|
|
|
|
pdate = date.today()
|
|
|
|
return Report(self._data, pdate)
|
|
|
|
|
|
|
|
|
|
|
|
class Report:
|
|
|
|
def __init__(self, data, pdate):
|
|
|
|
self._data = data
|
|
|
|
self._date = pdate
|
|
|
|
|
|
|
|
def report(self):
|
2020-02-20 14:22:40 +00:00
|
|
|
tmp = []
|
2020-02-24 14:41:08 +00:00
|
|
|
for e in self._data[self._date.strftime("%Y-%m")][self._date.strftime("%d")]:
|
2020-02-20 14:22:40 +00:00
|
|
|
tstr, b64str = e.split()
|
|
|
|
task = base64.b64decode(b64str.encode("utf-8")).decode("utf-8")
|
2020-02-24 14:41:08 +00:00
|
|
|
start_time = datetime.combine(self._date, datetime.strptime(tstr, "%H:%M").time())
|
2020-02-20 14:22:40 +00:00
|
|
|
tmp.append((task, start_time))
|
2020-02-24 15:54:28 +00:00
|
|
|
if self._date == date.today():
|
|
|
|
tmp.append(("End", datetime.now()))
|
2020-02-20 14:22:40 +00:00
|
|
|
|
|
|
|
ret = []
|
2020-02-24 12:16:17 +00:00
|
|
|
dsum = timedelta()
|
2020-02-20 14:22:40 +00:00
|
|
|
for i, t in enumerate(tmp):
|
|
|
|
task, start_time = t
|
|
|
|
if i < len(tmp) - 1:
|
|
|
|
end_time = tmp[i+1][1]
|
2020-02-24 15:54:28 +00:00
|
|
|
duration = end_time - start_time
|
|
|
|
dsum += duration
|
|
|
|
dhours, rem = divmod(duration.seconds, 3600)
|
|
|
|
dmins, _ = divmod(rem, 60)
|
2020-02-24 16:15:34 +00:00
|
|
|
ret.append([task, start_time.strftime("%H:%M"), f"{dhours:02d}:{dmins:02d}"])
|
2020-02-20 14:22:40 +00:00
|
|
|
else:
|
2020-02-24 16:15:34 +00:00
|
|
|
ret.append([task, start_time.strftime("%H:%M"), ""])
|
2020-02-24 12:16:17 +00:00
|
|
|
|
2020-02-24 16:15:34 +00:00
|
|
|
ret.append(["", "", ""])
|
2020-02-24 12:16:17 +00:00
|
|
|
dhours, rem = divmod(dsum.seconds, 3600)
|
|
|
|
dmins, _ = divmod(rem, 60)
|
2020-02-24 16:15:34 +00:00
|
|
|
ret.append(["Sum", "", f"{dhours:02d}:{dmins:02d}"])
|
2020-02-20 14:22:40 +00:00
|
|
|
return ret
|
2020-02-24 14:41:08 +00:00
|
|
|
|
|
|
|
def save(self, report):
|
2020-02-24 15:54:28 +00:00
|
|
|
if self._date == date.today():
|
|
|
|
report = report[:-3] # cut off sum display and end time
|
|
|
|
else:
|
|
|
|
report = report[:-2] # cut off sum display
|
2020-02-24 14:41:08 +00:00
|
|
|
save_list = []
|
|
|
|
for tstr, ttime, _ in report:
|
|
|
|
b64str = base64.b64encode(tstr.encode("utf-8")).decode("utf-8")
|
|
|
|
save_string = f"{ttime} {b64str}"
|
|
|
|
save_list.append(save_string)
|
|
|
|
# month dance necessary to trigger Data.__setitem__
|
|
|
|
month = self._data[self._date.strftime("%Y-%m")]
|
|
|
|
if month[self._date.strftime("%d")] == save_list: # no changes
|
|
|
|
return
|
|
|
|
month[self._date.strftime("%d")] = save_list
|
|
|
|
self._data[self._date.strftime("%Y-%m")] = month
|
|
|
|
|
|
|
|
def prev_next_avail(self):
|
|
|
|
prev = (self._date - timedelta(days=1)).strftime("%d") in self._data[self._date.strftime("%Y-%m")]
|
|
|
|
_next = (self._date + timedelta(days=1)).strftime("%d") in self._data[self._date.strftime("%Y-%m")]
|
|
|
|
return prev, _next
|
|
|
|
|
|
|
|
def previous(self):
|
|
|
|
self._date = self._date - timedelta(days=1)
|
|
|
|
|
|
|
|
def next(self):
|
|
|
|
self._date = self._date + timedelta(days=1)
|
2020-02-24 17:18:35 +00:00
|
|
|
|
|
|
|
def date(self):
|
|
|
|
return self._date.strftime("%Y-%m-%d")
|