Compare commits
3 Commits
22d1150801
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
dcafd88b86 | ||
|
|
3ef24d2bfe | ||
|
|
468701b220 |
90
journal.py
90
journal.py
@@ -1,6 +1,6 @@
|
||||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta
|
||||
from functools import reduce, partial
|
||||
from functools import cache, reduce, partial
|
||||
from pathlib import Path
|
||||
from shutil import copyfile, rmtree
|
||||
from subprocess import run
|
||||
@@ -23,7 +23,7 @@ def remove_chars(text, chars):
|
||||
return ''.join([c for c in text if c not in chars])
|
||||
|
||||
def get_words(text):
|
||||
return remove_chars(text, '.,-:;').lower().split()
|
||||
return remove_chars(text, '.,-:;/').lower().split()
|
||||
|
||||
def nth_or_default(n, l, default):
|
||||
return l[n] if n < len(l) else default
|
||||
@@ -95,6 +95,15 @@ def edit_text(text, suffix=''):
|
||||
def prompt(text):
|
||||
return input(text + ' [y/n] ') == 'y'
|
||||
|
||||
def find_entries(journal, pred):
|
||||
matches = []
|
||||
for day in journal['days']:
|
||||
for idx, entry in enumerate(journal['days'][day]['entries']):
|
||||
for block in entry['blocks']:
|
||||
if pred(day, entry, block):
|
||||
matches.append((day, idx, entry['timestamp']))
|
||||
return matches
|
||||
|
||||
### DATE UTILS
|
||||
|
||||
def parse_date(date):
|
||||
@@ -222,6 +231,7 @@ def init_hacky_hackery(journal):
|
||||
global _global_do_not_use
|
||||
_global_do_not_use = journal
|
||||
|
||||
@cache
|
||||
def get_foods_file():
|
||||
return parse_foods_file(_global_do_not_use['files']['foods'])
|
||||
|
||||
@@ -717,7 +727,7 @@ def parse_day(text):
|
||||
return {
|
||||
'title': title,
|
||||
'header': parse_header(header),
|
||||
'entries': [parse_entry(timestamp, content) for timestamp, content in entries],
|
||||
'entries': [parse_entry(timestamp, content) for timestamp, content in entries]
|
||||
}
|
||||
|
||||
def generate_day(day):
|
||||
@@ -752,7 +762,7 @@ def import_journal(fpath):
|
||||
},
|
||||
'files': {
|
||||
fname: (fpath / fname).read_text()
|
||||
for fname in ['habits', 'godword', 'tasks', 'foods']
|
||||
for fname in ['habits', 'godword', 'tasks', 'foods', 'backup']
|
||||
}
|
||||
}
|
||||
|
||||
@@ -954,9 +964,10 @@ def handle_backup(args):
|
||||
if prompt('Delete backup archive?'):
|
||||
archive_path.unlink()
|
||||
|
||||
def handle_search(args):
|
||||
query = args[0]
|
||||
def edit_entries(entries):
|
||||
pass
|
||||
|
||||
def parse_search_query(query):
|
||||
parts = query.split(',')
|
||||
|
||||
strings = []
|
||||
@@ -967,43 +978,60 @@ def handle_search(args):
|
||||
tags.append(part.removeprefix('#'))
|
||||
else:
|
||||
strings.append(part)
|
||||
|
||||
journal = load_journal()
|
||||
|
||||
matches = {}
|
||||
return strings, tags
|
||||
|
||||
for day in journal['days']:
|
||||
for i, entry in enumerate(journal['days'][day]['entries']):
|
||||
for block in entry['blocks']:
|
||||
if isinstance(block, str):
|
||||
words = get_words(block)
|
||||
if any(s in words for s in strings):
|
||||
matches[entry['timestamp']] = (day, i)
|
||||
break
|
||||
elif block['type'] == 'tag':
|
||||
if any(t in block['value'] for t in tags):
|
||||
matches[entry['timestamp']] = (day, i)
|
||||
break
|
||||
def edit_entries_by_predicate(journal, predicate, reversed=False):
|
||||
matches = find_entries(journal, predicate)
|
||||
|
||||
result = ''
|
||||
result += f'Num matches: {len(matches)}\n'
|
||||
result += '---\n'
|
||||
header = f'Number of matches: {len(matches)}'
|
||||
|
||||
for day, idx in matches.values():
|
||||
text = header
|
||||
|
||||
for day, idx, ts in matches:
|
||||
entry = journal['days'][day]['entries'][idx]
|
||||
result += generate_entry(entry)
|
||||
text += generate_entry(entry)
|
||||
|
||||
text = edit_text(result)
|
||||
text = edit_text(text)
|
||||
|
||||
_, *tmp = ENTRY_RE.split(text)
|
||||
entries = [parse_entry(ts, c) for ts, c in list(zip(tmp[::2], tmp[1::2]))]
|
||||
|
||||
for entry in entries:
|
||||
day, idx = matches[entry['timestamp']]
|
||||
matches_map = {ts: (day, idx) for day, idx, ts in matches}
|
||||
|
||||
for entry in entries:
|
||||
day, idx = matches_map[entry['timestamp']]
|
||||
journal['days'][day]['entries'][idx] = entry
|
||||
|
||||
save_journal(journal)
|
||||
return journal
|
||||
|
||||
def handle_search(args):
|
||||
strings, tags = parse_search_query(args[0])
|
||||
|
||||
def predicate(day, entry, block):
|
||||
if isinstance(block, str):
|
||||
words = get_words(block)
|
||||
if any(s in words for s in strings):
|
||||
return True
|
||||
elif block['type'] == 'tag':
|
||||
if any(t in block['value'] for t in tags):
|
||||
return True
|
||||
|
||||
save_journal(edit_entries_by_predicate(load_journal(), predicate))
|
||||
|
||||
def handle_tasks(args):
|
||||
def predicate(day, entry, block):
|
||||
if not isinstance(block, str) and block['type'] == 'task':
|
||||
is_done = any(b['type'] == 'done' for b in entry['blocks'] if not isinstance(b, str))
|
||||
return not is_done
|
||||
|
||||
save_journal(edit_entries_by_predicate(load_journal(), predicate))
|
||||
|
||||
def handle_profile(args):
|
||||
import cProfile
|
||||
|
||||
cProfile.run("export_journal(load_journal(), Path(mkdtemp()))", sort='cumtime')
|
||||
|
||||
|
||||
### MAIN
|
||||
|
||||
@@ -1025,6 +1053,8 @@ def main():
|
||||
'summary': handle_summary,
|
||||
'backup': handle_backup,
|
||||
'search': handle_search,
|
||||
'tasks': handle_tasks,
|
||||
'profile': handle_profile,
|
||||
}
|
||||
|
||||
handler = command_handlers.get(command, handle_invalid)
|
||||
|
||||
16
migrations/2021-07-06_fix.py
Normal file
16
migrations/2021-07-06_fix.py
Normal file
@@ -0,0 +1,16 @@
|
||||
from copy import deepcopy
|
||||
from pathlib import Path
|
||||
from shutil import copy
|
||||
import json
|
||||
|
||||
journal_path = Path.home() / '.journal.json'
|
||||
|
||||
copy(str(journal_path), str(journal_path.with_suffix('.bkp')))
|
||||
|
||||
journal = json.loads(journal_path.read_text())
|
||||
new_journal = deepcopy(journal)
|
||||
|
||||
for day in journal['days']:
|
||||
new_journal['days'][day]['entries'] = journal['days'][day]['entries'][0]
|
||||
|
||||
journal_path.write_text(json.dumps(new_journal))
|
||||
Reference in New Issue
Block a user