forked from karpathy/ulogme
-
Notifications
You must be signed in to change notification settings - Fork 0
/
export_events.py
117 lines (100 loc) · 3.06 KB
/
export_events.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
import time
import datetime
import json
import os
import os.path
import sys
import glob
def loadEvents(fname):
"""
Reads a file that consists of first column of unix timestamps
followed by arbitrary string, one per line. Outputs as dictionary.
Also keeps track of min and max time seen in global mint,maxt
"""
events = []
try:
ws = open(fname, 'r').read().splitlines()
events = []
for w in ws:
ix = w.find(' ') # find first space, that's where stamp ends
stamp = int(w[:ix])
str = w[ix+1:]
events.append({'t':stamp, 's':str})
except Exception, e:
print '%s probably does not exist, setting empty events list.' % (fname, )
print 'error was:'
print e
events = []
return events
def mtime(f):
"""
return time file was last modified, or 0 if it doesnt exist
"""
if os.path.isfile(f):
return int(os.path.getmtime(f))
else:
return 0
def updateEvents():
"""
goes down the list of .txt log files and writes all .json
files that can be used by the frontend
"""
L = []
L.extend(glob.glob("logs/keyfreq_*.txt"))
L.extend(glob.glob("logs/window_*.txt"))
L.extend(glob.glob("logs/notes_*.txt"))
# extract all times. all log files of form {type}_{stamp}.txt
ts = [int(x[x.find('_')+1:x.find('.txt')]) for x in L]
ts.sort()
mint = min(ts)
maxt = max(ts)
# march from beginning to end, group events for each day and write json
ROOT = ''
RENDER_ROOT = os.path.join(ROOT, 'render')
os.system('mkdir -p ' + RENDER_ROOT) # make sure output directory exists
t = mint
out_list = []
while t <= maxt:
t0 = t
t1 = t0 + 60*60*24 # 24 hrs later
fout = 'events_%d.json' % (t0, )
out_list.append({'t0':t0, 't1':t1, 'fname': fout})
fwrite = os.path.join(RENDER_ROOT, fout)
e1f = 'logs/window_%d.txt' % (t0, )
e2f = 'logs/keyfreq_%d.txt' % (t0, )
e3f = 'logs/notes_%d.txt' % (t0, )
e4f = 'logs/blog_%d.txt' % (t0, )
dowrite = False
# output file already exists?
# if the log files have not changed there is no need to regen
if os.path.isfile(fwrite):
tmod = mtime(fwrite)
e1mod = mtime(e1f)
e2mod = mtime(e2f)
e3mod = mtime(e3f)
e4mod = mtime(e4f)
if e1mod > tmod or e2mod > tmod or e3mod > tmod or e4mod > tmod:
dowrite = True # better update!
print 'a log file has changed, so will update %s' % (fwrite, )
else:
# output file doesnt exist, so write.
dowrite = True
if dowrite:
# okay lets do work
e1 = loadEvents(e1f)
e2 = loadEvents(e2f)
e3 = loadEvents(e3f)
for k in e2: k['s'] = int(k['s']) # int convert
e4 = ''
if os.path.isfile(e4f):
e4 = open(e4f, 'r').read()
eout = {'window_events': e1, 'keyfreq_events': e2, 'notes_events': e3, 'blog': e4}
open(fwrite, 'w').write(json.dumps(eout))
print 'wrote ' + fwrite
t = t1
fwrite = os.path.join(RENDER_ROOT, 'export_list.json')
open(fwrite, 'w').write(json.dumps(out_list))
print 'wrote ' + fwrite
# invoked as script
if __name__ == '__main__':
updateEvents()