-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathsyncer.py
444 lines (397 loc) · 17 KB
/
syncer.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
import logging
from aiohttp import BasicAuth
from dateutil import tz
from dateutil.parser import isoparse
from datetime import datetime, date, time, timedelta
import json
import asyncio
def iterateDate(curdate, days):
mdate = datetime.strptime(curdate, "%Y-%m-%d") + timedelta(days=days)
return mdate.strftime("%Y-%m-%d")
def series_compress(data, duration=60, zero_only=False):
# Elevation data has a bunch of 0s... we can simply compress all the 0s
dataset = []
curdp = {"t": data[0]["t"] - duration, "d": data[0]["d"], "dt": duration}
for dp in data[1:]:
if (
(curdp["t"] + curdp["dt"] < dp["t"] - duration)
or curdp["d"] != dp["d"]
or (curdp["d"] != 0 and zero_only)
):
dataset.append(curdp)
curdp = {"t": dp["t"] - duration, "d": dp["d"], "dt": duration}
# If the ranges intersect, fix it in the simplest way possible.
if dataset[-1]["t"] + dataset[-1]["dt"] > curdp["t"]:
dataset[-1]["dt"] = curdp["t"] - dataset[-1]["t"]
else:
curdp["dt"] = dp["t"] - curdp["t"]
dataset.append(curdp)
return dataset
def sanity_fix(data):
data.sort(key=lambda d: d["t"]) # Sort by timestamps
dataset = [data[0]]
for dp in data[1:]:
if dp["t"] == dataset[-1]["t"]:
# Next replace each datapoint with the most recent one of that timestamp
dataset[-1] = dp
elif "dt" in dataset[-1] and dataset[-1]["t"] + dataset[-1]["dt"] > dp["t"]:
# The timestamps interfere with each other - we modify the previous one to have
# a smaller duration, and then insert the datapoint
dataset[-1]["dt"] = dp["t"] - dataset[-1]["t"]
dataset.append(dp)
else:
dataset.append(dp)
return dataset
class Syncer:
active = {}
alock = asyncio.Lock()
buffer_days = 10
@staticmethod
async def sync(session, app, appid):
await Syncer.alock.acquire()
if not appid in Syncer.active:
Syncer.active[appid] = Syncer(session, app, appid)
cursyncer = Syncer.active[appid]
if cursyncer.task is not None:
if not cursyncer.task.done():
logging.getLogger(f"fitbit:{appid}").debug(
"Sync is ongoing - not starting new sync"
)
Syncer.alock.release()
return # There is currently a sync happening
# We have a free task!
cursyncer.task = asyncio.create_task(cursyncer.start())
Syncer.alock.release()
def __init__(self, session, app, appid):
self.app = app
self.session = session
self.log = logging.getLogger(f"fitbit:{appid}")
self.task = None
async def init(self):
# To start off, we get all the necessary initial data
self.kv = await self.app.kv()
self.auth = {
"Authorization": f"{self.kv['token_type']} {self.kv['access_token']}"
}
def sanity_check(self, data):
# Makes sure that the data is both sorted and does not have overlapping durations,
# which happens when the tracker switches time (like when travelling or dst)
for i in range(1, len(data)):
if "dt" in data[i - 1] and data[i - 1]["dt"] > 0:
if data[i - 1]["t"] + data[i - 1]["dt"] > data[i]["t"]:
self.log.warn(f"SANITY CHECK FAILED {i} {data[i - 1]} {data[i]}")
return sanity_fix(data)
elif data[i - 1]["t"] >= data[i]["t"]:
self.log.warn(f"SANITY CHECK FAILED {i} {data[i - 1]} {data[i]}")
return sanity_fix(data)
return data
async def get(self, uri):
self.log.debug(f"Querying: {uri}")
response = await self.session.get(uri, headers=self.auth)
# print(response.headers)
if response.status >= 400:
if response.status == 429:
# Add on an extra couple seconds to make sure their end registers the reset
waitfor = int(response.headers["Retry-After"]) + 10
self.log.debug(
f"Waiting for {waitfor} seconds for fitbit API rate-limit to expire"
)
await asyncio.sleep(waitfor)
return await self.get(uri)
errdata = json.loads(await response.text())
self.log.debug(f"Error response: {json.dumps(errdata)}")
errtype = errdata["errors"][0]["errorType"]
if errtype == "expired_token":
await self.refresh_token()
return await self.get(uri)
return await response.json()
async def refresh_token(self):
self.log.debug("Refreshing token")
settings = await self.app.settings
response = await self.session.post(
settings["refresh_uri"],
data={
"grant_type": "refresh_token",
"refresh_token": self.kv["refresh_token"],
},
auth=BasicAuth(settings["client_id"], settings["client_secret"]),
)
resjson = await response.json()
await self.app.kv.update(**resjson)
await self.init() # Need to re-init
self.log.debug("Access token updated")
async def prepare(
self,
key,
tags,
title,
description,
schema,
icon="",
owner_scope="read",
resolution="1min",
transform=lambda x: x,
ignore_zero=False,
):
o = await self.app.objects(key=key)
if len(o) == 0:
o = [
await self.app.objects.create(
title,
description=description,
key=key,
tags=tags,
meta={"schema": schema},
icon=icon,
owner_scope=owner_scope,
)
]
series = o[0]
# First try the sync_query variable
sync_query = await series.kv["sync_query"]
if sync_query is None:
sync_query = date.fromisoformat(self.joinDate)
else:
# If it is already set up, subtract the buffer days, to get a guess as to where to start sync
sync_query = date.fromisoformat(sync_query) - timedelta(
days=self.buffer_days
)
# Next, try comparing to the most recent several datapoints in the series, to check how far back we actually need to query
lastdp = await series[-10:]
for dp in reversed(lastdp):
ts_date = datetime.fromtimestamp(dp["t"], tz=self.timezone).date()
if ts_date > sync_query and (dp["d"] != 0 or not ignore_zero):
sync_query = ts_date
break
return {
"series": series,
"sync_query": sync_query,
"key": key,
"resolution": resolution,
"transform": transform,
"ignore_zero": ignore_zero,
}
async def sync_intraday_activities(self, a):
# this uses the intraday API endpoint: https://dev.fitbit.com/build/reference/web-api/intraday/
if datetime.now(tz=self.timezone).date() < a["sync_query"]:
# Skip if already finished sync
return
series = a["series"]
data = await self.get(
f"https://api.fitbit.com/1/user/-/activities/{a['key']}/date/{a['sync_query'].isoformat()}/1d/{a['resolution']}.json"
)
datakey = f"activities-{a['key']}-intraday"
if datakey not in data:
self.log.warn(f"No data for {a['key']}")
a["sync_query"] = datetime.now(tz=self.timezone).date() + timedelta(days=1)
return
dpa = data[datakey]["dataset"]
formatted = a["transform"](
self.sanity_check(
[
{
"t": datetime.combine(
a["sync_query"],
time.fromisoformat(dp["time"]),
tzinfo=self.timezone,
).timestamp(),
"d": dp["value"],
}
for dp in dpa
]
)
)
# Add the data if we're not ignoring zeros
if len(formatted) > 0:
if len(formatted) > 1 or formatted[0]["d"] != 0 or not a["ignore_zero"]:
await series.insert_array(formatted)
await series.kv.update(sync_query=a["sync_query"].isoformat())
a["sync_query"] = a["sync_query"] + timedelta(days=1)
async def sync_body_data(self, body_data_config):
# we fetch multiple body data series with only a single API request.
# As a result, for the sync_query, we just use the sync query of the weight
sync_query = body_data_config["weight"]["sync_query"]
curdate = datetime.now(tz=self.timezone).date()
if curdate < sync_query:
# Skip if already finished sync
return
query_end = sync_query + timedelta(days=30) # Query by 30 days
if curdate < query_end:
query_end = curdate # ... but don't go past today
api_response = await self.get(
f"https://api.fitbit.com/1/user/-/body/log/weight/date/{sync_query.isoformat()}/{query_end.isoformat()}.json"
)
for data_key, a in body_data_config.items():
series = a["series"]
formatted_values = []
for s in api_response["weight"]:
datetime_string = f"{s['date']}T{s['time']}"
if data_key not in s:
self.log.info(f"{datetime_string}: Skipping {data_key} because it's not in the response")
continue
timestamp = isoparse(datetime_string).replace(tzinfo=self.timezone).timestamp()
formatted_values.append({
"t": timestamp,
"d": s[data_key],
})
self.log.debug(f"{data_key}: {formatted_values}")
await series.insert_array(self.sanity_check(formatted_values))
new_sync_query = query_end + timedelta(days=1)
a["sync_query"] = new_sync_query
await a["series"].kv.update(sync_query=new_sync_query.isoformat())
async def sync_sleep(self, a):
curdate = datetime.now(tz=self.timezone).date()
if curdate < a["sync_query"]:
# Skip if already finished sync
return
series = a["series"]
sync_query = a["sync_query"]
query_end = sync_query + timedelta(days=10) # Query by 10 days
if curdate < query_end:
query_end = curdate # ... but don't go past today
data = await self.get(
f"https://api.fitbit.com/1.2/user/-/sleep/date/{sync_query.isoformat()}/{query_end.isoformat()}.json"
)
for s in data["sleep"]:
formatted = [
{
"t": isoparse(dp["dateTime"])
.replace(tzinfo=self.timezone)
.timestamp(),
"d": dp["level"],
"dt": dp["seconds"],
}
for dp in s["levels"]["data"]
]
await series.insert_array(self.sanity_check(formatted))
await series.kv.update(sync_query=query_end.isoformat())
a["sync_query"] = query_end + timedelta(days=1)
async def start(self):
# It is assumed that self.isrunning was already set to True
try:
self.log.debug("Starting sync")
await self.init()
profile = await self.get(
f"https://api.fitbit.com/1/user/{self.kv['user_id']}/profile.json"
)
usr = profile["user"]
self.log.info(f"Syncing data for {usr['fullName']}")
self.joinDate = usr["memberSince"]
self.timezone = tz.gettz(usr["timezone"])
#
# There are a couple issues with syncing fitbit data using the API:
# - There is a rate limit of 150 requests an hour
# - intraday time series seem to be limited to 1 day per request
# - If a day does not return data, it is not clear if there is no data there, or if the
# user's device did not sync yet.
#
# Our syncing approach is to keep a "sync_query" variable in each series holding the date of the
# most recently queried data. This allows us to keep place in series that have no data.
# Since fitbit devices can only store detailed data for ~ 1 week before needing to sync, on each sync,
# the sync_query variable is actually back-tracked a number of days to catch any datapoints that might have come
# into fitbit recently for the past few days due to a device sync.
#
# This would normally be very inefficient, requiring a re-query of an entire week on each sync for each series.
# Therefore, the most recent data in the timeseries is also used as a reference time - if there exists a datapoint at time t,
# it is assumed that *all* data has been synced until time t, so we can just start with time t, instead of backtracking a whole week.
# Start by finding all the timeseries, and initializing their metadata if necessary
intraday_activities = [
await self.prepare(
"heart",
"fitbit heartrate",
"Heart Rate",
"",
{"type": "number"},
icon="fas fa-heartbeat",
resolution="1sec",
),
await self.prepare(
"steps",
"fitbit steps",
"Steps",
"",
{"type": "number"},
icon="fas fa-shoe-prints",
transform=lambda x: series_compress(x, zero_only=True),
ignore_zero=True,
),
await self.prepare(
"elevation",
"fitbit elevation",
"Elevation",
"",
{"type": "number"},
icon="fas fa-mountain",
transform=lambda x: series_compress(x),
ignore_zero=True,
),
]
# These are not intraday, so they need to be handled manually
sleep = await self.prepare(
"sleep",
"fitbit sleep",
"Sleep",
"",
{"type": "string"},
icon="fas fa-bed",
)
body_data = {
"weight": await self.prepare(
"weight",
"fitbit weight",
"Weight",
"",
{"type": "number"},
icon="fas fa-weight",
),
"bmi": await self.prepare(
"bmi",
"fitbit bmi",
"BMI",
"",
{"type": "number"},
icon="fas fa-ruler-combined",
),
"fat": await self.prepare(
"body_fat",
"fitbit bodyfat",
"Body Fat",
"",
{"type": "number"},
icon="fas fa-percent",
)
}
curdate = datetime.now(tz=self.timezone).date()
while (
any(map(lambda x: curdate >= x["sync_query"], intraday_activities))
or any(map(lambda x: curdate >= x["sync_query"], body_data.values()))
or curdate >= sleep["sync_query"]
):
for s in intraday_activities:
await self.sync_intraday_activities(s)
# Handle non-intraday requests
await self.sync_sleep(sleep)
await self.sync_body_data(body_data)
# The current date might have changed during sync
curdate = datetime.now(tz=self.timezone).date()
await self.app.notifications.delete("sync")
await self.app.notifications.delete("err")
await Syncer.alock.acquire()
self.task = None
Syncer.alock.release()
self.log.debug("Sync finished")
except Exception as e:
await Syncer.alock.acquire()
self.task = None
Syncer.alock.release()
self.log.exception("Sync failed")
await self.app.notifications.delete("sync")
await self.app.notifications.notify(
"err",
"Error syncing fitbit data",
**{
"description": f"Sync failed with the following exception:\n```\n{str(e)}\n```\nWill try again later.",
"global": True,
"type": "error",
},
)