From b4ccb908eb7cd9e68194da326a84a87a36846e03 Mon Sep 17 00:00:00 2001 From: Matthew Duggan Date: Wed, 29 Dec 2021 16:11:07 +0900 Subject: [PATCH 1/2] Split parts from Dropbox that are useful for other storage systems --- tapiriik/services/Dropbox/dropbox.py | 262 ++++------------------ tapiriik/services/storage_service_base.py | 257 +++++++++++++++++++++ 2 files changed, 296 insertions(+), 223 deletions(-) create mode 100644 tapiriik/services/storage_service_base.py diff --git a/tapiriik/services/Dropbox/dropbox.py b/tapiriik/services/Dropbox/dropbox.py index 2c53e68e9..17ff9efc1 100644 --- a/tapiriik/services/Dropbox/dropbox.py +++ b/tapiriik/services/Dropbox/dropbox.py @@ -1,66 +1,29 @@ -from datetime import datetime, timedelta -from django.core.urlresolvers import reverse -from tapiriik.database import cachedb -from tapiriik.services.api import APIException, ServiceExceptionScope, UserException, UserExceptionType, APIExcludeActivity, ServiceException -from tapiriik.services.exception_tools import strip_context -from tapiriik.services.gpx import GPXIO -from tapiriik.services.interchange import ActivityType, UploadedActivity -from tapiriik.services.service_base import ServiceAuthenticationType, ServiceBase -from tapiriik.services.tcx import TCXIO from tapiriik.settings import WEB_ROOT, DROPBOX_APP_KEY, DROPBOX_APP_SECRET, DROPBOX_FULL_APP_KEY, DROPBOX_FULL_APP_SECRET -import bson +from tapiriik.services.service_base import ServiceAuthenticationType +from tapiriik.services.storage_service_base import StorageServiceBase +from tapiriik.services.api import APIException, UserException, UserExceptionType +from tapiriik.database import cachedb, redis +from django.core.urlresolvers import reverse +from datetime import timedelta import dropbox import json import logging -import lxml import pickle -import re import requests logger = logging.getLogger(__name__) -class DropboxService(ServiceBase): + +class DropboxService(StorageServiceBase): ID = "dropbox" DisplayName = "Dropbox" DisplayAbbreviation = "DB" AuthenticationType = ServiceAuthenticationType.OAuth AuthenticationNoFrame = True # damn dropbox, spoiling my slick UI Configurable = True - ReceivesStationaryActivities = False - - ActivityTaggingTable = { # earlier items have precedence over - ActivityType.Running: "run(?!tastic)", - ActivityType.MountainBiking: "m(oun)?t(ai)?n\s*bik(e|ing)", - ActivityType.Cycling: "(cycl(e|ing)|bik(e|ing))", - ActivityType.Walking: "walk", - ActivityType.Hiking: "hik(e|ing)", - ActivityType.DownhillSkiing: "(downhill|down(hill)?\s*ski(ing)?)", - ActivityType.CrossCountrySkiing: "(xc|cross.*country)\s*ski(ing)?", - ActivityType.Snowboarding: "snowboard(ing)?", - ActivityType.Skating: "skat(e|ing)?", - ActivityType.Swimming: "swim", - ActivityType.Wheelchair: "wheelchair", - ActivityType.Rowing: "row", - ActivityType.Elliptical: "elliptical", - ActivityType.RollerSkiing: "rollerskiing", - ActivityType.StrengthTraining: "strength( ?training)?", - ActivityType.Gym: "(gym|workout)", - ActivityType.Climbing: "climb(ing)?", - ActivityType.StandUpPaddling: "(sup|stand( |-)/up ?paddl(e|ing))", - ActivityType.Other: "(other|unknown)" - } - ConfigurationDefaults = {"SyncRoot": "/", "UploadUntagged": False, "Format":"tcx", "Filename":"%Y-%m-%d_%H-%M-%S_#NAME_#TYPE"} - SupportsHR = SupportsCadence = True + ConfigurationDefaults = {"SyncRoot": "/", "UploadUntagged": False, "Format": "tcx", "Filename":"%Y-%m-%d_%H-%M-%S_#NAME_#TYPE"} - SupportedActivities = ActivityTaggingTable.keys() - - def _app_credentials(self, full): - if full: - return (DROPBOX_FULL_APP_KEY, DROPBOX_FULL_APP_SECRET) - else: - return (DROPBOX_APP_KEY, DROPBOX_APP_SECRET) - - def _getClient(self, serviceRec): + def GetClient(self, serviceRec): from tapiriik.services import Service if "Secret" in serviceRec.Authorization: # Upgrade OAuth v1 token to v2. @@ -84,7 +47,7 @@ def _getClient(self, serviceRec): return dropbox.Dropbox(token) def WebInit(self): - self.UserAuthorizationURL = reverse("oauth_redirect", kwargs={"service": "dropbox"}) + self.UserAuthorizationURL = reverse("oauth_redirect", kwargs={"service": self.ID}) def RequiresConfiguration(self, svcRec): return svcRec.Authorization["Full"] and ("SyncRoot" not in svcRec.Config or not len(svcRec.Config["SyncRoot"])) @@ -107,6 +70,7 @@ def RetrieveAuthorizationToken(self, req, level): uid = int(result.user_id) return (uid, {"Token": result.access_token, "Full": full}) + def RevokeAuthorization(self, serviceRecord): pass # :( @@ -126,65 +90,18 @@ def _raiseDbException(self, e): raise APIException("Dropbox quota error", block=True, user_exception=UserException(UserExceptionType.AccountFull, intervention_required=True)) raise APIException("API failure - %s" % e) - def _tagActivity(self, text): - for act, pattern in self.ActivityTaggingTable.items(): - if re.search(pattern, text, re.IGNORECASE): - return act - return None - - def _getActivity(self, serviceRecord, dbcl, path, base_activity=None): - try: - metadata, file = dbcl.files_download(path) - except dropbox.exceptions.DropboxException as e: - self._raiseDbException(e) - - try: - if path.lower().endswith(".tcx"): - act = TCXIO.Parse(file.content, base_activity) - else: - act = GPXIO.Parse(file.content, base_activity) - except ValueError as e: - raise APIExcludeActivity("Invalid GPX/TCX " + str(e), activity_id=path, user_exception=UserException(UserExceptionType.Corrupt)) - except lxml.etree.XMLSyntaxError as e: - raise APIExcludeActivity("LXML parse error " + str(e), activity_id=path, user_exception=UserException(UserExceptionType.Corrupt)) - return act, metadata.rev - - def DownloadActivityList(self, svcRec, exhaustive=False): - dbcl = self._getClient(svcRec) - if not svcRec.Authorization["Full"]: - syncRoot = "/" - else: - syncRoot = svcRec.Config["SyncRoot"] + def EnumerateFiles(self, svcRec, dbcl, root, cache): # Dropbox API v2 doesn't like / as root. - if syncRoot == "/": - syncRoot = "" + if root == "/": + root = "" # New Dropbox API prefers path_lower, it would seem. - syncRoot = syncRoot.lower() - - # There used to be a massive affair going on here to cache the folder structure locally. - # Dropbox API 2.0 doesn't support the hashes I need for that. - # Oh well. Throw that data out now. Well, don't load it at all. - cache = cachedb.dropbox_cache.find_one({"ExternalID": svcRec.ExternalID}, {"ExternalID": True, "Activities": True}) - if cache is None: - cache = {"ExternalID": svcRec.ExternalID, "Activities": {}} + root = syncRoot.lower() try: list_result = dbcl.files_list_folder(syncRoot, recursive=True) except dropbox.exceptions.DropboxException as e: self._raiseDbException(e) - def cache_writeback(): - if "_id" in cache: - cachedb.dropbox_cache.save(cache) - else: - insert_result = cachedb.dropbox_cache.insert(cache) - cache["_id"] = insert_result.inserted_id - - - activities = [] - exclusions = [] - discovered_activity_cache_keys = set() - while True: for entry in list_result.entries: if not hasattr(entry, "rev"): @@ -197,143 +114,42 @@ def cache_writeback(): continue if svcRec.Authorization["Full"]: - relPath = path.replace(syncRoot, "", 1) + relPath = path.replace(root, "", 1) else: relPath = path.replace("/Apps/tapiriik/", "", 1) # dropbox api is meh api - hashedRelPath = self._hash_path(relPath) - discovered_activity_cache_keys.add(hashedRelPath) - if hashedRelPath in cache["Activities"]: - existing = cache["Activities"][hashedRelPath] - else: - existing = None - - if existing and existing["Rev"] == entry.rev: - # don't need entire activity loaded here, just UID - act = UploadedActivity() - act.UID = existing["UID"] - try: - act.StartTime = datetime.strptime(existing["StartTime"], "%H:%M:%S %d %m %Y %z") - except: - act.StartTime = datetime.strptime(existing["StartTime"], "%H:%M:%S %d %m %Y") # Exactly one user has managed to break %z :S - if "EndTime" in existing: # some cached activities may not have this, it is not essential - act.EndTime = datetime.strptime(existing["EndTime"], "%H:%M:%S %d %m %Y %z") - else: - logger.debug("Retrieving %s (%s)" % (path, "outdated meta cache" if existing else "not in meta cache")) - # get the full activity - try: - act, rev = self._getActivity(svcRec, dbcl, path) - except APIExcludeActivity as e: - logger.info("Encountered APIExcludeActivity %s" % str(e)) - exclusions.append(strip_context(e)) - continue - - try: - act.EnsureTZ() - except: - pass # We tried. - - act.Laps = [] # Yeah, I'll process the activity twice, but at this point CPU time is more plentiful than RAM. - cache["Activities"][hashedRelPath] = {"Rev": rev, "UID": act.UID, "StartTime": act.StartTime.strftime("%H:%M:%S %d %m %Y %z"), "EndTime": act.EndTime.strftime("%H:%M:%S %d %m %Y %z")} - # Incrementally update the cache db. - # Otherwise, if we crash later on in listing - # (due to OOM or similar), we'll never make progress on this account. - cache_writeback() - tagRes = self._tagActivity(relPath) - act.ServiceData = {"Path": path, "Tagged": tagRes is not None} - - act.Type = tagRes if tagRes is not None else ActivityType.Other - - logger.debug("Activity s/t %s" % act.StartTime) - - activities.append(act) - + yield (path, relPath, path, file.rev) # Perform pagination. if list_result.has_more: list_result = dbcl.files_list_folder_continue(list_result.cursor) else: break - # Drop deleted activities' records from cache. - all_activity_cache_keys = set(cache["Activities"].keys()) - for deleted_key in all_activity_cache_keys - discovered_activity_cache_keys: - del cache["Activities"][deleted_key] - - cache_writeback() - return activities, exclusions - - def DownloadActivity(self, serviceRecord, activity): - # activity might not be populated at this point, still possible to bail out - if not activity.ServiceData["Tagged"]: - if not (hasattr(serviceRecord, "Config") and "UploadUntagged" in serviceRecord.Config and serviceRecord.Config["UploadUntagged"]): - raise APIExcludeActivity("Activity untagged", permanent=False, activity_id=activity.ServiceData["Path"], user_exception=UserException(UserExceptionType.Untagged)) - - path = activity.ServiceData["Path"] - dbcl = self._getClient(serviceRecord) - activity, rev = self._getActivity(serviceRecord, dbcl, path, base_activity=activity) - - # Dropbox doesn't support stationary activities yet. - if activity.CountTotalWaypoints() <= 1: - raise APIExcludeActivity("Too few waypoints", activity_id=path, user_exception=UserException(UserExceptionType.Corrupt)) - - return activity - - def _hash_path(self, path): - import hashlib - # Can't use the raw file path as a dict key in Mongo, since who knows what'll be in it (periods especially) - # Used the activity UID for the longest time, but that causes inefficiency when >1 file represents the same activity - # So, this: - csp = hashlib.new("md5") - csp.update(path.encode('utf-8')) - return csp.hexdigest() - - def _clean_activity_name(self, name): - # https://www.dropbox.com/help/145/en - # Nothing outside BMP is allowed, either, apparently. - return re.sub("[@><:\"|?*]|[^\U00000000-\U0000d7ff\U0000e000-\U0000ffff]", "", re.sub("[/\\\]", "-", name)) - - def _format_file_name(self, format, activity): - name_pattern = re.compile("#NAME", re.IGNORECASE) - type_pattern = re.compile("#TYPE", re.IGNORECASE) - name = activity.StartTime.strftime(format) - name = name_pattern.sub(self._clean_activity_name(activity.Name) if activity.Name and len(activity.Name) > 0 and activity.Name.lower() != activity.Type.lower() else "", name) - name = type_pattern.sub(activity.Type, name) - name = re.sub(r"([\W_])\1+", r"\1", name) # To handle cases where the activity is unnamed - name = re.sub(r"^([\W_])|([\W_])$", "", name) # To deal with trailing-seperator weirdness (repeated seperator handled by prev regexp) - return name - - def UploadActivity(self, serviceRecord, activity): - format = serviceRecord.GetConfiguration()["Format"] - if format == "tcx": - if "tcx" in activity.PrerenderedFormats: - logger.debug("Using prerendered TCX") - data = activity.PrerenderedFormats["tcx"] - else: - data = TCXIO.Dump(activity) - else: - if "gpx" in activity.PrerenderedFormats: - logger.debug("Using prerendered GPX") - data = activity.PrerenderedFormats["gpx"] - else: - data = GPXIO.Dump(activity) - - dbcl = self._getClient(serviceRecord) - fname = self._format_file_name(serviceRecord.GetConfiguration()["Filename"], activity)[:250] + "." + format # DB has a max path component length of 255 chars, and we have to save for the file ext (4) and the leading slash (1) + def GetFileContents(self, serviceRecord, dbcl, path, storageid, cache): + try: + metadata, file = dbcl.files_download(path) + except dropbox.exceptions.DropboxException as e: + self._raiseDbException(e) - if not serviceRecord.Authorization["Full"]: - fpath = "/" + fname - else: - fpath = serviceRecord.Config["SyncRoot"] + "/" + fname + return file.content, metadata.rev + def PutFileContents(self, serviceRecord, dbcl, path, contents, cache): try: metadata = dbcl.files_upload(data.encode("UTF-8"), fpath, mode=dropbox.files.WriteMode.overwrite) except dropbox.exceptions.DropboxException as e: self._raiseDbException(e) - # Fake this in so we don't immediately redownload the activity next time 'round - cache = cachedb.dropbox_cache.find_one({"ExternalID": serviceRecord.ExternalID}) - cache["Activities"][self._hash_path("/" + fname)] = {"Rev": metadata.rev, "UID": activity.UID, "StartTime": activity.StartTime.strftime("%H:%M:%S %d %m %Y %z"), "EndTime": activity.EndTime.strftime("%H:%M:%S %d %m %Y %z")} - cachedb.dropbox_cache.update({"ExternalID": serviceRecord.ExternalID}, cache) # not upsert, hope the record exists at this time... - return fpath - def DeleteCachedData(self, serviceRecord): - cachedb.dropbox_cache.remove({"ExternalID": serviceRecord.ExternalID}) + return metadata.rev + + def MoveFile(self, serviceRecord, dbcl, path, destPath, cache): + dbcl.file_move(path, path.replace(".tcx", ".tcx.summary-data")) + + def ServiceCacheDB(self): + return cachedb.dropbox_cache + + def SyncRoot(self, svcRec): + if not svcRec.Authorization["Full"]: + syncRoot = "/" + else: + syncRoot = svcRec.Config["SyncRoot"] + return syncRoot diff --git a/tapiriik/services/storage_service_base.py b/tapiriik/services/storage_service_base.py new file mode 100644 index 000000000..44a267bea --- /dev/null +++ b/tapiriik/services/storage_service_base.py @@ -0,0 +1,257 @@ +from tapiriik.services.service_base import ServiceBase +from tapiriik.services.api import UserException, UserExceptionType, APIExcludeActivity +from tapiriik.services.interchange import ActivityType, UploadedActivity +from tapiriik.services.exception_tools import strip_context +from tapiriik.services.gpx import GPXIO +from tapiriik.services.tcx import TCXIO +import re +import lxml +from datetime import datetime +import logging +logger = logging.getLogger(__name__) + +class StorageServiceBase(ServiceBase): + """ + A base class for all storage-like services (Dropbox, Google Drive, etc) + """ + + # Maximum path length that this service will accept. Default is from Dropbox. + MaxPathLen = 255 + + ReceivesStationaryActivities = False + + ActivityTaggingTable = { # earlier items have precedence over + ActivityType.Running: "run(?!tastic)", + ActivityType.MountainBiking: "m(oun)?t(ai)?n\s*bik(e|ing)", + ActivityType.Cycling: "(cycl(e|ing)|bik(e|ing))", + ActivityType.Walking: "walk", + ActivityType.Hiking: "hik(e|ing)", + ActivityType.DownhillSkiing: "(downhill|down(hill)?\s*ski(ing)?)", + ActivityType.CrossCountrySkiing: "(xc|cross.*country)\s*ski(ing)?", + ActivityType.Snowboarding:"snowboard(ing)?", + ActivityType.Skating: "skat(e|ing)?", + ActivityType.Swimming: "swim", + ActivityType.Wheelchair: "wheelchair", + ActivityType.Rowing: "row", + ActivityType.Elliptical: "elliptical", + ActivityType.RollerSkiing: "rollerskiing", + ActivityType.StrengthTraining: "strength( ?training)?", + ActivityType.Gym: "(gym|workout)", + ActivityType.Climbing: "climb(ing)?", + ActivityType.StandUpPaddling: "(sup|stand( |-)/up ?paddl(e|ing))", + ActivityType.Other: "(other|unknown)" + } + + SupportsHR = SupportsCadence = True + + SupportedActivities = ActivityTaggingTable.keys() + + def GetClient(self, svcRec): + """ Return a client object for the service. Will be passed back in to the various calls below """ + raise NotImplementedError() + + def GetFileContents(self, svcRec, client, path, storageid, cache): + """ Return a tuple of (contents, version_number) for a given path. If this file was just enumerated, ++ storageid will be given (see EnumerateFiles below), otherwise it will be None. """ + raise NotImplementedError() + + def PutFileContents(self, svcRec, client, path, contents, cache): + """ Write the contents to the file and return a version number for the newly written file. """ + raise NotImplementedError() + + def MoveFile(self, svcRec, client, path, destPath, cache): + """ Move/rename the file "path" to "destPath". """ + raise NotImplementedError() + + def ServiceCacheDB(self): + """ Get the cache DB object for this service, eg, cachedb.dropbox_cache """ + raise NotImplementedError() + + def SyncRoot(self, svcRec): + """ Get the root directory on the service that we will be syncing to, eg, "/tapiriik/" """ + raise NotImplementedError() + + def EnumerateFiles(self, svcRec, client, root, cache): + """ List the files available on the remote (applying some filtering, + and using cache as appropriate. Should yield tuples of: + (fullPath, relPath, fileid) + where storageid is some unique id that can be passed back to GetFileContents above. + """ + raise NotImplementedError() + + def _tagActivity(self, text): + for act, pattern in self.ActivityTaggingTable.items(): + if re.search(pattern, text, re.IGNORECASE): + return act + return None + + def _getActivity(self, serviceRecord, client, path, storageid, cache, base_activity=None): + activityData, revision = self.GetFileContents(serviceRecord, client, path, storageid, cache) + + try: + if path.lower().endswith(".tcx"): + act = TCXIO.Parse(activityData, base_activity) + else: + act = GPXIO.Parse(activityData, base_activity) + except ValueError as e: + raise APIExcludeActivity("Invalid GPX/TCX " + str(e), activity_id=path, user_exception=UserException(UserExceptionType.Corrupt)) + except lxml.etree.XMLSyntaxError as e: + raise APIExcludeActivity("LXML parse error " + str(e), activity_id=path, user_exception=UserException(UserExceptionType.Corrupt)) + return act, revision + + def _getCache(self, svcRec): + cache = self.ServiceCacheDB().find_one({"ExternalID": svcRec.ExternalID}, {"ExternalID": True, "Activities": True}) + if cache is None: + cache = {"ExternalID": svcRec.ExternalID, "Activities": {}} + return cache + + def _storeCache(self, svcRec, cache): + if "_id" in cache: + self.ServiceCacheDB().save(cache) + else: + insert_result = self.ServiceCacheDB().insert(cache) + cache["_id"] = insert_result.inserted_id + + def DownloadActivityList(self, svcRec, exhaustive=False): + client = self.GetClient(svcRec) + + cache = self._getCache(svcRec) + syncRoot = self.SyncRoot(svcRec) + + activities = [] + exclusions = [] + discovered_activity_cache_keys = set() + + for (path, relPath, storageid, revision) in self.EnumerateFiles(svcRec, client, syncRoot, cache): + hashedRelPath = self._hash_path(relPath) + discovered_activity_cache_keys.add(hashedRelPath) + if hashedRelPath in cache["Activities"]: + existing = cache["Activities"][hashedRelPath] + else: + existing = None + + if existing and existing["Rev"] == revision: + # don't need entire activity loaded here, just UID + act = UploadedActivity() + act.UID = existing["UID"] + try: + act.StartTime = datetime.strptime(existing["StartTime"], "%H:%M:%S %d %m %Y %z") + except: + act.StartTime = datetime.strptime(existing["StartTime"], "%H:%M:%S %d %m %Y") # Exactly one user has managed to break %z :S + if "EndTime" in existing: # some cached activities may not have this, it is not essential + act.EndTime = datetime.strptime(existing["EndTime"], "%H:%M:%S %d %m %Y %z") + else: + logger.debug("Retrieving %s (%s)" % (path, "outdated meta cache" if existing else "not in meta cache")) + # get the full activity + try: + act, rev = self._getActivity(svcRec, client, path, storageid, cache) + except APIExcludeActivity as e: + logger.info("Encountered APIExcludeActivity %s" % str(e)) + exclusions.append(strip_context(e)) + continue + + try: + act.EnsureTZ() + except: + pass # We tried. + + act.Laps = [] # Yeah, I'll process the activity twice, but at this point CPU time is more plentiful than RAM. + cache["Activities"][hashedRelPath] = {"Rev": rev, "UID": act.UID, "StartTime": act.StartTime.strftime("%H:%M:%S %d %m %Y %z"), "EndTime": act.EndTime.strftime("%H:%M:%S %d %m %Y %z")} + # Incrementally update the cache db. + # Otherwise, if we crash later on in listing + # (due to OOM or similar), we'll never make progress on this account. + self._storeCache(svcRec, cache) + tagRes = self._tagActivity(relPath) + act.ServiceData = {"Path": path, "Tagged": tagRes is not None} + + act.Type = tagRes if tagRes is not None else ActivityType.Other + + logger.debug("Activity s/t %s" % act.StartTime) + + activities.append(act) + + # Drop deleted activities' records from cache. + all_activity_cache_keys = set(cache["Activities"].keys()) + for deleted_key in all_activity_cache_keys - discovered_activity_cache_keys: + del cache["Activities"][deleted_key] + + self._storeCache(svcRec, cache) + + return activities, exclusions + + def DownloadActivity(self, serviceRecord, activity): + # activity might not be populated at this point, still possible to bail out + if not activity.ServiceData["Tagged"]: + if not (hasattr(serviceRecord, "Config") and "UploadUntagged" in serviceRecord.Config and serviceRecord.Config["UploadUntagged"]): + raise APIExcludeActivity("Activity untagged", permanent=False, activity_id=activity.ServiceData["Path"], user_exception=UserException(UserExceptionType.Untagged)) + + path = activity.ServiceData["Path"] + client = self.GetClient(serviceRecord) + cache = self._getCache(serviceRecord) + activity, rev = self._getActivity(serviceRecord, client, path, None, cache) + self._storeCache(serviceRecord, cache) + + # Storage-based services don't support stationary activities yet. + if activity.CountTotalWaypoints() <= 1: + raise APIExcludeActivity("Too few waypoints", activity_id=path, user_exception=UserException(UserExceptionType.Corrupt)) + + return activity + + def _hash_path(self, path): + import hashlib + # Can't use the raw file path as a dict key in Mongo, since who knows what'll be in it (periods especially) + # Used the activity UID for the longest time, but that causes inefficiency when >1 file represents the same activity + # So, this: + csp = hashlib.new("md5") + csp.update(path.encode("utf-8")) + return csp.hexdigest() + + def _clean_activity_name(self, name): + # https://www.dropbox.com/help/145/en + # Nothing outside BMP is allowed, either, apparently. + return re.sub("[@><:\"|?*]|[^\U00000000-\U0000d7ff\U0000e000-\U0000ffff]", "", re.sub("[/\\\]", "-", name)) + + def _format_file_name(self, format, activity): + name_pattern = re.compile("#NAME", re.IGNORECASE) + type_pattern = re.compile("#TYPE", re.IGNORECASE) + name = activity.StartTime.strftime(format) + name = name_pattern.sub(self._clean_activity_name(activity.Name) if activity.Name and len(activity.Name) > 0 and activity.Name.lower() != activity.Type.lower() else "", name) + name = type_pattern.sub(activity.Type, name) + name = re.sub(r"([\W_])\1+", r"\1", name) # To handle cases where the activity is unnamed + name = re.sub(r"^([\W_])|([\W_])$", "", name) # To deal with trailing-seperator weirdness (repeated seperator handled by prev regexp) + return name + + def UploadActivity(self, serviceRecord, activity): + format = serviceRecord.GetConfiguration()["Format"] + if format == "tcx": + if "tcx" in activity.PrerenderedFormats: + logger.debug("Using prerendered TCX") + data = activity.PrerenderedFormats["tcx"] + else: + data = TCXIO.Dump(activity) + else: + if "gpx" in activity.PrerenderedFormats: + logger.debug("Using prerendered GPX") + data = activity.PrerenderedFormats["gpx"] + else: + data = GPXIO.Dump(activity) + + fname = self._format_file_name(serviceRecord.GetConfiguration()["Filename"], activity)[:self.MaxPathLen-5] + "." + format # max path length, and we have to save for the file ext (4) and the leading slash (1) + + client = self.GetClient(serviceRecord) + + syncRoot = self.SyncRoot(serviceRecord) + if not syncRoot.endswith("/"): + syncRoot += "/" + fpath = syncRoot + fname + + cache = self._getCache(serviceRecord) + revision = self.PutFileContents(serviceRecord, client, fpath, data.encode("UTF-8"), cache) + + # Fake this in so we don't immediately redownload the activity next time 'round + cache["Activities"][self._hash_path("/" + fname)] = {"Rev": revision, "UID": activity.UID, "StartTime": activity.StartTime.strftime("%H:%M:%S %d %m %Y %z"), "EndTime": activity.EndTime.strftime("%H:%M:%S %d %m %Y %z")} + self._storeCache(serviceRecord, cache) + return fpath + + def DeleteCachedData(self, serviceRecord): + self.ServiceCacheDB().remove({"ExternalID": serviceRecord.ExternalID}) From d5099925628fd43ae0d041b77c89391012c966ce Mon Sep 17 00:00:00 2001 From: Matthew Duggan Date: Wed, 29 Dec 2021 17:14:02 +0900 Subject: [PATCH 2/2] Add support for Google Drive storage --- requirements.txt | 2 + tapiriik/local_settings.py.example | 3 + tapiriik/services/GoogleDrive/__init__.py | 1 + tapiriik/services/GoogleDrive/googledrive.py | 340 ++++++++++++++++++ tapiriik/services/__init__.py | 2 + tapiriik/services/service.py | 2 + .../web/static/img/services/googledrive.png | Bin 0 -> 4748 bytes .../web/static/img/services/googledrive_l.png | Bin 0 -> 9329 bytes tapiriik/web/views/privacy.py | 1 + 9 files changed, 351 insertions(+) create mode 100644 tapiriik/services/GoogleDrive/__init__.py create mode 100644 tapiriik/services/GoogleDrive/googledrive.py create mode 100644 tapiriik/web/static/img/services/googledrive.png create mode 100644 tapiriik/web/static/img/services/googledrive_l.png diff --git a/requirements.txt b/requirements.txt index 3423ded57..7803ea04d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,3 +13,5 @@ redis==2.10.6 django-ipware smashrun-client>=0.6.0 beautifulsoup4 +google-api-python-client +oauth2client diff --git a/tapiriik/local_settings.py.example b/tapiriik/local_settings.py.example index 28f4c337b..e220028c4 100644 --- a/tapiriik/local_settings.py.example +++ b/tapiriik/local_settings.py.example @@ -55,6 +55,9 @@ SMASHRUN_CLIENT_SECRET = "####" SPORTTRACKS_CLIENT_ID = "####" SPORTTRACKS_CLIENT_SECRET = "####" +GOOGLEDRIVE_CLIENT_ID = "####" +GOOGLEDRIVE_CLIENT_SECRET = "####" + STRAVA_CLIENT_SECRET = "####" STRAVA_CLIENT_ID = "####" STRAVA_RATE_LIMITS = [] diff --git a/tapiriik/services/GoogleDrive/__init__.py b/tapiriik/services/GoogleDrive/__init__.py new file mode 100644 index 000000000..414816609 --- /dev/null +++ b/tapiriik/services/GoogleDrive/__init__.py @@ -0,0 +1 @@ +from .googledrive import * diff --git a/tapiriik/services/GoogleDrive/googledrive.py b/tapiriik/services/GoogleDrive/googledrive.py new file mode 100644 index 000000000..209f57054 --- /dev/null +++ b/tapiriik/services/GoogleDrive/googledrive.py @@ -0,0 +1,340 @@ +from tapiriik.settings import WEB_ROOT, GOOGLEDRIVE_CLIENT_ID, GOOGLEDRIVE_CLIENT_SECRET +from tapiriik.services.service_base import ServiceAuthenticationType +from tapiriik.services.storage_service_base import StorageServiceBase +from tapiriik.services.service_record import ServiceRecord +from tapiriik.services.api import APIException, UserException, UserExceptionType, APIExcludeActivity, ServiceException +from tapiriik.database import cachedb, redis +from googleapiclient.discovery import build +from googleapiclient.http import MediaInMemoryUpload +from googleapiclient import errors +from oauth2client.client import OAuth2WebServerFlow, OAuth2Credentials +from django.core.urlresolvers import reverse +import logging +import httplib2 +import requests +import json + +logger = logging.getLogger(__name__) + +GOOGLE_REVOKE_URI = 'https://accounts.google.com/o/oauth2/revoke' + +# Full scope needed so that we can read files that user adds by hand +_OAUTH_SCOPE = "https://www.googleapis.com/auth/drive" + +# Mimetypes to use when uploading, keyed by extension +_MIMETYPES = { + "gpx": "application/gpx+xml", + "tcx": "application/vnd.garmin.tcx+xml" +} + +# Mimetype given to folders on google drive. +_FOLDER_MIMETYPE = "application/vnd.google-apps.folder" + +def _basename(path): + return path.split("/")[-1] + +class GoogleDriveService(StorageServiceBase): + ID = "googledrive" + DisplayName = "Google Drive" + DisplayAbbreviation = "GD" + AuthenticationType = ServiceAuthenticationType.OAuth + Configurable = True + ReceivesStationaryActivities = False + AuthenticationNoFrame = True + ConfigurationDefaults = {"SyncRoot": "/", "UploadUntagged": False, "Format":"tcx", "Filename":"%Y-%m-%d_#NAME_#TYPE"} + + def _oauthFlow(self): + return_url = WEB_ROOT + reverse("oauth_return", kwargs={"service": self.ID}) + flow = OAuth2WebServerFlow(GOOGLEDRIVE_CLIENT_ID, GOOGLEDRIVE_CLIENT_SECRET, _OAUTH_SCOPE, + redirect_uri=return_url, access_type='offline') + return flow + + def GetClient(self, serviceRec): + credentials = OAuth2Credentials.from_json(serviceRec.Authorization["Credentials"]) + http = httplib2.Http() + if credentials.access_token_expired: + logger.debug("Refreshing Google Drive credentials") + credentials.refresh(http) + serviceRec.Authorization["Credentials"] = credentials.to_json() + # Note: refreshed token doesn't get persisted, but will stick + # around in the serviceRec for the duration of a sync. + # TODO: Should use a SessionCache - tokens last 60 mins by default + http = credentials.authorize(http) + drive_service = build("drive", "v2", http=http) + return drive_service + + def WebInit(self): + self.UserAuthorizationURL = WEB_ROOT + reverse("oauth_redirect", kwargs={"service": self.ID}) + pass + + def GenerateUserAuthorizationURL(self, session, level=None): + flow = self._oauthFlow() + return flow.step1_get_authorize_url() + + def _getUserId(self, svcRec): + client = self.GetClient(svcRec) + try: + about = client.about().get().execute() + # TODO: Is this a good user ID to use? Could also use email.. + return about["rootFolderId"] + except errors.HttpError as error: + raise APIException("Google drive error fetching user ID - %s" % error) + + def RetrieveAuthorizationToken(self, req, level): + from tapiriik.services import Service + flow = self._oauthFlow() + code = req.GET["code"] + credentials = flow.step2_exchange(code) + cred_json = credentials.to_json() + + uid = self._getUserId(ServiceRecord({"Authorization": {"Credentials": cred_json}})) + return (uid, {"Credentials": cred_json}) + + def RevokeAuthorization(self, serviceRec): + credentials = OAuth2Credentials.from_json(serviceRec.Authorization["Credentials"]) + # should this just be calling credentials.revoke()? + resp = requests.post(GOOGLE_REVOKE_URI, data={"token": credentials.access_token}) + if resp.status_code == 400: + try: + result = json.loads(resp.text) + if result.get("error") == "invalid_token": + logger.debug("Google drive said token %s invalid when we tried to revoke it, oh well.." % credentials.access_token) + # Token wasn't valid anyway, we're good + return + except ValueError: + raise APIException("Error revoking Google Drive auth token, status " + str(resp.status_code) + " resp " + resp.text) + elif resp.status_code != 200: + raise APIException("Unable to revoke Google Drive auth token, status " + str(resp.status_code) + " resp " + resp.text) + pass + + def _idCache(self, cache): + if "FileIDs" not in cache: + cache["FileIDs"] = [] + return cache["FileIDs"] + + def _getFileId(self, client, path, cache): + """ get file id for the given path. Returns None if the path does not exist. + also returns cache hits used in determining the id, in case it turns out to be wrong. + """ + id_cache = self._idCache(cache) + + if path == "": + path = "/" + + assert(path.startswith("/")) + if path.endswith("/"): + path = path[:-1] + currentid = "root" + parts = path.split("/") + offset = 1 + cachehits = set() + + while offset < len(parts): + existingRecord = [x for x in id_cache if (x["Parent"] == currentid and x["Name"] == parts[offset])] + if len(existingRecord): + existingRecord = existingRecord[0] + currentid = existingRecord["ID"] + cachehits.add(currentid) + else: + try: + params = {"q": "title = '%s'" % parts[offset], "fields": "items/id"} + children = client.children().list(folderId=currentid, **params).execute() + except errors.HttpError as error: + raise APIException("Error listing Google Drive contents - %s" + str(error)) + + if not len(children.get("items", [])): + if cachehits: + # The cache may have led us astray - clear hits and try again + self._removeCachedIds(cachehits, cache) + return self._getFileId(client, path, cache) + else: + return None, None + childid = children["items"][0]["id"] + id_cache.append({"ID": childid, "Parent": currentid, "Name": parts[offset]}) + currentid = childid + offset += 1 + return currentid, cachehits + + def _removeCachedIds(self, fileids, cache): + id_cache = self._idCache(cache) + id_cache[:] = (x for x in id_cache if x["ID"] not in fileids) + + def _getFile(self, client, path, storageid, cache): + logger.info("getfile %s %s" % (storageid, path)) + if storageid: + file_id = storageid + cachehits = None + else: + file_id, cachehits = self._getFileId(client, path, cache) + logger.info("Trying to fetch id %s from path %s" % (file_id, path)) + if not file_id: + return None # File not found. + + try: + file = client.files().get(fileId=file_id).execute() + except errors.HttpError as error: + if error.resp.status == 404 and cachehits: + logger.debug("Google drive cache %s invalid - 404" % file_id) + # remove cache entries and try again + self._removeCachedIds(cachehits, cache) + return self._getFile(client, path, storageid, cache) + raise APIException("Error %d fetching Google Drive file URL - %s" % (error.resp.status, str(error))) + + if file.get("title") != _basename(path): + if not cachehits: + # shouldn't happen? + raise APIException("Error fetching Google Drive file - name didn't match") + + # Cached file ID now has different name - invalidate and try again + logger.debug("Google drive cache %s invalid - name no longer matches" % file_id) + self._removeCachedIds(cachehits, cache) + return self._getFile(client, path, storageid, cache) + + return file + + def GetFileContents(self, svcRec, client, path, storageid, cache): + """ Return a tuple of (contents, version_number) for a given path. """ + import hashlib + + file = self._getFile(client, path, storageid, cache) + if file is None or file.get("downloadUrl") is None: + # File not found or has no contents + return None, 0 + + resp, content = client._http.request(file.get("downloadUrl")) + if resp.status != 200: + raise APIException("Google drive download error - status %d" % resp.status) + + md5sum = file.get("md5Checksum") + if md5sum: + csp = hashlib.new("md5") + csp.update(content) + contentmd5 = csp.hexdigest() + if contentmd5.lower() != md5sum.lower(): + raise APIException("Google drive download error - md5 mismatch %s vs %s" % (md5sum, contentmd5)) + return content, file["version"] + + def PutFileContents(self, svcRec, client, path, contents, cache): + """ Write the contents to the file and return a version number for the newly written file. """ + fname = _basename(path) + parent = path[:-(len(fname)+1)] + logger.debug("Google Drive putting file contents for %s %s" % (parent, fname)) + parent_id, cachehits = self._getFileId(client, parent, cache) + + if parent_id is None: + # First make a directory. Only make one level up. + dirname = _basename(parent) + top_parent = parent[:-(len(dirname)+1)] + logger.debug("Google Drive creating parent - '%s' '%s'" % (top_parent, dirname)) + top_parent_id, topcachehits = self._getFileId(client, top_parent, cache) + if top_parent_id is None: + raise APIException("Parent of directory for %s does not exist, giving up" % (path,)) + + body = {"title": dirname, "mimeType": _FOLDER_MIMETYPE, "parents": [{"id": top_parent_id}]} + + try: + parent_obj = client.files().insert(body=body).execute() + except errors.HttpError as error: + if error.resp.status == 404 and topcachehits: + logger.debug("Google drive cache %s invalid - 404" % top_parent_id) + self._removeCachedIds(topcachehits.union(cachehits), cache) # remove cache entries and try again + return self.PutFileContents(svcRec, client, path, contents, cache) + raise APIException("Google drive error creating folder - %s" % error) + + parent_id = parent_obj["id"] + + extn = fname.split(".")[-1].lower() + if extn not in _MIMETYPES: + # Shouldn't happen? + raise APIException("Google drive upload only supports file types %s" % (_MIMETYPES.keys(),)) + + media_body = MediaInMemoryUpload(contents, mimetype=_MIMETYPES[extn], resumable=True) + # TODO: Maybe description should ideally be Activity.Notes? + body = {"title": fname, "description": "Uploaded by Tapiriik", "mimeType": _MIMETYPES[extn], "parents": [{"id": parent_id}]} + + try: + file = client.files().insert(body=body, media_body=media_body).execute() + return file["version"] + except errors.HttpError as error: + if error.resp.status == 404 and cachehits: + logger.debug("Google drive cache %s invalid - 404" % parent_id) + self._removeCachedIds(cachehits, cache) # remove cache entries and try again + return self.PutFileContents(svcRec, client, path, contents, cache) + raise APIException("Google drive upload error - %s" % error) + + def MoveFile(self, svcRec, client, path, destPath, cache): + """ Move/rename the file "path" to "destPath". """ + fname1 = _basename(path) + fname2 = _basename(destPath) + if path[:-len(fname1)] != destPath[:-len(fname2)]: + # Currently only support renaming files in the same dir, otherwise + # we have to twiddle parents which is hard.. + raise NotImplementedError() + + try: + file = self._getFile(client, path, cache) + if file is None: + raise APIException("Error renaming file: %s not found" % path) + file["title"] = fname1 + client.files().update(fileId=file["id"], body=file, newRevision=False).execute() + except errors.HttpError as error: + raise APIException("Error renaming file: %s" % error) + + def ServiceCacheDB(self): + return cachedb.googledrive_cache + + def SyncRoot(self, svcRec): + # TODO: Make this configurable + return "/tapiriik" + + def EnumerateFiles(self, svcRec, client, root, cache): + root_id, cachehits = self._getFileId(client, root, cache) + if root_id is None: + # Root does not exist.. that's ok, just no files to list. + return + + idcache = self._idCache(cache) + yield from self._folderRecurse(svcRec, client, root_id, root, idcache) + + def _folderRecurse(self, svcRec, client, parent_id, parent_path, id_cache): + assert(not parent_path.endswith("/")) + page_token = None + while True: + try: + param = {"maxResults": 1000, "q": "trashed = false and '%s' in parents" % parent_id, "fields": "items(id,version,parents(id,isRoot,kind),title,md5Checksum,mimeType),kind,nextLink,nextPageToken"} + if page_token: + param["pageToken"] = page_token + children = client.files().list(**param).execute() + + for child in children.get("items", []): + ctitle = child["title"] + cid = child["id"] + cpath = parent_path + "/" + ctitle + is_folder = child.get("mimeType") == _FOLDER_MIMETYPE + is_supported_file = any([ctitle.lower().endswith("."+x) for x in _MIMETYPES.keys()]) + + if not is_folder and not is_supported_file: + continue + + cache_entry = {"ID": cid, "Parent": parent_id, "Name": ctitle} + if cache_entry not in id_cache: + if any([x["ID"] == cid for x in id_cache]): + # Cached different name or parent info for this ID, maybe moved + logger.debug("ID %s seems to have changed name, updating cache" % cid) + id_cache[:] = (x for x in id_cache if x["ID"] != cid) + if any([x["Parent"] == parent_id and x["Name"] == ctitle for x in id_cache]): + logger.debug("%s/%s seems to have changed id, updating cache" % (parent_id, ctitle)) + # Cached different info for this parent/name + id_cache[:] = (x for x in id_cache if not (x["Parent"] == parent_id and x["Name"] != ctitle)) + id_cache.append(cache_entry) + + if is_folder: + yield from self._folderRecurse(svcRec, client, cid, cpath, id_cache) + elif is_supported_file: + yield (cpath, cpath.replace(parent_path, "", 1), cid, child["version"]) + + page_token = children.get("nextPageToken") + if not page_token: + break + except errors.HttpError as error: + raise APIException("Error listing files in Google Drive - %s" % error) diff --git a/tapiriik/services/__init__.py b/tapiriik/services/__init__.py index c4bd65421..204822f61 100644 --- a/tapiriik/services/__init__.py +++ b/tapiriik/services/__init__.py @@ -8,6 +8,8 @@ Endomondo = EndomondoService() from tapiriik.services.Dropbox import DropboxService Dropbox = DropboxService() +from tapiriik.services.GoogleDrive import GoogleDriveService +GoogleDrive = GoogleDriveService() from tapiriik.services.GarminConnect import GarminConnectService GarminConnect = GarminConnectService() from tapiriik.services.SportTracks import SportTracksService diff --git a/tapiriik/services/service.py b/tapiriik/services/service.py index 93af9e8d2..3da36ceb1 100644 --- a/tapiriik/services/service.py +++ b/tapiriik/services/service.py @@ -34,6 +34,7 @@ def List(): Endomondo, SportTracks, Dropbox, + GoogleDrive, TrainingPeaks, RideWithGPS, TrainAsONE, @@ -62,6 +63,7 @@ def PreferredDownloadPriorityList(): SportTracks, # Pretty much equivalent to GC, no temperature (not that GC temperature works all thar well now, but I digress) TrainingPeaks, # No seperate run cadence, but has temperature Dropbox, # Equivalent to any of the above + GoogleDrive, RideWithGPS, # Uses TCX for everything, so same as Dropbox TrainAsONE, VeloHero, # PWX export, no temperature diff --git a/tapiriik/web/static/img/services/googledrive.png b/tapiriik/web/static/img/services/googledrive.png new file mode 100644 index 0000000000000000000000000000000000000000..13cdda50a214d18a7df6eeec75c6c7de60febd9d GIT binary patch literal 4748 zcmZu#c{r4B+nzCueJLW@Cd!(%NMp}3#=b9!h{4D*7#Z6zBFZ*}kbNnn5LvT~kbRk< zVvwnnEt!Vw@#**bz3=?~(k0000x+)&Tz zcz$#|#F@Z<-a|LDjwgCA9a9|u;AI-?fg8i|9^!6jWeNab4 z09IT90BAMsGcgm6O#1la@x_-g9>=`{rI=jMg5Lb!Q^smuT0jDNuD@_+39FZ+M@_OIx1 zb2V9xk^j0VO_n+_>`wrIO9rl|V}k*%*wPhgq!MQ;xMQwhU&iR)4j>zqf zi1`RU{Q0(~4|~!3c~M)_USzMrji~K*yB9uXt*xyp%63wg<=EAeI>w$)@^FuN9m{>@ zT7QY|m<_+>aOJ4zB?=cE_Cfhhl+d(afvEaO;=z=rd*S)bi<4GUyshnkk~F= zAye24WQ3N(!K$3sAF*$;?vxnRDokj^@6FuwD&A6ZWW7~=F^0R03AD~w#;9YiFv0c; zMJ&+|d1ql!nOpdCKOaVtO?`J=NIEzM&Dni|9D6S(xSxYg0yZe);&FegX1l30Zu6Hc~nztE+Er#fN02s;^3n#xJ8ootSzx zp5d#g3x+0MqWblOr#Zp0zoJ;3Ku82npRa*AxEXsUx-ExT_{u}t$VUx`(q z?3@ml5Q3iE8Gx(D92F&;yQ3--<1BbMoyP zCXtigEp#b8CsbozdGI{rx%b&SGLaV2WdrH8bnLgq8qDqcikyxKh0$7mweH{>E>Zjadogzv4@t7jus)V1d-70^XGL`ssuy^X6yMWLwYqWHe z#hpuMa5Z$18|iPDSz1V5s>I?%&Y}BfNaoUrLJ^LaNjwW;H{ZFG^7U;3%e(;lyK6L- zes!Nirm|^5VTo^fWI~p`^S3av2`k&SWaLT11jKJ7sd*#6#D6MSSA-!I)EE1u3+H-g zT*5h|Eh9c(2rg3atK8CyZRQ>33D_MkVTH7}z|z~*D<{X66V4T-?2ygsG*1L=ykX^^ zT%skL=JvQ+}dg9Ghz5$sdl1Z@Ewuk5(9CW5{TjrOVjUe@8mfL8*R#Zm-Msh zp`nmP5UlFyt#}jr-=CA{KWDFQwV(r^FFjRxF0b$6f}f1IJ1awwa*e80oMOXFrat|# z>&o6IaGQb@MD2fb5W9A5tSZhXfP=@UJN)UB+TwgRVtMRU_0p4(8m{N?qN1mQG7AIw zwemk7<`^}6sh_<%nKH(yIKE`}z*;dx@S`whGm4|p7Wk2ahmjY#LnbiGKSV&Zbk1|< zWWt>EbItU8IV*po#~Y(Cc$f6lrKe%z3g*GwPo}r+mxb6KpIj@s^D*hi z)5F)dKb2Eg@r^v}98$eq5WQ%5Cq&U)}P4`;1G4tlvpzFVd9 zl7k{R2?ZowEjl6M8Zer1R(m%c6$0laOU$u8fYqz0#q1LH=O!pXi|-Ohu?ms= z{O z48>Jy3++E=U1iHld)dDT3E)mz>3OB^jV80l7t&RFNbu?X zDhOO)FUMZvx-<%`$NduW*7XoU^Qwgby%XO&;ubZPQ~2<-w!*O^?#j3%XX@4Ma^;Lfk)};38e(~ z(;EeKSz89pg{1uyR>BsHF2^29*)C1E4vwCSKDk5J zY*1ayp_ILMi0iUI&+;I<2_>B%I4ol>fAfXwMJoRkXooh*E|6r%Cj;9X<1tEcArSAm zliW8}b6#A6(6Z>HQd_T9)2!FCe1pe^Q$cbkX`g!@sz@6lNuLy{CD#g$=#^bl?qd~VkpohS&_M%8^n(jAn(C}hRwv$+;(&dqS z*QyIzspuOMIt$Lhmwk!crRf}K0IJydj9m+zK0W{4tkJU6e1$zj15kHqT`XgI#Z|M4 z*KbwE`Z&Hx-&l~Uuc-nKP-Rc?mEmQ5c|#g9e(|qImuf=w(gsTjrITV# zO==1^`7M)OV4%|L_wUor3~XI5Ux1(%cZA7GlF#W%3W*Z;ol-z**1W4F&*igW?H};3 zm!Cc?0E|s!dZ>KxSCjEdQPu8lEUU#oJ7j}p#|;auF?5 zyS9bfw+rSRzvo-7TA8uBDJ(;J&tSLGJe}=(k<4Bl&RHvk5mK!bW(?~KRez{9k+`L- zz&MV+h6!<>&@c^$Or6}MXB}TQ*q{3}vBs!0v&)wlKeFBXav4toFBLK#@n!=elcKmg z$~aEy2xtgDJ&jc;DgxN@Wwt=GjVWs{CR<=}EDLlwuCKY3e2veS4yrok z(8g`J&hp01(C)mNs|HdsCE{FJq6OC}M#cLFibawUi(ZcF*C z%QApMDt@xUfkzl%&y7$D- zo)Uo(C}_2iMUKwkk(4$p_|1s18G3SG0nWm)BaG}Tl3g?$TwO6CF}~VAsXk{(u_tAH zByfd-!+wo|tvWi8^M|u*sR;4gZB}T&sP&VVV%0{Ci?dGlGgi?Q^J~zgEkUp3Ob^>V z*7NhjM*Ce}?%hi=SEAkfZ?GJ|eVA1FFuR>>!{!_~lI!>>RRaFOb93}4*UdgHz~VEV zkKtbzVjZ^ZpXKS*aiTu-#K7&&F}~o2o`9AUx!#juQ-W#Vbb&A}rR<*?fO<8z1HG#-8!5FdT_35pLuV_nUmF zr9-^Ea7S&uGC6!}==CI9Eg#K~Fl7;EPQSQ7I}cxZwUyc+L*k}DbuJj)yQ!HT@=?E; zH}a}Rdzi4dcW6tl#2sN`4I}-Df0WL$3)`i>1JBvxd!# z?C0N-GDd@|zK2?Y!UE&kL*oxrdpGX_eV^rQ-G`sif=N~^!j z(=WlSzxV9N4jZ8OEaH92U`~t9C`OddUJSxW)pkli?@ebUit&5`PGDfz9JdbJwJj1k z4aY(bwzc~dO1%3k8n2*yh2b0rB1^6K54cU2!GcNTFl)uNdf_e4U)8?xzHRoRPn^Zs3F^}r{&2y+xaksv7 z-I?NOrR5ERA(eSN0R7#mD`P>=Min>AMER`D(@=5%TY@)%5GoFtwdS2x*0v!*48*%x zeRCCF=vpkBWF^$lLvBHuX$uU^iIk;i=M&OmLdE^s>0C-v<>~cd=jT{k`25pEhMW(k za>!}71;5?R`izAqFN^u+-?89{P3k1Ofn-rdWU>3w!9{k$6wRF>PEb(3^(sS*33U>@UUHpBpq< zxqFg^2Bx;P0q2?}tAfF`>-I6fwCgz6S>6Jc-?Ys|KCbimT`4*C;YG&V0(mQAo2G~D zLHv!YBk9n=^lG7g`Y8B*ml+98<1s7sy-bOBWbu+&cy!3Wl^mC(mJ@z$S0|lg9DaeV z!bH~ICE#Iw-GX3FN;uT5I@Uk%rqn9KwxCN$vAb&*(G(=JXnBaBE{V*Jh=+x;_W78T(;=oRkP^KRq2CnGjsQBOYZP8T zJ`|UTT?a*4XFiQ}A_Ne;sQ{{MP&|KDl=B(sAA;Zl|2?YQ>CM-@o~}S@0Y%{54_CG+ z_MTjU0mN0e^+ojOGkcU<9ey=j8M$gVnzrekK#L=n&qrr$)tdxKlYUCqRD|JKR}4S( zfd=fOkkKd`BiU3B$J>>TdOdIlqncrANq3FpMe{=A)J_`qvYktHC+VxZRu`$!`usXfs*)kbfByNv M_09CEue!$m58TVMivR!s literal 0 HcmV?d00001 diff --git a/tapiriik/web/static/img/services/googledrive_l.png b/tapiriik/web/static/img/services/googledrive_l.png new file mode 100644 index 0000000000000000000000000000000000000000..5cb8e809735e5eeb4970bc2a75b5cc6f1b2423f2 GIT binary patch literal 9329 zcmaia1yodDyEox5+^{w^oz1BH8MkXGLhop;ZZ!&P&L5A!xy?~ zYY`LOJp0d^xNly!oRzed@bIdV$S!OFH+>F!4FhdFydXY2yznSIyt5lo_$nSAQVb7o z%?1w-`UVe=0g>6HFMC5a@YE3I3)6lKwTF9%*gC-N97O^=5H|=sJlO!~P1D2C*Onu| z!`;&d8X(8@FAeBT`>$A(i{oD^zHV|{Fl{{!6}Yz}hlI!j5iu@#QVtFdS#Jj?sDY~5 zzu`AMIW8ApUj$TC)ZgD<#9v$l?(HmkPg+`9RP4U!{rkc<8p1w-p1!sL!k#|d|B(DQ zkE)}Oz4udu?^C!Z$6sDsJ2=u;j*IKBq5moW`04x9>3=PG`usbrn*c@sei6MVA}0F( zg53oBKmPp(D!>-;59nV}{bNk_FBMeR`>ErNyMLXMzbE@I`u~vquXD0D>`)bNM_XUG zw;>$vF0b@A*Bp;LY@Hnq9Bmx}WkvsQjemn>MgNBTe}w-}ZvQ2{Nv=HU4f4N#lssuQ zH)`*8^h0a;Y*}>Tr`gQs)~S_1by@~rtif)X+5FO8)Dth7SorP zgPC5-W}nl%R(<#|p07t(O-Y|8Pr$PQ9Z61HBJiw4=-JuT^-y`>>-T@lmaS9Ce(1H~ zY!gz~DC;A%-t%ztJv>xUvsUpxYoKtYa^TOO<(nDswyH9t&Yg@0JOZT-hp31ZxG~HY zFqWrErb<>zKvp&rWWTJd*g5Z^n$-gl6HG^^8@CsHcer8PqZY*Cu5d6!cLQ~hg~7M7Ej$Q?><5swg@s;0t37LWP~bJCfpEURW& zcjH0cRGZK(>gNIe);)6$2uWA@!XxZ#W>jyton9LT)PHqcYPjUkl`!g^Kh{5QbWg&} z=JXSujvWBuI7Z}&=Lo4q5ACoBqED4In1w(bs}8M@B9pKk47&s-wX-SJtY4*6vX z=pzN+4=>a{xMW=1M$@op9qON65fvZaxl5@rpuOfO`%jv}b?%z>QTatWE14Rw@(7j= z##fNkqZ7*vKx9ATQQf_^6dJ~;s*Hc!^)-e@{i=GtBZR293*qS&?UD6wkbFCg8X3`duo;L$Z`b624AcJ#=>9IkEjA&%P~qF%c}i5*!$EyXrNAUl21gZ_+OHyVz0Mi`zQgXib3i3>1nS#xV;oB_AZLOW^om#dcCHV%{#-PK=wEKw( zL!^SL@$|4cAu2xk1<4&!0b7YDp@!!b*)w-&bLu6zTe@!XWR}X`-JaL0aMok)r$}@U zTyMh8xkS)AM^u+pzPUifpFuP-6{(F!9yKISv4M0eu4#AgR_dfpg{vdE%=eE2`V&Nx zi-_WtY86ZC@o42uv#8GQmoQIh!af_a|GbpiFwy28kYswkV~B|6{k+EMhUQt#>M~+3 zT>fHYT*_Wv6;a+vknq0b@mV6{BEbnsgR5)Y(93I1j0{)nk+E{S=F;6#Awk81Uz!5P zG&-5RZlL|Au@#PQ*b383!`o(G@ExL$q_y`MRKJXFr+AMp*^1XG_O-{Ry~Q<;eCp2q z(TA~T!aEm7Tr?#rDAkCs5~J-o&8Vg6%ab~~!QZX&4BT<~t|kZowgx0I_9&Y|yfTAU zRPze~7J&p^n&x+RfJ3CdCi;hEY}&mc(Km|&RI|c-)6g|s8{ZzHmJ10}7cDFj{!~tN zRE!iaF;a#_XRCbBd8tLFDd2DEeaeR2e;lMxXyK&HYFxM@B1Uzl!VV+M3^;YbkB5G2 z9M9eXGpfrtQK@~n>X}5nCqCLlQOS26NsY}#=wsK#y99)L2zaH1rdG(Zt;7z~Sbr|l{F z6}w$NC2@YO6PXxia=gy!G5-9m!m0|csDz!jt5b9r?{ft)919^35fjF>&2{kAN=QoE zW6ig$wqh56q6me~eYz9Y0kn-E0-Qm-3mO3)Mb39SN8Sz^t;{Fz2 z8~+8~UEc9&?{>2EKl1HIo_7kGBIwx8huh(al@`PuB+z|53Bi))_T zQh_93$J7Y@C;Y66Bw21wP4Q27m#efff{rGA(GK$b44uW5;Z?W>;pT$FKhcY<@$>4^ zTZacR)_yy7#-sWP{kUplK3rF$WXVhjIu*)>I|-h{Z2CKRs9E4u@EQAdhPuxv=RS;)|0bUpjUlPU|^g8Qwa^1Vus6~9{n8WZe?$v@d0l_ zb_qXHA6z~-GgCLw@xq7_9BY{u*)US9qdKZ`8b&| z797-BkxP0J6VI*(zO1D?sO_noQ?rZfF)d6u2P}-^^36^8eLgBM3oY1y)0=fXkE=h$ z%1@FzCQh?WFRRb?C4vNiVe^l+JzPSH-@A;N;7usKjBb zZ0271KDFjyY_Ee(%onHRe)_}HQ;~k8g!8FL=(W<+Z5+qPC@dQ=f1xkcUuyYGP90cA zSjXZ`tU2?8{QkQ*=8OT&CjA{n##?f^zy@^#Kt*J>>KrA-tod}Z+l}$DQaSgJ^kW}{fpdifNsv{-MmFsd7GiOtanG)oS5P~zgj)7hYB%yc8-Xlh7=$ll zOk2$X6%6C3D_IFj0Q)v>Q}u*4Wigl5q^{rb7j6L2qwkZ75FK~a?)FkS za8gV8BRv7pvRk2VCc?^Bkn&d6lxxNn9-zf2E#oRcns;(Y=mK2;!85Re(k#W4G9@Fa zQrUDCGHsxdQxA{`sOO96Vk8d!pqe6?$Tw3aBKNt#Clm-FUz_djFHTxN3;%R@hH4Dd zkO9^r7qKL>`+N8Hw21d0uR5)?TtY=+hjSDlSK3c70660z>FG@Wt~pPec>{b4P=JgT z?5IixpRX7}s9iU6%QH&_ju~|^)$Fg@1RKs?YQ~i^W{t%nz{^uFcbKAc&*dh(h8|(W z#lv{lXr>w%&Mr|)a#0ENYi-<`7T{ZNReMZm-x-#~`qwn)FvwE21K9by@>7t$j=pp( zBp^NOx2v6Xy>^r)40TaHz~lh1Z*)7l;^_;3N%>KleYsQv(+ItXVs9nu!oupI{aIm@ zPkQLBOb{w>?b)!sf@$5=H3_pK)w&<3!8MdbUVWS9fJ#{BYw#Uu-Ke*?hA;x5;i^dn z6aW~WoxV+=(&!}GZ)&_P7^=$prCUb~?|n>*vfS|RsL(T5zlHL7rx%3Pm1J|V8ty)g zz_J?Sn(a96WJYUgU_heUY2=7JPGT=PYVoarp_1vWf@bPAGB;se}>Y_IY;^@aV*8V zTONa#U|3q7BO}UawMluk&x3!r5k>~6qX?@(T)z}v9{4LZ2PRBlGN%bf z%g~NyepKh*!B$85lV0^iQE5ZXJ@8l^Trq6Cf}&wB=yqhu`dt{mxnwVNI+v;k)gYeH z3wes+Xwe@+D~*U`#4e@bf)RIadHoD9m9$n_v{+wT785{5FVA2J-N#rP9aC;H+I0p# z8BAqY4kq>^x4jS2sF#-zEXzr%MJr9SkV5mJ(fd<9f+6T!~jtPjH7Gv0b19=y?Nj4Y;uT z%Xkq(=SK1;{i$UnMAS+7-0bah4P29&w9s?K-Xk%R_XEPowLTrF3{bHmw!VMx9ZN9V zp0p6_fE$8e2&wYbsOJ=eko*=oSo2%Dja5E;yX)hm52sGkWfcW%oDR#AY3_;_rFuUq zEg>TSs4|Vf5ag~S2ll@&xTc3e%hQPEFw{xf{0l}6^yyIRl&TQw^<~Rzm;PCux=xWV zT2xt226?!)`i0@KX*ta}-XGJB7fLwp7oqZG@4Je$i1#&vwzPFX0}ea8osL0%6~Edw z)=Ab_39n|fIa{q?^{RhA=ZIT!Bj!nAyPR5a+1a(u(2gca#P4ESI3WYQ52xO73{qxQ z?|S#g>t4k2HB)5w0bxX7BZGk`k{u5{=JV+x^DHRAETZ^Klfu3Ea1*B3*__Et!YkKg zw+^rfmf8ShRXxl-^5Y!762s?H;j&mlWlzi#R}^2q&K$Vnv}|qzs3`qj^;6-hV@U@q z5hpkQ((S9ebF0`O)F|*8O6+lwVxy8;NaTbxKthrGGugIWuIfx9R? zM0Q1Uc)p^ypa?Wo(4780z&7`Vwh-#vgYW5YZI<;(FoLf+iI|W#7dH{vW)U@iI3m zlP|f~%4TyTyI)B7-$LM5O)LG@yT=`FZT&4$I-l}C%2{+xNBUJXhOhh?gJ1wLP2L!Q@3%aRDq zL|7XBPN~SH@x4wXQ!i#P|E^3k)GRAU*D{@991kVRnjP8BUogk^h9=tZCqGZ00V@Y% zHli^M`k!)Kgeity*eifYipsMGYs7Qk97d%*GTHq3$%p!RvR;647~kp^-9@|rOumde zL?TmvuG>dAYDbtwUe3&9^^$DQ9hvHbq>UUDqH+w5t26K_nNjUj#8?z~3L1vnw>vi4aaOcb|-sEYVqzolJ(hmJBSM=e%ms30+Rd z873+33iP8IdI)@Bjf75GqihjGGD2&%^M5!<>!eeElky5e({L|p{+upqX=J{YUO;yC zL2{~t#+PBvPhMcHpX6He)dYFG4$WW%W=YR6vZ0$)&x7o%8Uu_ILqp?z9SH@)^({6gJ?w56T4rKs;&6E9j!miF=Q3GIBMazlh z6=bx81iwCix~_!;eREeeDrN5urw7vsgh~~-SR8S6p;Mkl@h5Rf(#HDflKu!T18?Hi zTXhFw$DfbCAo3ccYiCghY6~HUugdcxwbh;!UHS>gl9@7l4DKo-P`~6s$*D7XD>P=h zy+-%;Sl+t!mw3SpyN3KKHz|V^3qS8!MYh{UiH`0u8}dyw-pu9?6(SRI4M5{TAA+p| zIE2Txt=}4_zB~qFYPctHw>dA643lr&30Tb@%ya#pfMNj7u2kSWRY?QzJGH=^?y zp04M)S%rGwT0-}sr)bz*?4Jl7emC?sVn$xh#2SZ$#+0=TMj%6PJ9Y8paIX=YhqKQs zJk5%8b|eHfMMBHhQ*xH>sJ>E2TWh4W_>%Q; zT?ms+(04mj&si(}+9{pv71IwR9lh7&uB=^+m5yd8I|W_(JIBdzCBBMa{@CuJT3$%i zw?8fVaK4pR>MOkVsG5a)U;7ftdizCpY%^~-; z-IU$+Hxh=Tg$K&*ben5yd?S-FLC2gZ{EeuqqY?U$`ZDpS*JMYZ4NpiOyLALqUx{-x zyKW5Z(ytDE$|zXj{V^;#jXK|>4WN6cyp>P2Sw{(LTU?rx#0@&~{G#pEk`nNn`8jHf z1_P7)tDof1V&l^g#wF#isC(Yq*rZ)(djWYvqS<8DtUowSJIoGfi+TI|o{v`!sWnTe zVtDs~(hMx$ssvS~?yc8JZyq|kU9OFS@AxA&YhIT6-}^leSYLG&Jod7Mk8bQoChJVi zy?qRQX|qn!<709kYS^n6n)hT{Hsw*Py>0H(AJe;DgtC;KPd61Wj3LxVQ7WAwz=%y5 zq6?)2Z>b%jHESie^Jvsg7a3kJhVs zcL#0gu}P;r@@oX^bmHUtcZgF)e6I2RLU=#-EfpMWDZGdoE?yj00!}h|{Xt2Tw97ec zr?aF#EUQ05#*N0G>2eF1eZ2;4wx*DwK;zj}G4hS_&pi$uVpua_9apmoFEQzrdF$4K&#Qc4 zz3N571|-bo4*<5~O)0;WRt43`bLK|m9yQWn@Xt+e+^9T zGU1X9XK8UNv-;}$&U{Z2dgiAopVs|*i8{2I*w^{Py-6IuX(maGwP;($)@t3+m-<>I z&yU7-H!|L5>IU3)IOHo9Sl8xir^fhX1~JS7L?SpQsPFuUd0BzciM!h;_o{tKg6C%z9w{^5elC=U2w7 zMaEjX2v{+qh&{mH{DU!Ok1wFN<@K|sx9j^ANxaJlF(q~40rU03I@6dW1euFEnve9< z(&$XUN6P(HE@cyyfAsp?H}mC8T)Y8_*!&Goeif{f7co;FO-s zflo@`3PC?0>ZMv|7CH7Xj?PJV?d7eMxoF!^mw3Aj9kjV-$%O+(n#sbb0ZJkjFA=wH zcxInXMzWog?^+JkfLG@9it@L2MW08MmfQIA=M6bB+U{wn&1A&UE;5KQwxB@(lZDe? zC>qvE6J>Q6Ya*~+!;M*u*57-2C}BZ%qz@~Zm6j;B@x%+J*TNn4E~VQjYiTW@|4kIe zqWEW)4~>Hn*L3m(bFa{wUWp27XuNNmi3;o`M9whBXAXbE=fkp597~2ET7`mgai}! z0XXwL=kOHOVpo|XqOd~cEYsb9doH4ftrX5GsrHg|uz<@Bv ze6ka9p+k}TUwuF&Zq@?W*r(^^y5-hxUndy%q|>j6*wOYo)2DmOMXz+3;Y1UW_VZQ6 za)gOVjbijDayK9=CTN4FTS<(?-4I4iEu8V~;n<&l~xs5P1csKFw4|HYYb3d<_ z4T})GFTWZyQY-gHs%m|zZfi^vyi_W7912&N_UQ8YXM?pzl8fEOgP9$$Vh`M-zGrtF# z@XrZbIu6}z{JWrltab0v=JKWjK^T0j9By1YV2E&^MSJpplpI$ZFU5E{Whn5rPsp(U zaLYOl&&r{U%1BA#J3LSzvwl=ryTt~C;MZt5%x zTx%66Pdcr8E&9ORDNFfkgwpf&$?{rH4ASc5dt6gmyjhH(RFp&C0#|Ca7$&98i%_L0Ldpit4)}+`lu-_i~SRwwrT<++Y`g5<+ zw%)YL^@9yfMz+~FKISrZS{(wJI#OR7=!e^4489iCZ_Yv=!HV@Tp&M%*o%uCYqVD-A z-EOQn0Ptm5i5d>jabF$^05W2`xs8y>ODhGR^4ZP56JpMr+178VT*)a`{Qf^ zqPivo-k&&YiuK|B}v^6@m< zl9K4ScxKoOzNCvo#R`tJbV(okCPp4}er4g3S!w0q`vfCvATOofQWNosMf>09FDQLx z@VT%WS`>*M0{>L1q^*5GQYv_Cb(NS!DnC(Q`$gjQ*?lz&eKvxOh$Tl-S)#T*oRyl@ z!c1@`0=$ByNTOM%F;Pz)cf}`qQ7Uw;-w}ZmX*KBA{OZ=z{A*$A`G~F+Q+~?8>-ean z;i7|xoP@=-j|3@r%_!hpG)EZkXgxOX>on^ezR6*~=VvSC{W@|op}MZ>{+V)%+X>OZ zvTB-dGi{L)&(^o?aSi<9w`SnPzvCo>`!HhL1)orR>Dq2b6fU-4a=}tP50mPL6;6LL zZvP5nn8Erd4Txz9R(BYH2KwTt%5Yy+gmLP*nksxBK#y9pBZEyq4TLrvCB<~WuZ{zc z%PO)pdD}myP8D+=!JNSG2*7!Sir?d)4fXklB%w>iF`i$i6^1+bsS3fS95N8^49D1{ zN3_q8_d7Y?h6%ig^+XUgC{Pd4dK}qVg1cXd={q|rr#>_7&`m)?ls@rQ6Z-jY&~dM) z^tsW^hxo$vf4IhMe;X*lk81fjrV zo4i2Y%;Ws)?9RJ8mOD$1g@w{W>l?jm&Pp>b@I0@?^2?6j55J;>ouIu}4hiftWq>P$ zbqD+YIwiNFvjkg^N=xGd{cMTncFM7|M^Pj>Dx^B`F2W?BKb%9pMB72#V-}O0ulXd_ zIckwYbsMg+R9{F&RJm!*w@EO`7lxC%@4B&E??Oa}{7-v+zL^Ic#H{(#4@c-woqvo! z`)CTLg}&c`5m)d~mGgGKI{iEz>Tzo!siTvgPvJW~FNQo6E(8@!80m@JoCx3UJ`|(y z{_Is)!$erD_A7oX}cn3bX5>A64YUi`#A z#?df8Xda3OKck0%g!NXRD9`t4%Kzj(ka2sB&TeX$0cGb0h$AP!`6(-+| zVJbX(C19wxGo2?_&U58VgP9$$&PaJr+RVn(H0KT2cn`_SSE*3A#|b|CK9Z)B z>x@v~V7WzT!~3adm6g4Ki3=Z&#QHSgTQlc~PCs4RljEJb0Ce z8y=S@z?7Lyt=$cIiCY^v4o)6CI&-HPR=>Hh@Sq%Uv1O#c=-M6s7?QoCEHZWbqA$tq z(Rp0$Jk@cry+wI#fhfd$!l=V~z2(KHkoTX){BF*s0it!v8l9Li8>>^0c&2izWX$