From eb0cd85c2f104e15a22929386d62219cc2067a93 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Sat, 16 Jan 2016 11:05:31 -0800 Subject: [PATCH 01/41] Ignore default output directory, vim temp files. --- .gitignore | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 5dc43a8..30b04d5 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ - +[0-9\-]*_garmin_connect_export +.*.swp .DS_Store extras/ From 187d3d18a8e63e9fe6bac157c9da5022e111676b Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Sat, 31 Dec 2016 11:36:47 -0800 Subject: [PATCH 02/41] Handy Makefile --- Makefile | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 Makefile diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..e46b830 --- /dev/null +++ b/Makefile @@ -0,0 +1,9 @@ +COUNT := 4 +.PHONY: help +help: + @echo Usage: + @echo make go COUNT=\ + +.PHONY: go +go: + ./gcexport.py --username aaronferrucci --count $(COUNT) --format original --unzip From 4037bb03db96eee4ab12e0c99c8189656c109280 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Sat, 31 Dec 2016 11:37:10 -0800 Subject: [PATCH 03/41] Print url before downloading. --- gcexport.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/gcexport.py b/gcexport.py index 2c6fec7..c64a7fa 100755 --- a/gcexport.py +++ b/gcexport.py @@ -211,6 +211,8 @@ def http_req(url, post=None, headers={}): # should pick up where it left off. print '\tDownloading file...', + print "\tusing url: '" + download_url + "'" + try: data = http_req(download_url) except urllib2.HTTPError as e: From 4b687b8cf705fc5f251471faf68d039c4d1394eb Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Thu, 23 Nov 2017 09:15:55 -0800 Subject: [PATCH 04/41] Fix typo --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index e46b830..c9563b0 100644 --- a/Makefile +++ b/Makefile @@ -2,7 +2,7 @@ COUNT := 4 .PHONY: help help: @echo Usage: - @echo make go COUNT=\ + @echo make go COUNT=\ .PHONY: go go: From 11abe1c5479ee7edb8ef3a7e005a5c2d90f77575 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Mon, 4 Dec 2017 19:49:41 -0800 Subject: [PATCH 05/41] Add vimdiff target for easy merging. --- Makefile | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/Makefile b/Makefile index c9563b0..9d90aa7 100644 --- a/Makefile +++ b/Makefile @@ -1,3 +1,4 @@ +SHELL := /bin/bash COUNT := 4 .PHONY: help help: @@ -7,3 +8,15 @@ help: .PHONY: go go: ./gcexport.py --username aaronferrucci --count $(COUNT) --format original --unzip + +NUM_ACTIVITIES = $(shell find . -name activities.csv | wc -l) +.PHONY: count_activities_csv +count_activities_csv: + @if [ $(NUM_ACTIVITIES) -ne 1 ] ; then \ + echo "Too many activities.csv files found ($(NUM_ACTIVITIES))"; \ + false; \ + fi + +.PHONY: vimdiff +vimdiff: count_activities_csv + vimdiff $(shell find . -name activities.csv) ../garmin_running/activities.csv From f017d904794a7217f5616011177e5d914311650e Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Sun, 31 Dec 2017 17:20:04 -0800 Subject: [PATCH 06/41] Don't end csv records with comma --- gcexport.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/gcexport.py b/gcexport.py index c64a7fa..4c14276 100755 --- a/gcexport.py +++ b/gcexport.py @@ -282,6 +282,10 @@ def http_req(url, post=None, headers={}): csv_record += empty_record if 'gainElevation' not in a['activity'] else '"' + a['activity']['gainElevation']['value'].replace('"', '""') + '",' csv_record += empty_record if 'lossElevation' not in a['activity'] else '"' + a['activity']['lossElevation']['withUnit'].replace('"', '""') + '",' csv_record += empty_record if 'lossElevation' not in a['activity'] else '"' + a['activity']['lossElevation']['value'].replace('"', '""') + '"' + + # remove any trailing commas - R read.csv doesn't like them. + csv_record = csv_record.rstrip(',') + csv_record += '\n' csv_file.write(csv_record.encode('utf8')) From ae6f985c9989f6dd5bce391642c4067035ec712f Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Tue, 2 Jan 2018 21:24:35 -0800 Subject: [PATCH 07/41] Swap order for vimdiff, so the to-be-modified file has the selection. --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 9d90aa7..33aafb3 100644 --- a/Makefile +++ b/Makefile @@ -19,4 +19,4 @@ count_activities_csv: .PHONY: vimdiff vimdiff: count_activities_csv - vimdiff $(shell find . -name activities.csv) ../garmin_running/activities.csv + vimdiff ../garmin_running/activities.csv $(shell find . -name activities.csv) From 0588a645020f139d16eed68cb144887c2fcacd7f Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Mon, 26 Mar 2018 20:35:59 -0700 Subject: [PATCH 08/41] Now writing per-activity json files. --- gcexport.py | 45 +++++++++++++++++++++++++++++++++++++-------- 1 file changed, 37 insertions(+), 8 deletions(-) diff --git a/gcexport.py b/gcexport.py index 4c14276..3afe999 100755 --- a/gcexport.py +++ b/gcexport.py @@ -90,10 +90,11 @@ def http_req(url, post=None, headers={}): # URLs for various services. url_gc_login = 'https://sso.garmin.com/sso/login?service=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&webhost=olaxpw-connect04&source=https%3A%2F%2Fconnect.garmin.com%2Fen-US%2Fsignin&redirectAfterAccountLoginUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&redirectAfterAccountCreationUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&gauthHost=https%3A%2F%2Fsso.garmin.com%2Fsso&locale=en_US&id=gauth-widget&cssUrl=https%3A%2F%2Fstatic.garmincdn.com%2Fcom.garmin.connect%2Fui%2Fcss%2Fgauth-custom-v1.1-min.css&clientId=GarminConnect&rememberMeShown=true&rememberMeChecked=false&createAccountShown=true&openCreateAccount=false&usernameShown=false&displayNameShown=false&consumeServiceTicket=false&initialFocus=true&embedWidget=false&generateExtraServiceTicket=false' url_gc_post_auth = 'https://connect.garmin.com/post-auth/login?' -url_gc_search = 'http://connect.garmin.com/proxy/activity-search-service-1.0/json/activities?' +url_gc_search = 'http://connect.garmin.com/proxy/activity-search-service-1.2/json/activities?' url_gc_gpx_activity = 'http://connect.garmin.com/proxy/activity-service-1.1/gpx/activity/' url_gc_tcx_activity = 'http://connect.garmin.com/proxy/activity-service-1.1/tcx/activity/' url_gc_original_activity = 'http://connect.garmin.com/proxy/download-service/files/activity/' +url_gc_modern_activity = 'https://connect.garmin.com/modern/proxy/activity-service/activity/' # Initially, we need to get a valid session cookie, so we pull the login page. http_req(url_gc_login) @@ -152,27 +153,55 @@ def http_req(url, post=None, headers={}): search_params = {'start': total_downloaded, 'limit': num_to_download} # Query Garmin Connect - result = http_req(url_gc_search + urlencode(search_params)) + query_url = url_gc_search + urlencode(search_params) + print "### query_url:" + print query_url + print "###" + result = http_req(query_url) json_results = json.loads(result) # TODO: Catch possible exceptions here. - - search = json_results['results']['search'] + # search = json_results['results']['search'] if download_all: # Modify total_to_download based on how many activities the server reports. - total_to_download = int(search['totalFound']) + # total_to_download = int(search['totalFound']) + total_to_download = int(json_results['results']['totalFound']) # Do it only once. download_all = False # Pull out just the list of activities. activities = json_results['results']['activities'] + print "### json_results:" + print json_results + print "###" + # Process each activity. for a in activities: # Display which entry we're working on. - print 'Garmin Connect activity: [' + a['activity']['activityId'] + ']', - print a['activity']['activityName']['value'] - print '\t' + a['activity']['beginTimestamp']['display'] + ',', + + # backwards compatibility hack: activityId used to be a string, + # now is an int. + a['activity']['activityId'] = str(a['activity']['activityId']) + activityId = a['activity']['activityId'] + + print 'Garmin Connect activity: [' + activityId + ']', + print a['activity']['activityName'] + modern_activity_url = url_gc_modern_activity + activityId + print "url: " + modern_activity_url + + activity_filename = args.directory + '/' + activityId + '.json' + print "filename: " + activity_filename + result = http_req(modern_activity_url) + json_results = json.loads(result) + + save_file = open(activity_filename, 'w') + save_file.write(str(json_results)) + save_file.close() + + continue + + # print '\t' + a['activity']['beginTimestamp']['display'] + ',', if 'sumElapsedDuration' in a['activity']: print a['activity']['sumElapsedDuration']['display'] + ',', else: From e312cdcb356920de8a93df127b155b8892b88719 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Tue, 27 Mar 2018 21:17:05 -0700 Subject: [PATCH 09/41] Some progress on extracting data from the new format. --- gcexport.py | 83 ++++++++--------------------------------------------- 1 file changed, 12 insertions(+), 71 deletions(-) diff --git a/gcexport.py b/gcexport.py index 3afe999..708440c 100755 --- a/gcexport.py +++ b/gcexport.py @@ -193,87 +193,28 @@ def http_req(url, post=None, headers={}): activity_filename = args.directory + '/' + activityId + '.json' print "filename: " + activity_filename result = http_req(modern_activity_url) - json_results = json.loads(result) + results = json.loads(result) save_file = open(activity_filename, 'w') - save_file.write(str(json_results)) + save_file.write(str(results)) save_file.close() - continue + # Write stats to CSV. + empty_record = '"",' - # print '\t' + a['activity']['beginTimestamp']['display'] + ',', - if 'sumElapsedDuration' in a['activity']: - print a['activity']['sumElapsedDuration']['display'] + ',', - else: - print '??:??:??,', - if 'sumDistance' in a['activity']: - print a['activity']['sumDistance']['withUnit'] - else: - print '0.00 Miles' + csv_record = '' - if args.format == 'gpx': - data_filename = args.directory + '/activity_' + a['activity']['activityId'] + '.gpx' - download_url = url_gc_gpx_activity + a['activity']['activityId'] + '?full=true' - file_mode = 'w' - elif args.format == 'tcx': - data_filename = args.directory + '/activity_' + a['activity']['activityId'] + '.tcx' - download_url = url_gc_tcx_activity + a['activity']['activityId'] + '?full=true' - file_mode = 'w' - elif args.format == 'original': - data_filename = args.directory + '/activity_' + a['activity']['activityId'] + '.zip' - fit_filename = args.directory + '/' + a['activity']['activityId'] + '.fit' - download_url = url_gc_original_activity + a['activity']['activityId'] - file_mode = 'wb' - else: - raise Exception('Unrecognized format.') - - if isfile(data_filename): - print '\tData file already exists; skipping...' - continue - if args.format == 'original' and isfile(fit_filename): # Regardless of unzip setting, don't redownload if the ZIP or FIT file exists. - print '\tFIT data file already exists; skipping...' - continue - - # Download the data file from Garmin Connect. - # If the download fails (e.g., due to timeout), this script will die, but nothing - # will have been written to disk about this activity, so just running it again - # should pick up where it left off. - print '\tDownloading file...', - - print "\tusing url: '" + download_url + "'" - - try: - data = http_req(download_url) - except urllib2.HTTPError as e: - # Handle expected (though unfortunate) error codes; die on unexpected ones. - if e.code == 500 and args.format == 'tcx': - # Garmin will give an internal server error (HTTP 500) when downloading TCX files if the original was a manual GPX upload. - # Writing an empty file prevents this file from being redownloaded, similar to the way GPX files are saved even when there are no tracks. - # One could be generated here, but that's a bit much. Use the GPX format if you want actual data in every file, - # as I believe Garmin provides a GPX file for every activity. - print 'Writing empty file since Garmin did not generate a TCX file for this activity...', - data = '' - elif e.code == 404 and args.format == 'original': - # For manual activities (i.e., entered in online without a file upload), there is no original file. - # Write an empty file to prevent redownloading it. - print 'Writing empty file since there was no original activity data...', - data = '' - else: - raise Exception('Failed. Got an unexpected HTTP error (' + str(e.code) + ').') + csv_record += empty_record if 'activityId' not in results else '"' + str(results['activityId']).replace('"', '""') + '",' - save_file = open(data_filename, file_mode) - save_file.write(data) - save_file.close() + csv_record += empty_record if 'activityName' not in results else '"' + results['activityName'].replace('"', '""') + '",' - # Write stats to CSV. - empty_record = '"",' + csv_record += empty_record if 'activityDescription' not in results else '"' + results['activityDescription'].replace('"', '""') + '",' - csv_record = '' + csv_record += empty_record if 'startTimeLocal' not in results['summaryDTO'] else '"' + results['summaryDTO']['startTimeLocal'].replace('"', '""') + '",' + + print "data: " + csv_record + continue - csv_record += empty_record if 'activityId' not in a['activity'] else '"' + a['activity']['activityId'].replace('"', '""') + '",' - csv_record += empty_record if 'activityName' not in a['activity'] else '"' + a['activity']['activityName']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'activityDescription' not in a['activity'] else '"' + a['activity']['activityDescription']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'beginTimestamp' not in a['activity'] else '"' + a['activity']['beginTimestamp']['display'].replace('"', '""') + '",' csv_record += empty_record if 'beginTimestamp' not in a['activity'] else '"' + a['activity']['beginTimestamp']['millis'].replace('"', '""') + '",' csv_record += empty_record if 'endTimestamp' not in a['activity'] else '"' + a['activity']['endTimestamp']['display'].replace('"', '""') + '",' csv_record += empty_record if 'endTimestamp' not in a['activity'] else '"' + a['activity']['endTimestamp']['millis'].replace('"', '""') + '",' From b7f196995b0731b418d142d1d9336a1b18bf4814 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Wed, 28 Mar 2018 21:25:30 -0700 Subject: [PATCH 10/41] Some progress on refactoring dict lookup. --- gcexport.py | 62 +++++++++++++++++++++++++++++++++++++---------------- 1 file changed, 43 insertions(+), 19 deletions(-) diff --git a/gcexport.py b/gcexport.py index 708440c..b54b320 100755 --- a/gcexport.py +++ b/gcexport.py @@ -59,6 +59,18 @@ cookie_jar = cookielib.CookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie_jar)) +def csvFormat(value): + csv_record = '"' + str(value).replace('"', '""') + '",' + return csv_record + +def dictFind(data, keys): + try: + for key in keys: + data = data[key] + except KeyError: + return "" + return data + # url is a string, post is a dictionary of POST parameters, headers is a dictionary of headers. def http_req(url, post=None, headers={}): request = urllib2.Request(url) @@ -203,33 +215,45 @@ def http_req(url, post=None, headers={}): empty_record = '"",' csv_record = '' + # csv_record += '"' + activityId.replace('"', '""') + '",' + csv_record += csvFormat(activityId) + csv_record += csvFormat(dictFind(results, ['activityName', ])) + csv_record += csvFormat(dictFind(results, ['activityDescription', ])) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'startTimeLocal', ])) + + # beginTimestamp (ms) + csv_record += empty_record + + # endTimestamp (display) + csv_record += empty_record + + # endTimestamp (ms) + csv_record += empty_record + + device = dictFind(a, ['activity', 'device', 'display', ]) + deviceVer = dictFind(a, ['activity', 'device', 'version', ]) + csv_record += csvFormat(device + deviceVer) - csv_record += empty_record if 'activityId' not in results else '"' + str(results['activityId']).replace('"', '""') + '",' + csv_record += csvFormat(dictFind(a, ['activity', 'activityType', 'parent', 'display' ])) + csv_record += csvFormat(dictFind(a, ['activity', 'activityType', 'display' ])) - csv_record += empty_record if 'activityName' not in results else '"' + results['activityName'].replace('"', '""') + '",' + csv_record += csvFormat(dictFind(a, ['activity', 'eventType', 'display' ])) - csv_record += empty_record if 'activityDescription' not in results else '"' + results['activityDescription'].replace('"', '""') + '",' + csv_record += csvFormat(dictFind(a, ['activity', 'activityTimeZone', 'display' ])) - csv_record += empty_record if 'startTimeLocal' not in results['summaryDTO'] else '"' + results['summaryDTO']['startTimeLocal'].replace('"', '""') + '",' + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'maxElevation', ])) + # csv_record += empty_record if 'maxElevation' not in results['summaryDTO'] else '"' + str(results['summaryDTO']['maxElevation']).replace('"', '""') + '",' + # Max elevation (raw) + csv_record += empty_record + + for key in ['startLatitude', 'startLongitude', 'endLatitude', 'endLongitude']: + csv_record += csvFormat(dictFind(results, ['summaryDTO', key, ])) print "data: " + csv_record continue - csv_record += empty_record if 'beginTimestamp' not in a['activity'] else '"' + a['activity']['beginTimestamp']['millis'].replace('"', '""') + '",' - csv_record += empty_record if 'endTimestamp' not in a['activity'] else '"' + a['activity']['endTimestamp']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'endTimestamp' not in a['activity'] else '"' + a['activity']['endTimestamp']['millis'].replace('"', '""') + '",' - csv_record += empty_record if 'device' not in a['activity'] else '"' + a['activity']['device']['display'].replace('"', '""') + ' ' + a['activity']['device']['version'].replace('"', '""') + '",' - csv_record += empty_record if 'activityType' not in a['activity'] else '"' + a['activity']['activityType']['parent']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'activityType' not in a['activity'] else '"' + a['activity']['activityType']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'eventType' not in a['activity'] else '"' + a['activity']['eventType']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'activityTimeZone' not in a['activity'] else '"' + a['activity']['activityTimeZone']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'maxElevation' not in a['activity'] else '"' + a['activity']['maxElevation']['withUnit'].replace('"', '""') + '",' - csv_record += empty_record if 'maxElevation' not in a['activity'] else '"' + a['activity']['maxElevation']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'beginLatitude' not in a['activity'] else '"' + a['activity']['beginLatitude']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'beginLongitude' not in a['activity'] else '"' + a['activity']['beginLongitude']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'endLatitude' not in a['activity'] else '"' + a['activity']['endLatitude']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'endLongitude' not in a['activity'] else '"' + a['activity']['endLongitude']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'weightedMeanMovingSpeed' not in a['activity'] else '"' + a['activity']['weightedMeanMovingSpeed']['display'].replace('"', '""') + '",' # The units vary between Minutes per Mile and mph, but withUnit always displays "Minutes per Mile" + csv_record += empty_record if 'WeightedMeanMovingPace' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['weightedMeanMovingPace']['display'].replace('"', '""') + '",' # The units vary between Minutes per Mile and mph, but withUnit always displays "Minutes per Mile" + csv_record += empty_record if 'weightedMeanMovingSpeed' not in a['activity'] else '"' + a['activity']['weightedMeanMovingSpeed']['value'].replace('"', '""') + '",' csv_record += empty_record if 'maxHeartRate' not in a['activity'] else '"' + a['activity']['maxHeartRate']['display'].replace('"', '""') + '",' csv_record += empty_record if 'weightedMeanHeartRate' not in a['activity'] else '"' + a['activity']['weightedMeanHeartRate']['display'].replace('"', '""') + '",' From 26964ccd6c2c1a6144551755fb5d18fc5a23ab78 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Thu, 29 Mar 2018 20:35:05 -0700 Subject: [PATCH 11/41] Completed field fill-in, not necessarily correctly. --- gcexport.py | 130 +++++++++++++++++++++++++++++++--------------------- 1 file changed, 77 insertions(+), 53 deletions(-) diff --git a/gcexport.py b/gcexport.py index b54b320..103f50e 100755 --- a/gcexport.py +++ b/gcexport.py @@ -218,7 +218,7 @@ def http_req(url, post=None, headers={}): # csv_record += '"' + activityId.replace('"', '""') + '",' csv_record += csvFormat(activityId) csv_record += csvFormat(dictFind(results, ['activityName', ])) - csv_record += csvFormat(dictFind(results, ['activityDescription', ])) + csv_record += csvFormat(dictFind(results, ['description', ])) csv_record += csvFormat(dictFind(results, ['summaryDTO', 'startTimeLocal', ])) # beginTimestamp (ms) @@ -242,72 +242,96 @@ def http_req(url, post=None, headers={}): csv_record += csvFormat(dictFind(a, ['activity', 'activityTimeZone', 'display' ])) csv_record += csvFormat(dictFind(results, ['summaryDTO', 'maxElevation', ])) - # csv_record += empty_record if 'maxElevation' not in results['summaryDTO'] else '"' + str(results['summaryDTO']['maxElevation']).replace('"', '""') + '",' # Max elevation (raw) csv_record += empty_record + # {start, end} X {latitude, longitude} for key in ['startLatitude', 'startLongitude', 'endLatitude', 'endLongitude']: csv_record += csvFormat(dictFind(results, ['summaryDTO', key, ])) - print "data: " + csv_record - continue - - csv_record += empty_record if 'WeightedMeanMovingPace' not in a['activity']['activitySummary'] else '"' + a['activity']['activitySummary']['weightedMeanMovingPace']['display'].replace('"', '""') + '",' # The units vary between Minutes per Mile and mph, but withUnit always displays "Minutes per Mile" - - csv_record += empty_record if 'weightedMeanMovingSpeed' not in a['activity'] else '"' + a['activity']['weightedMeanMovingSpeed']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'maxHeartRate' not in a['activity'] else '"' + a['activity']['maxHeartRate']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'weightedMeanHeartRate' not in a['activity'] else '"' + a['activity']['weightedMeanHeartRate']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'maxSpeed' not in a['activity'] else '"' + a['activity']['maxSpeed']['display'].replace('"', '""') + '",' # The units vary between Minutes per Mile and mph, but withUnit always displays "Minutes per Mile" - csv_record += empty_record if 'maxSpeed' not in a['activity'] else '"' + a['activity']['maxSpeed']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'sumEnergy' not in a['activity'] else '"' + a['activity']['sumEnergy']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'sumEnergy' not in a['activity'] else '"' + a['activity']['sumEnergy']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'sumElapsedDuration' not in a['activity'] else '"' + a['activity']['sumElapsedDuration']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'sumElapsedDuration' not in a['activity'] else '"' + a['activity']['sumElapsedDuration']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'sumMovingDuration' not in a['activity'] else '"' + a['activity']['sumMovingDuration']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'sumMovingDuration' not in a['activity'] else '"' + a['activity']['sumMovingDuration']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'weightedMeanSpeed' not in a['activity'] else '"' + a['activity']['weightedMeanSpeed']['withUnit'].replace('"', '""') + '",' - csv_record += empty_record if 'weightedMeanSpeed' not in a['activity'] else '"' + a['activity']['weightedMeanSpeed']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'sumDistance' not in a['activity'] else '"' + a['activity']['sumDistance']['withUnit'].replace('"', '""') + '",' - csv_record += empty_record if 'sumDistance' not in a['activity'] else '"' + a['activity']['sumDistance']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'minHeartRate' not in a['activity'] else '"' + a['activity']['minHeartRate']['display'].replace('"', '""') + '",' - csv_record += empty_record if 'maxElevation' not in a['activity'] else '"' + a['activity']['maxElevation']['withUnit'].replace('"', '""') + '",' - csv_record += empty_record if 'maxElevation' not in a['activity'] else '"' + a['activity']['maxElevation']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'gainElevation' not in a['activity'] else '"' + a['activity']['gainElevation']['withUnit'].replace('"', '""') + '",' - csv_record += empty_record if 'gainElevation' not in a['activity'] else '"' + a['activity']['gainElevation']['value'].replace('"', '""') + '",' - csv_record += empty_record if 'lossElevation' not in a['activity'] else '"' + a['activity']['lossElevation']['withUnit'].replace('"', '""') + '",' - csv_record += empty_record if 'lossElevation' not in a['activity'] else '"' + a['activity']['lossElevation']['value'].replace('"', '""') + '"' + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'averageMovingSpeed', ])) + + # weightedMeanMovingSpeed.display + csv_record += empty_record + + # maxHeartRate + csv_record += empty_record + # weightedMeanHeartRate + csv_record += empty_record + + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'maxSpeed', ])) + # maxSpeed.value + csv_record += empty_record + + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'calories', ])) + # sumEnergy.value + csv_record += empty_record + + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'elapsedDuration', ])) + # sumElapsedDuration.value + csv_record += empty_record + + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'movingDuration', ])) + # sumMovingDuration.value + csv_record += empty_record + + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'averageSpeed', ])) + # averageSpeed.value + csv_record += empty_record + + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'distance', ])) + # distance.value + csv_record += empty_record + + # minHeartRate + csv_record += empty_record + + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'maxElevation', ])) + # maxElevation.value + csv_record += empty_record + + # gainElevation + csv_record += empty_record + # gainElevation.value + csv_record += empty_record + # lossElevation + csv_record += empty_record + # lossElevation.value + csv_record += empty_record # remove any trailing commas - R read.csv doesn't like them. csv_record = csv_record.rstrip(',') csv_record += '\n' + print "data: " + csv_record + csv_file.write(csv_record.encode('utf8')) - if args.format == 'gpx': - # Validate GPX data. If we have an activity without GPS data (e.g., running on a treadmill), - # Garmin Connect still kicks out a GPX, but there is only activity information, no GPS data. - # N.B. You can omit the XML parse (and the associated log messages) to speed things up. - gpx = parseString(data) - gpx_data_exists = len(gpx.getElementsByTagName('trkpt')) > 0 - - if gpx_data_exists: - print 'Done. GPX data saved.' - else: - print 'Done. No track points found.' - elif args.format == 'original': - if args.unzip and data_filename[-3:].lower() == 'zip': # Even manual upload of a GPX file is zipped, but we'll validate the extension. - print "Unzipping and removing original files...", - zip_file = open(data_filename, 'rb') - z = zipfile.ZipFile(zip_file) - for name in z.namelist(): - z.extract(name, args.directory) - zip_file.close() - remove(data_filename) - print 'Done.' - else: - # TODO: Consider validating other formats. - print 'Done.' + #if args.format == 'gpx': + # # Validate GPX data. If we have an activity without GPS data (e.g., running on a treadmill), + # # Garmin Connect still kicks out a GPX, but there is only activity information, no GPS data. + # # N.B. You can omit the XML parse (and the associated log messages) to speed things up. + # gpx = parseString(data) + # gpx_data_exists = len(gpx.getElementsByTagName('trkpt')) > 0 + + # if gpx_data_exists: + # print 'Done. GPX data saved.' + # else: + # print 'Done. No track points found.' + #elif args.format == 'original': + # if args.unzip and data_filename[-3:].lower() == 'zip': # Even manual upload of a GPX file is zipped, but we'll validate the extension. + # print "Unzipping and removing original files...", + # zip_file = open(data_filename, 'rb') + # z = zipfile.ZipFile(zip_file) + # for name in z.namelist(): + # z.extract(name, args.directory) + # zip_file.close() + # remove(data_filename) + # print 'Done.' + #else: + # # TODO: Consider validating other formats. + # print 'Done.' total_downloaded += num_to_download # End while loop for multiple chunks. From 69e3ab98b1c376b27ac3afff0346938644160168 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Sat, 31 Mar 2018 16:24:34 -0700 Subject: [PATCH 12/41] Accomodate new garmin data format. --- gcexport.py | 90 +++++++++++++++++++++++++++++++---------------------- 1 file changed, 53 insertions(+), 37 deletions(-) diff --git a/gcexport.py b/gcexport.py index 103f50e..e54de60 100755 --- a/gcexport.py +++ b/gcexport.py @@ -185,7 +185,7 @@ def http_req(url, post=None, headers={}): activities = json_results['results']['activities'] print "### json_results:" - print json_results + print json.dumps(json_results, indent=4, sort_keys=True) print "###" # Process each activity. @@ -208,95 +208,111 @@ def http_req(url, post=None, headers={}): results = json.loads(result) save_file = open(activity_filename, 'w') - save_file.write(str(results)) + save_file.write(json.dumps(results, indent=4, sort_keys=True)) save_file.close() # Write stats to CSV. empty_record = '"",' - csv_record = '' - # csv_record += '"' + activityId.replace('"', '""') + '",' + # Activity ID csv_record += csvFormat(activityId) + # Activity Name csv_record += csvFormat(dictFind(results, ['activityName', ])) + # Description csv_record += csvFormat(dictFind(results, ['description', ])) + # Begin Timestamp csv_record += csvFormat(dictFind(results, ['summaryDTO', 'startTimeLocal', ])) - # beginTimestamp (ms) + # Begin Timestamp (Raw Milliseconds) csv_record += empty_record - # endTimestamp (display) + # End Timestamp csv_record += empty_record - # endTimestamp (ms) + # End Timestamp (Raw Milliseconds) csv_record += empty_record device = dictFind(a, ['activity', 'device', 'display', ]) deviceVer = dictFind(a, ['activity', 'device', 'version', ]) - csv_record += csvFormat(device + deviceVer) - + # Device + csv_record += csvFormat(device + ' ' + deviceVer) + # Activity Parent csv_record += csvFormat(dictFind(a, ['activity', 'activityType', 'parent', 'display' ])) + # Activity Type csv_record += csvFormat(dictFind(a, ['activity', 'activityType', 'display' ])) + # Event Type csv_record += csvFormat(dictFind(a, ['activity', 'eventType', 'display' ])) + # Activity Time Zone + csv_record += csvFormat(dictFind(results, ['timeZoneUnitDTO', 'timeZone' ])) - csv_record += csvFormat(dictFind(a, ['activity', 'activityTimeZone', 'display' ])) - - csv_record += csvFormat(dictFind(results, ['summaryDTO', 'maxElevation', ])) - # Max elevation (raw) + # Max. Elevation csv_record += empty_record + # Max. Elevation (Raw) + # (was in feet previously, now appears to be meters) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'maxElevation', ])) # {start, end} X {latitude, longitude} + # Begin Latitude (Decimal Degrees Raw) + # Begin Longitude (Decimal Degrees Raw) + # End Latitude (Decimal Degrees Raw) + # End Longitude (Decimal Degrees Raw) for key in ['startLatitude', 'startLongitude', 'endLatitude', 'endLongitude']: csv_record += csvFormat(dictFind(results, ['summaryDTO', key, ])) - csv_record += csvFormat(dictFind(results, ['summaryDTO', 'averageMovingSpeed', ])) - - # weightedMeanMovingSpeed.display + # Average Moving Speed csv_record += empty_record - # maxHeartRate + # Average Moving Speed (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'averageMovingSpeed', ])) + + # Max. Heart Rate (bpm) csv_record += empty_record - # weightedMeanHeartRate + # Average Heart Rate (bpm) csv_record += empty_record - csv_record += csvFormat(dictFind(results, ['summaryDTO', 'maxSpeed', ])) - # maxSpeed.value + # Max. Speed csv_record += empty_record + # Max. Speed (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'maxSpeed', ])) - csv_record += csvFormat(dictFind(results, ['summaryDTO', 'calories', ])) - # sumEnergy.value + # Calories csv_record += empty_record + # Calories (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'calories', ])) + # Duration (h:m:s) + csv_record += empty_record + # Duration (Raw Seconds) csv_record += csvFormat(dictFind(results, ['summaryDTO', 'elapsedDuration', ])) - # sumElapsedDuration.value + # Moving Duration (h:m:s) csv_record += empty_record - + # Moving Duration (Raw Seconds), csv_record += csvFormat(dictFind(results, ['summaryDTO', 'movingDuration', ])) - # sumMovingDuration.value + # Average Speed csv_record += empty_record - + # Average Speed (Raw) csv_record += csvFormat(dictFind(results, ['summaryDTO', 'averageSpeed', ])) - # averageSpeed.value + # Distance csv_record += empty_record - - csv_record += csvFormat(dictFind(results, ['summaryDTO', 'distance', ])) # distance.value - csv_record += empty_record + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'distance', ])) - # minHeartRate + # Max. Heart Rate (bpm) csv_record += empty_record - csv_record += csvFormat(dictFind(results, ['summaryDTO', 'maxElevation', ])) - # maxElevation.value + # Min. Elevation csv_record += empty_record + # Min. Elevation (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'minElevation', ])) - # gainElevation + # Elevation Gain csv_record += empty_record - # gainElevation.value + # Elevation Gain (Raw) csv_record += empty_record - # lossElevation + # Elevation Loss csv_record += empty_record - # lossElevation.value + # Elevation Loss (Raw) csv_record += empty_record # remove any trailing commas - R read.csv doesn't like them. From f7693031d09115fe1ad24a8da890839672ce97b2 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Mon, 2 Apr 2018 07:33:28 -0700 Subject: [PATCH 13/41] Remove unused format, unzip args. Add debug, quiet args. --- Makefile | 2 +- gcexport.py | 76 ++++++++++++++++++++--------------------------------- 2 files changed, 29 insertions(+), 49 deletions(-) diff --git a/Makefile b/Makefile index 33aafb3..3fb0736 100644 --- a/Makefile +++ b/Makefile @@ -7,7 +7,7 @@ help: .PHONY: go go: - ./gcexport.py --username aaronferrucci --count $(COUNT) --format original --unzip + ./gcexport.py --username aaronferrucci --count $(COUNT) NUM_ACTIVITIES = $(shell find . -name activities.csv | wc -l) .PHONY: count_activities_csv diff --git a/gcexport.py b/gcexport.py index e54de60..512864d 100755 --- a/gcexport.py +++ b/gcexport.py @@ -25,14 +25,14 @@ import argparse import zipfile -script_version = '1.0.0' +script_version = '1.2.0' current_date = datetime.now().strftime('%Y-%m-%d') activities_directory = './' + current_date + '_garmin_connect_export' parser = argparse.ArgumentParser() -# TODO: Implement verbose and/or quiet options. -# parser.add_argument('-v', '--verbose', help="increase output verbosity", action="store_true") +parser.add_argument('--quiet', help="stifle all output", action="store_true") +parser.add_argument('--debug', help="lots of console output", action="store_true") parser.add_argument('--version', help="print version and exit", action="store_true") parser.add_argument('--username', help="your Garmin Connect username (otherwise, you will be prompted)", nargs='?') parser.add_argument('--password', help="your Garmin Connect password (otherwise, you will be prompted)", nargs='?') @@ -40,16 +40,9 @@ parser.add_argument('-c', '--count', nargs='?', default="1", help="number of recent activities to download, or 'all' (default: 1)") -parser.add_argument('-f', '--format', nargs='?', choices=['gpx', 'tcx', 'original'], default="gpx", - help="export format; can be 'gpx', 'tcx', or 'original' (default: 'gpx')") - parser.add_argument('-d', '--directory', nargs='?', default=activities_directory, help="the directory to export to (default: './YYYY-MM-DD_garmin_connect_export')") -parser.add_argument('-u', '--unzip', - help="if downloading ZIP files (format: 'original'), unzip the file and removes the ZIP file", - action="store_true") - args = parser.parse_args() if args.version: @@ -87,7 +80,9 @@ def http_req(url, post=None, headers={}): return response.read() -print 'Welcome to Garmin Connect Exporter!' + +if not args.quiet: + print 'Welcome to Garmin Connect Exporter!' # Create directory for data files. if isdir(args.directory): @@ -166,9 +161,10 @@ def http_req(url, post=None, headers={}): search_params = {'start': total_downloaded, 'limit': num_to_download} # Query Garmin Connect query_url = url_gc_search + urlencode(search_params) - print "### query_url:" - print query_url - print "###" + if args.debug: + print "### query_url:" + print query_url + print "###" result = http_req(query_url) json_results = json.loads(result) # TODO: Catch possible exceptions here. @@ -184,9 +180,10 @@ def http_req(url, post=None, headers={}): # Pull out just the list of activities. activities = json_results['results']['activities'] - print "### json_results:" - print json.dumps(json_results, indent=4, sort_keys=True) - print "###" + if args.debug: + print "### json_results:" + print json.dumps(json_results, indent=4, sort_keys=True) + print "###" # Process each activity. for a in activities: @@ -196,14 +193,18 @@ def http_req(url, post=None, headers={}): # now is an int. a['activity']['activityId'] = str(a['activity']['activityId']) activityId = a['activity']['activityId'] - - print 'Garmin Connect activity: [' + activityId + ']', - print a['activity']['activityName'] + + if not args.quiet: + print 'activity: [' + activityId + ']', + print a['activity']['activityName'] modern_activity_url = url_gc_modern_activity + activityId - print "url: " + modern_activity_url + + if args.debug: + print "url: " + modern_activity_url activity_filename = args.directory + '/' + activityId + '.json' - print "filename: " + activity_filename + if args.debug: + print "filename: " + activity_filename result = http_req(modern_activity_url) results = json.loads(result) @@ -320,37 +321,16 @@ def http_req(url, post=None, headers={}): csv_record += '\n' - print "data: " + csv_record + if args.debug: + print "data: " + csv_record csv_file.write(csv_record.encode('utf8')) - #if args.format == 'gpx': - # # Validate GPX data. If we have an activity without GPS data (e.g., running on a treadmill), - # # Garmin Connect still kicks out a GPX, but there is only activity information, no GPS data. - # # N.B. You can omit the XML parse (and the associated log messages) to speed things up. - # gpx = parseString(data) - # gpx_data_exists = len(gpx.getElementsByTagName('trkpt')) > 0 - - # if gpx_data_exists: - # print 'Done. GPX data saved.' - # else: - # print 'Done. No track points found.' - #elif args.format == 'original': - # if args.unzip and data_filename[-3:].lower() == 'zip': # Even manual upload of a GPX file is zipped, but we'll validate the extension. - # print "Unzipping and removing original files...", - # zip_file = open(data_filename, 'rb') - # z = zipfile.ZipFile(zip_file) - # for name in z.namelist(): - # z.extract(name, args.directory) - # zip_file.close() - # remove(data_filename) - # print 'Done.' - #else: - # # TODO: Consider validating other formats. - # print 'Done.' total_downloaded += num_to_download # End while loop for multiple chunks. csv_file.close() -print 'Done!' +if not args.quiet: + print 'Done!' + From 3a33cb1b28e7ccc0dda3def63b7b5dc2675f6fe9 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Mon, 16 Apr 2018 07:25:01 -0700 Subject: [PATCH 14/41] Update the 'post-auth' url --- gcexport.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/gcexport.py b/gcexport.py index 512864d..d899b13 100755 --- a/gcexport.py +++ b/gcexport.py @@ -66,6 +66,9 @@ def dictFind(data, keys): # url is a string, post is a dictionary of POST parameters, headers is a dictionary of headers. def http_req(url, post=None, headers={}): + if args.debug: + print "### http_req(" + url + ")" + request = urllib2.Request(url) request.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/1337 Safari/537.36') # Tell Garmin we're some supported browser. for header_key, header_value in headers.iteritems(): @@ -96,7 +99,7 @@ def http_req(url, post=None, headers={}): # URLs for various services. url_gc_login = 'https://sso.garmin.com/sso/login?service=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&webhost=olaxpw-connect04&source=https%3A%2F%2Fconnect.garmin.com%2Fen-US%2Fsignin&redirectAfterAccountLoginUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&redirectAfterAccountCreationUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&gauthHost=https%3A%2F%2Fsso.garmin.com%2Fsso&locale=en_US&id=gauth-widget&cssUrl=https%3A%2F%2Fstatic.garmincdn.com%2Fcom.garmin.connect%2Fui%2Fcss%2Fgauth-custom-v1.1-min.css&clientId=GarminConnect&rememberMeShown=true&rememberMeChecked=false&createAccountShown=true&openCreateAccount=false&usernameShown=false&displayNameShown=false&consumeServiceTicket=false&initialFocus=true&embedWidget=false&generateExtraServiceTicket=false' -url_gc_post_auth = 'https://connect.garmin.com/post-auth/login?' +url_gc_post_auth = 'https://connect.garmin.com/modern/?' url_gc_search = 'http://connect.garmin.com/proxy/activity-search-service-1.2/json/activities?' url_gc_gpx_activity = 'http://connect.garmin.com/proxy/activity-service-1.1/gpx/activity/' url_gc_tcx_activity = 'http://connect.garmin.com/proxy/activity-service-1.1/tcx/activity/' @@ -124,7 +127,8 @@ def http_req(url, post=None, headers={}): # Chop of 'TGT-' off the beginning, prepend 'ST-0'. login_ticket = 'ST-0' + login_ticket[4:] -http_req(url_gc_post_auth + 'ticket=' + login_ticket) +login_url = url_gc_post_auth + 'ticket=' + login_ticket +http_req(login_url) # We should be logged in now. if not isdir(args.directory): From 5d81ae28a0ef4cab31551dd008d102e3a81a93d7 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Mon, 21 May 2018 20:08:31 -0700 Subject: [PATCH 15/41] Hey, gvimdiff. --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 3fb0736..22d537f 100644 --- a/Makefile +++ b/Makefile @@ -19,4 +19,4 @@ count_activities_csv: .PHONY: vimdiff vimdiff: count_activities_csv - vimdiff ../garmin_running/activities.csv $(shell find . -name activities.csv) + gvimdiff ../garmin_running/activities.csv $(shell find . -name activities.csv) From 93e7e7f3e9fbdb34f2fec315f965395355973c58 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Sun, 26 Aug 2018 21:46:01 -0700 Subject: [PATCH 16/41] Add DEBUG flag --- Makefile | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 22d537f..e3b6455 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,7 @@ SHELL := /bin/bash COUNT := 4 +# DEBUG = --debug +DEBUG = .PHONY: help help: @echo Usage: @@ -7,7 +9,7 @@ help: .PHONY: go go: - ./gcexport.py --username aaronferrucci --count $(COUNT) + ./gcexport.py $(DEBUG) --username aaronferrucci --count $(COUNT) NUM_ACTIVITIES = $(shell find . -name activities.csv | wc -l) .PHONY: count_activities_csv From 1300fd15ea1000cea073f3689e0156c0d9eceb2f Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Sun, 26 Aug 2018 21:46:47 -0700 Subject: [PATCH 17/41] Accomodate the latest changes at garmin. Look up activity type, event type, now that they're not in the activity proper. Todo: cleanup --- gcexport.py | 152 ++++++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 129 insertions(+), 23 deletions(-) diff --git a/gcexport.py b/gcexport.py index d899b13..cfb1101 100755 --- a/gcexport.py +++ b/gcexport.py @@ -25,7 +25,7 @@ import argparse import zipfile -script_version = '1.2.0' +script_version = '1.3.0' current_date = datetime.now().strftime('%Y-%m-%d') activities_directory = './' + current_date + '_garmin_connect_export' @@ -100,11 +100,13 @@ def http_req(url, post=None, headers={}): # URLs for various services. url_gc_login = 'https://sso.garmin.com/sso/login?service=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&webhost=olaxpw-connect04&source=https%3A%2F%2Fconnect.garmin.com%2Fen-US%2Fsignin&redirectAfterAccountLoginUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&redirectAfterAccountCreationUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&gauthHost=https%3A%2F%2Fsso.garmin.com%2Fsso&locale=en_US&id=gauth-widget&cssUrl=https%3A%2F%2Fstatic.garmincdn.com%2Fcom.garmin.connect%2Fui%2Fcss%2Fgauth-custom-v1.1-min.css&clientId=GarminConnect&rememberMeShown=true&rememberMeChecked=false&createAccountShown=true&openCreateAccount=false&usernameShown=false&displayNameShown=false&consumeServiceTicket=false&initialFocus=true&embedWidget=false&generateExtraServiceTicket=false' url_gc_post_auth = 'https://connect.garmin.com/modern/?' -url_gc_search = 'http://connect.garmin.com/proxy/activity-search-service-1.2/json/activities?' +# url_gc_search = 'http://connect.garmin.com/proxy/activity-search-service-1.2/json/activities?' +url_gc_search = 'http://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?' url_gc_gpx_activity = 'http://connect.garmin.com/proxy/activity-service-1.1/gpx/activity/' url_gc_tcx_activity = 'http://connect.garmin.com/proxy/activity-service-1.1/tcx/activity/' url_gc_original_activity = 'http://connect.garmin.com/proxy/download-service/files/activity/' url_gc_modern_activity = 'https://connect.garmin.com/modern/proxy/activity-service/activity/' +devices_url = "https://connect.garmin.com/modern/proxy/device-service/deviceregistration/devices" # Initially, we need to get a valid session cookie, so we pull the login page. http_req(url_gc_login) @@ -131,6 +133,113 @@ def http_req(url, post=None, headers={}): http_req(login_url) # We should be logged in now. + +# get device info, put in a dict +device_info = {} +devices_url = "https://connect.garmin.com/modern/proxy/device-service/deviceregistration/devices" +devices = json.loads(http_req(devices_url)) +keys = ['currentFirmwareVersion', 'displayName', 'partNumber', 'serialNumber', ] +for dev in devices: + dev_id = dev['deviceId'] + this_device = {} + for key in keys: + this_device[key] = dictFind(dev, [key, ]) + device_info[dev_id] = this_device + +# backward compatibility hack: prepend ' ', append ".0.0" to firmware version. +for dev_id in device_info: + device_info[dev_id]['currentFirmwareVersion'] = ' ' + device_info[dev_id]['currentFirmwareVersion'] + ".0.0" + +for dev_id in device_info: + print dev_id + for dev_parameter in device_info[dev_id]: + print " " + dev_parameter + ": " + device_info[dev_id][dev_parameter] + +# get activity properties, put in a dict +# This maps cryptic activity typeKeys to display names +# all:: All Activities +# golf:: Golf +# indoor_cycling:: Indoor Cycling +# ... +# street_running:: Street Running +# +# keys appear in activity records, activityType/typeKey + +activity_properties = {} +activity_properties_url = 'https://connect.garmin.com/modern/main/js/properties/activity_types/activity_types.properties?bust=4.10.1.0' +activity_properties_req = http_req(activity_properties_url) +print "### activity_properties" +aps = activity_properties_req.splitlines() +for ap in aps: + (key, value) = ap.split('=') + key = key.replace("activity_type_", "") + activity_properties[key] = value +print "###" + +# get activity type info, put in a dict +activity_type_info = {} +activity_type_url = "https://connect.garmin.com/modern/proxy/activity-service/activity/activityTypes" +activity_types = json.loads(http_req(activity_type_url)) +print "### activity_type_info" +keys = ['typeKey', ] +for a_type in activity_types: + type_id = a_type['typeId'] + this_type = {} + for key in keys: + this_type[key] = dictFind(a_type, [key, ]) + # Set type from typeKey + try: + this_type['type'] = activity_properties[this_type['typeKey']] + except: + this_type['type'] = this_type['typeKey'] + activity_type_info[type_id] = this_type + +for a_type in activity_type_info: + print a_type + for activity_parameter in activity_type_info[a_type]: + print " " + activity_parameter + ": " + str(activity_type_info[a_type][activity_parameter]) + +print "###" + +event_properties = {} +event_properties_url = 'https://connect.garmin.com/modern/main/js/properties/event_types/event_types.properties?bust=4.10.1.0' +event_properties_req = http_req(event_properties_url) +print "### event_properties" +evs = event_properties_req.splitlines() +for ev in evs: + (key, value) = ev.split('=') + event_properties[key] = value +for ev in event_properties: + print "%s: %s" % (ev, event_properties[ev]) +print "###" + +# get event type info, put in a dict + +event_type_info = {} +event_type_url = 'https://connect.garmin.com/modern/proxy/activity-service/activity/eventTypes' +event_types = json.loads(http_req(event_type_url)) +print "### event_type_info" +keys = ['typeKey', ] +for e_type in event_types: + type_id = e_type['typeId'] + this_type = {} + for key in keys: + this_type[key] = dictFind(e_type, [key, ]) + # Set type from typeKey + try: + this_type['type'] = event_properties[this_type['typeKey']] + except KeyError: + this_type['type'] = this_type['typeKey'] + + event_type_info[type_id] = this_type + +for e_type in event_type_info: + print e_type + for event_parameter in event_type_info[e_type]: + print " " + event_parameter + ": " + str(event_type_info[e_type][event_parameter]) + +print "###" + if not isdir(args.directory): mkdir(args.directory) @@ -165,15 +274,8 @@ def http_req(url, post=None, headers={}): search_params = {'start': total_downloaded, 'limit': num_to_download} # Query Garmin Connect query_url = url_gc_search + urlencode(search_params) - if args.debug: - print "### query_url:" - print query_url - print "###" result = http_req(query_url) json_results = json.loads(result) # TODO: Catch possible exceptions here. - - # search = json_results['results']['search'] - if download_all: # Modify total_to_download based on how many activities the server reports. # total_to_download = int(search['totalFound']) @@ -181,26 +283,24 @@ def http_req(url, post=None, headers={}): # Do it only once. download_all = False - # Pull out just the list of activities. - activities = json_results['results']['activities'] if args.debug: print "### json_results:" print json.dumps(json_results, indent=4, sort_keys=True) print "###" + # Pull out just the list of activities. + # Only the activityId is used. + # json_results used to be a deep hierarchy, but ... no longer + activities = json_results + # Process each activity. for a in activities: - # Display which entry we're working on. - - # backwards compatibility hack: activityId used to be a string, - # now is an int. - a['activity']['activityId'] = str(a['activity']['activityId']) - activityId = a['activity']['activityId'] + activityId = str(a['activityId']) if not args.quiet: print 'activity: [' + activityId + ']', - print a['activity']['activityName'] + print a['activityName'] modern_activity_url = url_gc_modern_activity + activityId if args.debug: @@ -237,17 +337,23 @@ def http_req(url, post=None, headers={}): # End Timestamp (Raw Milliseconds) csv_record += empty_record - device = dictFind(a, ['activity', 'device', 'display', ]) - deviceVer = dictFind(a, ['activity', 'device', 'version', ]) + deviceId = dictFind(a, ['deviceId', ]) + device = dictFind(device_info[deviceId], ['displayName',]) + deviceVer = dictFind(device_info[deviceId], ['currentFirmwareVersion',]) # Device csv_record += csvFormat(device + ' ' + deviceVer) # Activity Parent - csv_record += csvFormat(dictFind(a, ['activity', 'activityType', 'parent', 'display' ])) + parentTypeId = dictFind(a, ['activityType', 'parentTypeId',]) + print "parentTypeId: %d" % parentTypeId + csv_record += csvFormat(dictFind(activity_type_info, [parentTypeId, 'type', ])) # Activity Type - csv_record += csvFormat(dictFind(a, ['activity', 'activityType', 'display' ])) + typeId = dictFind(a, ['activityType', 'typeId',]) + print "typeId: %d" % typeId + csv_record += csvFormat(dictFind(activity_type_info, [typeId, 'type', ])) # Event Type - csv_record += csvFormat(dictFind(a, ['activity', 'eventType', 'display' ])) + typeId = dictFind(a, ['eventType', 'typeId',]) + csv_record += csvFormat(dictFind(event_type_info, [typeId, 'type', ])) # Activity Time Zone csv_record += csvFormat(dictFind(results, ['timeZoneUnitDTO', 'timeZone' ])) From 8ce1c4934ccf1a21fda73b32755ac1cbcad87bc9 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Mon, 27 Aug 2018 08:14:47 -0700 Subject: [PATCH 18/41] Push device info stuff into a class. --- gcexport.py | 71 +++++++++++++++++++++++++++++++++-------------------- 1 file changed, 45 insertions(+), 26 deletions(-) diff --git a/gcexport.py b/gcexport.py index cfb1101..599cd80 100755 --- a/gcexport.py +++ b/gcexport.py @@ -25,7 +25,47 @@ import argparse import zipfile -script_version = '1.3.0' +class DeviceInfo(): + devices_url = "https://connect.garmin.com/modern/proxy/device-service/deviceregistration/devices" + keys = ['currentFirmwareVersion', 'displayName', 'partNumber', 'serialNumber', ] + def __init__(self): + self.device_info = {} + devices = json.loads(http_req(self.devices_url)) + for dev in devices: + dev_id = dev['deviceId'] + this_device = {} + for key in self.keys: + this_device[key] = dictFind(dev, [key, ]) + self.device_info[dev_id] = this_device + + # backward compatibility hack: prepend ' ', append ".0.0" + # to firmware version. + for dev_id in self.device_info: + fw = self.device_info[dev_id]['currentFirmwareVersion'] + fw = ' ' + fw + ".0.0" + self.device_info[dev_id]['currentFirmwareVersion'] = fw + + def printit(self): + for dev_id in self.device_info: + print dev_id + for dev_parameter in self.device_info[dev_id]: + print " " + dev_parameter + ": " + self.device_info[dev_id][dev_parameter] + + def displayName(self, deviceId): + try: + device = self.device_info[deviceId]['displayName'] + except KeyError: + device = "" + + try: + version = self.device_info[deviceId]['currentFirmwareVersion'] + except KeyError: + version = "" + + displayName = device + ' ' + version + return displayName + +script_version = '1.3.1' current_date = datetime.now().strftime('%Y-%m-%d') activities_directory = './' + current_date + '_garmin_connect_export' @@ -106,7 +146,6 @@ def http_req(url, post=None, headers={}): url_gc_tcx_activity = 'http://connect.garmin.com/proxy/activity-service-1.1/tcx/activity/' url_gc_original_activity = 'http://connect.garmin.com/proxy/download-service/files/activity/' url_gc_modern_activity = 'https://connect.garmin.com/modern/proxy/activity-service/activity/' -devices_url = "https://connect.garmin.com/modern/proxy/device-service/deviceregistration/devices" # Initially, we need to get a valid session cookie, so we pull the login page. http_req(url_gc_login) @@ -134,26 +173,7 @@ def http_req(url, post=None, headers={}): # We should be logged in now. -# get device info, put in a dict -device_info = {} -devices_url = "https://connect.garmin.com/modern/proxy/device-service/deviceregistration/devices" -devices = json.loads(http_req(devices_url)) -keys = ['currentFirmwareVersion', 'displayName', 'partNumber', 'serialNumber', ] -for dev in devices: - dev_id = dev['deviceId'] - this_device = {} - for key in keys: - this_device[key] = dictFind(dev, [key, ]) - device_info[dev_id] = this_device - -# backward compatibility hack: prepend ' ', append ".0.0" to firmware version. -for dev_id in device_info: - device_info[dev_id]['currentFirmwareVersion'] = ' ' + device_info[dev_id]['currentFirmwareVersion'] + ".0.0" - -for dev_id in device_info: - print dev_id - for dev_parameter in device_info[dev_id]: - print " " + dev_parameter + ": " + device_info[dev_id][dev_parameter] +deviceInfo = DeviceInfo() # get activity properties, put in a dict # This maps cryptic activity typeKeys to display names @@ -337,11 +357,10 @@ def http_req(url, post=None, headers={}): # End Timestamp (Raw Milliseconds) csv_record += empty_record - deviceId = dictFind(a, ['deviceId', ]) - device = dictFind(device_info[deviceId], ['displayName',]) - deviceVer = dictFind(device_info[deviceId], ['currentFirmwareVersion',]) # Device - csv_record += csvFormat(device + ' ' + deviceVer) + deviceId = dictFind(a, ['deviceId', ]) + csv_record += csvFormat(deviceInfo.displayName(deviceId)) + # Activity Parent parentTypeId = dictFind(a, ['activityType', 'parentTypeId',]) print "parentTypeId: %d" % parentTypeId From 86482b2f195c7f31c415f61701c7619eda874a26 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Mon, 27 Aug 2018 20:47:30 -0700 Subject: [PATCH 19/41] Remove some unused imports; fix deplorable indents. --- gcexport.py | 582 ++++++++++++++++++++++++++-------------------------- 1 file changed, 288 insertions(+), 294 deletions(-) diff --git a/gcexport.py b/gcexport.py index 599cd80..95cda84 100755 --- a/gcexport.py +++ b/gcexport.py @@ -2,14 +2,13 @@ """ File: gcexport.py -Author: Kyle Krafka (https://github.com/kjkjava/) -Date: April 28, 2015 - -Description: Use this script to export your fitness data from Garmin Connect. - See README.md for more information. +Original author: Kyle Krafka (https://github.com/kjkjava/) +Description: Use this script to export your fitness data from Garmin's servers. + See README.md for more information. """ from urllib import urlencode +import urllib2, cookielib, json from datetime import datetime from getpass import getpass from sys import argv @@ -17,55 +16,50 @@ from os.path import isfile from os import mkdir from os import remove -from xml.dom.minidom import parseString - -import urllib2, cookielib, json -from fileinput import filename import argparse -import zipfile class DeviceInfo(): - devices_url = "https://connect.garmin.com/modern/proxy/device-service/deviceregistration/devices" - keys = ['currentFirmwareVersion', 'displayName', 'partNumber', 'serialNumber', ] - def __init__(self): - self.device_info = {} - devices = json.loads(http_req(self.devices_url)) - for dev in devices: - dev_id = dev['deviceId'] - this_device = {} - for key in self.keys: - this_device[key] = dictFind(dev, [key, ]) - self.device_info[dev_id] = this_device - - # backward compatibility hack: prepend ' ', append ".0.0" - # to firmware version. - for dev_id in self.device_info: - fw = self.device_info[dev_id]['currentFirmwareVersion'] - fw = ' ' + fw + ".0.0" - self.device_info[dev_id]['currentFirmwareVersion'] = fw - - def printit(self): - for dev_id in self.device_info: - print dev_id - for dev_parameter in self.device_info[dev_id]: - print " " + dev_parameter + ": " + self.device_info[dev_id][dev_parameter] - - def displayName(self, deviceId): - try: - device = self.device_info[deviceId]['displayName'] - except KeyError: - device = "" - - try: - version = self.device_info[deviceId]['currentFirmwareVersion'] - except KeyError: - version = "" - - displayName = device + ' ' + version - return displayName - -script_version = '1.3.1' + devices_url = "https://connect.garmin.com/modern/proxy/device-service/deviceregistration/devices" + keys = ['currentFirmwareVersion', 'displayName', 'partNumber', 'serialNumber', ] + def __init__(self): + self.device_info = {} + devices = json.loads(http_req(self.devices_url)) + for dev in devices: + dev_id = dev['deviceId'] + this_device = {} + for key in self.keys: + this_device[key] = dictFind(dev, [key, ]) + self.device_info[dev_id] = this_device + + # backward compatibility hack: prepend ' ', append ".0.0" + # to firmware version. + for dev_id in self.device_info: + fw = self.device_info[dev_id]['currentFirmwareVersion'] + fw = ' ' + fw + ".0.0" + self.device_info[dev_id]['currentFirmwareVersion'] = fw + + def printit(self): + for dev_id in self.device_info: + print dev_id + for dev_parameter in self.device_info[dev_id]: + print " " + dev_parameter + ": " + self.device_info[dev_id][dev_parameter] + + def displayName(self, deviceId): + try: + device = self.device_info[deviceId]['displayName'] + except KeyError: + device = "" + + try: + version = self.device_info[deviceId]['currentFirmwareVersion'] + except KeyError: + version = "" + + displayName = device + ' ' + version + return displayName + +script_version = '1.3.2' current_date = datetime.now().strftime('%Y-%m-%d') activities_directory = './' + current_date + '_garmin_connect_export' @@ -78,58 +72,58 @@ def displayName(self, deviceId): parser.add_argument('--password', help="your Garmin Connect password (otherwise, you will be prompted)", nargs='?') parser.add_argument('-c', '--count', nargs='?', default="1", - help="number of recent activities to download, or 'all' (default: 1)") + help="number of recent activities to download, or 'all' (default: 1)") parser.add_argument('-d', '--directory', nargs='?', default=activities_directory, - help="the directory to export to (default: './YYYY-MM-DD_garmin_connect_export')") + help="the directory to export to (default: './YYYY-MM-DD_garmin_connect_export')") args = parser.parse_args() if args.version: - print argv[0] + ", version " + script_version - exit(0) + print argv[0] + ", version " + script_version + exit(0) cookie_jar = cookielib.CookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie_jar)) def csvFormat(value): - csv_record = '"' + str(value).replace('"', '""') + '",' - return csv_record + csv_record = '"' + str(value).replace('"', '""') + '",' + return csv_record def dictFind(data, keys): - try: - for key in keys: - data = data[key] - except KeyError: - return "" - return data + try: + for key in keys: + data = data[key] + except KeyError: + return "" + return data # url is a string, post is a dictionary of POST parameters, headers is a dictionary of headers. def http_req(url, post=None, headers={}): - if args.debug: - print "### http_req(" + url + ")" + if args.debug: + print "### http_req(" + url + ")" - request = urllib2.Request(url) - request.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/1337 Safari/537.36') # Tell Garmin we're some supported browser. - for header_key, header_value in headers.iteritems(): - request.add_header(header_key, header_value) - if post: - post = urlencode(post) # Convert dictionary to POST parameter string. - response = opener.open(request, data=post) # This line may throw a urllib2.HTTPError. + request = urllib2.Request(url) + request.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/1337 Safari/537.36') # Tell Garmin we're some supported browser. + for header_key, header_value in headers.iteritems(): + request.add_header(header_key, header_value) + if post: + post = urlencode(post) # Convert dictionary to POST parameter string. + response = opener.open(request, data=post) # This line may throw a urllib2.HTTPError. - # N.B. urllib2 will follow any 302 redirects. Also, the "open" call above may throw a urllib2.HTTPError which is checked for below. - if response.getcode() != 200: - raise Exception('Bad return code (' + response.getcode() + ') for: ' + url) + # N.B. urllib2 will follow any 302 redirects. Also, the "open" call above may throw a urllib2.HTTPError which is checked for below. + if response.getcode() != 200: + raise Exception('Bad return code (' + response.getcode() + ') for: ' + url) - return response.read() + return response.read() if not args.quiet: - print 'Welcome to Garmin Connect Exporter!' + print 'Welcome to Garmin Connect Exporter!' # Create directory for data files. if isdir(args.directory): - print 'Warning: Output directory already exists. Will skip already-downloaded files and append to the CSV file.' + print 'Warning: Output directory already exists. Will skip already-downloaded files and append to the CSV file.' username = args.username if args.username else raw_input('Username: ') password = args.password if args.password else getpass() @@ -158,12 +152,12 @@ def http_req(url, post=None, headers={}): # TODO: Can we do this without iterating? login_ticket = None for cookie in cookie_jar: - if cookie.name == 'CASTGC': - login_ticket = cookie.value - break + if cookie.name == 'CASTGC': + login_ticket = cookie.value + break if not login_ticket: - raise Exception('Did not get a ticket cookie. Cannot log in. Did you enter the correct username and password?') + raise Exception('Did not get a ticket cookie. Cannot log in. Did you enter the correct username and password?') # Chop of 'TGT-' off the beginning, prepend 'ST-0'. login_ticket = 'ST-0' + login_ticket[4:] @@ -191,11 +185,11 @@ def http_req(url, post=None, headers={}): print "### activity_properties" aps = activity_properties_req.splitlines() for ap in aps: - (key, value) = ap.split('=') - key = key.replace("activity_type_", "") - activity_properties[key] = value + (key, value) = ap.split('=') + key = key.replace("activity_type_", "") + activity_properties[key] = value print "###" - + # get activity type info, put in a dict activity_type_info = {} activity_type_url = "https://connect.garmin.com/modern/proxy/activity-service/activity/activityTypes" @@ -203,21 +197,21 @@ def http_req(url, post=None, headers={}): print "### activity_type_info" keys = ['typeKey', ] for a_type in activity_types: - type_id = a_type['typeId'] - this_type = {} - for key in keys: - this_type[key] = dictFind(a_type, [key, ]) - # Set type from typeKey - try: - this_type['type'] = activity_properties[this_type['typeKey']] - except: - this_type['type'] = this_type['typeKey'] - activity_type_info[type_id] = this_type + type_id = a_type['typeId'] + this_type = {} + for key in keys: + this_type[key] = dictFind(a_type, [key, ]) + # Set type from typeKey + try: + this_type['type'] = activity_properties[this_type['typeKey']] + except: + this_type['type'] = this_type['typeKey'] + activity_type_info[type_id] = this_type for a_type in activity_type_info: - print a_type - for activity_parameter in activity_type_info[a_type]: - print " " + activity_parameter + ": " + str(activity_type_info[a_type][activity_parameter]) + print a_type + for activity_parameter in activity_type_info[a_type]: + print " " + activity_parameter + ": " + str(activity_type_info[a_type][activity_parameter]) print "###" @@ -227,10 +221,10 @@ def http_req(url, post=None, headers={}): print "### event_properties" evs = event_properties_req.splitlines() for ev in evs: - (key, value) = ev.split('=') - event_properties[key] = value + (key, value) = ev.split('=') + event_properties[key] = value for ev in event_properties: - print "%s: %s" % (ev, event_properties[ev]) + print "%s: %s" % (ev, event_properties[ev]) print "###" # get event type info, put in a dict @@ -241,27 +235,27 @@ def http_req(url, post=None, headers={}): print "### event_type_info" keys = ['typeKey', ] for e_type in event_types: - type_id = e_type['typeId'] - this_type = {} - for key in keys: - this_type[key] = dictFind(e_type, [key, ]) - # Set type from typeKey - try: - this_type['type'] = event_properties[this_type['typeKey']] - except KeyError: - this_type['type'] = this_type['typeKey'] - - event_type_info[type_id] = this_type + type_id = e_type['typeId'] + this_type = {} + for key in keys: + this_type[key] = dictFind(e_type, [key, ]) + # Set type from typeKey + try: + this_type['type'] = event_properties[this_type['typeKey']] + except KeyError: + this_type['type'] = this_type['typeKey'] + + event_type_info[type_id] = this_type for e_type in event_type_info: - print e_type - for event_parameter in event_type_info[e_type]: - print " " + event_parameter + ": " + str(event_type_info[e_type][event_parameter]) + print e_type + for event_parameter in event_type_info[e_type]: + print " " + event_parameter + ": " + str(event_type_info[e_type][event_parameter]) print "###" if not isdir(args.directory): - mkdir(args.directory) + mkdir(args.directory) csv_filename = args.directory + '/activities.csv' csv_existed = isfile(csv_filename) @@ -270,196 +264,196 @@ def http_req(url, post=None, headers={}): # Write header to CSV file if not csv_existed: - csv_file.write('Activity ID,Activity Name,Description,Begin Timestamp,Begin Timestamp (Raw Milliseconds),End Timestamp,End Timestamp (Raw Milliseconds),Device,Activity Parent,Activity Type,Event Type,Activity Time Zone,Max. Elevation,Max. Elevation (Raw),Begin Latitude (Decimal Degrees Raw),Begin Longitude (Decimal Degrees Raw),End Latitude (Decimal Degrees Raw),End Longitude (Decimal Degrees Raw),Average Moving Speed,Average Moving Speed (Raw),Max. Heart Rate (bpm),Average Heart Rate (bpm),Max. Speed,Max. Speed (Raw),Calories,Calories (Raw),Duration (h:m:s),Duration (Raw Seconds),Moving Duration (h:m:s),Moving Duration (Raw Seconds),Average Speed,Average Speed (Raw),Distance,Distance (Raw),Max. Heart Rate (bpm),Min. Elevation,Min. Elevation (Raw),Elevation Gain,Elevation Gain (Raw),Elevation Loss,Elevation Loss (Raw)\n') + csv_file.write('Activity ID,Activity Name,Description,Begin Timestamp,Begin Timestamp (Raw Milliseconds),End Timestamp,End Timestamp (Raw Milliseconds),Device,Activity Parent,Activity Type,Event Type,Activity Time Zone,Max. Elevation,Max. Elevation (Raw),Begin Latitude (Decimal Degrees Raw),Begin Longitude (Decimal Degrees Raw),End Latitude (Decimal Degrees Raw),End Longitude (Decimal Degrees Raw),Average Moving Speed,Average Moving Speed (Raw),Max. Heart Rate (bpm),Average Heart Rate (bpm),Max. Speed,Max. Speed (Raw),Calories,Calories (Raw),Duration (h:m:s),Duration (Raw Seconds),Moving Duration (h:m:s),Moving Duration (Raw Seconds),Average Speed,Average Speed (Raw),Distance,Distance (Raw),Max. Heart Rate (bpm),Min. Elevation,Min. Elevation (Raw),Elevation Gain,Elevation Gain (Raw),Elevation Loss,Elevation Loss (Raw)\n') download_all = False if args.count == 'all': - # If the user wants to download all activities, first download one, - # then the result of that request will tell us how many are available - # so we will modify the variables then. - total_to_download = 1 - download_all = True + # If the user wants to download all activities, first download one, + # then the result of that request will tell us how many are available + # so we will modify the variables then. + total_to_download = 1 + download_all = True else: - total_to_download = int(args.count) + total_to_download = int(args.count) total_downloaded = 0 # This while loop will download data from the server in multiple chunks, if necessary. while total_downloaded < total_to_download: - # Maximum of 100... 400 return status if over 100. So download 100 or whatever remains if less than 100. - if total_to_download - total_downloaded > 100: - num_to_download = 100 - else: - num_to_download = total_to_download - total_downloaded - - search_params = {'start': total_downloaded, 'limit': num_to_download} - # Query Garmin Connect - query_url = url_gc_search + urlencode(search_params) - result = http_req(query_url) - json_results = json.loads(result) # TODO: Catch possible exceptions here. - if download_all: - # Modify total_to_download based on how many activities the server reports. - # total_to_download = int(search['totalFound']) - total_to_download = int(json_results['results']['totalFound']) - # Do it only once. - download_all = False - - - if args.debug: - print "### json_results:" - print json.dumps(json_results, indent=4, sort_keys=True) - print "###" - - # Pull out just the list of activities. - # Only the activityId is used. - # json_results used to be a deep hierarchy, but ... no longer - activities = json_results - - # Process each activity. - for a in activities: - activityId = str(a['activityId']) + # Maximum of 100... 400 return status if over 100. So download 100 or whatever remains if less than 100. + if total_to_download - total_downloaded > 100: + num_to_download = 100 + else: + num_to_download = total_to_download - total_downloaded + + search_params = {'start': total_downloaded, 'limit': num_to_download} + # Query Garmin Connect + query_url = url_gc_search + urlencode(search_params) + result = http_req(query_url) + json_results = json.loads(result) # TODO: Catch possible exceptions here. + if download_all: + # Modify total_to_download based on how many activities the server reports. + # total_to_download = int(search['totalFound']) + total_to_download = int(json_results['results']['totalFound']) + # Do it only once. + download_all = False + + + if args.debug: + print "### json_results:" + print json.dumps(json_results, indent=4, sort_keys=True) + print "###" + + # Pull out just the list of activities. + # Only the activityId is used. + # json_results used to be a deep hierarchy, but ... no longer + activities = json_results + + # Process each activity. + for a in activities: + activityId = str(a['activityId']) - if not args.quiet: - print 'activity: [' + activityId + ']', - print a['activityName'] - modern_activity_url = url_gc_modern_activity + activityId - - if args.debug: - print "url: " + modern_activity_url - - activity_filename = args.directory + '/' + activityId + '.json' - if args.debug: - print "filename: " + activity_filename - result = http_req(modern_activity_url) - results = json.loads(result) - - save_file = open(activity_filename, 'w') - save_file.write(json.dumps(results, indent=4, sort_keys=True)) - save_file.close() - - # Write stats to CSV. - empty_record = '"",' - csv_record = '' - # Activity ID - csv_record += csvFormat(activityId) - # Activity Name - csv_record += csvFormat(dictFind(results, ['activityName', ])) - # Description - csv_record += csvFormat(dictFind(results, ['description', ])) - # Begin Timestamp - csv_record += csvFormat(dictFind(results, ['summaryDTO', 'startTimeLocal', ])) - - # Begin Timestamp (Raw Milliseconds) - csv_record += empty_record - - # End Timestamp - csv_record += empty_record - - # End Timestamp (Raw Milliseconds) - csv_record += empty_record - - # Device - deviceId = dictFind(a, ['deviceId', ]) - csv_record += csvFormat(deviceInfo.displayName(deviceId)) - - # Activity Parent - parentTypeId = dictFind(a, ['activityType', 'parentTypeId',]) - print "parentTypeId: %d" % parentTypeId - csv_record += csvFormat(dictFind(activity_type_info, [parentTypeId, 'type', ])) - # Activity Type - typeId = dictFind(a, ['activityType', 'typeId',]) - print "typeId: %d" % typeId - csv_record += csvFormat(dictFind(activity_type_info, [typeId, 'type', ])) - - # Event Type - typeId = dictFind(a, ['eventType', 'typeId',]) - csv_record += csvFormat(dictFind(event_type_info, [typeId, 'type', ])) - # Activity Time Zone - csv_record += csvFormat(dictFind(results, ['timeZoneUnitDTO', 'timeZone' ])) - - # Max. Elevation - csv_record += empty_record - # Max. Elevation (Raw) - # (was in feet previously, now appears to be meters) - csv_record += csvFormat(dictFind(results, ['summaryDTO', 'maxElevation', ])) - - # {start, end} X {latitude, longitude} - # Begin Latitude (Decimal Degrees Raw) - # Begin Longitude (Decimal Degrees Raw) - # End Latitude (Decimal Degrees Raw) - # End Longitude (Decimal Degrees Raw) - for key in ['startLatitude', 'startLongitude', 'endLatitude', 'endLongitude']: - csv_record += csvFormat(dictFind(results, ['summaryDTO', key, ])) - - # Average Moving Speed - csv_record += empty_record - - # Average Moving Speed (Raw) - csv_record += csvFormat(dictFind(results, ['summaryDTO', 'averageMovingSpeed', ])) - - # Max. Heart Rate (bpm) - csv_record += empty_record - # Average Heart Rate (bpm) - csv_record += empty_record - - # Max. Speed - csv_record += empty_record - # Max. Speed (Raw) - csv_record += csvFormat(dictFind(results, ['summaryDTO', 'maxSpeed', ])) - - # Calories - csv_record += empty_record - # Calories (Raw) - csv_record += csvFormat(dictFind(results, ['summaryDTO', 'calories', ])) - - # Duration (h:m:s) - csv_record += empty_record - # Duration (Raw Seconds) - csv_record += csvFormat(dictFind(results, ['summaryDTO', 'elapsedDuration', ])) - # Moving Duration (h:m:s) - csv_record += empty_record - # Moving Duration (Raw Seconds), - csv_record += csvFormat(dictFind(results, ['summaryDTO', 'movingDuration', ])) - # Average Speed - csv_record += empty_record - # Average Speed (Raw) - csv_record += csvFormat(dictFind(results, ['summaryDTO', 'averageSpeed', ])) - # Distance - csv_record += empty_record - # distance.value - csv_record += csvFormat(dictFind(results, ['summaryDTO', 'distance', ])) - - # Max. Heart Rate (bpm) - csv_record += empty_record - - # Min. Elevation - csv_record += empty_record - # Min. Elevation (Raw) - csv_record += csvFormat(dictFind(results, ['summaryDTO', 'minElevation', ])) - - # Elevation Gain - csv_record += empty_record - # Elevation Gain (Raw) - csv_record += empty_record - # Elevation Loss - csv_record += empty_record - # Elevation Loss (Raw) - csv_record += empty_record - - # remove any trailing commas - R read.csv doesn't like them. - csv_record = csv_record.rstrip(',') - - csv_record += '\n' - - if args.debug: - print "data: " + csv_record - - csv_file.write(csv_record.encode('utf8')) - - total_downloaded += num_to_download + if not args.quiet: + print 'activity: [' + activityId + ']', + print a['activityName'] + modern_activity_url = url_gc_modern_activity + activityId + + if args.debug: + print "url: " + modern_activity_url + + activity_filename = args.directory + '/' + activityId + '.json' + if args.debug: + print "filename: " + activity_filename + result = http_req(modern_activity_url) + results = json.loads(result) + + save_file = open(activity_filename, 'w') + save_file.write(json.dumps(results, indent=4, sort_keys=True)) + save_file.close() + + # Write stats to CSV. + empty_record = '"",' + csv_record = '' + # Activity ID + csv_record += csvFormat(activityId) + # Activity Name + csv_record += csvFormat(dictFind(results, ['activityName', ])) + # Description + csv_record += csvFormat(dictFind(results, ['description', ])) + # Begin Timestamp + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'startTimeLocal', ])) + + # Begin Timestamp (Raw Milliseconds) + csv_record += empty_record + + # End Timestamp + csv_record += empty_record + + # End Timestamp (Raw Milliseconds) + csv_record += empty_record + + # Device + deviceId = dictFind(a, ['deviceId', ]) + csv_record += csvFormat(deviceInfo.displayName(deviceId)) + + # Activity Parent + parentTypeId = dictFind(a, ['activityType', 'parentTypeId',]) + print "parentTypeId: %d" % parentTypeId + csv_record += csvFormat(dictFind(activity_type_info, [parentTypeId, 'type', ])) + # Activity Type + typeId = dictFind(a, ['activityType', 'typeId',]) + print "typeId: %d" % typeId + csv_record += csvFormat(dictFind(activity_type_info, [typeId, 'type', ])) + + # Event Type + typeId = dictFind(a, ['eventType', 'typeId',]) + csv_record += csvFormat(dictFind(event_type_info, [typeId, 'type', ])) + # Activity Time Zone + csv_record += csvFormat(dictFind(results, ['timeZoneUnitDTO', 'timeZone' ])) + + # Max. Elevation + csv_record += empty_record + # Max. Elevation (Raw) + # (was in feet previously, now appears to be meters) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'maxElevation', ])) + + # {start, end} X {latitude, longitude} + # Begin Latitude (Decimal Degrees Raw) + # Begin Longitude (Decimal Degrees Raw) + # End Latitude (Decimal Degrees Raw) + # End Longitude (Decimal Degrees Raw) + for key in ['startLatitude', 'startLongitude', 'endLatitude', 'endLongitude']: + csv_record += csvFormat(dictFind(results, ['summaryDTO', key, ])) + + # Average Moving Speed + csv_record += empty_record + + # Average Moving Speed (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'averageMovingSpeed', ])) + + # Max. Heart Rate (bpm) + csv_record += empty_record + # Average Heart Rate (bpm) + csv_record += empty_record + + # Max. Speed + csv_record += empty_record + # Max. Speed (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'maxSpeed', ])) + + # Calories + csv_record += empty_record + # Calories (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'calories', ])) + + # Duration (h:m:s) + csv_record += empty_record + # Duration (Raw Seconds) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'elapsedDuration', ])) + # Moving Duration (h:m:s) + csv_record += empty_record + # Moving Duration (Raw Seconds), + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'movingDuration', ])) + # Average Speed + csv_record += empty_record + # Average Speed (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'averageSpeed', ])) + # Distance + csv_record += empty_record + # distance.value + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'distance', ])) + + # Max. Heart Rate (bpm) + csv_record += empty_record + + # Min. Elevation + csv_record += empty_record + # Min. Elevation (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'minElevation', ])) + + # Elevation Gain + csv_record += empty_record + # Elevation Gain (Raw) + csv_record += empty_record + # Elevation Loss + csv_record += empty_record + # Elevation Loss (Raw) + csv_record += empty_record + + # remove any trailing commas - R read.csv doesn't like them. + csv_record = csv_record.rstrip(',') + + csv_record += '\n' + + if args.debug: + print "data: " + csv_record + + csv_file.write(csv_record.encode('utf8')) + + total_downloaded += num_to_download # End while loop for multiple chunks. csv_file.close() if not args.quiet: - print 'Done!' + print 'Done!' From 4f143727b88e08026bf5a8ebeffe32f9ad455305 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Mon, 27 Aug 2018 21:13:15 -0700 Subject: [PATCH 20/41] A bit of comment cleanup; remove unneeded debugging output. --- gcexport.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/gcexport.py b/gcexport.py index 95cda84..2812a11 100755 --- a/gcexport.py +++ b/gcexport.py @@ -144,8 +144,15 @@ def http_req(url, post=None, headers={}): # Initially, we need to get a valid session cookie, so we pull the login page. http_req(url_gc_login) -# Now we'll actually login. -post_data = {'username': username, 'password': password, 'embed': 'true', 'lt': 'e1s1', '_eventId': 'submit', 'displayNameRequired': 'false'} # Fields that are passed in a typical Garmin login. +# Now we'll actually login, using fields that are passed in a typical +# Garmin login. +post_data = { + 'username': username, + 'password': password, + 'embed': 'true', + 'lt': 'e1s1', + '_eventId': 'submit', + 'displayNameRequired': 'false'} http_req(url_gc_login, post_data) # Get the key. @@ -159,7 +166,7 @@ def http_req(url, post=None, headers={}): if not login_ticket: raise Exception('Did not get a ticket cookie. Cannot log in. Did you enter the correct username and password?') -# Chop of 'TGT-' off the beginning, prepend 'ST-0'. +# Chop 'TGT-' off the beginning, prepend 'ST-0'. login_ticket = 'ST-0' + login_ticket[4:] login_url = url_gc_post_auth + 'ticket=' + login_ticket @@ -357,11 +364,9 @@ def http_req(url, post=None, headers={}): # Activity Parent parentTypeId = dictFind(a, ['activityType', 'parentTypeId',]) - print "parentTypeId: %d" % parentTypeId csv_record += csvFormat(dictFind(activity_type_info, [parentTypeId, 'type', ])) # Activity Type typeId = dictFind(a, ['activityType', 'typeId',]) - print "typeId: %d" % typeId csv_record += csvFormat(dictFind(activity_type_info, [typeId, 'type', ])) # Event Type From 6b35a4d128ad983f65cb5d643a727bc5a0d4b398 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Tue, 28 Aug 2018 06:30:24 -0700 Subject: [PATCH 21/41] Linewrap tidying; remove broken 'download_all' feature --- gcexport.py | 79 +++++++++++++++++++++++++++++++++-------------------- 1 file changed, 50 insertions(+), 29 deletions(-) diff --git a/gcexport.py b/gcexport.py index 2812a11..a3e530f 100755 --- a/gcexport.py +++ b/gcexport.py @@ -21,7 +21,13 @@ class DeviceInfo(): devices_url = "https://connect.garmin.com/modern/proxy/device-service/deviceregistration/devices" - keys = ['currentFirmwareVersion', 'displayName', 'partNumber', 'serialNumber', ] + keys = [ + 'currentFirmwareVersion', + 'displayName', + 'partNumber', + 'serialNumber', + ] + def __init__(self): self.device_info = {} devices = json.loads(http_req(self.devices_url)) @@ -65,17 +71,47 @@ def displayName(self, deviceId): parser = argparse.ArgumentParser() -parser.add_argument('--quiet', help="stifle all output", action="store_true") -parser.add_argument('--debug', help="lots of console output", action="store_true") -parser.add_argument('--version', help="print version and exit", action="store_true") -parser.add_argument('--username', help="your Garmin Connect username (otherwise, you will be prompted)", nargs='?') -parser.add_argument('--password', help="your Garmin Connect password (otherwise, you will be prompted)", nargs='?') - -parser.add_argument('-c', '--count', nargs='?', default="1", - help="number of recent activities to download, or 'all' (default: 1)") - -parser.add_argument('-d', '--directory', nargs='?', default=activities_directory, - help="the directory to export to (default: './YYYY-MM-DD_garmin_connect_export')") +parser.add_argument( + '--quiet', + help="stifle all output", + action="store_true" +) +parser.add_argument( + '--debug', + help="lots of console output", + action="store_true" +) +parser.add_argument( + '--version', + help="print version and exit", + action="store_true" +) +parser.add_argument( + '--username', + help="your Garmin Connect username (otherwise, you will be prompted)", + nargs='?' +) +parser.add_argument( + '--password', + help="your Garmin Connect password (otherwise, you will be prompted)", + nargs='?' +) + +parser.add_argument( + '-c', + '--count', + nargs='?', + default="1", + help="number of recent activities to download (default: 1)" +) + +parser.add_argument( + '-d', + '--directory', + nargs='?', + default=activities_directory, + help="save directory (default: './YYYY-MM-DD_garmin_connect_export')" +) args = parser.parse_args() @@ -132,7 +168,7 @@ def http_req(url, post=None, headers={}): limit_maximum = 100 # URLs for various services. -url_gc_login = 'https://sso.garmin.com/sso/login?service=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&webhost=olaxpw-connect04&source=https%3A%2F%2Fconnect.garmin.com%2Fen-US%2Fsignin&redirectAfterAccountLoginUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&redirectAfterAccountCreationUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&gauthHost=https%3A%2F%2Fsso.garmin.com%2Fsso&locale=en_US&id=gauth-widget&cssUrl=https%3A%2F%2Fstatic.garmincdn.com%2Fcom.garmin.connect%2Fui%2Fcss%2Fgauth-custom-v1.1-min.css&clientId=GarminConnect&rememberMeShown=true&rememberMeChecked=false&createAccountShown=true&openCreateAccount=false&usernameShown=false&displayNameShown=false&consumeServiceTicket=false&initialFocus=true&embedWidget=false&generateExtraServiceTicket=false' +url_gc_login = 'https://sso.garmin.com/sso/login?service=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&webhost=olaxpw-connect04&source=https%3A%2F%2Fconnect.garmin.com%2Fen-US%2Fsignin&redirectAfterAccountLoginUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&redirectAfterAccountCreationUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&gauthHost=https%3A%2F%2Fsso.garmin.com%2Fsso&locale=en_US&id=gauth-widget&cssUrl=https%3A%2F%2Fstatic.garmincdn.com%2Fcom.garmin.connect%2Fui%2Fcss%2Fgauth-custom-v1.1-min.css&clientId=GarminConnect&rememberMeShown=true&rememberMeChecked=false&createAccountShown=true&openCreateAccount=false&usernameShown=false&displayNameShown=false&consumeServiceTicket=false&initialFocus=true&embedWidget=false&generateExtraServiceTicket=false' url_gc_post_auth = 'https://connect.garmin.com/modern/?' # url_gc_search = 'http://connect.garmin.com/proxy/activity-search-service-1.2/json/activities?' url_gc_search = 'http://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?' @@ -273,15 +309,7 @@ def http_req(url, post=None, headers={}): if not csv_existed: csv_file.write('Activity ID,Activity Name,Description,Begin Timestamp,Begin Timestamp (Raw Milliseconds),End Timestamp,End Timestamp (Raw Milliseconds),Device,Activity Parent,Activity Type,Event Type,Activity Time Zone,Max. Elevation,Max. Elevation (Raw),Begin Latitude (Decimal Degrees Raw),Begin Longitude (Decimal Degrees Raw),End Latitude (Decimal Degrees Raw),End Longitude (Decimal Degrees Raw),Average Moving Speed,Average Moving Speed (Raw),Max. Heart Rate (bpm),Average Heart Rate (bpm),Max. Speed,Max. Speed (Raw),Calories,Calories (Raw),Duration (h:m:s),Duration (Raw Seconds),Moving Duration (h:m:s),Moving Duration (Raw Seconds),Average Speed,Average Speed (Raw),Distance,Distance (Raw),Max. Heart Rate (bpm),Min. Elevation,Min. Elevation (Raw),Elevation Gain,Elevation Gain (Raw),Elevation Loss,Elevation Loss (Raw)\n') -download_all = False -if args.count == 'all': - # If the user wants to download all activities, first download one, - # then the result of that request will tell us how many are available - # so we will modify the variables then. - total_to_download = 1 - download_all = True -else: - total_to_download = int(args.count) +total_to_download = int(args.count) total_downloaded = 0 # This while loop will download data from the server in multiple chunks, if necessary. @@ -297,13 +325,6 @@ def http_req(url, post=None, headers={}): query_url = url_gc_search + urlencode(search_params) result = http_req(query_url) json_results = json.loads(result) # TODO: Catch possible exceptions here. - if download_all: - # Modify total_to_download based on how many activities the server reports. - # total_to_download = int(search['totalFound']) - total_to_download = int(json_results['results']['totalFound']) - # Do it only once. - download_all = False - if args.debug: print "### json_results:" From cf51147bce9c2dfecc5a9e1c00551c74be4d06f5 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Tue, 28 Aug 2018 07:34:22 -0700 Subject: [PATCH 22/41] Properties capture into a class --- gcexport.py | 85 +++++++++++++++++++++++++---------------------------- 1 file changed, 40 insertions(+), 45 deletions(-) diff --git a/gcexport.py b/gcexport.py index a3e530f..d3ff4c2 100755 --- a/gcexport.py +++ b/gcexport.py @@ -65,6 +65,27 @@ def displayName(self, deviceId): displayName = device + ' ' + version return displayName +class Properties(): + def __init__(self, url, key_trim_prefix = None): + self.key_trim_prefix = key_trim_prefix + + self.properties = {} + http_data = http_req(url) + http_lines = http_data.splitlines() + for line in http_lines: + (key, value) = line.split('=') + if (key_trim_prefix != None): + key = key.replace("activity_type_", "") + self.properties[key] = value + + # Get a value, default to key as value + def get(self, key): + try: + value = self.properties[key] + except KeyError: + value = key + return value + script_version = '1.3.2' current_date = datetime.now().strftime('%Y-%m-%d') activities_directory = './' + current_date + '_garmin_connect_export' @@ -212,7 +233,7 @@ def http_req(url, post=None, headers={}): deviceInfo = DeviceInfo() -# get activity properties, put in a dict +# get activity properties # This maps cryptic activity typeKeys to display names # all:: All Activities # golf:: Golf @@ -221,23 +242,13 @@ def http_req(url, post=None, headers={}): # street_running:: Street Running # # keys appear in activity records, activityType/typeKey - -activity_properties = {} activity_properties_url = 'https://connect.garmin.com/modern/main/js/properties/activity_types/activity_types.properties?bust=4.10.1.0' -activity_properties_req = http_req(activity_properties_url) -print "### activity_properties" -aps = activity_properties_req.splitlines() -for ap in aps: - (key, value) = ap.split('=') - key = key.replace("activity_type_", "") - activity_properties[key] = value -print "###" +activity_properties = Properties(activity_properties_url, "activity_type_") # get activity type info, put in a dict activity_type_info = {} activity_type_url = "https://connect.garmin.com/modern/proxy/activity-service/activity/activityTypes" activity_types = json.loads(http_req(activity_type_url)) -print "### activity_type_info" keys = ['typeKey', ] for a_type in activity_types: type_id = a_type['typeId'] @@ -245,37 +256,24 @@ def http_req(url, post=None, headers={}): for key in keys: this_type[key] = dictFind(a_type, [key, ]) # Set type from typeKey - try: - this_type['type'] = activity_properties[this_type['typeKey']] - except: - this_type['type'] = this_type['typeKey'] + this_type['type'] = activity_properties.get(this_type['typeKey']) activity_type_info[type_id] = this_type -for a_type in activity_type_info: - print a_type - for activity_parameter in activity_type_info[a_type]: - print " " + activity_parameter + ": " + str(activity_type_info[a_type][activity_parameter]) - -print "###" +if args.debug: + print "### activity_type_info" + for a_type in activity_type_info: + print a_type + for activity_parameter in activity_type_info[a_type]: + print " " + activity_parameter + ": " + str(activity_type_info[a_type][activity_parameter]) + print "###" -event_properties = {} event_properties_url = 'https://connect.garmin.com/modern/main/js/properties/event_types/event_types.properties?bust=4.10.1.0' -event_properties_req = http_req(event_properties_url) -print "### event_properties" -evs = event_properties_req.splitlines() -for ev in evs: - (key, value) = ev.split('=') - event_properties[key] = value -for ev in event_properties: - print "%s: %s" % (ev, event_properties[ev]) -print "###" +event_properties = Properties(event_properties_url) # get event type info, put in a dict - event_type_info = {} event_type_url = 'https://connect.garmin.com/modern/proxy/activity-service/activity/eventTypes' event_types = json.loads(http_req(event_type_url)) -print "### event_type_info" keys = ['typeKey', ] for e_type in event_types: type_id = e_type['typeId'] @@ -283,19 +281,16 @@ def http_req(url, post=None, headers={}): for key in keys: this_type[key] = dictFind(e_type, [key, ]) # Set type from typeKey - try: - this_type['type'] = event_properties[this_type['typeKey']] - except KeyError: - this_type['type'] = this_type['typeKey'] - + this_type['type'] = event_properties.get(this_type['typeKey']) event_type_info[type_id] = this_type -for e_type in event_type_info: - print e_type - for event_parameter in event_type_info[e_type]: - print " " + event_parameter + ": " + str(event_type_info[e_type][event_parameter]) - -print "###" +if args.debug: + print "### event_type_info" + for e_type in event_type_info: + print e_type + for event_parameter in event_type_info[e_type]: + print " " + event_parameter + ": " + str(event_type_info[e_type][event_parameter]) + print "###" if not isdir(args.directory): mkdir(args.directory) From 850922cecaf598a875b9ca72c69fb5b401d8dfa0 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Tue, 28 Aug 2018 07:35:39 -0700 Subject: [PATCH 23/41] Remove trailing whitespace --- gcexport.py | 36 ++++++++++++++++++------------------ 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/gcexport.py b/gcexport.py index d3ff4c2..17c31db 100755 --- a/gcexport.py +++ b/gcexport.py @@ -334,7 +334,7 @@ def http_req(url, post=None, headers={}): # Process each activity. for a in activities: activityId = str(a['activityId']) - + if not args.quiet: print 'activity: [' + activityId + ']', print a['activityName'] @@ -369,7 +369,7 @@ def http_req(url, post=None, headers={}): csv_record += empty_record # End Timestamp - csv_record += empty_record + csv_record += empty_record # End Timestamp (Raw Milliseconds) csv_record += empty_record @@ -392,7 +392,7 @@ def http_req(url, post=None, headers={}): csv_record += csvFormat(dictFind(results, ['timeZoneUnitDTO', 'timeZone' ])) # Max. Elevation - csv_record += empty_record + csv_record += empty_record # Max. Elevation (Raw) # (was in feet previously, now appears to be meters) csv_record += csvFormat(dictFind(results, ['summaryDTO', 'maxElevation', ])) @@ -406,59 +406,59 @@ def http_req(url, post=None, headers={}): csv_record += csvFormat(dictFind(results, ['summaryDTO', key, ])) # Average Moving Speed - csv_record += empty_record + csv_record += empty_record # Average Moving Speed (Raw) csv_record += csvFormat(dictFind(results, ['summaryDTO', 'averageMovingSpeed', ])) # Max. Heart Rate (bpm) - csv_record += empty_record + csv_record += empty_record # Average Heart Rate (bpm) - csv_record += empty_record + csv_record += empty_record # Max. Speed - csv_record += empty_record + csv_record += empty_record # Max. Speed (Raw) csv_record += csvFormat(dictFind(results, ['summaryDTO', 'maxSpeed', ])) # Calories - csv_record += empty_record + csv_record += empty_record # Calories (Raw) csv_record += csvFormat(dictFind(results, ['summaryDTO', 'calories', ])) # Duration (h:m:s) - csv_record += empty_record + csv_record += empty_record # Duration (Raw Seconds) csv_record += csvFormat(dictFind(results, ['summaryDTO', 'elapsedDuration', ])) # Moving Duration (h:m:s) - csv_record += empty_record + csv_record += empty_record # Moving Duration (Raw Seconds), csv_record += csvFormat(dictFind(results, ['summaryDTO', 'movingDuration', ])) # Average Speed - csv_record += empty_record + csv_record += empty_record # Average Speed (Raw) csv_record += csvFormat(dictFind(results, ['summaryDTO', 'averageSpeed', ])) # Distance - csv_record += empty_record + csv_record += empty_record # distance.value csv_record += csvFormat(dictFind(results, ['summaryDTO', 'distance', ])) # Max. Heart Rate (bpm) - csv_record += empty_record + csv_record += empty_record # Min. Elevation - csv_record += empty_record + csv_record += empty_record # Min. Elevation (Raw) csv_record += csvFormat(dictFind(results, ['summaryDTO', 'minElevation', ])) # Elevation Gain - csv_record += empty_record + csv_record += empty_record # Elevation Gain (Raw) - csv_record += empty_record + csv_record += empty_record # Elevation Loss - csv_record += empty_record + csv_record += empty_record # Elevation Loss (Raw) - csv_record += empty_record + csv_record += empty_record # remove any trailing commas - R read.csv doesn't like them. csv_record = csv_record.rstrip(',') From 066775578e3daef7c3ed24d831d7d61a3abc2ad1 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Sun, 2 Sep 2018 21:24:12 -0700 Subject: [PATCH 24/41] Comment for Properties --- gcexport.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/gcexport.py b/gcexport.py index 17c31db..8465352 100755 --- a/gcexport.py +++ b/gcexport.py @@ -66,6 +66,14 @@ def displayName(self, deviceId): return displayName class Properties(): + """Properties: utility class that stores data from a URL in a dict. Values + in the dict are accessed by get(), which provides a default value. + + Data from the URL are expected be in string form, with multiple lines + in key=value format. + + Keys may be decorated with a to-be-removed prefix + """ def __init__(self, url, key_trim_prefix = None): self.key_trim_prefix = key_trim_prefix From 7034390b8107d833a0a37e98d54350705598260a Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Fri, 19 Oct 2018 22:42:52 -0700 Subject: [PATCH 25/41] Fix the latest garmin breakage: user-agent string needed to be updated. --- gcexport.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/gcexport.py b/gcexport.py index 8465352..4f3376e 100755 --- a/gcexport.py +++ b/gcexport.py @@ -169,7 +169,7 @@ def http_req(url, post=None, headers={}): print "### http_req(" + url + ")" request = urllib2.Request(url) - request.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/1337 Safari/537.36') # Tell Garmin we're some supported browser. + request.add_header('User-Agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36') # Tell Garmin we're some supported browser. for header_key, header_value in headers.iteritems(): request.add_header(header_key, header_value) if post: @@ -199,7 +199,6 @@ def http_req(url, post=None, headers={}): # URLs for various services. url_gc_login = 'https://sso.garmin.com/sso/login?service=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&webhost=olaxpw-connect04&source=https%3A%2F%2Fconnect.garmin.com%2Fen-US%2Fsignin&redirectAfterAccountLoginUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&redirectAfterAccountCreationUrl=https%3A%2F%2Fconnect.garmin.com%2Fpost-auth%2Flogin&gauthHost=https%3A%2F%2Fsso.garmin.com%2Fsso&locale=en_US&id=gauth-widget&cssUrl=https%3A%2F%2Fstatic.garmincdn.com%2Fcom.garmin.connect%2Fui%2Fcss%2Fgauth-custom-v1.1-min.css&clientId=GarminConnect&rememberMeShown=true&rememberMeChecked=false&createAccountShown=true&openCreateAccount=false&usernameShown=false&displayNameShown=false&consumeServiceTicket=false&initialFocus=true&embedWidget=false&generateExtraServiceTicket=false' url_gc_post_auth = 'https://connect.garmin.com/modern/?' -# url_gc_search = 'http://connect.garmin.com/proxy/activity-search-service-1.2/json/activities?' url_gc_search = 'http://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?' url_gc_gpx_activity = 'http://connect.garmin.com/proxy/activity-service-1.1/gpx/activity/' url_gc_tcx_activity = 'http://connect.garmin.com/proxy/activity-service-1.1/tcx/activity/' @@ -224,8 +223,12 @@ def http_req(url, post=None, headers={}): # TODO: Can we do this without iterating? login_ticket = None for cookie in cookie_jar: + if args.debug: + print "### cookie.name: " + cookie.name if cookie.name == 'CASTGC': login_ticket = cookie.value + if args.debug: + print "### selected login_ticket: " + login_ticket break if not login_ticket: @@ -233,6 +236,8 @@ def http_req(url, post=None, headers={}): # Chop 'TGT-' off the beginning, prepend 'ST-0'. login_ticket = 'ST-0' + login_ticket[4:] +if args.debug: + print "### modified login_ticket: " + login_ticket login_url = url_gc_post_auth + 'ticket=' + login_ticket http_req(login_url) From f732b2bc5fc3c3126acbbd18ed11d9c260aef57a Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Sun, 5 May 2019 19:26:07 -0700 Subject: [PATCH 26/41] Ignore .pyc --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 30b04d5..10c4361 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,4 @@ .*.swp .DS_Store extras/ +*.pyc From e9abd9b7e6dcf1973d2a7e923e2f198e18edbbc8 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Sun, 5 May 2019 19:30:15 -0700 Subject: [PATCH 27/41] Following the login method of tapiriik; breaking code out into separate files. --- deviceinfo.py | 50 ++++++++++++++++++++ gcdownload.py | 20 ++++++++ gclogin.py | 128 ++++++++++++++++++++++++++++++++++++++++++++++++++ properties.py | 36 ++++++++++++++ 4 files changed, 234 insertions(+) create mode 100644 deviceinfo.py create mode 100644 gcdownload.py create mode 100644 gclogin.py create mode 100644 properties.py diff --git a/deviceinfo.py b/deviceinfo.py new file mode 100644 index 0000000..16382ac --- /dev/null +++ b/deviceinfo.py @@ -0,0 +1,50 @@ +import json +class DeviceInfo(): + devices_url = "https://connect.garmin.com/modern/proxy/device-service/deviceregistration/devices" + keys = [ + 'currentFirmwareVersion', + 'displayName', + 'partNumber', + 'serialNumber', + ] + + def __init__(self, session): + self.session = session + http_data = session.get(DeviceInfo.devices_url, allow_redirects=False) + devices = json.loads(http_data.text) + self.device_info = {} + for dev in devices: + dev_id = dev['deviceId'] + this_device = {} + for key in DeviceInfo.keys: + this_device[key] = dev.get(key, None) + self.device_info[dev_id] = this_device + + # backward compatibility hack: prepend ' ', append ".0.0" + # to firmware version. + for dev_id in self.device_info: + fw = self.device_info[dev_id]['currentFirmwareVersion'] + fw = ' ' + fw + ".0.0" + self.device_info[dev_id]['currentFirmwareVersion'] = fw + + def do_print(self): + for dev_id in self.device_info: + print dev_id + for dev_parameter in self.device_info[dev_id]: + print " " + dev_parameter + ": " + self.device_info[dev_id][dev_parameter] + + def displayName(self, deviceId): + try: + device = self.device_info[deviceId]['displayName'] + except KeyError: + device = "" + + try: + version = self.device_info[deviceId]['currentFirmwareVersion'] + except KeyError: + version = "" + + displayName = device + ' ' + version + return displayName + + diff --git a/gcdownload.py b/gcdownload.py new file mode 100644 index 0000000..a37bb74 --- /dev/null +++ b/gcdownload.py @@ -0,0 +1,20 @@ +from gclogin import GarminLogin +from properties import Properties +from deviceinfo import DeviceInfo + +gcl = GarminLogin() +session = gcl._get_session(email='aaronferrucci', password='Adh0r3w38x4k1u8Z') + +deviceinfo = DeviceInfo(session) +print("\nDevices:") +deviceinfo.do_print() + +print("\nProperties:") +props = Properties(session, "activity_type_") +props.do_print() + +# http_data = session.get(propUrl, allow_redirects=False) +# for line in http_data.iter_lines(): +# (key, value) = line.split('=') +# print("%s=%s" % (key, value)) +# diff --git a/gclogin.py b/gclogin.py new file mode 100644 index 0000000..cf27273 --- /dev/null +++ b/gclogin.py @@ -0,0 +1,128 @@ +import requests +import tempfile + +HTTP_SOURCE_ADDR = '0.0.0.0' +class GarminLogin(): + _garmin_signin_headers = { + "origin": "https://sso.garmin.com" + } + + # To do: pull in sessioncache from tapiriik, or omit if that's possible + # _sessionCache = SessionCache("garminconnect", lifetime=timedelta(minutes=120), freshen_on_get=True) + + def _rate_limit(self): + import fcntl, struct, time + min_period = 1 # I appear to been banned from Garmin Connect while determining this. + fcntl.flock(self._rate_lock,fcntl.LOCK_EX) + try: + self._rate_lock.seek(0) + last_req_start = self._rate_lock.read() + if not last_req_start: + last_req_start = 0 + else: + last_req_start = float(last_req_start) + + wait_time = max(0, min_period - (time.time() - last_req_start)) + print("_rate_limit: wait: '%s'; last_req_start: '%s'" % (wait_time, last_req_start)) + time.sleep(wait_time) + + self._rate_lock.seek(0) + self._rate_lock.write(str(time.time())) + self._rate_lock.flush() + finally: + fcntl.flock(self._rate_lock,fcntl.LOCK_UN) + + def __init__(self): + rate_lock_path = tempfile.gettempdir() + "/gc_rate.%s.lock" % HTTP_SOURCE_ADDR + print("rate_lock_path: '%s'" % rate_lock_path) + # Ensure the rate lock file exists (...the easy way) + open(rate_lock_path, "a").close() + self._rate_lock = open(rate_lock_path, "r+") + + def _get_session(self, email, password): + + session = requests.Session() + + # JSIG CAS, cool I guess. + # Not quite OAuth though, so I'll continue to collect raw credentials. + # Commented stuff left in case this ever breaks because of missing parameters... + data = { + "username": email, + "password": password, + "_eventId": "submit", + "embed": "true", + # "displayNameRequired": "false" + } + params = { + "service": "https://connect.garmin.com/modern", + # "redirectAfterAccountLoginUrl": "http://connect.garmin.com/modern", + # "redirectAfterAccountCreationUrl": "http://connect.garmin.com/modern", + # "webhost": "olaxpw-connect00.garmin.com", + "clientId": "GarminConnect", + "gauthHost": "https://sso.garmin.com/sso", + # "rememberMeShown": "true", + # "rememberMeChecked": "false", + "consumeServiceTicket": "false", + # "id": "gauth-widget", + # "embedWidget": "false", + # "cssUrl": "https://static.garmincdn.com/com.garmin.connect/ui/src-css/gauth-custom.css", + # "source": "http://connect.garmin.com/en-US/signin", + # "createAccountShown": "true", + # "openCreateAccount": "false", + # "usernameShown": "true", + # "displayNameShown": "false", + # "initialFocus": "true", + # "locale": "en" + } + + # I may never understand what motivates people to mangle a perfectly good protocol like HTTP in the ways they do... + preResp = session.get("https://sso.garmin.com/sso/signin", params=params) + if preResp.status_code != 200: + raise APIException("SSO prestart error %s %s" % (preResp.status_code, preResp.text)) + + ssoResp = session.post("https://sso.garmin.com/sso/signin", headers=self._garmin_signin_headers, params=params, data=data, allow_redirects=False) + if ssoResp.status_code != 200 or "temporarily unavailable" in ssoResp.text: + raise APIException("SSO error %s %s" % (ssoResp.status_code, ssoResp.text)) + + if ">sendEvent('FAIL')" in ssoResp.text: + raise APIException("Invalid login", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True)) + if ">sendEvent('ACCOUNT_LOCKED')" in ssoResp.text: + raise APIException("Account Locked", block=True, user_exception=UserException(UserExceptionType.Locked, intervention_required=True)) + + if "renewPassword" in ssoResp.text: + raise APIException("Reset password", block=True, user_exception=UserException(UserExceptionType.RenewPassword, intervention_required=True)) + + # ...AND WE'RE NOT DONE YET! + self._rate_limit() + + gcRedeemResp = session.get("https://connect.garmin.com/modern", allow_redirects=False) + if gcRedeemResp.status_code != 302: + raise APIException("GC redeem-start error %s %s" % (gcRedeemResp.status_code, gcRedeemResp.text)) + url_prefix = "https://connect.garmin.com" + # There are 6 redirects that need to be followed to get the correct cookie + # ... :( + max_redirect_count = 7 + current_redirect_count = 1 + while True: + self._rate_limit() + url = gcRedeemResp.headers["location"] + # Fix up relative redirects. + if url.startswith("/"): + url = url_prefix + url + url_prefix = "/".join(url.split("/")[:3]) + print("url: '%s'" % url) + gcRedeemResp = session.get(url, allow_redirects=False) + + if current_redirect_count >= max_redirect_count and gcRedeemResp.status_code != 200: + raise APIException("GC redeem %d/%d error %s %s" % (current_redirect_count, max_redirect_count, gcRedeemResp.status_code, gcRedeemResp.text)) + if gcRedeemResp.status_code == 200 or gcRedeemResp.status_code == 404: + break + current_redirect_count += 1 + if current_redirect_count > max_redirect_count: + break + + # self._sessionCache.Set(record.ExternalID if record else email, session) + # session.headers.update(self._obligatory_headers) + + return session + diff --git a/properties.py b/properties.py new file mode 100644 index 0000000..40cba70 --- /dev/null +++ b/properties.py @@ -0,0 +1,36 @@ +import requests + +class Properties(): + """Properties: utility class that stores data from a URL in a dict. Values + in the dict are accessed by get(), which provides a default value. + + Data from the URL are expected be in string form, with multiple lines + in key=value format. + + Keys may be decorated with a to-be-removed prefix + """ + propUrl = 'https://connect.garmin.com/modern/main/js/properties/activity_types/activity_types.properties?bust=4.10.1.0' + def __init__(self, session, key_trim_prefix = None): + self.key_trim_prefix = key_trim_prefix + + self.properties = {} + + http_data = session.get(Properties.propUrl, allow_redirects=False) + for line in http_data.iter_lines(): + (key, value) = line.split('=') + if (key_trim_prefix != None): + key = key.replace("activity_type_", "") + self.properties[key] = value + + # Get a value, default to key as value + def get(self, key): + try: + value = self.properties[key] + except KeyError: + value = key + return value + + def do_print(self): + for key in self.properties: + print(" %s=%s" % (key, self.properties[key])) + From a08a45ece819d1ccf053e2af0dd6df9a289fe6e5 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Mon, 6 May 2019 19:51:56 -0700 Subject: [PATCH 28/41] Checkpoint: moving forward with the port. --- gcdownload.py | 33 +++++++++++++++++++++++---------- properties.py | 13 +++++-------- typeinfo.py | 29 +++++++++++++++++++++++++++++ 3 files changed, 57 insertions(+), 18 deletions(-) create mode 100644 typeinfo.py diff --git a/gcdownload.py b/gcdownload.py index a37bb74..d63fb17 100644 --- a/gcdownload.py +++ b/gcdownload.py @@ -1,20 +1,33 @@ from gclogin import GarminLogin from properties import Properties from deviceinfo import DeviceInfo +from typeinfo import TypeInfo gcl = GarminLogin() session = gcl._get_session(email='aaronferrucci', password='Adh0r3w38x4k1u8Z') deviceinfo = DeviceInfo(session) -print("\nDevices:") -deviceinfo.do_print() +# print("\nDevices:") +# deviceinfo.do_print() -print("\nProperties:") -props = Properties(session, "activity_type_") -props.do_print() +# activity properties +activity_properties_url = 'https://connect.garmin.com/modern/main/js/properties/activity_types/activity_types.properties?bust=4.10.1.0' +activity_properties = Properties(session, activity_properties_url, "activity_type_") +print("\nActivity Properties:") +activity_properties.do_print() + +activity_type_url = "https://connect.garmin.com/modern/proxy/activity-service/activity/activityTypes" +activity_type_info = TypeInfo(session, activity_type_url, activity_properties) +print("\nActivity Type Info:") +activity_type_info.do_print() + +event_properties_url = 'https://connect.garmin.com/modern/main/js/properties/event_types/event_types.properties?bust=4.10.1.0' +event_properties = Properties(session, event_properties_url) +print("\nEvent Properties:") +event_properties.do_print() + +event_type_url = 'https://connect.garmin.com/modern/proxy/activity-service/activity/eventTypes' +event_type_info = TypeInfo(session, event_type_url, event_properties) +print("\nEvent Type Info:") +event_type_info.do_print() -# http_data = session.get(propUrl, allow_redirects=False) -# for line in http_data.iter_lines(): -# (key, value) = line.split('=') -# print("%s=%s" % (key, value)) -# diff --git a/properties.py b/properties.py index 40cba70..4f36f8b 100644 --- a/properties.py +++ b/properties.py @@ -4,22 +4,19 @@ class Properties(): """Properties: utility class that stores data from a URL in a dict. Values in the dict are accessed by get(), which provides a default value. - Data from the URL are expected be in string form, with multiple lines + Data from the URL are expected to be in string form, with multiple lines in key=value format. - Keys may be decorated with a to-be-removed prefix + If key_trim_prefix is provided, its value is deleted from key names. """ - propUrl = 'https://connect.garmin.com/modern/main/js/properties/activity_types/activity_types.properties?bust=4.10.1.0' - def __init__(self, session, key_trim_prefix = None): - self.key_trim_prefix = key_trim_prefix - + def __init__(self, session, url, key_trim_prefix = None): self.properties = {} - http_data = session.get(Properties.propUrl, allow_redirects=False) + http_data = session.get(url, allow_redirects=False) for line in http_data.iter_lines(): (key, value) = line.split('=') if (key_trim_prefix != None): - key = key.replace("activity_type_", "") + key = key.replace(key_trim_prefix, "") self.properties[key] = value # Get a value, default to key as value diff --git a/typeinfo.py b/typeinfo.py new file mode 100644 index 0000000..46f3339 --- /dev/null +++ b/typeinfo.py @@ -0,0 +1,29 @@ +import requests +import json + +class TypeInfo(): + """ + TypeInfo: utility dict wrapper class + Looks up types in a url and an associated Properties instance + """ + def __init__(self, session, url, props): + self.type_info = {} + http_data = session.get(url, allow_redirects=False) + types = json.loads(http_data.text) + key = 'typeKey' + for _type in types: + type_id = _type['typeId'] + this_type = {} + this_type[key] = _type.get(key, "") + # Set type from typeKey + this_type['type'] = props.get(this_type[key]) + self.type_info[type_id] = this_type + + def do_print(self): + print "### type_info" + for _type in self.type_info: + print _type + for param in self.type_info[_type]: + print " " + param + ": " + str(self.type_info[_type][param]) + print "###" + From 74a663311c9fe8ef44407c68128c48b0203f39d5 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Mon, 6 May 2019 20:18:30 -0700 Subject: [PATCH 29/41] Progress: json dump of some activities looks sane. --- gcdownload.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/gcdownload.py b/gcdownload.py index d63fb17..afb3177 100644 --- a/gcdownload.py +++ b/gcdownload.py @@ -1,3 +1,5 @@ +import json + from gclogin import GarminLogin from properties import Properties from deviceinfo import DeviceInfo @@ -31,3 +33,25 @@ print("\nEvent Type Info:") event_type_info.do_print() +# start of experimental get-activities code +total_to_download = 5 # int(args.count) +total_downloaded = 0 +url_gc_search = 'http://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?' +while total_downloaded < total_to_download: + # Maximum of 100... 400 return status if over 100. So download 100 or whatever remains if less than 100. + if total_to_download - total_downloaded > 100: + num_to_download = 100 + else: + num_to_download = total_to_download - total_downloaded + + search_params = {'start': total_downloaded, 'limit': num_to_download} + http_data = session.get(url_gc_search, params=search_params) + json_results = json.loads(http_data.text) + # result = http_req(query_url) + # json_results = json.loads(result) # TODO: Catch possible exceptions here. + + print "### json_results:" + print json.dumps(json_results, indent=4, sort_keys=True) + print "###" + total_downloaded += num_to_download +# end of experiment From 16a9e4a51a92ab904cb03ac896078b4cfe191e0c Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Tue, 7 May 2019 20:42:03 -0700 Subject: [PATCH 30/41] cmd line args, less debug output --- cmdlineargs.py | 53 ++++++++++++++++++++++++++++++++++++++++++++++++++ gcdownload.py | 45 ++++++++++++++++++++++++++++-------------- gclogin.py | 6 +++--- 3 files changed, 86 insertions(+), 18 deletions(-) create mode 100644 cmdlineargs.py mode change 100644 => 100755 gcdownload.py diff --git a/cmdlineargs.py b/cmdlineargs.py new file mode 100644 index 0000000..aa88997 --- /dev/null +++ b/cmdlineargs.py @@ -0,0 +1,53 @@ +import argparse +from datetime import datetime +def get_args(): + current_date = datetime.now().strftime('%Y-%m-%d') + activities_directory = './' + current_date + '_garmin_connect_export' + parser = argparse.ArgumentParser() + + parser.add_argument( + '--quiet', + help="stifle all output", + action="store_true" + ) + parser.add_argument( + '--debug', + help="lots of console output", + action="store_true" + ) + parser.add_argument( + '--version', + help="print version and exit", + action="store_true" + ) + parser.add_argument( + '--username', + help="your Garmin Connect username (otherwise, you will be prompted)", + nargs='?' + ) + parser.add_argument( + '--password', + help="your Garmin Connect password (otherwise, you will be prompted)", + nargs='?' + ) + + parser.add_argument( + '-c', + '--count', + nargs='?', + default="1", + help="number of recent activities to download (default: 1)" + ) + + parser.add_argument( + '-d', + '--directory', + nargs='?', + default=activities_directory, + help="save directory (default: './YYYY-MM-DD_garmin_connect_export')" + ) + + args = parser.parse_args() + return args + + diff --git a/gcdownload.py b/gcdownload.py old mode 100644 new mode 100755 index afb3177..4f6ad70 --- a/gcdownload.py +++ b/gcdownload.py @@ -1,9 +1,18 @@ +#!/usr/bin/python import json from gclogin import GarminLogin from properties import Properties from deviceinfo import DeviceInfo from typeinfo import TypeInfo +from cmdlineargs import get_args +from sys import argv + +script_version = '1.4.0' +args = get_args() +if args.version: + print argv[0] + ", version " + script_version + exit(0) gcl = GarminLogin() session = gcl._get_session(email='aaronferrucci', password='Adh0r3w38x4k1u8Z') @@ -15,26 +24,26 @@ # activity properties activity_properties_url = 'https://connect.garmin.com/modern/main/js/properties/activity_types/activity_types.properties?bust=4.10.1.0' activity_properties = Properties(session, activity_properties_url, "activity_type_") -print("\nActivity Properties:") -activity_properties.do_print() +# print("\nActivity Properties:") +# activity_properties.do_print() activity_type_url = "https://connect.garmin.com/modern/proxy/activity-service/activity/activityTypes" activity_type_info = TypeInfo(session, activity_type_url, activity_properties) -print("\nActivity Type Info:") -activity_type_info.do_print() +# print("\nActivity Type Info:") +# activity_type_info.do_print() event_properties_url = 'https://connect.garmin.com/modern/main/js/properties/event_types/event_types.properties?bust=4.10.1.0' event_properties = Properties(session, event_properties_url) -print("\nEvent Properties:") -event_properties.do_print() +# print("\nEvent Properties:") +# event_properties.do_print() event_type_url = 'https://connect.garmin.com/modern/proxy/activity-service/activity/eventTypes' event_type_info = TypeInfo(session, event_type_url, event_properties) -print("\nEvent Type Info:") -event_type_info.do_print() +# print("\nEvent Type Info:") +# event_type_info.do_print() # start of experimental get-activities code -total_to_download = 5 # int(args.count) +total_to_download = int(args.count) total_downloaded = 0 url_gc_search = 'http://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?' while total_downloaded < total_to_download: @@ -46,12 +55,18 @@ search_params = {'start': total_downloaded, 'limit': num_to_download} http_data = session.get(url_gc_search, params=search_params) - json_results = json.loads(http_data.text) - # result = http_req(query_url) - # json_results = json.loads(result) # TODO: Catch possible exceptions here. + activities = json.loads(http_data.text) + + # print "### activities:" + # print json.dumps(activities, indent=4, sort_keys=True) + # print "###" + + for a in activities: + activityId = str(a['activityId']) + + if not args.quiet: + print 'activity: [' + activityId + ']', + print a['activityName'] - print "### json_results:" - print json.dumps(json_results, indent=4, sort_keys=True) - print "###" total_downloaded += num_to_download # end of experiment diff --git a/gclogin.py b/gclogin.py index cf27273..ad07cb8 100644 --- a/gclogin.py +++ b/gclogin.py @@ -23,7 +23,7 @@ def _rate_limit(self): last_req_start = float(last_req_start) wait_time = max(0, min_period - (time.time() - last_req_start)) - print("_rate_limit: wait: '%s'; last_req_start: '%s'" % (wait_time, last_req_start)) + # print("_rate_limit: wait: '%s'; last_req_start: '%s'" % (wait_time, last_req_start)) time.sleep(wait_time) self._rate_lock.seek(0) @@ -34,7 +34,7 @@ def _rate_limit(self): def __init__(self): rate_lock_path = tempfile.gettempdir() + "/gc_rate.%s.lock" % HTTP_SOURCE_ADDR - print("rate_lock_path: '%s'" % rate_lock_path) + # print("rate_lock_path: '%s'" % rate_lock_path) # Ensure the rate lock file exists (...the easy way) open(rate_lock_path, "a").close() self._rate_lock = open(rate_lock_path, "r+") @@ -110,7 +110,7 @@ def _get_session(self, email, password): if url.startswith("/"): url = url_prefix + url url_prefix = "/".join(url.split("/")[:3]) - print("url: '%s'" % url) + # print("url: '%s'" % url) gcRedeemResp = session.get(url, allow_redirects=False) if current_redirect_count >= max_redirect_count and gcRedeemResp.status_code != 200: From 1941499829d2fede38c209d9f2316da784644ee3 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Tue, 7 May 2019 20:42:15 -0700 Subject: [PATCH 31/41] Call new script from make --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index e3b6455..1e1f23b 100644 --- a/Makefile +++ b/Makefile @@ -9,7 +9,7 @@ help: .PHONY: go go: - ./gcexport.py $(DEBUG) --username aaronferrucci --count $(COUNT) + ./gcdownload.py $(DEBUG) --username aaronferrucci --count $(COUNT) NUM_ACTIVITIES = $(shell find . -name activities.csv | wc -l) .PHONY: count_activities_csv From 7705f067bb88d900b3dfd88eeebb47f79302b4de Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Tue, 7 May 2019 21:51:28 -0700 Subject: [PATCH 32/41] Approaching mvp milestone --- gcdownload.py | 58 +++++++++++++++++++++-- typeinfo.py | 2 + utils.py | 129 ++++++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 185 insertions(+), 4 deletions(-) create mode 100644 utils.py diff --git a/gcdownload.py b/gcdownload.py index 4f6ad70..deb0de6 100755 --- a/gcdownload.py +++ b/gcdownload.py @@ -6,7 +6,12 @@ from deviceinfo import DeviceInfo from typeinfo import TypeInfo from cmdlineargs import get_args +from utils import csvFormat, dictFind, activity_to_csv from sys import argv +from os.path import isdir +from os.path import isfile +from os import mkdir +from getpass import getpass script_version = '1.4.0' args = get_args() @@ -14,12 +19,16 @@ print argv[0] + ", version " + script_version exit(0) +# utilities - put these somewhere else? + gcl = GarminLogin() -session = gcl._get_session(email='aaronferrucci', password='Adh0r3w38x4k1u8Z') +username = args.username if args.username else raw_input('Username: ') +password = args.password if args.password else getpass() +session = gcl._get_session(email=username, password=password) -deviceinfo = DeviceInfo(session) +devInfo = DeviceInfo(session) # print("\nDevices:") -# deviceinfo.do_print() +# devInfo.do_print() # activity properties activity_properties_url = 'https://connect.garmin.com/modern/main/js/properties/activity_types/activity_types.properties?bust=4.10.1.0' @@ -42,10 +51,24 @@ # print("\nEvent Type Info:") # event_type_info.do_print() +if not isdir(args.directory): + mkdir(args.directory) + + +csv_filename = args.directory + '/activities.csv' +csv_existed = isfile(csv_filename) + +csv_file = open(csv_filename, 'a') + +# Write header to CSV file +if not csv_existed: + csv_file.write('Activity ID,Activity Name,Description,Begin Timestamp,Begin Timestamp (Raw Milliseconds),End Timestamp,End Timestamp (Raw Milliseconds),Device,Activity Parent,Activity Type,Event Type,Activity Time Zone,Max. Elevation,Max. Elevation (Raw),Begin Latitude (Decimal Degrees Raw),Begin Longitude (Decimal Degrees Raw),End Latitude (Decimal Degrees Raw),End Longitude (Decimal Degrees Raw),Average Moving Speed,Average Moving Speed (Raw),Max. Heart Rate (bpm),Average Heart Rate (bpm),Max. Speed,Max. Speed (Raw),Calories,Calories (Raw),Duration (h:m:s),Duration (Raw Seconds),Moving Duration (h:m:s),Moving Duration (Raw Seconds),Average Speed,Average Speed (Raw),Distance,Distance (Raw),Max. Heart Rate (bpm),Min. Elevation,Min. Elevation (Raw),Elevation Gain,Elevation Gain (Raw),Elevation Loss,Elevation Loss (Raw)\n') + # start of experimental get-activities code total_to_download = int(args.count) total_downloaded = 0 url_gc_search = 'http://connect.garmin.com/modern/proxy/activitylist-service/activities/search/activities?' +url_gc_modern_activity = 'https://connect.garmin.com/modern/proxy/activity-service/activity/' while total_downloaded < total_to_download: # Maximum of 100... 400 return status if over 100. So download 100 or whatever remains if less than 100. if total_to_download - total_downloaded > 100: @@ -67,6 +90,33 @@ if not args.quiet: print 'activity: [' + activityId + ']', print a['activityName'] + modern_activity_url = url_gc_modern_activity + activityId + if args.debug: + print "url: " + modern_activity_url + + result = session.get(modern_activity_url) + results = json.loads(result.text) + + activity_filename = args.directory + '/' + activityId + '.json' + if args.debug: + print "filename: " + activity_filename + + save_file = open(activity_filename, 'w') + save_file.write(json.dumps(results, indent=4, sort_keys=True)) + save_file.close() + + # Write stats to CSV. + csv_record = activity_to_csv(results, a, devInfo, activity_type_info, event_type_info) + if args.debug: + print "data: " + csv_record + + csv_file.write(csv_record.encode('utf8')) total_downloaded += num_to_download -# end of experiment +# End while loop for multiple chunks. + +csv_file.close() + +if not args.quiet: + print 'Done!' + diff --git a/typeinfo.py b/typeinfo.py index 46f3339..b854587 100644 --- a/typeinfo.py +++ b/typeinfo.py @@ -27,3 +27,5 @@ def do_print(self): print " " + param + ": " + str(self.type_info[_type][param]) print "###" + def __getitem__(self, key): + return self.type_info[key] diff --git a/utils.py b/utils.py new file mode 100644 index 0000000..b30e3ee --- /dev/null +++ b/utils.py @@ -0,0 +1,129 @@ +def csvFormat(value): + csv_record = '"' + str(value).replace('"', '""') + '",' + return csv_record + +# recursive dict get +def dictFind(data, keys): + try: + for key in keys: + data = data[key] + except KeyError: + return "" + return data + +def activity_to_csv(results, a, devInfo, activity_type_info, event_type_info): + empty_record = '"",' + csv_record = '' + # Activity ID + activityId = str(a['activityId']) + csv_record += csvFormat(activityId) + # Activity Name + csv_record += csvFormat(dictFind(results, ['activityName', ])) + # Description + csv_record += csvFormat(dictFind(results, ['description', ])) + # Begin Timestamp + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'startTimeLocal', ])) + + # Begin Timestamp (Raw Milliseconds) + csv_record += empty_record + + # End Timestamp + csv_record += empty_record + + # End Timestamp (Raw Milliseconds) + csv_record += empty_record + + # Device + deviceId = dictFind(a, ['deviceId', ]) + csv_record += csvFormat(devInfo.displayName(deviceId)) + + # Activity Parent + parentTypeId = dictFind(a, ['activityType', 'parentTypeId',]) + csv_record += csvFormat(dictFind(activity_type_info, [parentTypeId, 'type', ])) + # Activity Type + typeId = dictFind(a, ['activityType', 'typeId',]) + csv_record += csvFormat(dictFind(activity_type_info, [typeId, 'type', ])) + + # Event Type + typeId = dictFind(a, ['eventType', 'typeId',]) + csv_record += csvFormat(dictFind(event_type_info, [typeId, 'type', ])) + # Activity Time Zone + csv_record += csvFormat(dictFind(results, ['timeZoneUnitDTO', 'timeZone' ])) + + # Max. Elevation + csv_record += empty_record + # Max. Elevation (Raw) + # (was in feet previously, now appears to be meters) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'maxElevation', ])) + + # {start, end} X {latitude, longitude} + # Begin Latitude (Decimal Degrees Raw) + # Begin Longitude (Decimal Degrees Raw) + # End Latitude (Decimal Degrees Raw) + # End Longitude (Decimal Degrees Raw) + for key in ['startLatitude', 'startLongitude', 'endLatitude', 'endLongitude']: + csv_record += csvFormat(dictFind(results, ['summaryDTO', key, ])) + + # Average Moving Speed + csv_record += empty_record + + # Average Moving Speed (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'averageMovingSpeed', ])) + + # Max. Heart Rate (bpm) + csv_record += empty_record + # Average Heart Rate (bpm) + csv_record += empty_record + + # Max. Speed + csv_record += empty_record + # Max. Speed (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'maxSpeed', ])) + + # Calories + csv_record += empty_record + # Calories (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'calories', ])) + + # Duration (h:m:s) + csv_record += empty_record + # Duration (Raw Seconds) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'elapsedDuration', ])) + # Moving Duration (h:m:s) + csv_record += empty_record + # Moving Duration (Raw Seconds), + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'movingDuration', ])) + # Average Speed + csv_record += empty_record + # Average Speed (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'averageSpeed', ])) + # Distance + csv_record += empty_record + # distance.value + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'distance', ])) + + # Max. Heart Rate (bpm) + csv_record += empty_record + + # Min. Elevation + csv_record += empty_record + # Min. Elevation (Raw) + csv_record += csvFormat(dictFind(results, ['summaryDTO', 'minElevation', ])) + + # Elevation Gain + csv_record += empty_record + # Elevation Gain (Raw) + csv_record += empty_record + # Elevation Loss + csv_record += empty_record + # Elevation Loss (Raw) + csv_record += empty_record + + # remove any trailing commas - R read.csv doesn't like them. + csv_record = csv_record.rstrip(',') + + csv_record += '\n' + + return csv_record + + From 1a6b645260bdf7f88df7f6ed214023777cf51cd6 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Sat, 25 Jan 2020 19:47:03 -0800 Subject: [PATCH 33/41] Check for overlap before adding new data --- Makefile | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 1e1f23b..4920340 100644 --- a/Makefile +++ b/Makefile @@ -7,9 +7,15 @@ help: @echo Usage: @echo make go COUNT=\ +.PHONY: verify_overlap +# the new activity file should overlap the old one (otherwise vimdiff +# sometimes seems confused). +verify_overlap: + @grep -q $(shell tail -1 $(shell find . -name activities.csv) | cut --delimiter=, --fields=1) ../garmin_running/activities.csv + .PHONY: go go: - ./gcdownload.py $(DEBUG) --username aaronferrucci --count $(COUNT) + ./gcdownload.py --username aaronferrucci --count $(COUNT) $(DEBUG) NUM_ACTIVITIES = $(shell find . -name activities.csv | wc -l) .PHONY: count_activities_csv @@ -20,5 +26,5 @@ count_activities_csv: fi .PHONY: vimdiff -vimdiff: count_activities_csv +vimdiff: verify_overlap gvimdiff ../garmin_running/activities.csv $(shell find . -name activities.csv) From 38dde7dd1043d8a41eed9e11b738dca86cb8f79e Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Sun, 14 Mar 2021 16:36:05 -0700 Subject: [PATCH 34/41] a bit of http error checking --- deviceinfo.py | 5 +++++ gcdownload.py | 3 +++ typeinfo.py | 5 +++++ 3 files changed, 13 insertions(+) diff --git a/deviceinfo.py b/deviceinfo.py index 16382ac..caf9a12 100644 --- a/deviceinfo.py +++ b/deviceinfo.py @@ -11,6 +11,11 @@ class DeviceInfo(): def __init__(self, session): self.session = session http_data = session.get(DeviceInfo.devices_url, allow_redirects=False) + if http_data.status_code != 200: + print("DeviceInfo error code: %d" % (http_data.status_code)) + self.devices = None + return + devices = json.loads(http_data.text) self.device_info = {} for dev in devices: diff --git a/gcdownload.py b/gcdownload.py index deb0de6..ee7a2ad 100755 --- a/gcdownload.py +++ b/gcdownload.py @@ -78,6 +78,9 @@ search_params = {'start': total_downloaded, 'limit': num_to_download} http_data = session.get(url_gc_search, params=search_params) + if http_data.status_code != 200: + print("Activity load error code: %d" % (http_data.status_code)) + activities = json.loads(http_data.text) # print "### activities:" diff --git a/typeinfo.py b/typeinfo.py index b854587..ac6ace4 100644 --- a/typeinfo.py +++ b/typeinfo.py @@ -9,6 +9,11 @@ class TypeInfo(): def __init__(self, session, url, props): self.type_info = {} http_data = session.get(url, allow_redirects=False) + if http_data.status_code != 200: + print("TypeInfo error code: %d" % (http_data.status_code)) + self.type_info = None + return + types = json.loads(http_data.text) key = 'typeKey' for _type in types: From 0a52d61c67645d4bdd8e7b0296e4dae9649cd28e Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Sun, 14 Mar 2021 16:36:34 -0700 Subject: [PATCH 35/41] Copy a magic header item from https://github.com/moderation/garmin-connect-export; fixes 402 errors --- gclogin.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/gclogin.py b/gclogin.py index ad07cb8..9900e4a 100644 --- a/gclogin.py +++ b/gclogin.py @@ -7,6 +7,12 @@ class GarminLogin(): "origin": "https://sso.garmin.com" } + # Copied from https://github.com/moderation/garmin-connect-export + # Starting around 2/2021, all requests return 402. nk=NT fixes it. + _obligatory_headers = { + 'nk': 'NT' + } + # To do: pull in sessioncache from tapiriik, or omit if that's possible # _sessionCache = SessionCache("garminconnect", lifetime=timedelta(minutes=120), freshen_on_get=True) @@ -122,7 +128,7 @@ def _get_session(self, email, password): break # self._sessionCache.Set(record.ExternalID if record else email, session) - # session.headers.update(self._obligatory_headers) + session.headers.update(self._obligatory_headers) return session From eb63b2e1156dbbe18d004e4250c0d39fb9722305 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Sun, 9 May 2021 19:13:38 -0700 Subject: [PATCH 36/41] parens around print arguments --- deviceinfo.py | 4 ++-- typeinfo.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/deviceinfo.py b/deviceinfo.py index caf9a12..f8105ab 100644 --- a/deviceinfo.py +++ b/deviceinfo.py @@ -34,9 +34,9 @@ def __init__(self, session): def do_print(self): for dev_id in self.device_info: - print dev_id + print(dev_id) for dev_parameter in self.device_info[dev_id]: - print " " + dev_parameter + ": " + self.device_info[dev_id][dev_parameter] + print(" " + dev_parameter + ": " + self.device_info[dev_id][dev_parameter]) def displayName(self, deviceId): try: diff --git a/typeinfo.py b/typeinfo.py index ac6ace4..1f70692 100644 --- a/typeinfo.py +++ b/typeinfo.py @@ -25,12 +25,12 @@ def __init__(self, session, url, props): self.type_info[type_id] = this_type def do_print(self): - print "### type_info" + print("### type_info") for _type in self.type_info: - print _type + print(_type) for param in self.type_info[_type]: - print " " + param + ": " + str(self.type_info[_type][param]) - print "###" + print(" " + param + ": " + str(self.type_info[_type][param])) + print("###") def __getitem__(self, key): return self.type_info[key] From bd968d3b7ed5adf473366df143db79d3aed07289 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Sun, 9 May 2021 19:14:27 -0700 Subject: [PATCH 37/41] parens around print args --- gcdownload.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/gcdownload.py b/gcdownload.py index ee7a2ad..7f8025b 100755 --- a/gcdownload.py +++ b/gcdownload.py @@ -16,7 +16,7 @@ script_version = '1.4.0' args = get_args() if args.version: - print argv[0] + ", version " + script_version + print(argv[0] + ", version " + script_version) exit(0) # utilities - put these somewhere else? @@ -91,18 +91,18 @@ activityId = str(a['activityId']) if not args.quiet: - print 'activity: [' + activityId + ']', - print a['activityName'] + print('activity: [' + activityId + ']') + print(a['activityName']) modern_activity_url = url_gc_modern_activity + activityId if args.debug: - print "url: " + modern_activity_url + print("url: " + modern_activity_url) result = session.get(modern_activity_url) results = json.loads(result.text) activity_filename = args.directory + '/' + activityId + '.json' if args.debug: - print "filename: " + activity_filename + print("filename: " + activity_filename) save_file = open(activity_filename, 'w') save_file.write(json.dumps(results, indent=4, sort_keys=True)) @@ -111,7 +111,7 @@ # Write stats to CSV. csv_record = activity_to_csv(results, a, devInfo, activity_type_info, event_type_info) if args.debug: - print "data: " + csv_record + print("data: " + csv_record) csv_file.write(csv_record.encode('utf8')) @@ -121,5 +121,5 @@ csv_file.close() if not args.quiet: - print 'Done!' + print('Done!') From 5e7397dab00223992a3eaadf8b964283d5551c9c Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Sun, 9 May 2021 19:29:19 -0700 Subject: [PATCH 38/41] Fix login error by supplying User-Agent. Convert unimplemented APIException to plain old Exception. --- gclogin.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/gclogin.py b/gclogin.py index 9900e4a..1600e42 100644 --- a/gclogin.py +++ b/gclogin.py @@ -9,8 +9,10 @@ class GarminLogin(): # Copied from https://github.com/moderation/garmin-connect-export # Starting around 2/2021, all requests return 402. nk=NT fixes it. + # May 2021 it became necessary to supply a User-Agent. _obligatory_headers = { - 'nk': 'NT' + 'nk': 'NT', + 'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/89.0.4389.69 Safari/537.36', } # To do: pull in sessioncache from tapiriik, or omit if that's possible @@ -48,6 +50,7 @@ def __init__(self): def _get_session(self, email, password): session = requests.Session() + session.headers.update(self._obligatory_headers) # JSIG CAS, cool I guess. # Not quite OAuth though, so I'll continue to collect raw credentials. @@ -84,26 +87,26 @@ def _get_session(self, email, password): # I may never understand what motivates people to mangle a perfectly good protocol like HTTP in the ways they do... preResp = session.get("https://sso.garmin.com/sso/signin", params=params) if preResp.status_code != 200: - raise APIException("SSO prestart error %s %s" % (preResp.status_code, preResp.text)) + raise Exception("SSO prestart error %s %s" % (preResp.status_code, preResp.text)) ssoResp = session.post("https://sso.garmin.com/sso/signin", headers=self._garmin_signin_headers, params=params, data=data, allow_redirects=False) if ssoResp.status_code != 200 or "temporarily unavailable" in ssoResp.text: - raise APIException("SSO error %s %s" % (ssoResp.status_code, ssoResp.text)) + raise Exception("SSO error %s %s" % (ssoResp.status_code, ssoResp.text)) if ">sendEvent('FAIL')" in ssoResp.text: - raise APIException("Invalid login", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True)) + raise Exception("Invalid login", block=True, user_exception=UserException(UserExceptionType.Authorization, intervention_required=True)) if ">sendEvent('ACCOUNT_LOCKED')" in ssoResp.text: - raise APIException("Account Locked", block=True, user_exception=UserException(UserExceptionType.Locked, intervention_required=True)) + raise Exception("Account Locked", block=True, user_exception=UserException(UserExceptionType.Locked, intervention_required=True)) if "renewPassword" in ssoResp.text: - raise APIException("Reset password", block=True, user_exception=UserException(UserExceptionType.RenewPassword, intervention_required=True)) + raise Exception("Reset password", block=True, user_exception=UserException(UserExceptionType.RenewPassword, intervention_required=True)) # ...AND WE'RE NOT DONE YET! self._rate_limit() gcRedeemResp = session.get("https://connect.garmin.com/modern", allow_redirects=False) if gcRedeemResp.status_code != 302: - raise APIException("GC redeem-start error %s %s" % (gcRedeemResp.status_code, gcRedeemResp.text)) + raise Exception("GC redeem-start error %s %s" % (gcRedeemResp.status_code, gcRedeemResp.text)) url_prefix = "https://connect.garmin.com" # There are 6 redirects that need to be followed to get the correct cookie # ... :( @@ -120,7 +123,7 @@ def _get_session(self, email, password): gcRedeemResp = session.get(url, allow_redirects=False) if current_redirect_count >= max_redirect_count and gcRedeemResp.status_code != 200: - raise APIException("GC redeem %d/%d error %s %s" % (current_redirect_count, max_redirect_count, gcRedeemResp.status_code, gcRedeemResp.text)) + raise Exception("GC redeem %d/%d error %s %s" % (current_redirect_count, max_redirect_count, gcRedeemResp.status_code, gcRedeemResp.text)) if gcRedeemResp.status_code == 200 or gcRedeemResp.status_code == 404: break current_redirect_count += 1 @@ -128,7 +131,6 @@ def _get_session(self, email, password): break # self._sessionCache.Set(record.ExternalID if record else email, session) - session.headers.update(self._obligatory_headers) return session From f97df86b66cb48aa52084227f3732fd1f7af744b Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Sun, 9 May 2021 21:21:08 -0700 Subject: [PATCH 39/41] data shows up as 'bytes' object, instead of string. to analyze, but these changes accommodate it --- gcdownload.py | 5 +++-- properties.py | 4 ++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/gcdownload.py b/gcdownload.py index 7f8025b..74c354a 100755 --- a/gcdownload.py +++ b/gcdownload.py @@ -32,7 +32,7 @@ # activity properties activity_properties_url = 'https://connect.garmin.com/modern/main/js/properties/activity_types/activity_types.properties?bust=4.10.1.0' -activity_properties = Properties(session, activity_properties_url, "activity_type_") +activity_properties = Properties(session, activity_properties_url, b'activity_type_') # print("\nActivity Properties:") # activity_properties.do_print() @@ -113,7 +113,8 @@ if args.debug: print("data: " + csv_record) - csv_file.write(csv_record.encode('utf8')) + # csv_file.write(csv_record.encode('utf8')) + csv_file.write(csv_record) total_downloaded += num_to_download # End while loop for multiple chunks. diff --git a/properties.py b/properties.py index 4f36f8b..67a024a 100644 --- a/properties.py +++ b/properties.py @@ -14,9 +14,9 @@ def __init__(self, session, url, key_trim_prefix = None): http_data = session.get(url, allow_redirects=False) for line in http_data.iter_lines(): - (key, value) = line.split('=') + (key, value) = line.split(b'=') if (key_trim_prefix != None): - key = key.replace(key_trim_prefix, "") + key = key.replace(key_trim_prefix, b'') self.properties[key] = value # Get a value, default to key as value From 8f5b963c46b949fb5bd70cb0d9e8ade31de6dde1 Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Sat, 15 May 2021 20:58:04 -0700 Subject: [PATCH 40/41] Properties are stored both as bytes (as the data is delivered, now) and strings. Key type corresponds to value type --- properties.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/properties.py b/properties.py index 67a024a..9e86699 100644 --- a/properties.py +++ b/properties.py @@ -5,7 +5,8 @@ class Properties(): in the dict are accessed by get(), which provides a default value. Data from the URL are expected to be in string form, with multiple lines - in key=value format. + in key=value format. (At some point strings became bytes. I store both + string and bytes values, for now.) If key_trim_prefix is provided, its value is deleted from key names. """ @@ -18,6 +19,8 @@ def __init__(self, session, url, key_trim_prefix = None): if (key_trim_prefix != None): key = key.replace(key_trim_prefix, b'') self.properties[key] = value + # key, value are bytes. record a parallel string value + self.properties[key.decode('utf-8')] = value.decode('utf-8') # Get a value, default to key as value def get(self, key): From 4c15e32c83630ebecb5707d964ca59b5c1c4b77e Mon Sep 17 00:00:00 2001 From: Aaron Ferrucci Date: Wed, 2 Jun 2021 16:13:05 -0700 Subject: [PATCH 41/41] specify python3 --- gcdownload.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gcdownload.py b/gcdownload.py index 74c354a..35efdb3 100755 --- a/gcdownload.py +++ b/gcdownload.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 import json from gclogin import GarminLogin