From 87ef2c8778151461ae5929b58ccea91c694cc81b Mon Sep 17 00:00:00 2001 From: yihong0618 Date: Wed, 6 Jul 2022 21:17:56 +0800 Subject: [PATCH] feat: support tcx --- .github/workflows/run_data_sync.yml | 7 +- README-CN.md | 44 ++++++- README.md | 44 ++++++- TCX_OUT/.gitkeep | 0 requirements.txt | 2 + scripts/codoon_sync.py | 3 - scripts/config.py | 1 + scripts/endomondo_sync.py | 9 +- scripts/garmin_sync.py | 53 ++++++--- scripts/garmin_to_garmin.py | 108 +++++++++++++++++ scripts/gen_svg.py | 27 +---- scripts/generator/__init__.py | 4 +- scripts/gpx_sync.py | 2 - scripts/gpxtrackposter/track.py | 122 ++++++++++---------- scripts/gpxtrackposter/track_loader.py | 154 ++++++------------------- scripts/gpxtrackposter/utils.py | 8 +- scripts/joyrun_sync.py | 3 - scripts/keep_sync.py | 3 - scripts/nike_sync.py | 21 ++-- scripts/nike_to_strava_sync.py | 8 +- scripts/strava_sync.py | 3 - scripts/strava_to_garmin_sync.py | 9 +- scripts/tcx_sync.py | 12 ++ scripts/tcx_to_strava_sync.py | 89 ++++++++++++++ scripts/utils.py | 9 +- 25 files changed, 470 insertions(+), 275 deletions(-) create mode 100644 TCX_OUT/.gitkeep create mode 100644 scripts/garmin_to_garmin.py create mode 100755 scripts/tcx_sync.py create mode 100755 scripts/tcx_to_strava_sync.py diff --git a/.github/workflows/run_data_sync.yml b/.github/workflows/run_data_sync.yml index d5e52d53c73..6fb2a296661 100644 --- a/.github/workflows/run_data_sync.yml +++ b/.github/workflows/run_data_sync.yml @@ -37,7 +37,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v1 with: - python-version: 3.6 + python-version: 3.7 # from pdm - name: Set Variables @@ -77,6 +77,7 @@ jobs: run: | python scripts/strava_sync.py ${{ secrets.STRAVA_CLIENT_ID }} ${{ secrets.STRAVA_CLIENT_SECRET }} ${{ secrets.STRAVA_CLIENT_REFRESH_TOKEN }} + # for garmin if you want generate `tcx` you can add --tcx command in the args. - name: Run sync Garmin script if: env.RUN_TYPE == 'garmin' run: | @@ -116,6 +117,6 @@ jobs: run: | git config --local user.email "${{ env.GITHUB_EMAIL }}" git config --local user.name "${{ env.GITHUB_NAME }}" - git commit -a -m 'update new runs' || echo "nothing to commit" + git add . + git commit -m 'update new runs' || echo "nothing to commit" git push || echo "nothing to push" - diff --git a/README-CN.md b/README-CN.md index 577e860d5cf..3a37034bf29 100644 --- a/README-CN.md +++ b/README-CN.md @@ -88,6 +88,8 @@ R.I.P. 希望大家都能健康顺利的跑过终点,逝者安息。 - **[悦跑圈](#joyrun 悦跑圈)** (因悦跑圈限制单个设备原因,无法自动化) - **[咕咚](#codoon 咕咚)** (因咕咚限制单个设备原因,无法自动化) - **[GPX](#GPX)** +- **[TCX](#TCX)** +- **[Tcx_to_Strava(upload all tcx data to strava)](#TCX_to_Strava)** - **[Nike+Strava(Using NRC Run, Strava backup data)](#nikestrava)** - **[Strava_to_Garmin(Using Strava Run, Garmin backup data)](#)** @@ -97,7 +99,7 @@ R.I.P. 希望大家都能健康顺利的跑过终点,逝者安息。 git clone https://github.com/yihong0618/running_page.git --depth=1 ``` -## 安装及测试 (node >= 12 and <= 14 python >= 3.6) +## 安装及测试 (node >= 12 and <= 14 python >= 3.7) ``` pip3 install -r requirements.txt @@ -189,6 +191,20 @@ python3(python) scripts/gpx_sync.py +### TCX + +
+Make your TCX data +
+ +把其它软件生成的 tcx files 拷贝到 TCX_OUT 之后运行 + +```python +python3(python) scripts/tcx_sync.py +``` + +
+ ### Keep
@@ -311,6 +327,10 @@ python3(python) scripts/codoon_sync.py 54bxxxxxxx fefxxxxx-xxxx-xxxx --from-auth
获取您的 Garmin 数据 +
+如果你只想同步跑步数据增加命令 --only-run +如果你想同步 `tcx` 格式,增加命令 --tcx + ```python python3(python) scripts/garmin_sync.py ${your email} ${your password} @@ -449,6 +469,28 @@ python3(python) scripts/strava_sync.py ${client_id} ${client_secret} ${refresch_
+### TCX_to_Strava + +
+Upload all tcx files to strava + +
+ +1. 完成 strava 的步骤 +2. 在项目根目录下执行: + +```python +python3(python) scripts/tcx_to_strava_sync.py ${client_id} ${client_secret} ${strava_refresch_token} +``` + +示例: + +```python +python3(python) scripts/tcx_to_strava_sync.py xxx xxx xxx +``` + +
+ ### Nike+Strava
diff --git a/README.md b/README.md index 57ce8331ddb..4e8bd0beba9 100644 --- a/README.md +++ b/README.md @@ -77,7 +77,9 @@ English | [简体中文](https://github.com/yihong0618/running_page/blob/master/ - **[Nike Run Club](#nike-run-club)** - **[Strava](#strava)** - **[GPX](#GPX)** +- **[TCX](#TCX)** - **[Nike_to_Strava(Using NRC Run, Strava backup data)](#Nike_to_Strava)** +- **[Tcx_to_Strava(upload all tcx data to strava)](#TCX_to_Strava)** - **[Strava_to_Garmin(Using Strava Run, Garmin backup data)](#)** ## Download @@ -88,7 +90,7 @@ Clone or fork the repo. git clone https://github.com/yihong0618/running_page.git --depth=1 ``` -## Installation and testing (node >= 12 and <= 14 python >= 3.6) +## Installation and testing (node >= 12 and <= 14 python >= 3.7) ``` pip3 install -r requirements.txt @@ -180,12 +182,27 @@ python3(python) scripts/gpx_sync.py
+### TCX + +
+Make your TCX data +
+ +Copy all your tcx files to TCX_OUT or new tcx files + +```python +python3(python) scripts/tcx_sync.py +``` + +
+ ### Garmin
Get your Garmin data
If you only want to sync `type running` add args --only-run +If you only want `tcx` files add args --tcx ```python python3(python) scripts/garmin_sync.py ${your email} ${your password} @@ -210,6 +227,8 @@ python3(python) scripts/garmin_sync.py example@gmail.com example --only-run
Get your Garmin-CN data
+If you only want to sync `type running` add args --only-run +If you only want `tcx` files add args --tcx ```python python3(python) scripts/garmin_sync.py ${your email} ${your password} --is-cn @@ -340,6 +359,29 @@ References:
+ +### TCX_to_Strava + +
+upload all tcx files to strava + +
+ +1. follow the strava steps +2. Execute in the root directory: + +```python +python3(python) scripts/tcx_to_strava_sync.py ${client_id} ${client_secret} ${strava_refresch_token} +``` + +example: + +```python +python3(python) scripts/tcx_to_strava_sync.py xxx xxx xxx +``` + +
+ ### Nike_to_Strava
diff --git a/TCX_OUT/.gitkeep b/TCX_OUT/.gitkeep new file mode 100644 index 00000000000..e69de29bb2d diff --git a/requirements.txt b/requirements.txt index d99aa5620bd..7d223e69dc5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,3 +13,5 @@ timezonefinder pyyaml aiofiles cloudscraper==1.2.58 +python-tcxparser +rich diff --git a/scripts/codoon_sync.py b/scripts/codoon_sync.py index f86015a5245..ab913b1b5e0 100755 --- a/scripts/codoon_sync.py +++ b/scripts/codoon_sync.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - import argparse import base64 import hashlib diff --git a/scripts/config.py b/scripts/config.py index d8d562d91f5..a64a1ff2a9c 100644 --- a/scripts/config.py +++ b/scripts/config.py @@ -6,6 +6,7 @@ GET_DIR = "activities" OUTPUT_DIR = "activities" GPX_FOLDER = os.path.join(os.getcwd(), "GPX_OUT") +TCX_FOLDER = os.path.join(os.getcwd(), "TCX_OUT") SQL_FILE = os.path.join(os.getcwd(), "scripts", "data.db") JSON_FILE = os.path.join(os.getcwd(), "src", "static", "activities.json") diff --git a/scripts/endomondo_sync.py b/scripts/endomondo_sync.py index f442cac406c..1b330f5fa1f 100644 --- a/scripts/endomondo_sync.py +++ b/scripts/endomondo_sync.py @@ -2,17 +2,16 @@ need to download the files from endomondo and store it in Workous dir in running_page """ -import os import json +import os from collections import namedtuple from datetime import datetime, timedelta -from utils import adjust_time -from config import BASE_TIMEZONE, ENDOMONDO_FILE_DIR, SQL_FILE, JSON_FILE -from generator import Generator - import polyline +from config import BASE_TIMEZONE, ENDOMONDO_FILE_DIR, JSON_FILE, SQL_FILE +from generator import Generator +from utils import adjust_time # TODO Same as keep_sync maybe refactor start_point = namedtuple("start_point", "lat lon") diff --git a/scripts/garmin_sync.py b/scripts/garmin_sync.py index 858d0dde36f..795693e3f81 100755 --- a/scripts/garmin_sync.py +++ b/scripts/garmin_sync.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- """ Python 3 API wrapper for Garmin Connect to get your statistics. Copy most code from https://github.com/cyberjunky/python-garminconnect @@ -7,10 +5,10 @@ import argparse import asyncio +import json import logging import os import re -import json import sys import time import traceback @@ -18,13 +16,17 @@ import aiofiles import cloudscraper import httpx -from config import GPX_FOLDER, JSON_FILE, SQL_FILE, config +from config import GPX_FOLDER, JSON_FILE, SQL_FILE, TCX_FOLDER, config from utils import make_activities_file # logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger(__name__) +FOLDER_DICT = { + "gpx": GPX_FOLDER, + "tcx": TCX_FOLDER, +} TIME_OUT = httpx.Timeout(240.0, connect=360.0) GARMIN_COM_URL_DICT = { @@ -178,8 +180,8 @@ async def get_activities(self, start, limit): url = url + "&activityType=running" return await self.fetch_data(url) - async def download_activity(self, activity_id): - url = f"{self.modern_url}/proxy/download-service/export/gpx/activity/{activity_id}" + async def download_activity(self, activity_id, file_type="gpx"): + url = f"{self.modern_url}/proxy/download-service/export/{file_type}/activity/{activity_id}" logger.info(f"Download activity from {url}") response = await self.req.get(url, headers=self.headers) response.raise_for_status() @@ -190,7 +192,6 @@ async def upload_activities(self, files): self.login() for file, garmin_type in files: files = {"data": ("file.gpx", file)} - try: res = await self.req.post( self.upload_url, files=files, headers={"nk": "NT"} @@ -252,16 +253,16 @@ def __init__(self, status): self.status = status -async def download_garmin_gpx(client, activity_id): +async def download_garmin_data(client, activity_id, file_type="gpx"): + folder = FOLDER_DICT.get(file_type, "gpx") try: - gpx_data = await client.download_activity(activity_id) - file_path = os.path.join(GPX_FOLDER, f"{activity_id}.gpx") + file_data = await client.download_activity(activity_id, file_type=file_type) + file_path = os.path.join(folder, f"{activity_id}.{file_type}") async with aiofiles.open(file_path, "wb") as fb: - await fb.write(gpx_data) + await fb.write(file_data) except: print(f"Failed to download activity {activity_id}: ") traceback.print_exc() - pass async def get_activity_id_list(client, start=0): @@ -300,6 +301,14 @@ async def sem_task(task): action="store_true", help="if is only for running", ) + parser.add_argument( + "--tcx", + dest="download_file_type", + action="store_const", + const="tcx", + default="gpx", + help="to download personal documents or ebook", + ) options = parser.parse_args() email = options.email or config("sync", "garmin", "email") password = options.password or config("sync", "garmin", "password") @@ -310,10 +319,10 @@ async def sem_task(task): if email == None or password == None: print("Missing argument nor valid configuration file") sys.exit(1) - + folder = FOLDER_DICT.get(options.download_file_type, "gpx") # make gpx dir - if not os.path.exists(GPX_FOLDER): - os.mkdir(GPX_FOLDER) + if not os.path.exists(folder): + os.mkdir(folder) async def download_new_activities(): client = Garmin(email, password, auth_domain, is_only_running) @@ -322,18 +331,26 @@ async def download_new_activities(): # because I don't find a para for after time, so I use garmin-id as filename # to find new run to generage downloaded_ids = [ - i.split(".")[0] for i in os.listdir(GPX_FOLDER) if not i.startswith(".") + i.split(".")[0] for i in os.listdir(folder) if not i.startswith(".") ] activity_ids = await get_activity_id_list(client) to_generate_garmin_ids = list(set(activity_ids) - set(downloaded_ids)) print(f"{len(to_generate_garmin_ids)} new activities to be downloaded") start_time = time.time() + file_type = options.download_file_type await gather_with_concurrency( - 10, [download_garmin_gpx(client, id) for id in to_generate_garmin_ids] + 10, + [ + download_garmin_data(client, id, file_type=file_type) + for id in to_generate_garmin_ids + ], ) print(f"Download finished. Elapsed {time.time()-start_time} seconds") - make_activities_file(SQL_FILE, GPX_FOLDER, JSON_FILE) + + make_activities_file( + SQL_FILE, folder, JSON_FILE, file_suffix=options.download_file_type + ) await client.req.aclose() loop = asyncio.get_event_loop() diff --git a/scripts/garmin_to_garmin.py b/scripts/garmin_to_garmin.py new file mode 100644 index 00000000000..5f0981abcd7 --- /dev/null +++ b/scripts/garmin_to_garmin.py @@ -0,0 +1,108 @@ +import argparse +import asyncio +from datetime import datetime, timedelta +from io import BytesIO +from xml.etree import ElementTree + +import gpxpy +import gpxpy.gpx +from config import STRAVA_GARMIN_TYPE_DICT +from garmin_sync import Garmin +from rich import print +from strava_sync import run_strava_sync + +from utils import make_strava_client + +"""" +TODO..................not finish yet + +""" + + +def generate_strava_run_points(start_time, strava_streams): + """ + strava return same len data list + """ + if not (strava_streams.get("time") and strava_streams.get("latlng")): + return None + points_dict_list = [] + time_list = strava_streams["time"].data + time_list = [start_time + timedelta(seconds=int(i)) for i in time_list] + latlng_list = strava_streams["latlng"].data + + for i, j in zip(time_list, latlng_list): + points_dict_list.append( + { + "latitude": j[0], + "longitude": j[1], + "time": i, + } + ) + # add heart rate + if strava_streams.get("heartrate"): + heartrate_list = strava_streams.get("heartrate").data + for index, h in enumerate(heartrate_list): + points_dict_list[index]["heart_rate"] = h + # add altitude + if strava_streams.get("altitude"): + heartrate_list = strava_streams.get("altitude").data + for index, h in enumerate(heartrate_list): + points_dict_list[index]["elevation"] = h + return points_dict_list + + +def make_gpx_from_points(title, points_dict_list): + gpx = gpxpy.gpx.GPX() + gpx.nsmap["gpxtpx"] = "http://www.garmin.com/xmlschemas/TrackPointExtension/v1" + gpx_track = gpxpy.gpx.GPXTrack() + gpx_track.name = title + gpx_track.type = "Run" + gpx.tracks.append(gpx_track) + + # Create first segment in our GPX track: + gpx_segment = gpxpy.gpx.GPXTrackSegment() + gpx_track.segments.append(gpx_segment) + for p in points_dict_list: + if p.get("heart_rate") is None: + point = gpxpy.gpx.GPXTrackPoint(**p) + else: + heart_rate_num = p.pop("heart_rate") + point = gpxpy.gpx.GPXTrackPoint(**p) + gpx_extension_hr = ElementTree.fromstring( + f""" + {heart_rate_num} + + """ + ) + point.extensions.append(gpx_extension_hr) + gpx_segment.points.append(point) + return gpx.to_xml() + + +async def upload_to_activities(garmin_client): + last_activity = await garmin_client.get_activities(0, 1) + print(last_activity) + files_list = [] + await garmin_client.upload_activities(files_list) + return files_list + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("garmin_email", nargs="?", help="email of garmin") + parser.add_argument("garmin_password", nargs="?", help="password of garmin") + parser.add_argument( + "--is-cn", + dest="is_cn", + action="store_true", + help="if garmin accout is cn", + ) + options = parser.parse_args() + garmin_auth_domain = "CN" if options.is_cn else "" + + garmin_client = Garmin( + options.garmin_email, options.garmin_password, garmin_auth_domain + ) + loop = asyncio.get_event_loop() + future = asyncio.ensure_future(upload_to_activities(garmin_client)) + loop.run_until_complete(future) diff --git a/scripts/gen_svg.py b/scripts/gen_svg.py index 14adda647a3..49e966f2ea1 100755 --- a/scripts/gen_svg.py +++ b/scripts/gen_svg.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - import argparse import logging import os @@ -9,11 +6,11 @@ import appdirs from config import SQL_FILE from gpxtrackposter import ( - circular_drawer, - github_drawer, - grid_drawer, - poster, - track_loader, + circular_drawer, + github_drawer, + grid_drawer, + poster, + track_loader, ) from gpxtrackposter.exceptions import ParameterError, PosterError @@ -141,12 +138,6 @@ def main(): default="metric", help='Distance units; "metric", "imperial" (default: "metric").', ) - args_parser.add_argument( - "--clear-cache", - dest="clear_cache", - action="store_true", - help="Clear the track cache.", - ) args_parser.add_argument( "--verbose", dest="verbose", action="store_true", help="Verbose logging." ) @@ -206,17 +197,11 @@ def main(): loader = track_loader.TrackLoader() if args.use_localtime: loader.use_local_time = True - loader.cache_dir = os.path.join( - appdirs.user_cache_dir(__app_name__, __app_author__), "tracks" - ) if not loader.year_range.parse(args.year): raise ParameterError(f"Bad year range: {args.year}.") loader.special_file_names = args.special loader.min_length = args.min_distance * 1000 - if args.clear_cache: - print("Clearing cache...") - loader.clear_cache() if args.from_db: # for svg from db here if you want gpx please do not use --from-db @@ -225,8 +210,6 @@ def main(): else: tracks = loader.load_tracks(args.gpx_dir) if not tracks: - if not args.clear_cache: - print("No tracks found.") return is_circular = args.type == "circular" diff --git a/scripts/generator/__init__.py b/scripts/generator/__init__.py index eec650b8ead..ece0ab46244 100644 --- a/scripts/generator/__init__.py +++ b/scripts/generator/__init__.py @@ -61,9 +61,9 @@ def sync(self, force: bool = False): sys.stdout.flush() self.session.commit() - def sync_from_gpx(self, gpx_dir): + def sync_from_data_dir(self, data_dir, file_suffix="gpx"): loader = track_loader.TrackLoader() - tracks = loader.load_tracks(gpx_dir) + tracks = loader.load_tracks(data_dir, file_suffix=file_suffix) print(f"load {len(tracks)} tracks") if not tracks: print("No tracks found.") diff --git a/scripts/gpx_sync.py b/scripts/gpx_sync.py index 9d8287ba7bd..caf23fef4d7 100755 --- a/scripts/gpx_sync.py +++ b/scripts/gpx_sync.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- """ If you do not want bind any account Only the gpx files in GPX_OUT sync diff --git a/scripts/gpxtrackposter/track.py b/scripts/gpxtrackposter/track.py index 52686247fd2..2cf5401b0eb 100644 --- a/scripts/gpxtrackposter/track.py +++ b/scripts/gpxtrackposter/track.py @@ -1,17 +1,18 @@ """Create and maintain info about a given activity track (corresponding to one GPX file).""" # Copyright 2016-2019 Florian Pigorsch & Contributors. All rights reserved. -# +# 2019-now yihong0618 Florian Pigorsch & Contributors. All rights reserved. # Use of this source code is governed by a MIT-style # license that can be found in the LICENSE file. import datetime -import json import os from collections import namedtuple import gpxpy as mod_gpxpy import polyline import s2sphere as s2 +from rich import print +from tcxparser import TCXParser from .exceptions import TrackLoadError from .utils import parse_datetime_to_local @@ -37,6 +38,9 @@ def __init__(self): self.start_latlng = [] def load_gpx(self, file_name): + """ + TODO refactor with load_tcx to one function + """ try: self.file_names = [os.path.basename(file_name)] # Handle empty gpx files @@ -52,6 +56,20 @@ def load_gpx(self, file_name): print(str(e)) pass + def load_tcx(self, file_name): + try: + self.file_names = [os.path.basename(file_name)] + # Handle empty tcx files + # (for example, treadmill runs pulled via garmin-connect-export) + if os.path.getsize(file_name) == 0: + raise TrackLoadError("Empty TCX file") + self._load_tcx_data(TCXParser(file_name)) + except Exception as e: + print( + f"Something went wrong when loading TCX. for file {self.file_names[0]}, we just ignore this file and continue" + ) + print(str(e)) + def load_from_db(self, activity): # use strava as file name self.file_names = [str(activity.run_id)] @@ -73,13 +91,48 @@ def bbox(self): bbox = bbox.union(s2.LatLngRect.from_point(latlng.normalized())) return bbox + @staticmethod + def __make_run_id(time_stamp): + return int(datetime.datetime.timestamp(time_stamp) * 1000) + + def _load_tcx_data(self, tcx): + self.length = float(tcx.distance) + time_values = tcx.time_objects() + if not time_values: + raise TrackLoadError("Track is empty.") + + self.start_time, self.end_time = time_values[0], time_values[-1] + moving_time = int(self.end_time.timestamp() - self.start_time.timestamp()) + self.run_id = self.__make_run_id(self.start_time) + self.average_heartrate = tcx.hr_avg + polyline_container = [] + position_values = tcx.position_values() + line = [s2.LatLng.from_degrees(p[0], p[1]) for p in position_values] + self.polylines.append(line) + polyline_container.extend([[p[0], p[1]] for p in position_values]) + self.polyline_container = polyline_container + self.start_time_local, self.end_time_local = parse_datetime_to_local( + self.start_time, self.end_time, polyline_container[0] + ) + # get start point + try: + self.start_latlng = start_point(*polyline_container[0]) + except: + pass + self.polyline_str = polyline.encode(polyline_container) + self.moving_dict = { + "distance": self.length, + "moving_time": datetime.timedelta(seconds=moving_time), + "elapsed_time": datetime.timedelta( + seconds=moving_time + ), # FIXME for now make it same as moving time + "average_speed": self.length / moving_time if moving_time else 0, + } + def _load_gpx_data(self, gpx): self.start_time, self.end_time = gpx.get_time_bounds() # use timestamp as id - self.run_id = int(datetime.datetime.timestamp(self.start_time) * 1000) - self.start_time_local, self.end_time_local = parse_datetime_to_local( - self.start_time, self.end_time, gpx - ) + self.run_id = self.__make_run_id(self.start_time) if self.start_time is None: raise TrackLoadError("Track has no start time.") if self.end_time is None: @@ -113,6 +166,9 @@ def _load_gpx_data(self, gpx): self.start_latlng = start_point(*polyline_container[0]) except: pass + self.start_time_local, self.end_time_local = parse_datetime_to_local( + self.start_time, self.end_time, polyline_container[0] + ) self.polyline_str = polyline.encode(polyline_container) self.average_heartrate = ( sum(heart_rate_list) / len(heart_rate_list) if heart_rate_list else None @@ -142,60 +198,6 @@ def append(self, other): ) pass - def load_cache(self, cache_file_name): - try: - with open(cache_file_name) as data_file: - data = json.load(data_file) - self.start_time = datetime.datetime.strptime( - data["start"], "%Y-%m-%d %H:%M:%S" - ) - self.end_time = datetime.datetime.strptime( - data["end"], "%Y-%m-%d %H:%M:%S" - ) - self.start_time_local = datetime.datetime.strptime( - data["start_local"], "%Y-%m-%d %H:%M:%S" - ) - self.end_time_local = datetime.datetime.strptime( - data["end_local"], "%Y-%m-%d %H:%M:%S" - ) - self.length = float(data["length"]) - self.polylines = [] - for data_line in data["segments"]: - self.polylines.append( - [ - s2.LatLng.from_degrees(float(d["lat"]), float(d["lng"])) - for d in data_line - ] - ) - except Exception as e: - raise TrackLoadError("Failed to load track data from cache.") from e - - def store_cache(self, cache_file_name): - """Cache the current track""" - dir_name = os.path.dirname(cache_file_name) - if not os.path.isdir(dir_name): - os.makedirs(dir_name) - with open(cache_file_name, "w") as json_file: - lines_data = [] - for line in self.polylines: - lines_data.append( - [ - {"lat": latlng.lat().degrees, "lng": latlng.lng().degrees} - for latlng in line - ] - ) - json.dump( - { - "start": self.start_time.strftime("%Y-%m-%d %H:%M:%S"), - "end": self.end_time.strftime("%Y-%m-%d %H:%M:%S"), - "start_local": self.start_time_local.strftime("%Y-%m-%d %H:%M:%S"), - "end_local": self.end_time_local.strftime("%Y-%m-%d %H:%M:%S"), - "length": self.length, - "segments": lines_data, - }, - json_file, - ) - @staticmethod def _get_moving_data(gpx): moving_data = gpx.get_moving_data() diff --git a/scripts/gpxtrackposter/track_loader.py b/scripts/gpxtrackposter/track_loader.py index 2c7950c545a..888ba34ed72 100644 --- a/scripts/gpxtrackposter/track_loader.py +++ b/scripts/gpxtrackposter/track_loader.py @@ -1,19 +1,18 @@ -"""Handle parsing of GPX files and writing/loading of cached data""" +"""Handle parsing of GPX files""" # Copyright 2016-2019 Florian Pigorsch & Contributors. All rights reserved. +# 2019-now Yihong0618 # # Use of this source code is governed by a MIT-style # license that can be found in the LICENSE file. -import hashlib import logging import os import sys sys.path.append(os.path.join(os.path.dirname(__file__), "..")) import concurrent.futures -import shutil from generator.db import Activity, init_db @@ -31,75 +30,46 @@ def load_gpx_file(file_name): return t -def load_cached_track_file(cache_file_name, file_name): - """Load an individual track from cache files""" - try: - t = Track() - t.load_cache(cache_file_name) - t.file_names = [os.path.basename(file_name)] - log.info(f"Loaded track {file_name} from cache file {cache_file_name}") - return t - except Exception as e: - raise TrackLoadError("Failed to load track from cache.") from e +def load_tcx_file(file_name): + """Load an individual TCX file as a track by using Track.load_tcx()""" + t = Track() + t.load_tcx(file_name) + return t class TrackLoader: - """Handle the loading of tracks from cache and/or GPX files - + """ Attributes: min_length: All tracks shorter than this value are filtered out. special_file_names: Tracks marked as special in command line args year_range: All tracks outside of this range will be filtered out. - cache_dir: Directory used to store cached tracks Methods: - clear_cache: Remove cache directory - load_tracks: Load all data from cache and GPX files + load_tracks: Load all data from GPX files """ def __init__(self): self.min_length = 100 self.special_file_names = [] self.year_range = YearRange() - self.cache_dir = None - self._cache_file_names = {} + self.load_func_dict = { + "gpx": load_gpx_file, + "tcx": load_tcx_file, + } - def clear_cache(self): - """Remove cache directory, if it exists""" - if os.path.isdir(self.cache_dir): - log.info(f"Removing cache dir: {self.cache_dir}") - try: - shutil.rmtree(self.cache_dir) - except OSError as e: - log.error(f"Failed: {e}") - - def load_tracks(self, base_dir): - """Load tracks base_dir and return as a List of tracks""" - file_names = [x for x in self._list_gpx_files(base_dir)] - log.info(f"GPX files: {len(file_names)}") - print(len(file_names)) + def load_tracks(self, data_dir, file_suffix): + """Load tracks data_dir and return as a List of tracks""" + file_names = [x for x in self._list_data_files(data_dir, file_suffix)] + print(f"{file_suffix.upper()} files: {len(file_names)}") tracks = [] - # load track from cache - cached_tracks = {} - # self.clear_cache() - if self.cache_dir: - log.info(f"Trying to load {len(file_names)} track(s) from cache...") - cached_tracks = self._load_tracks_from_cache(file_names) - log.info(f"Loaded tracks from cache: {len(cached_tracks)}") - tracks = list(cached_tracks.values()) - - # load remaining gpx files - remaining_file_names = [f for f in file_names if f not in cached_tracks] - if remaining_file_names: - log.info( - f"Trying to load {len(remaining_file_names)} track(s) from GPX files; this may take a while..." - ) - loaded_tracks = self._load_tracks(remaining_file_names) - tracks.extend(loaded_tracks.values()) - log.info(f"Conventionally loaded tracks: {len(loaded_tracks)}") - self._store_tracks_to_cache(loaded_tracks) + loaded_tracks = self._load_data_tracks( + file_names, self.load_func_dict.get(file_suffix, load_gpx_file) + ) + + tracks.extend(loaded_tracks.values()) + log.info(f"Conventionally loaded tracks: {len(loaded_tracks)}") tracks = self._filter_tracks(tracks) @@ -123,9 +93,9 @@ def load_tracks_from_db(self, sql_file, is_grid=False): t = Track() t.load_from_db(activity) tracks.append(t) - print(len(tracks)) + print(f"All tracks: {len(tracks)}") tracks = self._filter_tracks(tracks) - print(len(tracks)) + print(f"After filter tracks: {len(tracks)}") # merge tracks that took place within one hour tracks = self._merge_tracks(tracks) return [t for t in tracks if t.length >= self.min_length] @@ -167,11 +137,14 @@ def _merge_tracks(tracks): return merged_tracks @staticmethod - def _load_tracks(file_names): + def _load_data_tracks(file_names, load_func=load_gpx_file): + """ + TODO refactor with _load_tcx_tracks + """ tracks = {} with concurrent.futures.ProcessPoolExecutor() as executor: future_to_file_name = { - executor.submit(load_gpx_file, file_name): file_name + executor.submit(load_func, file_name): file_name for file_name in file_names } for future in concurrent.futures.as_completed(future_to_file_name): @@ -182,69 +155,16 @@ def _load_tracks(file_names): log.error(f"Error while loading {file_name}: {e}") else: tracks[file_name] = t - - return tracks - - def _load_tracks_from_cache(self, file_names): - tracks = {} - with concurrent.futures.ProcessPoolExecutor() as executor: - future_to_file_name = { - executor.submit( - load_cached_track_file, - self._get_cache_file_name(file_name), - file_name, - ): file_name - for file_name in file_names - } - for future in concurrent.futures.as_completed(future_to_file_name): - file_name = future_to_file_name[future] - try: - t = future.result() - except Exception: - # silently ignore failed cache load attempts - pass - else: - tracks[file_name] = t return tracks - def _store_tracks_to_cache(self, tracks): - if (not tracks) or (not self.cache_dir): - return - - log.info(f"Storing {len(tracks)} track(s) to cache...") - for (file_name, t) in tracks.items(): - try: - t.store_cache(self._get_cache_file_name(file_name)) - except Exception as e: - log.error(f"Failed to store track {file_name} to cache: {e}") - else: - log.info(f"Stored track {file_name} to cache") - @staticmethod - def _list_gpx_files(base_dir): - base_dir = os.path.abspath(base_dir) - if not os.path.isdir(base_dir): - raise ParameterError(f"Not a directory: {base_dir}") - for name in os.listdir(base_dir): + def _list_data_files(data_dir, file_suffix): + data_dir = os.path.abspath(data_dir) + if not os.path.isdir(data_dir): + raise ParameterError(f"Not a directory: {data_dir}") + for name in os.listdir(data_dir): if name.startswith("."): continue - path_name = os.path.join(base_dir, name) - if name.endswith(".gpx") and os.path.isfile(path_name): + path_name = os.path.join(data_dir, name) + if name.endswith(f".{file_suffix}") and os.path.isfile(path_name): yield path_name - - def _get_cache_file_name(self, file_name): - assert self.cache_dir - - if file_name in self._cache_file_names: - return self._cache_file_names[file_name] - - try: - checksum = hashlib.sha256(open(file_name, "rb").read()).hexdigest() - except PermissionError as e: - raise TrackLoadError("Failed to compute checksum (bad permissions).") from e - except Exception as e: - raise TrackLoadError("Failed to compute checksum.") from e - - cache_file_name = os.path.join(self.cache_dir, f"{checksum}.json") - self._cache_file_names[file_name] = cache_file_name - return cache_file_name diff --git a/scripts/gpxtrackposter/utils.py b/scripts/gpxtrackposter/utils.py index 9f9d36d5a9b..755a9284094 100644 --- a/scripts/gpxtrackposter/utils.py +++ b/scripts/gpxtrackposter/utils.py @@ -111,19 +111,17 @@ def interpolate_color(color1: str, color2: str, ratio: float) -> str: return c3.hex_l -def format_float(f) -> str: +def format_float(f): return locale.format_string("%.1f", f) -def parse_datetime_to_local( - start_time: datetime, end_time: datetime, gpx: "mod_gpxpy.gpx.GPX" -) -> Tuple[datetime, datetime]: +def parse_datetime_to_local(start_time, end_time, point): # just parse the start time, because start/end maybe different offset = start_time.utcoffset() if offset: return start_time + offset, end_time + offset tf = TimezoneFinder() - lat, _, lng, _ = list(gpx.get_bounds()) + lat, lng = point timezone = tf.timezone_at(lng=lng, lat=lat) tc_offset = datetime.now(pytz.timezone(timezone)).utcoffset() return start_time + tc_offset, end_time + tc_offset diff --git a/scripts/joyrun_sync.py b/scripts/joyrun_sync.py index 58b8ee75fdf..959388c987b 100755 --- a/scripts/joyrun_sync.py +++ b/scripts/joyrun_sync.py @@ -1,7 +1,4 @@ # some code from https://github.com/fieryd/PKURunningHelper great thanks -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - import argparse import json import os diff --git a/scripts/keep_sync.py b/scripts/keep_sync.py index bd183c07e84..5a64fd9819b 100755 --- a/scripts/keep_sync.py +++ b/scripts/keep_sync.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - import argparse import base64 import json diff --git a/scripts/nike_sync.py b/scripts/nike_sync.py index 65c98cec163..891edece9c7 100644 --- a/scripts/nike_sync.py +++ b/scripts/nike_sync.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - import argparse import json import logging @@ -13,15 +10,15 @@ import gpxpy.gpx import httpx from config import ( - BASE_TIMEZONE, - BASE_URL, - GPX_FOLDER, - JSON_FILE, - NIKE_CLIENT_ID, - OUTPUT_DIR, - SQL_FILE, - TOKEN_REFRESH_URL, - run_map, + BASE_TIMEZONE, + BASE_URL, + GPX_FOLDER, + JSON_FILE, + NIKE_CLIENT_ID, + OUTPUT_DIR, + SQL_FILE, + TOKEN_REFRESH_URL, + run_map, ) from generator import Generator diff --git a/scripts/nike_to_strava_sync.py b/scripts/nike_to_strava_sync.py index b77d8327f1b..20ce46ff610 100755 --- a/scripts/nike_to_strava_sync.py +++ b/scripts/nike_to_strava_sync.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - import argparse import os import time @@ -8,9 +5,10 @@ from config import OUTPUT_DIR from nike_sync import make_new_gpxs, run -from utils import make_strava_client from strava_sync import run_strava_sync +from utils import make_strava_client + def get_last_time(client): """ @@ -38,7 +36,7 @@ def get_to_generate_files(last_time): return [ os.path.join(OUTPUT_DIR, i) for i in file_names - if not i.startswith(".") and int(i.split(".")[0]) > last_time + if i.endswith(".gpx") and int(i.split(".")[0]) > last_time ] diff --git a/scripts/strava_sync.py b/scripts/strava_sync.py index c258e740976..a9962ce4a5f 100755 --- a/scripts/strava_sync.py +++ b/scripts/strava_sync.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - import argparse import json diff --git a/scripts/strava_to_garmin_sync.py b/scripts/strava_to_garmin_sync.py index be50c483e99..666a7b6433a 100644 --- a/scripts/strava_to_garmin_sync.py +++ b/scripts/strava_to_garmin_sync.py @@ -1,15 +1,16 @@ import argparse import asyncio +from datetime import datetime, timedelta from io import BytesIO -import gpxpy from xml.etree import ElementTree + +import gpxpy import gpxpy.gpx -from datetime import datetime, timedelta -from utils import make_strava_client +from config import STRAVA_GARMIN_TYPE_DICT from garmin_sync import Garmin from strava_sync import run_strava_sync -from config import STRAVA_GARMIN_TYPE_DICT +from utils import make_strava_client def generate_strava_run_points(start_time, strava_streams): diff --git a/scripts/tcx_sync.py b/scripts/tcx_sync.py new file mode 100755 index 00000000000..87e3f6812f8 --- /dev/null +++ b/scripts/tcx_sync.py @@ -0,0 +1,12 @@ +""" +If you do not want bind any account +Only the gpx files in GPX_OUT sync +""" + +from config import JSON_FILE, SQL_FILE, TCX_FOLDER + +from utils import make_activities_file + +if __name__ == "__main__": + print("only sync tcx files in TCX_OUT") + make_activities_file(SQL_FILE, TCX_FOLDER, JSON_FILE, file_suffix="tcx") diff --git a/scripts/tcx_to_strava_sync.py b/scripts/tcx_to_strava_sync.py new file mode 100755 index 00000000000..bffe2cccea2 --- /dev/null +++ b/scripts/tcx_to_strava_sync.py @@ -0,0 +1,89 @@ +import argparse +import os +import time +from datetime import datetime + +from config import TCX_FOLDER +from rich import print +from strava_sync import run_strava_sync +from tcxparser import TCXParser + +from utils import make_strava_client + + +def get_last_time(client): + """ + if there is no activities cause exception return 0 + """ + try: + activity = None + activities = client.get_activities(limit=10) + # for else in python if you don't know please google it. + for a in activities: + if a.type == "Run": + activity = a + break + else: + return 0 + end_date = activity.start_date + activity.elapsed_time + return int(datetime.timestamp(end_date)) + except Exception as e: + print(f"Something wrong to get last time err: {str(e)}") + return 0 + + +def get_to_generate_files(last_time): + """ + reuturn to values one dict for upload + and one sorted list for next time upload + """ + file_names = os.listdir(TCX_FOLDER) + tcx_files = [ + (TCXParser(os.path.join(TCX_FOLDER, i)), os.path.join(TCX_FOLDER, i)) + for i in file_names + if i.endswith(".tcx") + ] + tcx_files_dict = { + int(i[0].time_objects()[0].timestamp()): i[1] + for i in tcx_files + if int(i[0].time_objects()[0].timestamp()) > last_time + } + + return sorted(list(tcx_files_dict.keys())), tcx_files_dict + + +def upload_tcx(client, file_name): + with open(file_name, "rb") as f: + r = client.upload_activity(activity_file=f, data_type="tcx") + try: + r.wait() + print(file_name) + print("===== waiting for upload ====") + print(r.status, f"strava id: {r.activity_id}") + except Exception as e: + print(str(e)) + + +if __name__ == "__main__": + if not os.path.exists(TCX_FOLDER): + os.mkdir(TCX_FOLDER) + parser = argparse.ArgumentParser() + parser.add_argument("client_id", help="strava client id") + parser.add_argument("client_secret", help="strava client secret") + parser.add_argument("strava_refresh_token", help="strava refresh token") + options = parser.parse_args() + # upload new tcx to strava + print("Need to load all tcx files maybe take some time") + client = make_strava_client( + options.client_id, options.client_secret, options.strava_refresh_token + ) + last_time = get_last_time(client) + to_upload_time_list, to_upload_dict = get_to_generate_files(last_time) + for i in to_upload_time_list: + tcx_file = to_upload_dict.get(i) + upload_tcx(client, tcx_file) + + time.sleep(10) + run_strava_sync( + options.client_id, options.client_secret, options.strava_refresh_token + ) diff --git a/scripts/utils.py b/scripts/utils.py index 23ba37775eb..d860cb88e0e 100644 --- a/scripts/utils.py +++ b/scripts/utils.py @@ -1,12 +1,9 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- - import json from datetime import datetime -from stravalib.client import Client import pytz from generator import Generator +from stravalib.client import Client def adjust_time(time, tz_name): @@ -19,9 +16,9 @@ def adjust_time_to_utc(time, tz_name): return time - tc_offset -def make_activities_file(sql_file, gpx_dir, json_file): +def make_activities_file(sql_file, data_dir, json_file, file_suffix="gpx"): generator = Generator(sql_file) - generator.sync_from_gpx(gpx_dir) + generator.sync_from_data_dir(data_dir, file_suffix=file_suffix) activities_list = generator.load() with open(json_file, "w") as f: json.dump(activities_list, f)