@misc{10481/106414, year = {2025}, month = {9}, url = {https://hdl.handle.net/10481/106414}, abstract = {Human movement analysis, driven by computer vision and pose tracking technologies, is gaining acceptance in healthcare, rehabilitation, sports, and daily activity monitoring. While most approaches focus on qualitative analysis (e.g., pattern recognition), objective motion quantification can provide valuable insights for diagnosis, progress tracking, and performance assessment. This paper introduces PyBodyTrack, a Python library for motion quantification using mathematical methods in real-time and pre-recorded videos. It simplifies video management and integrates with position estimators like MediaPipe, YOLO, and OpenPose. PyBodyTrack enables seamless motion quantification through standardized metrics, facilitating its integration into various applications.}, organization = {European Union’s Horizon 2020 - Maria Sklodowska-Curie (Grant Agreement N◦ 956394)}, publisher = {Elsevier}, keywords = {Motion quantification}, keywords = {Pose tracking}, keywords = {Computer vision}, title = {PyBodyTrack: A python library for multi-algorithm motion quantification and tracking in videos}, doi = {10.1016/j.softx.2025.102272}, author = {Ruiz-Zafra, Ángel and Pigueiras-del-Real, Janet and Heredia Jiménez, José María and Hussain Shah, Syed Taimoor and Hussain Shah, Syed Adil and C. Gontard, Lionel}, }