<?xml version="1.0" encoding="UTF-8"?><?xml-stylesheet type="text/xsl" href="static/style.xsl"?><OAI-PMH xmlns="http://www.openarchives.org/OAI/2.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/ http://www.openarchives.org/OAI/2.0/OAI-PMH.xsd"><responseDate>2026-04-27T14:36:55Z</responseDate><request verb="GetRecord" identifier="oai:uvadoc.uva.es:10324/82164" metadataPrefix="etdms">https://uvadoc.uva.es/oai/request</request><GetRecord><record><header><identifier>oai:uvadoc.uva.es:10324/82164</identifier><datestamp>2026-03-25T08:00:29Z</datestamp><setSpec>com_10324_1191</setSpec><setSpec>com_10324_931</setSpec><setSpec>com_10324_894</setSpec><setSpec>col_10324_1379</setSpec></header><metadata><thesis xmlns="http://www.ndltd.org/standards/metadata/etdms/1.0/" xmlns:doc="http://www.lyncode.com/xoai" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.ndltd.org/standards/metadata/etdms/1.0/ http://www.ndltd.org/standards/metadata/etdms/1.0/etdms.xsd">
<title>Comparative evaluation of monocular deep learning pose estimation and IMU-based systems for remote kinematic assessment</title>
<creator>Medrano Paredes, Mario</creator>
<creator>Fernández González, Carmen</creator>
<creator>Saoudi, Hichem</creator>
<creator>Pozo Catá, Jorge</creator>
<creator>Díaz Pernas, Francisco Javier</creator>
<creator>Martínez Zarzuela, Mario</creator>
<description>Producción Científica</description>
<description>Remote assessment of human motion is increasingly pivotal in clinical, sports, and rehabilitation contexts, particularly given the rise of telemedicine. While traditional motion capture systems deliver high-precision data, their dependence on expensive equipment and controlled laboratory conditions limits their broader application. Advances in computer vision have enabled the development of monocular video-based 3D human pose estimation methods, which leverage ubiquitous camera technologies to offer cost-effective and accessible kinematic analysis. This study systematically benchmarks joint angles derived from both video-based models and IMUs, addressing the gap in comparative evaluations under realistic, out-of-the-lab conditions</description>
<date>2026-01-26</date>
<date>2026-01-26</date>
<date>2025</date>
<date>9999-01-01</date>
<type>info:eu-repo/semantics/article</type>
<identifier>Gait &amp; Posture, 121. doi.org/10.1016/j.gaitpost.2025.07.234</identifier>
<identifier>0966-6362</identifier>
<identifier>https://uvadoc.uva.es/handle/10324/82164</identifier>
<identifier>10.1016/j.gaitpost.2025.07.234</identifier>
<identifier>Gait &amp; Posture</identifier>
<identifier>121</identifier>
<language>eng</language>
<relation>https://www.sciencedirect.com/science/article/pii/S0966636225004904</relation>
<rights>info:eu-repo/semantics/embargoedAccess</rights>
<publisher>Elsevier</publisher>
</thesis></metadata></record></GetRecord></OAI-PMH>