<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.0 20040830//EN" "journalpublishing.dtd"><article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" dtd-version="2.0" xml:lang="en" article-type="research-article"><front><journal-meta><journal-id journal-id-type="nlm-ta">JMIR Serious Games</journal-id><journal-id journal-id-type="publisher-id">games</journal-id><journal-id journal-id-type="index">15</journal-id><journal-title>JMIR Serious Games</journal-title><abbrev-journal-title>JMIR Serious Games</abbrev-journal-title><issn pub-type="epub">2291-9279</issn><publisher><publisher-name>JMIR Publications</publisher-name><publisher-loc>Toronto, Canada</publisher-loc></publisher></journal-meta><article-meta><article-id pub-id-type="publisher-id">v13i1e75823</article-id><article-id pub-id-type="doi">10.2196/75823</article-id><article-categories><subj-group subj-group-type="heading"><subject>Original Paper</subject></subj-group></article-categories><title-group><article-title>Development and Evaluation of a Monocular Camera&#x2013;Based Mobile Exergame for at-Home Intervention in Individuals at High Risk of Type 2 Diabetes: Randomized Controlled Trial</article-title></title-group><contrib-group><contrib contrib-type="author"><name name-style="western"><surname>Zhao</surname><given-names>Jianan</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff1">1</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Zhu</surname><given-names>Dian</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff2">2</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Wang</surname><given-names>Yanan</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff1">1</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Xia</surname><given-names>Yaqin</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff1">1</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Zhu</surname><given-names>Zeshi</given-names></name><degrees>MD</degrees><xref ref-type="aff" rid="aff2">2</xref></contrib><contrib contrib-type="author" corresp="yes"><name name-style="western"><surname>Yu</surname><given-names>Jihong</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff1">1</xref></contrib></contrib-group><aff id="aff1"><institution>College of Fashion and Design, Donghua University</institution><addr-line>East Yanan Road 1882</addr-line><addr-line>Shanghai</addr-line><country>China</country></aff><aff id="aff2"><institution>School of Design, Shanghai Jiao Tong University</institution><addr-line>Shanghai</addr-line><country>China</country></aff><contrib-group><contrib contrib-type="editor"><name name-style="western"><surname>Coristine</surname><given-names>Andrew</given-names></name></contrib></contrib-group><contrib-group><contrib contrib-type="reviewer"><name name-style="western"><surname>Liu</surname><given-names>Chenyang</given-names></name></contrib><contrib contrib-type="reviewer"><name name-style="western"><surname>Qian</surname><given-names>Yefu</given-names></name></contrib></contrib-group><author-notes><corresp>Correspondence to Jihong Yu, PhD, College of Fashion and Design, Donghua University, East Yanan Road 1882, Shanghai, 200240, China, 86 18200484800; <email>18201855821@163.com</email></corresp></author-notes><pub-date pub-type="collection"><year>2025</year></pub-date><pub-date pub-type="epub"><day>12</day><month>12</month><year>2025</year></pub-date><volume>13</volume><elocation-id>e75823</elocation-id><history><date date-type="received"><day>08</day><month>05</month><year>2025</year></date><date date-type="rev-recd"><day>27</day><month>06</month><year>2025</year></date><date date-type="accepted"><day>27</day><month>06</month><year>2025</year></date></history><copyright-statement>&#x00A9; Jianan Zhao, Dian Zhu, Yanan Wang, Yaqin Xia, Zeshi Zhu, Jihong Yu. Originally published in JMIR Serious Games (<ext-link ext-link-type="uri" xlink:href="https://games.jmir.org">https://games.jmir.org</ext-link>), 12.12.2025. </copyright-statement><copyright-year>2025</copyright-year><license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (<ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">https://creativecommons.org/licenses/by/4.0/</ext-link>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work, first published in JMIR Serious Games, is properly cited. The complete bibliographic information, a link to the original publication on <ext-link ext-link-type="uri" xlink:href="https://games.jmir.org">https://games.jmir.org</ext-link>, as well as this copyright and license information must be included.</p></license><self-uri xlink:type="simple" xlink:href="https://games.jmir.org/2025/1/e75823"/><abstract><sec><title>Background</title><p>Exergames have emerged as effective interventions for promoting physical activity and preventing type 2 diabetes (T2D). Kinect-based exergames have demonstrated improvements in exercise adherence and health outcomes, but their high cost and reliance on specialized hardware hinder widespread home-based adoption. Recent advances in computer vision now enable monocular camera&#x2013;based systems, offering a potentially cost-effective and scalable alternative for promoting physical activity at home.</p></sec><sec><title>Objective</title><p>This study aimed to evaluate the feasibility and user experience of monocular camera&#x2013;based exergames as a home-based intervention for individuals at risk for T2D.</p></sec><sec sec-type="methods"><title>Methods</title><p>Forty-five community-dwelling individuals at high risk for T2D (mean age 47.12, SD 6.92 years) were recruited and randomized into three groups (n=15 each): (1) control group (traditional offline exercise), (2) Kinect group (Kinect-based exergame), and (3) monocular group (monocular camera&#x2013;based exergame). Participants engaged in a 10-minute intervention once per week for 7 weeks. Data were collected at 3 time points: baseline (exercise performance: heart rate and perceived fatigue), postintervention (exercise performance and user experience, including game experience and intrinsic motivation), and follow-up (user engagement and qualitative feedback). One-way ANOVA was used for data analysis.</p></sec><sec sec-type="results"><title>Results</title><p>Exercise performance was comparable across all groups, with no significant differences in heart rate (<italic>P</italic>=.76) or fatigue levels (<italic>P</italic>=.25). However, participants in the monocular group reported significantly lower fatigue than those in the control group (<italic>P</italic>=.04). Intrinsic motivation was significantly higher in both the Kinect (mean 35.13, SD 3.20) and monocular (mean 34.00, SD 4.41) groups than in the control group (mean 26.06, SD 1.87; <italic>P</italic>&#x003C;.001), with no significant difference between the 2 exergame groups (<italic>P</italic>=.44). While most user experience measures showed no significant differences, the monocular group reported a higher perceived challenge (mean 3.45, SD 0.51) than the Kinect group (mean 2.96, SD 0.39; <italic>P</italic>=.09). Additionally, the monocular group exhibited higher engagement, as evidenced by more frequent use, fewer challenges, and a greater intention to continue using the system.</p></sec><sec sec-type="conclusions"><title>Conclusions</title><p>Monocular camera&#x2013;based exergame is a feasible and effective solution for promoting physical activity in individuals at risk for T2D. It offers motivational and experiential benefits similar to Kinect-based systems but requires less costly and more accessible equipment. These findings suggest that monocular systems have strong potential as scalable tools for home-based chronic disease prevention.</p></sec><sec><title>Trial Registration</title><p>ClinicalTrials.gov NCT06950528; https://clinicaltrials.gov/study/NCT06950528</p></sec></abstract><kwd-group><kwd>monocular camera</kwd><kwd>exergames</kwd><kwd>type 2 diabetes</kwd><kwd>gamification</kwd><kwd>disease prevention</kwd></kwd-group></article-meta></front><body><sec id="s1" sec-type="intro"><title>Introduction</title><p>Type 2 diabetes (T2D) is a chronic, progressive condition that poses a major global health challenge, with the number of affected individuals projected to reach 500 million by 2030 [<xref ref-type="bibr" rid="ref1">1</xref>,<xref ref-type="bibr" rid="ref2">2</xref>]. The mean age of T2D diagnosis is 45 years [<xref ref-type="bibr" rid="ref3">3</xref>]. Individuals older than 35 years, those with obesity, having a family history of diabetes, not physically active, and having prediabetes are considered to have high risk of T2D [<xref ref-type="bibr" rid="ref4">4</xref>]. Among high-risk populations, lifestyle interventions, particularly those that encourage physical activity, are crucial for delaying or preventing T2D onset [<xref ref-type="bibr" rid="ref5">5</xref>]. Despite the proven benefits of exercise, barriers such as lack of motivation, limited access to fitness resources, and insufficient guidance persist [<xref ref-type="bibr" rid="ref6">6</xref>,<xref ref-type="bibr" rid="ref7">7</xref>]. To address these challenges, technology-driven solutions such as digital exergames have emerged as promising tools to promote physical activity in an engaging and accessible way [<xref ref-type="bibr" rid="ref8">8</xref>].</p><p>Exergames combine exercise with interactive gaming and offer real-time feedback, personalized experiences, and gamified rewards, all of which enhance physical activity [<xref ref-type="bibr" rid="ref9">9</xref>]. Motion-sensing technology has been central to the development of exergames, with Microsoft&#x2019;s Kinect being a landmark innovation [<xref ref-type="bibr" rid="ref10">10</xref>]. Released in 2010, Kinect revolutionized motion-sensing games by enabling real-time body tracking and virtual avatar control through natural movements [<xref ref-type="bibr" rid="ref11">11</xref>]. Kinect-based exergames have been extensively studied for their impact on physical performance, therapy adherence, and clinical outcomes [<xref ref-type="bibr" rid="ref12">12</xref>]. However, due to reliance on specialized hardware, high costs, ineffective marketing strategies, and limited home usability, the Kinect v2 was discontinued in 2023.</p><p>In recent years, advancements in deep learning have enabled a new generation of motion-sensing technologies, including monocular human pose tracking, which relies solely on monocular cameras [<xref ref-type="bibr" rid="ref13">13</xref>]. This approach allows for real-time mapping of users&#x2019; movements in virtual environments without requiring expensive depth cameras or wearable devices [<xref ref-type="bibr" rid="ref14">14</xref>].</p><p>The monocular camera&#x2013;based exergame emerges as a low-cost, noncontact technology and offers a practical solution for home-based exergame training [<xref ref-type="bibr" rid="ref15">15</xref>]. Its potential for enhancing accessibility and usability makes it an attractive area for exploration. However, while the potential of monocular camera&#x2013;based pose tracking has been identified, its use in designing and evaluating exergames tailored to patients with T2D remains rarely explored, and the quality assessment of the exergame is rarely discussed as well [<xref ref-type="bibr" rid="ref16">16</xref>,<xref ref-type="bibr" rid="ref17">17</xref>]. This creates a need for accessible, scalable solutions that not only use cutting-edge technologies but also align with the practical needs and preferences of the target audience.</p><p>To address these gaps, this study introduces an exergame specifically designed for individuals at high risk of diabetes, using monocular camera technology to ensure affordability and ease of use. The game is informed by evidence-based physical activities to promote metabolic health and incorporates features to enhance user engagement and long-term adherence. This study aims to evaluate the effectiveness and user experience of monocular camera&#x2013;based exergame to understand its potential to replace Kinect in providing home-based exercise intervention. The primary outcome of this study is to demonstrate that there is no significant difference in physical activity levels between the experimental and control groups, as evidenced by the lack of variation in heart rate changes before and after the intervention. The secondary outcome focuses on evaluating the impact of the monocular camera&#x2013;based group on user experience, providing insights into the acceptability and usability of the intervention from the participants&#x2019; perspective.</p></sec><sec id="s2" sec-type="methods"><title>Methods</title><sec id="s2-1"><title>Study Design</title><p>This 3-arm randomized controlled trial used a 1:1:1 allocation ratio to evaluate the effectiveness of an exergame designed using monocular camera technology for individuals at high risk of developing T2D. A total of 45 participants meeting specific inclusion criteria were recruited and randomly assigned to 1 of 3 groups: a control group performing traditional video-guided exercises, a Kinect-based exergame group, and a monocular camera&#x2013;based exergame group. The sample size was calculated based on a power analysis with an expected medium size at 0.48, where previous research reported medium effect sizes (0.40&#x2010;0.50) in evaluating usability and adherence in Kinect-based exergames for individuals with prediabetes [<xref ref-type="bibr" rid="ref18">18</xref>], aiming for 80% power and a significance level of .05 to detect meaningful differences between the groups. The intervention was conducted in a controlled setting, ensuring consistency across groups while collecting physiological and experiential data.</p></sec><sec id="s2-2"><title>Recruitment</title><p>A total of 45 individuals at high risk for T2D were recruited through offline outreach facilitated by community managers affiliated with local residential service centers in Minhang District, Shanghai, China. They supported recruitment by identifying eligible participants through existing health records and outreach channels. The inclusion criteria were as follows: (1) 35 years of age and older; (2) a score of 25 or higher on the Chinese Diabetes Risk Score; (3) community-dwelling individuals, not residing in assisted living or long-term care facilities; (4) physically capable of engaging in light to moderate exercise, as determined by self-report and physician clearance; (5) normal cognitive function that enables the participant to complete the experiment independently or with minimal assistance; and (6) written informed consent provided by participants or their families. Exclusion criteria included individuals with (1) diagnosis of T1D or T2D, (2) current participation in another exercise intervention study, (3) severe cognitive impairment with Mini-Mental State Examination score of &#x003C;24, and (4) major mobility limitations such as severe osteoarthritis and recent orthopedic surgery.</p></sec><sec id="s2-3"><title>Experimental Setting and Equipment</title><sec id="s2-3-1"><title>Experiment Group (Monocular Camera&#x2013;Based Exergame Design)</title><sec id="s2-3-1-1"><title>Overview</title><p>This intervention is an exercise game tailored for individuals at high risk of diabetes. It incorporates 6 movements adapted from the &#x201C;Diabetes Health Exercises,&#x201D; which include toe stretches, high knees, side arm raises, punching exercises, arm stretches with hip extensions, and elbow-to-chest expansions. These movements were selected for their simplicity, defined training objectives, and distinct characteristics that facilitate recognition and replication. The exercises aim to enhance peripheral blood circulation and target key muscle groups such as the triceps, biceps, gluteal muscles, and lower limbs. Each session is structured to last 10 minutes, comprising 5 minutes of aerobic exercise and 5 minutes of resistance training, thereby providing a balanced regimen that aligns with the physiological needs of the target population.</p><p>The game uses a virtual avatar to replicate users&#x2019; movements, thereby fostering an immersive and engaging experience (<xref ref-type="fig" rid="figure1">Figure 1</xref>). A monocular camera captures users&#x2019; movements in real time, which are analyzed through pose estimation algorithms and subsequently mapped onto the virtual avatar. Users interact with the game by following on-screen visual demonstrations, presented as either static images or animations, to perform the prescribed exercises. Movement accuracy is evaluated by the system, with scores awarded based on performance. To enhance user motivation and adherence, the game incorporates a reward system, where points earned through accurate execution can be redeemed for in-game rewards, such as unlocking background music, avatar customization options, and new virtual environments.</p><fig position="float" id="figure1"><label>Figure 1.</label><caption><p>Interface design of the exergame. The exergame interface includes real-time avatar mirroring, performance feedback indicators, instructional visuals, and a reward progress tracker.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="games_v13i1e75823_fig01.png"/></fig></sec><sec id="s2-3-1-2"><title>Technical Framework of the Monocular Camera&#x2013;Based Exergame</title><p>The intervention is designed with a robust technical foundation, using Figma for interface design and Unity 2019.4 for game development. The motion capture system leverages the open-source ThreeDPoseTracker framework, based on VNect technology, which processes low-resolution red, green, and blue video streams using Unity&#x2019;s Barracuda system and a pretrained ONNX model. This framework accurately detects 10 critical body landmarks&#x2014;wrists, elbows, ankles, knees, waist, and head&#x2014;estimating their 3D spatial positions in real time.</p><p>The real-time pose correction process compares the user&#x2019;s detected body posture with a predefined regulated posture within the exergame. The correction formula involves angular calculations between vectors formed by detected body landmarks, as shown in the image. Specifically, each detected angle (&#x03B8;<sub>i</sub>) is compared with the corresponding regulated angle (&#x03B8;&#x2032;<sub>i</sub>), and the overall difference (<italic>D</italic><sub>2</sub>) is calculated as <italic>D</italic><sub>2</sub>=&#x2211;(&#x03B8;i&#x2212;&#x03B8;&#x2032;<sub>i</sub>). To quantify the degree of correction required, a recognition threshold parameter (<italic>D</italic><sub>st</sub>) and a baseline recognition rate parameter (<italic>S</italic><sub>st</sub>) are used. The final evaluation metric (<italic>S</italic>) is determined by the piecewise function:</p><disp-formula id="E1"><mml:math id="eqn1"><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mrow><mml:mi>S</mml:mi><mml:mo>=</mml:mo><mml:mrow><mml:mo>{</mml:mo><mml:mtable rowspacing="4pt" columnspacing="1em"><mml:mtr><mml:mtd><mml:msub><mml:mi>D</mml:mi><mml:mrow><mml:mi mathvariant="normal">s</mml:mi><mml:mi mathvariant="normal">t</mml:mi></mml:mrow></mml:msub><mml:mo>&#x2212;</mml:mo><mml:mi>D</mml:mi><mml:mtext>&#x00A0;</mml:mtext><mml:mo>&#x00D7;</mml:mo><mml:mfrac><mml:mrow><mml:mn>100</mml:mn><mml:mo>&#x2212;</mml:mo><mml:msub><mml:mi>S</mml:mi><mml:mrow><mml:mi mathvariant="normal">s</mml:mi><mml:mi mathvariant="normal">t</mml:mi></mml:mrow></mml:msub></mml:mrow><mml:msub><mml:mi>D</mml:mi><mml:mrow><mml:mi mathvariant="normal">s</mml:mi><mml:mi mathvariant="normal">t</mml:mi></mml:mrow></mml:msub></mml:mfrac><mml:mo>+</mml:mo><mml:msub><mml:mi>S</mml:mi><mml:mrow><mml:mi mathvariant="normal">s</mml:mi><mml:mi mathvariant="normal">t</mml:mi></mml:mrow></mml:msub><mml:mo>,</mml:mo></mml:mtd><mml:mtd><mml:mn>0</mml:mn><mml:mo>&#x2264;</mml:mo><mml:mi>D</mml:mi><mml:mo>&#x003C;</mml:mo><mml:msub><mml:mi>D</mml:mi><mml:mrow><mml:mi mathvariant="normal">s</mml:mi><mml:mi mathvariant="normal">t</mml:mi></mml:mrow></mml:msub></mml:mtd></mml:mtr><mml:mtr><mml:mtd><mml:mn>0</mml:mn><mml:mo>,</mml:mo></mml:mtd><mml:mtd><mml:mi>D</mml:mi><mml:mo>&#x003E;</mml:mo><mml:msub><mml:mi>D</mml:mi><mml:mrow><mml:mi mathvariant="normal">s</mml:mi><mml:mi mathvariant="normal">t</mml:mi></mml:mrow></mml:msub></mml:mtd></mml:mtr></mml:mtable><mml:mo>}</mml:mo></mml:mrow></mml:mrow></mml:mstyle></mml:math></disp-formula><p>This ensures that minor deviations from the regulated posture are dynamically adjusted, while significant deviations are flagged as errors. Temporal filtering techniques are applied to improve the smoothness and precision of pose estimations, which are then mapped to a skeletal model in Unity for seamless motion replication.</p><p>By integrating real-time motion capture and precise pose correction, the system provides immediate feedback, ensuring accurate and adaptive user movements during gameplay. This enhances the usability and efficacy of the intervention, making it a scientifically grounded and technologically advanced solution for promoting physical activity, particularly for diabetes prevention.</p></sec></sec></sec><sec id="s2-4"><title>Kinect Group</title><p>Kinect group features the same software functionality and design as the application based on bullet-screen cameras, but the hardware is built using a Kinect sensor. The gamified platform is primarily developed and presented using Unity 3D. The Kinect 3D motion-sensing camera incorporates real-time dynamic capture and image recognition capabilities, offering new possibilities for interactive approaches to motion therapy.</p></sec><sec id="s2-5"><title>Control Group</title><p>The control group followed a set of 6 instructor-led exercise videos, which included crossover steps, high knees, lateral raises, punching movements, downward leg punches from a standing position, and elbow-to-chest expansions. Each session in the control group was designed to reflect the same aerobic and resistance exercise level as the Kinect group and the experimental group, ensuring comparability across groups in terms of physical exertion and activity content.</p></sec><sec id="s2-6"><title>Procedure</title><p>Participant recruitment was conducted from March to April 2024 in the Hongqiao Community. Individuals meeting the inclusion criteria were provided with comprehensive information about the study and signed written informed consent. Baseline demographic data were collected prior to random assignment using block randomization by researcher into one of three groups: (1) control group (traditional exercise control), (2) Kinect group (Kinect-based exergame), or (3) monocular group (monocular camera&#x2013;based exergame).</p><p>Three days prior to the intervention, all participants received instructional videos demonstrating the 6 targeted movements to be performed during the exercise sessions. Participants were instructed to review and practice these movements to ensure familiarity with the protocol.</p><p>On the day prior to the intervention, trained researchers and caregivers delivered and assisted in setting up the required equipment&#x2014;either a Kinect V2 sensor or an Aoni C33 monocular camera (manufactured by Shenzhen Aoni Electronic Industry Co, Ltd, resolution: 1920&#x00D7;1080 pixels, frame rate: 30 fps, field of view: 90&#x00B0;)&#x2014;in participants&#x2019; homes. Setup was scheduled in the afternoon, approximately 1&#x2010;2 hours post meal, to ensure participants&#x2019; comfort and safety during physical activity.</p><p>On the first intervention day, baseline data, including age, gender, T2D risk, resting heart rate, and perceived fatigue, were measured prior to the session. Each intervention session lasted 10 minutes and consisted of 2 components: 5 minutes of aerobic exercise followed by 5 minutes of resistance training. Participants in the control group followed a prerecorded instructional video demonstrating the 6 aerobic movements, followed by guided resistance exercises. The monocular group used a custom-developed exergame based on monocular camera technology. Real-time visual instructions were displayed on a television screen, guiding participants through the same movement protocol. The Kinect group used an exergame designed with Kinect-based skeletal tracking, offering a similar visual and instructional format as the monocular system.</p><p>During the aerobic component, participants performed 6 distinct movements, each lasting 50 seconds. For the resistance component, participants selected their preferred resistance level (ranging from 0.25 to 1.5 kg), and resistance bands were applied to the arms and legs under caregiver supervision. Caregivers and researchers refrained from intervening during the session unless directly requested by participants to minimize external influence.</p><p>Immediately following the first intervention, participants&#x2019; postexercise heart rate, perceived fatigue (via Borg Rating of Perceived Exertion Scale), intrinsic motivation (via Intrinsic Motivation Inventory [IMI]), and user experience (including perceived enjoyment, challenge, and usability) were assessed using standardized instruments.</p><p>Participants were then instructed to continue the 10-minute intervention daily for 7 consecutive days independently without additional notification from the researchers. They were asked to document their participation by recording short videos as proof of session completion. At the end of the 1-week period, researchers returned to collect the devices and retrieve adherence data, including the number of completed sessions and any user feedback.</p></sec><sec id="s2-7"><title>Outcome Measure</title><sec id="s2-7-1"><title>Exercise Performance</title><p>To evaluate the amount of exercise during the experiment, heart rate and perceived fatigue were assessed among participants at the baseline and immediately after the intervention. The postintervention heart rate was collected during the first 10 seconds postexercise to estimate physiological responses. Considering the general physical condition of individuals at high risk of diabetes, the study used 50%&#x2010;80% of the maximum heart rate as the target exercise intensity. The maximum heart rate was calculated using the formula: 208 &#x2212; (Age &#x00D7; 0.7). Additionally, perceived fatigue, a widely accepted parameter in exercise assessments for diabetes-related fields, was used to supplement the evaluation of physical activity. The Borg Rating of Perceived Exertion Scale [<xref ref-type="bibr" rid="ref19">19</xref>], ranging from 6 to 20, was used to assess subjective fatigue and compare perceived exercise intensity across groups.</p></sec><sec id="s2-7-2"><title>Perceived User Experience</title><p>At the first session of intervention, participants in the Kinect and monocular groups completed the Game Experience Questionnaire (GEQ) to assess their user experience [<xref ref-type="bibr" rid="ref20">20</xref>]. The GEQ was used to evaluate and compare the impact of different technologies on immersion and the overall experience of exergames. Furthermore, the Interest/Enjoyment Subscale of the IMI was administered to participants in the control group, Kinect group, and monocular group [<xref ref-type="bibr" rid="ref21">21</xref>]. This subscale evaluated and compared intrinsic motivation and enjoyment associated with physical activity across the 3 groups. At the end of the 1-week experiment period, user engagement was further quantified by tracking the frequency of intervention use over the 1-week period, based on participants&#x2019; video-recorded usage logs. To complement the quantitative data, a brief qualitative assessment was conducted at the end of the intervention week. Participants responded to open-ended questions such as: &#x201C;Did you experience any issues while using the exergame?&#x201D; and &#x201C;Would you be interested in continuing to use the exergame in the future?&#x201D;</p></sec></sec><sec id="s2-8"><title>Data Analysis</title><p>Data collected from the physiological measurements (resting and exercise heart rates) and the questionnaires were analyzed to compare the differences between groups and evaluate the effectiveness of the interventions. ANOVA analysis was used to understand the differences of the data collected from 3 experiment groups [<xref ref-type="bibr" rid="ref22">22</xref>], and 2-tailed <italic>t</italic> test (for normally distributed result) or nonparametric test (for non&#x2013;normally distributed result) was used to understand the significance of differences between 2 groups. For pairwise comparisons between 2 groups, independent-samples 2-tailed <italic>t</italic> tests were applied for normally distributed data. For data not meeting normality assumptions, nonparametric tests were used: the Kruskal-Wallis test for comparisons among 3 groups and the Mann-Whitney <italic>U</italic> test for pairwise group comparisons. Statistical analyses were conducted using SPSS (version 26.0; IBM Corp), with a significance level set at <italic>P</italic>&#x003C;.05. Cohen <italic>f</italic> value is used to understand the effect size, and the critical points for small, medium, and large effect sizes are 0.1, 0.40, and 0.80, respectively.</p></sec><sec id="s2-9"><title>Ethical Considerations</title><p>This study was approved by the Shanghai Jiao Tong University&#x2019;s institutional review board (H20230160I), and all procedures were conducted in accordance with the ethical standards outlined in the Declaration of Helsinki. Due to the initial pilot-phase nature of the project, the trial was retrospectively registered at ClinicalTrials.gov (NCT06950528), prior to data analysis. All participants provided written informed consent prior to participation. The study ensured strict confidentiality of participant data, and no personally identifiable information was collected or disclosed. Participants received a US $7 shopping voucher after completing the study. No identifiable images or recordings were used in the manuscript or supplementary materials.</p></sec></sec><sec id="s3" sec-type="results"><title>Results</title><sec id="s3-1"><title>Overview</title><p>This study recruited 45 participants identified as being at high risk for T2D. The CONSORT (Consolidated Standards of Reporting Trials) flowchart is shown in <xref ref-type="fig" rid="figure2">Figure 2</xref>. Participants aged between 40 and 54 years, with a mean age of 47.12 (SD 6.92) years. Participants were classified as high-risk based on the Chinese Diabetes Risk Score, with an average mean score of 33.38 (SD 5.04). As shown in <xref ref-type="table" rid="table1">Table 1</xref>, an ANOVA analysis of baseline characteristics among the 3 participant groups revealed no statistically significant differences. No negative incidents were reported.</p><fig position="float" id="figure2"><label>Figure 2.</label><caption><p>The CONSORT (Consolidated Standards of Reporting Trials) flowchart of the experiment.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="games_v13i1e75823_fig02.png"/></fig><table-wrap id="t1" position="float"><label>Table 1.</label><caption><p>Baseline data of the recruited participants.</p></caption><table id="table1" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Characteristics</td><td align="left" valign="bottom">Control group (n=15)</td><td align="left" valign="bottom">Kinect group (n=15)</td><td align="left" valign="bottom">Monocular group (n=15)</td><td align="left" valign="bottom"><italic>P</italic> value</td></tr></thead><tbody><tr><td align="left" valign="top">Gender (female), n (%)</td><td align="left" valign="top">8 (53.33%)</td><td align="left" valign="top">9 (60.0%)</td><td align="left" valign="top">8 (53.33%)</td><td align="left" valign="top">&#x2014;<sup><xref ref-type="table-fn" rid="table1fn1">a</xref></sup></td></tr><tr><td align="left" valign="top">Age (year), mean (SD)</td><td align="left" valign="top">47.27 (6.59)</td><td align="left" valign="top">47.93 (6.85)</td><td align="left" valign="top">46.17 (7.33)</td><td align="left" valign="top">.80</td></tr><tr><td align="left" valign="top">T2D<sup><xref ref-type="table-fn" rid="table1fn2">b</xref></sup> risk, mean (SD)</td><td align="left" valign="top">33.07 (5.02)</td><td align="left" valign="top">33.60 (4.75)</td><td align="left" valign="top">33.47 (5.36)</td><td align="left" valign="top">.84</td></tr><tr><td align="left" valign="top">Heart rate, mean (SD)</td><td align="left" valign="top">79.46 (10.23)</td><td align="left" valign="top">80.06 (10.01)</td><td align="left" valign="top">78.02 (8.66)</td><td align="left" valign="top">.48</td></tr><tr><td align="left" valign="top">Perceived fatigue, mean (SD)</td><td align="left" valign="top">9.27 (1.53)</td><td align="left" valign="top">9.41 (1.12)</td><td align="left" valign="top">9.28 (1.06)</td><td align="left" valign="top">.81</td></tr></tbody></table><table-wrap-foot><fn id="table1fn1"><p><sup>a</sup>Not available.</p></fn><fn id="table1fn2"><p><sup>b</sup>T2D: type 2 diabetes.</p></fn></table-wrap-foot></table-wrap></sec><sec id="s3-2"><title>Outcomes in Exercise Performance</title><p>The amount of exercise was assessed through heart rate and perceived fatigue. As shown in <xref ref-type="table" rid="table2">Table 2</xref>, the average heart rate for the control group, Kinect group, and monocular group was 86.34 (SD 7.48), 90.81 (SD 7.71), and 89.29 (SD 6.52), respectively. The between-group comparison revealed no significant difference in heart rate (<italic>P</italic>=.76; Cohen <italic>f</italic>=0.117), indicating a small effect size.</p><p>Exercise performance was evaluated using 2 indicators: heart rate as an objective physiological measure and perceived fatigue as a subjective self-report metric. As shown in <xref ref-type="table" rid="table2">Table 2</xref>, the average heart rates for the control, Kinect, and monocular groups were 86.34 (SD 7.48) bpm, 90.81 (SD 7.71) bpm, and 89.29 (SD 6.52) bpm, respectively. A one-way ANOVA revealed no statistically significant differences among the groups (<italic>F</italic><sub>2,42</sub>=0.28; <italic>P</italic>=.76), with a small effect size (Cohen <italic>f</italic>=0.117). These results suggest that the type of system used did not substantially impact cardiovascular exertion during the exercise session.</p><p>In terms of perceived fatigue, all participants reported increased fatigue after the intervention (<italic>P</italic>&#x003C;.05). As shown in <xref ref-type="fig" rid="figure3">Figure 3</xref>, the control group reported the highest average fatigue level (mean 11.60, SD 1.12). However, between-group comparisons did not reach statistical significance (<italic>P</italic>=.25; Cohen <italic>f</italic>=0.265), indicating a moderate effect size. Post hoc comparisons revealed that the monocular group experienced significantly lower fatigue than the control group (<italic>P</italic>=.04), while the Kinect group&#x2019;s difference was not significant (<italic>P</italic>=.15). These findings suggest a potential advantage of the monocular system in reducing perceived exertion during gameplay.</p><table-wrap id="t2" position="float"><label>Table 2.</label><caption><p>Data collected before and after the intervention in 3 groups of participants.</p></caption><table id="table2" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Variable</td><td align="left" valign="bottom">Control group</td><td align="left" valign="bottom">Kinect group</td><td align="left" valign="bottom">Monocular group</td><td align="left" valign="bottom"><italic>F</italic> test (<italic>df</italic>)</td><td align="left" valign="bottom"><italic>P</italic> value</td><td align="left" valign="bottom">Cohen <italic>f</italic></td></tr></thead><tbody><tr><td align="left" valign="top">Heart rate</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">0.28 (2,42)</td><td align="left" valign="top">.76</td><td align="left" valign="top">0.117</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Data collected at baseline, mean (SD)</td><td align="left" valign="top">79.46 (10.23)</td><td align="left" valign="top">80.06 (10.01)</td><td align="left" valign="top">78.02 (8.66)</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Data collected after experiment, mean (SD)</td><td align="left" valign="top">86.34 (7.48)</td><td align="left" valign="top">90.81 (7.71)</td><td align="left" valign="top">89.29 (6.52)</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content><italic>P</italic> value</td><td align="left" valign="top">.038</td><td align="left" valign="top">.004</td><td align="left" valign="top">.003</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/></tr><tr><td align="left" valign="top">Perceived fatigue</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top">1.442 (2,42)</td><td align="left" valign="top">.25</td><td align="left" valign="top">0.265</td></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Data collected at baseline, mean (SD)</td><td align="left" valign="top">9.27 (1.53)</td><td align="left" valign="top">9.41 (1.12)</td><td align="left" valign="top">9.28 (1.06)</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content>Data collected after experiment, mean (SD)</td><td align="left" valign="top">11.60 (1.12)</td><td align="left" valign="top">10.60 (1.45)</td><td align="left" valign="top">10.33 (1.49)</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/></tr><tr><td align="left" valign="top"><named-content content-type="indent">&#x00A0;&#x00A0;&#x00A0;&#x00A0;</named-content><italic>P</italic> value</td><td align="left" valign="top">&#x003C;.001</td><td align="left" valign="top">.007</td><td align="left" valign="top">.026</td><td align="left" valign="top"/><td align="left" valign="top"/><td align="left" valign="top"/></tr><tr><td align="left" valign="top">Inner motivation, mean (SD)</td><td align="left" valign="top">26.06 (1.87)</td><td align="left" valign="top">35.13 (3.20)</td><td align="left" valign="top">34.00 (4.41)</td><td align="left" valign="top">29.701 (2,42)</td><td align="left" valign="top">&#x003C;.001</td><td align="left" valign="top">1.204</td></tr></tbody></table></table-wrap><fig position="float" id="figure3"><label>Figure 3.</label><caption><p>Comparison of perceived fatigue and intrinsic motivation across 3 participant groups.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="games_v13i1e75823_fig03.png"/></fig></sec><sec id="s3-3"><title>Outcomes in User Experience</title><p>User experience following the first intervention session was assessed through self-reported measures of intrinsic motivation and game experience. Intrinsic motivation scores, as measured by the Interest/Enjoyment subscale of the IMI, were as follows: control group (mean 26.06, SD 1.87), Kinect group (mean 35.13, SD 3.20), and monocular group (mean 34.00, SD 4.41). A one-way ANOVA revealed a significant between-group difference in intrinsic motivation (<italic>P</italic>&#x003C;.001), with a large effect size (Cohen <italic>f</italic>=1.204). Post hoc comparisons indicated that both the Kinect-based and monocular camera&#x2013;based exergames significantly enhanced intrinsic motivation compared with those of the control group. However, there was no statistically significant difference between the Kinect and monocular groups (<italic>P</italic>=.44), suggesting comparable motivational effects across the 2 exergaming modalities.</p><p>Game experience, as measured by the GEQ and shown in <xref ref-type="table" rid="table3">Table 3</xref>, revealed significant differences in perceived challenge. Participants in the monocular group reported a higher challenge score (mean 3.45, SD 0.39) than those in the Kinect group (mean 2.96, SD 0.51), with the difference reaching statistical significance (<italic>t</italic><sub>28</sub>=&#x2212;2.71; <italic>P</italic>=.01). One possible explanation is the Kinect system&#x2019;s superior motion-tracking capabilities, which may have facilitated a stronger sense of virtual body ownership and smoother gameplay, thus reducing the perceived difficulty.</p><table-wrap id="t3" position="float"><label>Table 3.</label><caption><p>Results of the Game Experience Questionnaire for the Kinect and monocular groups.</p></caption><table id="table3" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Dimension</td><td align="left" valign="bottom" colspan="2">User feedbacks, mean (SD)</td><td align="left" valign="bottom"><italic>t</italic> test (<italic>df</italic>)</td><td align="left" valign="bottom"><italic>P</italic> value</td></tr><tr><td align="left" valign="bottom"/><td align="left" valign="bottom">Kinect group (n=15)</td><td align="left" valign="bottom">Monocular group (n=15)</td><td align="left" valign="bottom"/><td align="left" valign="bottom"/></tr></thead><tbody><tr><td align="left" valign="top">Sensory and imaginative immersion</td><td align="left" valign="top">3.40 (0.38)</td><td align="left" valign="top">3.51 (0.79)</td><td align="left" valign="top">&#x2212;2.034 (28)</td><td align="left" valign="top">.15</td></tr><tr><td align="left" valign="top">Flow</td><td align="left" valign="top">3.40 (0.54)</td><td align="left" valign="top">3.58 (0.63)</td><td align="left" valign="top">&#x2212;0.811 (28)</td><td align="left" valign="top">.43</td></tr><tr><td align="left" valign="top">Competence</td><td align="left" valign="top">2.90 (0.60)</td><td align="left" valign="top">3.16 (0.71)</td><td align="left" valign="top">&#x2212;1.049 (28)</td><td align="left" valign="top">.30</td></tr><tr><td align="left" valign="top">Tension or annoyance</td><td align="left" valign="top">0.10 (0.38)</td><td align="left" valign="top">0.08 (0.28)</td><td align="left" valign="top">0.124 (28)</td><td align="left" valign="top">.90</td></tr><tr><td align="left" valign="top">Challenge</td><td align="left" valign="top">2.96 (0.51)</td><td align="left" valign="top">3.45 (0.39)</td><td align="left" valign="top">&#x2212;2.71 (28)</td><td align="left" valign="top">.01</td></tr><tr><td align="left" valign="top">Positive affect</td><td align="left" valign="top">3.53 (0.44)</td><td align="left" valign="top">3.79 (0.39)</td><td align="left" valign="top">&#x2212;1.579 (28)</td><td align="left" valign="top">.13</td></tr><tr><td align="left" valign="top">Negative affect</td><td align="left" valign="top">0.33 (0.67)</td><td align="left" valign="top">0.41 (0.70)</td><td align="left" valign="top">&#x2212;0.688 (28)</td><td align="left" valign="top">.50</td></tr></tbody></table></table-wrap><p>For all other dimensions of game experience&#x2014;including sensory and imaginative immersion, flow, perceived competence, tension or annoyance, and emotional responses (positive and negative)&#x2014;no statistically significant differences were observed between the 2 exergame groups (<italic>P</italic>&#x003E;.05). While not statistically significant, participants in the Kinect group reported slightly higher tension levels, potentially due to unfamiliarity with the hardware. Conversely, the monocular group reported marginally higher negative affect, which may reflect minor limitations in motion-tracking responsiveness or accuracy.</p><p>User engagement over the 1-week intervention period was measured by the number of completed exercise sessions. On average, participants in the control group completed 2.33 sessions (SD 1.29), the Kinect group completed 2.93 sessions (SD 1.79), and the monocular group completed 3.60 sessions (SD 1.59). Although the monocular group exhibited the highest level of usage, between-group comparisons did not reach statistical significance (<italic>P</italic>=.49). Similarly, no significant difference was observed between the Kinect and monocular groups (<italic>P</italic>=.29).</p><p>A thematic analysis of the qualitative user feedback was conducted to contextualize participants&#x2019; experiences across groups. Feedback was first coded and then categorized into three major themes: usability, motivation, and future intention to use. (1) <italic>Usability</italic>: Most participants across all groups found the systems easy to operate. However, specific issues were reported. Four participants in the Kinect group mentioned challenges related to hardware setup (eg, spatial calibration and sensor alignment), while 1 participant in the monocular group cited difficulties with interface responsiveness. (2) <italic>Motivation</italic>: Lack of engagement was a recurring theme in the control group, with 6 participants indicating low motivation due to the static and repetitive nature of the game. (3) <italic>Future intention to use</italic>: Despite these challenges, participants in the exergame groups showed strong willingness to continue. In the Kinect group, 66.67% (10/15) expressed interest in future use, while 80% (12/15) of the monocular group reported a clear intention to continue engaging with the exergames.</p></sec></sec><sec id="s4" sec-type="discussion"><title>Discussion</title><sec id="s4-1"><title>Principal Results</title><p>In this study, we evaluated the impact of exercise interventions using 3 modalities&#x2014;traditional offline exergames, Kinect-based exergames, and monocular camera&#x2013;based exergames&#x2014;on exercise performance and user experience in a high-risk T2D population. The findings revealed several noteworthy insights into the potential of monocular camera&#x2013;based exergames as an affordable and convenient alternative to Kinect-based systems in developing exergame for high-risk T2D population.</p><p>From an exercise performance perspective, all 3 modalities&#x2014;traditional, Kinect-based, and monocular camera&#x2013;based exergames&#x2014;elicited comparable elevations in participants&#x2019; heart rates, indicating similar levels of cardiovascular engagement. This finding aligns with prior research demonstrating that exergames can effectively promote physical activity across diverse technological platforms [<xref ref-type="bibr" rid="ref23">23</xref>]. While the heart rate differences did not reach statistical significance, the effect size suggests a small practical effect that may still be meaningful in public health applications, especially for preventive care in at-risk populations.</p><p>Notably, participants reported lower perceived fatigue when engaging with digital exergames, particularly the monocular camera&#x2013;based system, and although these differences did not reach statistical significance, the moderate effect size and significant pairwise difference between the monocular and control groups suggest a clinically relevant trend. This trend suggests that digital interventions may offer a more comfortable exercise experience while maintaining effectiveness, corroborating previous studies that have highlighted the potential of exergames to enhance exercise enjoyment and adherence [<xref ref-type="bibr" rid="ref24">24</xref>]. The observed lower perceived fatigue in the monocular camera&#x2013;based exergame group may be attributed to the system&#x2019;s ability to provide real-time feedback and adjust to users&#x2019; movements, thereby enhancing engagement and reducing the subjective experience of exertion. Such features are consistent with findings from studies emphasizing the importance of interactive and adaptive elements in exergame design to optimize user experience and outcomes [<xref ref-type="bibr" rid="ref25">25</xref>].</p></sec><sec id="s4-2"><title>User Experience and Engagement</title><p>In terms of user experience, digital exergames were found to foster higher levels of inner motivation than traditional offline exergames, highlighting their potential to engage users more effectively [<xref ref-type="bibr" rid="ref26">26</xref>,<xref ref-type="bibr" rid="ref27">27</xref>]. The difference in inner motivation showed a large effect size, indicating the robust motivational potential of digital exercise systems for behavior change. Although there was no significant difference in inner motivation between Kinect-based and monocular camera&#x2013;based exergames, these 2 systems exhibited distinct strengths in the user experience. Monocular camera&#x2013;based exergames excelled in providing sensory and imaginative immersion, flow, competence, and positive affect, offering an engaging and enjoyable exercise environment [<xref ref-type="bibr" rid="ref28">28</xref>]. In contrast, Kinect-based exergames performed better in the dimensions of tension, negative affect, and challenge, with the challenge dimension showing a statistically significant advantage, suggesting Kinect&#x2019;s ability to reduce negative emotional responses during gameplay [<xref ref-type="bibr" rid="ref29">29</xref>]. The higher challenge scores in the monocular group may reflect user frustration stemming from occasional inaccuracies in motion tracking or delayed feedback, which can reduce the sense of control and increase cognitive effort.</p><p>To enhance the long-term appeal and sustain user engagement, current research indicating incorporating adaptive difficulty levels, personalization features, and reinforcement mechanisms is crucial [<xref ref-type="bibr" rid="ref30">30</xref>]. Adaptive difficulty can dynamically adjust the game&#x2019;s challenge to match the user&#x2019;s skill level, preventing boredom and frustration. Reinforcement mechanisms, including real-time feedback and rewards, can further encourage consistent participation [<xref ref-type="bibr" rid="ref31">31</xref>]. Future design may enhance these design elements to improve adherence and motivation in exergame interventions.</p><p>Nevertheless, participants using the monocular camera system reported fewer setup difficulties and a higher willingness to continue using the system postintervention. Compared with Kinect, the monocular system&#x2019;s low hardware burden, environmental adaptability, and plug-and-play configuration make it highly promising for home-based and scalable interventions. These findings highlight the practical potential of monocular camera&#x2013;based exergames as an accessible solution for promoting physical activity in resource-limited settings, where traditional fitness infrastructure or expensive hardware such as Kinect may not be feasible.</p></sec><sec id="s4-3"><title>Limitations</title><p>Despite encouraging results, this study has several limitations. First, the small sample size and short follow-up duration limit generalizability. Future studies should include larger, more demographically diverse populations. Second, the controlled indoor setup does not fully reflect real-world conditions; testing in-home or community settings is needed to assess ecological validity. Third, the study did not examine long-term adherence or health outcomes (eg, HbA<sub>1c</sub>, weight, and insulin sensitivity), which are critical for evaluating the sustained impact of digital interventions. Fourth, although Kinect-based exergames scored higher in the challenge dimension, participant feedback suggested that the monocular system&#x2019;s challenge scores may have resulted from tracking errors or interface latency, rather than intentional game difficulty&#x2014;indicating a need to distinguish between technical and design-related difficulty. Fifth, the study did not conduct subgroup analyses based on gender, which could influence user experience and outcomes due to differences in physical ability, digital familiarity, or motivational preferences. Sixth, the study was originally planned to be conducted at a different location; due to logistical challenges, it was relocated to another community site, which may affect comparability with the original protocol.</p><p>Future research should evaluate long-term health outcomes such as HbA<sub>1c</sub>, insulin sensitivity, and weight control associated with these interventions; conduct trials in real-world environments such as community centers or homes to assess ecological validity; investigate personalized game adaptation strategies that adjust challenge and feedback based on individual skill level and fatigue patterns; and develop cross-platform, low-barrier versions of monocular camera&#x2013;based systems for scalable deployment in resource-limited settings.</p></sec><sec id="s4-4"><title>Conclusions</title><p>The findings suggest that monocular camera&#x2013;based exergame offers a viable alternative to Kinect-based exergame for at-home use by T2D high-risk populations. It provides similar exercise outcomes and user experiences while offering the advantages of simplicity and greater adaptability. However, further improvements are needed to enhance user experience, particularly in balancing the challenge dimension.</p></sec></sec></body><back><ack><p>The authors extend their sincere gratitude to all participants who took part in this study. This research is supported by the Shanghai Pujiang Program (project number 22PJC002).</p></ack><notes><sec><title>Data Availability</title><p>The datasets are available from the corresponding author upon reasonable request.</p></sec></notes><fn-group><fn fn-type="conflict"><p>None declared.</p></fn><fn fn-type="other"><p><bold>Editorial Notice</bold></p><p>This randomized study was retrospectively registered on April 10, 2025, explained by authors as due to lack of prior awareness of the prospective registration requirement. Institutional review board approval was granted on January 12, 2024, the date of first patient enrollment was March 1, 2024, and the last patient data were collected on April 30, 2024. Authors uploaded the originally approved institutional review board protocol, which is largely similar to the manuscript submitted, save for the following discrepancies: the study was conducted in a different location than originally planned (Minhang District, Shanghai instead of Keqiao District, Shaoxing), and the full intervention schedule (1-week follow-up) was not detailed in the original protocol. Outcome measurement was the same. As a result, and because the risk of bias appears low, the editor granted an exception from ICMJE rules mandating prospective registration of randomized trials. However, readers are advised to carefully assess the validity of any potential explicit or implicit claims related to primary outcomes or effectiveness due to the retrospective registration.</p></fn></fn-group><glossary><title>Abbreviations</title><def-list><def-item><term id="abb1">CONSORT</term><def><p>Consolidated Standards of Reporting Trials</p></def></def-item><def-item><term id="abb2">CONSORT-EHEALTH</term><def><p>Consolidated Standards of Reporting Trials of Electronic and Mobile Health Applications and Online Telehealth</p></def></def-item><def-item><term id="abb3">GEQ</term><def><p>Game Experience Questionnaire</p></def></def-item><def-item><term id="abb4">IMI</term><def><p>Intrinsic Motivation Inventory</p></def></def-item><def-item><term id="abb5">T2D</term><def><p>type 2 diabetes</p></def></def-item></def-list></glossary><ref-list><title>References</title><ref id="ref1"><label>1</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Ahmad</surname><given-names>E</given-names> </name><name name-style="western"><surname>Lim</surname><given-names>S</given-names> </name><name name-style="western"><surname>Lamptey</surname><given-names>R</given-names> </name><name name-style="western"><surname>Webb</surname><given-names>DR</given-names> </name><name name-style="western"><surname>Davies</surname><given-names>MJ</given-names> </name></person-group><article-title>Type 2 diabetes</article-title><source>Lancet</source><year>2022</year><month>11</month><volume>400</volume><issue>10365</issue><fpage>1803</fpage><lpage>1820</lpage><pub-id pub-id-type="doi">10.1016/S0140-6736(22)01655-5</pub-id></nlm-citation></ref><ref id="ref2"><label>2</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Basu</surname><given-names>S</given-names> </name><name name-style="western"><surname>Yudkin</surname><given-names>JS</given-names> </name><name name-style="western"><surname>Kehlenbrink</surname><given-names>S</given-names> </name><etal/></person-group><article-title>Estimation of global insulin use for type 2 diabetes, 2018&#x2013;30: a microsimulation analysis</article-title><source>Lancet Diabetes Endocrinol</source><year>2019</year><month>01</month><volume>7</volume><issue>1</issue><fpage>25</fpage><lpage>33</lpage><pub-id pub-id-type="doi">10.1016/S2213-8587(18)30303-6</pub-id></nlm-citation></ref><ref id="ref3"><label>3</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Carrillo-Larco</surname><given-names>RM</given-names> </name><name name-style="western"><surname>Guzman-Vilca</surname><given-names>WC</given-names> </name><name name-style="western"><surname>Xu</surname><given-names>X</given-names> </name><name name-style="western"><surname>Bernabe-Ortiz</surname><given-names>A</given-names> </name></person-group><article-title>Mean age and body mass index at type 2 diabetes diagnosis: pooled analysis of 56 health surveys across income groups and world regions</article-title><source>Diabet Med</source><year>2024</year><month>02</month><volume>41</volume><issue>2</issue><fpage>e15174</fpage><pub-id pub-id-type="doi">10.1111/dme.15174</pub-id><pub-id pub-id-type="medline">37422703</pub-id></nlm-citation></ref><ref id="ref4"><label>4</label><nlm-citation citation-type="web"><article-title>Risk factors for type 2 diabetes</article-title><source>National Institutes of Health</source><year>2022</year><access-date>2025-07-31</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://www.niddk.nih.gov/health-information/diabetes/overview/risk-factors-type-2-diabetes">https://www.niddk.nih.gov/health-information/diabetes/overview/risk-factors-type-2-diabetes</ext-link></comment></nlm-citation></ref><ref id="ref5"><label>5</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Magkos</surname><given-names>F</given-names> </name><name name-style="western"><surname>Hjorth</surname><given-names>MF</given-names> </name><name name-style="western"><surname>Astrup</surname><given-names>A</given-names> </name></person-group><article-title>Diet and exercise in the prevention and treatment of type 2 diabetes mellitus</article-title><source>Nat Rev Endocrinol</source><year>2020</year><month>10</month><volume>16</volume><issue>10</issue><fpage>545</fpage><lpage>555</lpage><pub-id pub-id-type="doi">10.1038/s41574-020-0381-5</pub-id><pub-id pub-id-type="medline">32690918</pub-id></nlm-citation></ref><ref id="ref6"><label>6</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Dnes</surname><given-names>N</given-names> </name><name name-style="western"><surname>Coley</surname><given-names>B</given-names> </name><name name-style="western"><surname>Frisby</surname><given-names>K</given-names> </name><etal/></person-group><article-title>&#x201C;A little bit of a guidance and a little bit of group support&#x201D;: a qualitative study of preferences, barriers, and facilitators to participating in community-based exercise opportunities among adults living with chronic pain</article-title><source>Disabil Rehabil</source><year>2021</year><month>11</month><volume>43</volume><issue>23</issue><fpage>3347</fpage><lpage>3356</lpage><pub-id pub-id-type="doi">10.1080/09638288.2020.1742801</pub-id><pub-id pub-id-type="medline">32223460</pub-id></nlm-citation></ref><ref id="ref7"><label>7</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Farholm</surname><given-names>A</given-names> </name><name name-style="western"><surname>S&#x00F8;rensen</surname><given-names>M</given-names> </name></person-group><article-title>Motivation for physical activity and exercise in severe mental illness: a systematic review of intervention studies</article-title><source>Int J Ment Health Nurs</source><year>2016</year><month>06</month><volume>25</volume><issue>3</issue><fpage>194</fpage><lpage>205</lpage><pub-id pub-id-type="doi">10.1111/inm.12214</pub-id><pub-id pub-id-type="medline">26916699</pub-id></nlm-citation></ref><ref id="ref8"><label>8</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Choi</surname><given-names>SD</given-names> </name><name name-style="western"><surname>Guo</surname><given-names>L</given-names> </name><name name-style="western"><surname>Kang</surname><given-names>D</given-names> </name><name name-style="western"><surname>Xiong</surname><given-names>S</given-names> </name></person-group><article-title>Exergame technology and interactive interventions for elderly fall prevention: a systematic literature review</article-title><source>Appl Ergon</source><year>2017</year><month>11</month><volume>65</volume><issue>65</issue><fpage>570</fpage><lpage>581</lpage><pub-id pub-id-type="doi">10.1016/j.apergo.2016.10.013</pub-id><pub-id pub-id-type="medline">27825723</pub-id></nlm-citation></ref><ref id="ref9"><label>9</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Sch&#x00E4;ttin</surname><given-names>A</given-names> </name><name name-style="western"><surname>Pickles</surname><given-names>J</given-names> </name><name name-style="western"><surname>Flagmeier</surname><given-names>D</given-names> </name><etal/></person-group><article-title>Development of a novel home-based exergame with on-body feedback: usability study</article-title><source>JMIR Serious Games</source><year>2022</year><month>12</month><day>6</day><volume>10</volume><issue>4</issue><fpage>e38703</fpage><pub-id pub-id-type="doi">10.2196/38703</pub-id><pub-id pub-id-type="medline">36472900</pub-id></nlm-citation></ref><ref id="ref10"><label>10</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Huang</surname><given-names>K</given-names> </name><name name-style="western"><surname>Zhao</surname><given-names>Y</given-names> </name><name name-style="western"><surname>He</surname><given-names>R</given-names> </name><etal/></person-group><article-title>Exergame-based exercise training for depressive symptoms in adults: a systematic review and meta-analysis</article-title><source>Psychol Sport Exerc</source><year>2022</year><month>11</month><volume>63</volume><fpage>102266</fpage><pub-id pub-id-type="doi">10.1016/j.psychsport.2022.102266</pub-id></nlm-citation></ref><ref id="ref11"><label>11</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Hu</surname><given-names>MC</given-names> </name><name name-style="western"><surname>Chen</surname><given-names>CW</given-names> </name><name name-style="western"><surname>Cheng</surname><given-names>WH</given-names> </name><name name-style="western"><surname>Chang</surname><given-names>CH</given-names> </name><name name-style="western"><surname>Lai</surname><given-names>JH</given-names> </name><name name-style="western"><surname>Wu</surname><given-names>JL</given-names> </name></person-group><article-title>Real-time human movement retrieval and assessment with Kinect sensor</article-title><source>IEEE Trans Cybern</source><year>2015</year><month>04</month><volume>45</volume><issue>4</issue><fpage>742</fpage><lpage>753</lpage><pub-id pub-id-type="doi">10.1109/TCYB.2014.2335540</pub-id><pub-id pub-id-type="medline">25069133</pub-id></nlm-citation></ref><ref id="ref12"><label>12</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Da Gama</surname><given-names>A</given-names> </name><name name-style="western"><surname>Fallavollita</surname><given-names>P</given-names> </name><name name-style="western"><surname>Teichrieb</surname><given-names>V</given-names> </name><name name-style="western"><surname>Navab</surname><given-names>N</given-names> </name></person-group><article-title>Motor rehabilitation using Kinect: a systematic review</article-title><source>Games Health J</source><year>2015</year><month>04</month><volume>4</volume><issue>2</issue><fpage>123</fpage><lpage>135</lpage><pub-id pub-id-type="doi">10.1089/g4h.2014.0047</pub-id><pub-id pub-id-type="medline">26181806</pub-id></nlm-citation></ref><ref id="ref13"><label>13</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Fan</surname><given-names>Z</given-names> </name><name name-style="western"><surname>Zhu</surname><given-names>Y</given-names> </name><name name-style="western"><surname>He</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Sun</surname><given-names>Q</given-names> </name><name name-style="western"><surname>Liu</surname><given-names>H</given-names> </name><name name-style="western"><surname>He</surname><given-names>J</given-names> </name></person-group><article-title>Deep learning on monocular object pose detection and tracking: a comprehensive overview</article-title><source>ACM Comput Surv</source><year>2023</year><month>04</month><day>30</day><volume>55</volume><issue>4</issue><fpage>1</fpage><lpage>40</lpage><pub-id pub-id-type="doi">10.1145/3524496</pub-id></nlm-citation></ref><ref id="ref14"><label>14</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Delibasoglu</surname><given-names>I</given-names> </name><name name-style="western"><surname>Kosesoy</surname><given-names>I</given-names> </name><name name-style="western"><surname>Kotan</surname><given-names>M</given-names> </name><name name-style="western"><surname>Selamet</surname><given-names>F</given-names> </name></person-group><article-title>Motion detection in moving camera videos using background modeling and FlowNet</article-title><source>J Vis Commun Image Representation</source><year>2022</year><month>10</month><volume>88</volume><issue>88</issue><fpage>103616</fpage><pub-id pub-id-type="doi">10.1016/j.jvcir.2022.103616</pub-id></nlm-citation></ref><ref id="ref15"><label>15</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Chung</surname><given-names>YY</given-names> </name><name name-style="western"><surname>Annaswamy</surname><given-names>TM</given-names> </name><name name-style="western"><surname>Prabhakaran</surname><given-names>B</given-names> </name></person-group><article-title>Performance and user experience studies of HILLES: home-based immersive lower limb exergame system</article-title><conf-name>Proceedings of the 14th Conference on ACM Multimedia System</conf-name><conf-date>Jun 7-10, 2023</conf-date><pub-id pub-id-type="doi">10.1145/3587819.3590985</pub-id></nlm-citation></ref><ref id="ref16"><label>16</label><nlm-citation citation-type="book"><person-group person-group-type="author"><name name-style="western"><surname>Assad</surname><given-names>O</given-names> </name><name name-style="western"><surname>Hermann</surname><given-names>R</given-names> </name><name name-style="western"><surname>Lilla</surname><given-names>D</given-names> </name><name name-style="western"><surname>Mellies</surname><given-names>B</given-names> </name><name name-style="western"><surname>Meyer</surname><given-names>R</given-names> </name><name name-style="western"><surname>Shevach</surname><given-names>L</given-names> </name></person-group><article-title>Motion-based games for Parkinson&#x2019;s disease patients</article-title><source>Entertainment Computing&#x2014;ICEC 2011</source><year>2011</year><access-date>2025-07-31</access-date><publisher-name>Heidelberg</publisher-name><comment><ext-link ext-link-type="uri" xlink:href="https://link.springer.com/chapter/10.1007/978-3-642-24500-8_6">https://link.springer.com/chapter/10.1007/978-3-642-24500-8_6</ext-link></comment></nlm-citation></ref><ref id="ref17"><label>17</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Dill</surname><given-names>S</given-names> </name><name name-style="western"><surname>M&#x00FC;ller</surname><given-names>PN</given-names> </name><name name-style="western"><surname>Caserman</surname><given-names>P</given-names> </name><name name-style="western"><surname>G&#x00F6;bel</surname><given-names>S</given-names> </name><name name-style="western"><surname>Hoog Antink</surname><given-names>C</given-names> </name><name name-style="western"><surname>Tregel</surname><given-names>T</given-names> </name></person-group><article-title>Sensing in exergames for efficacy and motion quality: scoping review of recent publications</article-title><source>JMIR Serious Games</source><year>2024</year><month>11</month><day>5</day><volume>12</volume><issue>1</issue><fpage>e52153</fpage><pub-id pub-id-type="doi">10.2196/52153</pub-id><pub-id pub-id-type="medline">39499916</pub-id></nlm-citation></ref><ref id="ref18"><label>18</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Li</surname><given-names>J</given-names> </name><name name-style="western"><surname>Theng</surname><given-names>YL</given-names> </name><name name-style="western"><surname>Foo</surname><given-names>S</given-names> </name></person-group><article-title>Effect of exergames on depression and anxiety in older adults: a systematic review and meta-analysis</article-title><source>J Med Internet Res</source><year>2020</year><volume>23</volume><issue>6</issue><fpage>e16210</fpage><pub-id pub-id-type="doi">10.2196/16210</pub-id></nlm-citation></ref><ref id="ref19"><label>19</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Chen</surname><given-names>MJ</given-names> </name><name name-style="western"><surname>Fan</surname><given-names>X</given-names> </name><name name-style="western"><surname>Moe</surname><given-names>ST</given-names> </name></person-group><article-title>Criterion-related validity of the Borg ratings of perceived exertion scale in healthy individuals: a meta-analysis</article-title><source>J Sports Sci</source><year>2002</year><month>11</month><volume>20</volume><issue>11</issue><fpage>873</fpage><lpage>899</lpage><pub-id pub-id-type="doi">10.1080/026404102320761787</pub-id><pub-id pub-id-type="medline">12430990</pub-id></nlm-citation></ref><ref id="ref20"><label>20</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Johnson</surname><given-names>D</given-names> </name><name name-style="western"><surname>Gardner</surname><given-names>MJ</given-names> </name><name name-style="western"><surname>Perry</surname><given-names>R</given-names> </name></person-group><article-title>Validation of two game experience scales: the Player Experience of Need Satisfaction (PENS) and Game Experience Questionnaire (GEQ)</article-title><source>Int J Hum Comput Stud</source><year>2018</year><month>10</month><volume>118</volume><issue>118</issue><fpage>38</fpage><lpage>46</lpage><pub-id pub-id-type="doi">10.1016/j.ijhcs.2018.05.003</pub-id></nlm-citation></ref><ref id="ref21"><label>21</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Ostrow</surname><given-names>KS</given-names> </name><name name-style="western"><surname>Heffernan</surname><given-names>NT</given-names> </name></person-group><article-title>Testing the validity and reliability of intrinsic motivation inventory subscales within assistments</article-title><access-date>2025-09-02</access-date><conf-name>19th International Conference, Artificial Intelligence in Education 2018</conf-name><conf-date>Jun 27-30, 2018</conf-date><comment><ext-link ext-link-type="uri" xlink:href="https://link.springer.com/chapter/10.1007/978-3-319-93843-1_28">https://link.springer.com/chapter/10.1007/978-3-319-93843-1_28</ext-link></comment></nlm-citation></ref><ref id="ref22"><label>22</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Armstrong</surname><given-names>RA</given-names> </name><name name-style="western"><surname>Eperjesi</surname><given-names>F</given-names> </name><name name-style="western"><surname>Gilmartin</surname><given-names>B</given-names> </name></person-group><article-title>The application of analysis of variance (ANOVA) to different experimental designs in optometry</article-title><source>Ophthalmic Physiol Opt</source><year>2002</year><month>05</month><volume>22</volume><issue>3</issue><fpage>248</fpage><lpage>256</lpage><pub-id pub-id-type="doi">10.1046/j.1475-1313.2002.00020.x</pub-id><pub-id pub-id-type="medline">12090640</pub-id></nlm-citation></ref><ref id="ref23"><label>23</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Wang</surname><given-names>C</given-names> </name><name name-style="western"><surname>Lee</surname><given-names>C</given-names> </name><name name-style="western"><surname>Shin</surname><given-names>H</given-names> </name></person-group><article-title>Digital therapeutics from bench to bedside</article-title><source>NPJ Digit Med</source><year>2023</year><month>03</month><day>10</day><volume>6</volume><issue>1</issue><fpage>38</fpage><pub-id pub-id-type="doi">10.1038/s41746-023-00777-z</pub-id><pub-id pub-id-type="medline">36899073</pub-id></nlm-citation></ref><ref id="ref24"><label>24</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Montoya</surname><given-names>MF</given-names> </name><name name-style="western"><surname>Mu&#x00F1;oz</surname><given-names>J</given-names> </name><name name-style="western"><surname>Henao</surname><given-names>OA</given-names> </name></person-group><article-title>Fatigue-aware videogame using biocybernetic adaptation: a pilot study for upper-limb rehabilitation with sEMG</article-title><source>Virtual Real</source><year>2021</year><fpage>1</fpage><lpage>14</lpage><pub-id pub-id-type="doi">10.1007/s10055-021-00561-y</pub-id></nlm-citation></ref><ref id="ref25"><label>25</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Li</surname><given-names>A</given-names> </name><name name-style="western"><surname>Qiang</surname><given-names>W</given-names> </name><name name-style="western"><surname>Li</surname><given-names>J</given-names> </name><name name-style="western"><surname>Geng</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Qiang</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Zhao</surname><given-names>J</given-names> </name></person-group><article-title>Retracted: evaluating the clinical efficacy of an exergame-based training program for enhancing physical and cognitive functions in older adults with mild cognitive impairment and dementia residing in rural long-term care facilities: randomized controlled trial</article-title><source>J Med Internet Res</source><year>2025</year><month>02</month><day>19</day><volume>27</volume><fpage>e69109</fpage><pub-id pub-id-type="doi">10.2196/69109</pub-id><pub-id pub-id-type="medline">39969990</pub-id></nlm-citation></ref><ref id="ref26"><label>26</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Chen</surname><given-names>X</given-names> </name><name name-style="western"><surname>Wu</surname><given-names>L</given-names> </name><name name-style="western"><surname>Feng</surname><given-names>H</given-names> </name><etal/></person-group><article-title>Comparison of exergames versus conventional exercises on the health benefits of older adults: systematic review with meta-analysis of randomized controlled trials</article-title><source>JMIR Serious Games</source><year>2023</year><month>06</month><day>22</day><volume>11</volume><issue>1</issue><fpage>e42374</fpage><pub-id pub-id-type="doi">10.2196/42374</pub-id><pub-id pub-id-type="medline">37347534</pub-id></nlm-citation></ref><ref id="ref27"><label>27</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Chen</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Zhang</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Guo</surname><given-names>Z</given-names> </name><name name-style="western"><surname>Bao</surname><given-names>D</given-names> </name><name name-style="western"><surname>Zhou</surname><given-names>J</given-names> </name></person-group><article-title>Comparison between the effects of exergame intervention and traditional physical training on improving balance and fall prevention in healthy older adults: a systematic review and meta-analysis</article-title><source>J Neuroeng Rehabil</source><year>2021</year><month>11</month><day>24</day><volume>18</volume><issue>1</issue><fpage>164</fpage><pub-id pub-id-type="doi">10.1186/s12984-021-00917-0</pub-id><pub-id pub-id-type="medline">34819097</pub-id></nlm-citation></ref><ref id="ref28"><label>28</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Salti</surname><given-names>S</given-names> </name><name name-style="western"><surname>Schreer</surname><given-names>O</given-names> </name><name name-style="western"><surname>Di Stefano</surname><given-names>L</given-names> </name></person-group><article-title>Real-time 3D arm pose estimation from monocular video for enhanced HCI</article-title><conf-name>Proceedings of the 1st ACM Workshop on Vision Networks for Behavior Analysis</conf-name><conf-date>Oct 31, 2008</conf-date><pub-id pub-id-type="doi">10.1145/1461893.1461895</pub-id></nlm-citation></ref><ref id="ref29"><label>29</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Simonsen</surname><given-names>D</given-names> </name><name name-style="western"><surname>Popovic</surname><given-names>MB</given-names> </name><name name-style="western"><surname>Spaich</surname><given-names>EG</given-names> </name><name name-style="western"><surname>Andersen</surname><given-names>OK</given-names> </name></person-group><article-title>Design and test of a Microsoft Kinect-based system for delivering adaptive visual feedback to stroke patients during training of upper limb movement</article-title><source>Med Biol Eng Comput</source><year>2017</year><month>11</month><volume>55</volume><issue>11</issue><fpage>1927</fpage><lpage>1935</lpage><pub-id pub-id-type="doi">10.1007/s11517-017-1640-z</pub-id></nlm-citation></ref><ref id="ref30"><label>30</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Chen</surname><given-names>J</given-names> </name><name name-style="western"><surname>Yang</surname><given-names>T</given-names> </name><name name-style="western"><surname>He</surname><given-names>Q</given-names> </name><etal/></person-group><article-title>The impact of gamified interventions on the management of chronic obstructive pulmonary disease: systematic literature review</article-title><source>JMIR Serious Games</source><year>2025</year><month>05</month><day>30</day><volume>13</volume><issue>1</issue><fpage>e69510</fpage><pub-id pub-id-type="doi">10.2196/69510</pub-id><pub-id pub-id-type="medline">40446290</pub-id></nlm-citation></ref><ref id="ref31"><label>31</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Chu</surname><given-names>CH</given-names> </name><name name-style="western"><surname>Biss</surname><given-names>RK</given-names> </name><name name-style="western"><surname>Cooper</surname><given-names>L</given-names> </name><name name-style="western"><surname>Quan</surname><given-names>AML</given-names> </name><name name-style="western"><surname>Matulis</surname><given-names>H</given-names> </name></person-group><article-title>Exergaming platform for older adults residing in long-term care homes: user-centered design, development, and usability study</article-title><source>JMIR Serious Games</source><year>2021</year><month>03</month><day>9</day><volume>9</volume><issue>1</issue><fpage>e22370</fpage><pub-id pub-id-type="doi">10.2196/22370</pub-id><pub-id pub-id-type="medline">33687337</pub-id></nlm-citation></ref></ref-list><app-group><supplementary-material id="app1"><label>Checklist 1</label><p>CONSORT-eHEALTH checklist (V 1.6.1).</p><media xlink:href="games_v13i1e75823_app1.pdf" xlink:title="PDF File, 367 KB"/></supplementary-material></app-group></back></article>