<?xml version="1.0" encoding="UTF-8"?><?xml-stylesheet type="text/xsl" href="static/style.xsl"?><OAI-PMH xmlns="http://www.openarchives.org/OAI/2.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/ http://www.openarchives.org/OAI/2.0/OAI-PMH.xsd"><responseDate>2026-04-27T14:38:24Z</responseDate><request verb="GetRecord" identifier="oai:uvadoc.uva.es:10324/80444" metadataPrefix="dim">https://uvadoc.uva.es/oai/request</request><GetRecord><record><header><identifier>oai:uvadoc.uva.es:10324/80444</identifier><datestamp>2026-04-22T07:32:21Z</datestamp><setSpec>com_10324_1134</setSpec><setSpec>com_10324_931</setSpec><setSpec>com_10324_894</setSpec><setSpec>col_10324_1213</setSpec></header><metadata><dim:dim xmlns:dim="http://www.dspace.org/xmlns/dspace/dim" xmlns:doc="http://www.lyncode.com/xoai" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.dspace.org/xmlns/dspace/dim http://www.dspace.org/schema/dim.xsd">
<dim:field mdschema="dc" element="contributor" qualifier="author" authority="97f298028edd5d62" confidence="600" orcid_id="">Diez Hermano, Sergio</dim:field>
<dim:field mdschema="dc" element="contributor" qualifier="author" authority="5a595ce54711443c" confidence="600" orcid_id="0000-0002-8567-4826">Ganfornina Álvarez, María Dolores</dim:field>
<dim:field mdschema="dc" element="contributor" qualifier="author" authority="7a16fc1f-e22d-4680-b992-c5e99a30da9b" confidence="600" orcid_id="">Vega-Lozano, Esteban</dim:field>
<dim:field mdschema="dc" element="contributor" qualifier="author" authority="e455b9394a603299" confidence="600" orcid_id="0000-0001-6296-6773">Sánchez Romero, Diego</dim:field>
<dim:field mdschema="dc" element="date" qualifier="accessioned">2025-12-10T13:11:34Z</dim:field>
<dim:field mdschema="dc" element="date" qualifier="available">2025-12-10T13:11:34Z</dim:field>
<dim:field mdschema="dc" element="date" qualifier="issued">2020</dim:field>
<dim:field mdschema="dc" element="identifier" qualifier="citation" lang="es">Front Neurosci. 2020 Jun 4;14:516</dim:field>
<dim:field mdschema="dc" element="identifier" qualifier="uri">https://uvadoc.uva.es/handle/10324/80444</dim:field>
<dim:field mdschema="dc" element="identifier" qualifier="doi" lang="es">10.3389/fnins.2020.00516</dim:field>
<dim:field mdschema="dc" element="identifier" qualifier="publicationtitle" lang="es">Frontiers in Neuroscience</dim:field>
<dim:field mdschema="dc" element="identifier" qualifier="publicationvolume" lang="es">14</dim:field>
<dim:field mdschema="dc" element="identifier" qualifier="essn" lang="es">1662-453X</dim:field>
<dim:field mdschema="dc" element="description" lang="es">Producción Científica</dim:field>
<dim:field mdschema="dc" element="description" qualifier="abstract" lang="es">The fruit fly compound eye is a premier experimental system for modeling human&#xd;
neurodegenerative diseases. The disruption of the retinal geometry has been historically&#xd;
assessed using time-consuming and poorly reliable techniques such as histology or&#xd;
pseudopupil manual counting. Recent semiautomated quantification approaches rely&#xd;
either on manual region-of-interest delimitation or engineered features to estimate the&#xd;
extent of degeneration. This work presents a fully automated classification pipeline&#xd;
of bright-field images based on orientated gradient descriptors and machine learning&#xd;
techniques. An initial region-of-interest extraction is performed, applying morphological&#xd;
kernels and Euclidean distance-to-centroid thresholding. Image classification algorithms&#xd;
are trained on these regions (support vector machine, decision trees, random forest,&#xd;
and convolutional neural network), and their performance is evaluated on independent,&#xd;
unseen datasets. The combinations of oriented gradient C gaussian kernel Support&#xd;
Vector Machine [0.97 accuracy and 0.98 area under the curve (AUC)] and fine-tuned&#xd;
pre-trained convolutional neural network (0.98 accuracy and 0.99 AUC) yielded the best&#xd;
results overall. The proposed method provides a robust quantification framework that&#xd;
can be generalized to address the loss of regularity in biological patterns similar to the&#xd;
Drosophila eye surface and speeds up the processing of large sample batches.</dim:field>
<dim:field mdschema="dc" element="description" qualifier="project" lang="es">Ministerio de Ciencia e Innovación (MICINN), grants BFU2011-23978 and BFU2015-68149-R</dim:field>
<dim:field mdschema="dc" element="format" qualifier="mimetype" lang="es">application/pdf</dim:field>
<dim:field mdschema="dc" element="language" qualifier="iso" lang="es">spa</dim:field>
<dim:field mdschema="dc" element="rights" qualifier="accessRights" lang="es">info:eu-repo/semantics/openAccess</dim:field>
<dim:field mdschema="dc" element="rights" qualifier="uri" lang="*">http://creativecommons.org/licenses/by-nc-nd/4.0/</dim:field>
<dim:field mdschema="dc" element="rights" lang="*">Attribution-NonCommercial-NoDerivatives 4.0 Internacional</dim:field>
<dim:field mdschema="dc" element="title" lang="es">Machine Learning Representation of Loss of Eye Regularity in a Drosophila Neurodegenerative Model</dim:field>
<dim:field mdschema="dc" element="type" lang="es">info:eu-repo/semantics/article</dim:field>
<dim:field mdschema="dc" element="type" qualifier="hasVersion" lang="es">info:eu-repo/semantics/publishedVersion</dim:field>
<dim:field mdschema="dc" element="peerreviewed" lang="es">SI</dim:field>
</dim:dim></metadata></record></GetRecord></OAI-PMH>