An end-to-end system to automate the well-known Tanner - Whitehouse (TW3) clinical procedure to estimate the skeletal age in childhood is proposed. The system comprises the detailed analysis of the two most important bones in TW3: the radius and ulna wrist bones. First, a modified version of an adaptive clustering segmentation algorithm is presented to properly semi-automatically segment the contour of the bones. Second, up to 89 features are defined and extracted from bone contours and gray scale information inside the contour, followed by some well-founded feature selection mathematical criteria, based on the ideas of maximizing the classes{\textquoteright} separability. Third, bone age is estimated with the help of a Generalized Softmax Perceptron (GSP) neural network (NN) that, after supervised learning and optimal complexity estimation via the application of the recently developed Posterior Probability Model Selection (PPMS) algorithm, is able to accurately predict the different development stages in both radius and ulna from which and with the help of the TW3 methodology, we are able to conveniently score and estimate the bone age of a patient in years, in what can be understood as a multiple-class (multiple stages) pattern recognition approach with posterior probability estimation. Finally, numerical results are presented to evaluate the system performance in predicting the bone stages and the final patient bone age over a private hand image database, with the help of the pediatricians and the radiologists expert diagnoses. {\^A}{\textcopyright} 2006 IEEE.

}, keywords = {Age Determination by Skeleton, Aging, Algorithms, Artificial Intelligence, Automated, Bone, Bone age assessment, Clustering algorithms, Computer-Assisted, Humans, Model selection, Neural networks, Pattern recognition, Radiographic Image Interpretation, Reproducibility of Results, Sensitivity and Specificity, Skeletal maturity, algorithm, article, artificial neural network, automation, bone age, bone maturation, childhood, instrumentation, radius, ulna}, issn = {00189294}, doi = {10.1109/TBME.2008.918554}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-42249094547\&partnerID=40\&md5=2cecfea5f75a61b048611f2391b00aed}, author = {Antonio Trist{\'a}n-Vega and J I Arribas} } @article {420, title = {A model selection algorithm for a posteriori probability estimation with neural networks}, journal = {IEEE Transactions on Neural Networks}, volume = {16}, year = {2005}, pages = {799-809}, abstract = {This paper proposes a novel algorithm to jointly determine the structure and the parameters of a posteriori probability model based on neural networks (NNs). It makes use of well-known ideas of pruning, splitting, and merging neural components and takes advantage of the probabilistic interpretation of these components. The algorithm, so called a posteriori probability model selection (PPMS), is applied to an NN architecture called the generalized softmax perceptron (GSP) whose outputs can be understood as probabilities although results shown can be extended to more general network architectures. Learning rules are derived from the application of the expectation-maximization algorithm to the GSP-PPMS structure. Simulation results show the advantages of the proposed algorithm with respect to other schemes. {\^A}{\textcopyright} 2005 IEEE.

}, keywords = {Algorithms, Automated, Biological, Breast Neoplasms, Computer simulation, Computer-Assisted, Computing Methodologies, Decision Support Techniques, Diagnosis, Estimation, Expectation-maximization, Generalized Softmax Perceptron (GSP), Humans, Mathematical models, Model selection, Models, Neural Networks (Computer), Neural networks, Numerical Analysis, Objective function, Pattern recognition, Posterior probability, Probability, Statistical, Stochastic Processes, algorithm, article, artificial neural network, automated pattern recognition, biological model, breast tumor, classification, cluster analysis, computer analysis, computer assisted diagnosis, decision support system, evaluation, human, mathematical computing, methodology, statistical model, statistics}, issn = {10459227}, doi = {10.1109/TNN.2005.849826}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-23044459586\&partnerID=40\&md5=f00e7d86a625cfc466373a2a938276d0}, author = {J I Arribas and Jes{\'u}s Cid-Sueiro} } @conference {415, title = {Neural network fusion strategies for identifying breast masses}, booktitle = {IEEE International Conference on Neural Networks - Conference Proceedings}, year = {2004}, address = {Budapest}, abstract = {In this work, we introduce the Perceptron Average neural network fusion strategy and implemented a number of other fusion strategies to identify breast masses in mammograms as malignant or benign with both balanced and imbalanced input features. We numerically compare various fixed and trained fusion rules, i.e., the Majority Vote, Simple Average, Weighted Average, and Perceptron Average, when applying them to a binary statistical pattern recognition problem. To judge from the experimental results, the Weighted Average approach outperforms the other fusion strategies with balanced input features, while the Perceptron Average is superior and achieves the goals with lowest standard deviation with imbalanced ensembles. We concretely analyze the results of above fusion strategies, state the advantages of fusing the component networks, and provide our particular broad sense perspective about information fusion in neural networks.

}, keywords = {Biological organs, Breast cancers, Component neural networks (CNN), Image segmentation, Information fusions, Learning algorithms, Linear systems, Mammography, Mathematical models, Multilayer neural networks, Pattern recognition, Posterior probabilities, Tumors}, isbn = {0780383591}, doi = {10.1109/IJCNN.2004.1381010}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-10844231826\&partnerID=40\&md5=2be794a5832413fed34152d61dd49388}, author = {Y Wu and J He and Y Man and J I Arribas} } @conference {413, title = {Fusing Output Information in Neural Networks: Ensemble Performs Better}, booktitle = {Annual International Conference of the IEEE Engineering in Medicine and Biology - Proceedings}, year = {2003}, address = {Cancun}, abstract = {A neural network ensemble is a learning paradigm where a finite number of component neural networks are trained for the same task. Previous research suggests that an ensemble as a whole is often more accurate than any of the single component networks. This paper focuses on the advantages of fusing different nature network architectures, and to determine the appropriate information fusion algorithm in component neural networks by several approaches within hard decision classifiers, when solving a binary pattern recognition problem. We numerically simulated and compared the different fusion approaches in terms of the mean-square error rate in testing data set, over synthetically generated binary Gaussian noisy data, and stated the advantages of fusing the hard outputs of different component networks to make a final hard decision classification. The results of the experiments indicate that neural network ensembles can indeed improve the overall accuracy for classification problems; in all fusion architectures tested, the ensemble correct classification rates are better than those achieved by the individual component networks. Finally we are nowadays comparing the above mentioned hard decision classifiers with new soft decision classifier architectures that make use of the additional continuous type intermediate network soft outputs, fulfilling probability fundamental laws (positive, and add to unity), which can be understood as the a posteriori probabilities of a given pattern to belong to a certain class.

}, keywords = {Algorithms, Backpropagation, Classification (of information), Computer simulation, Decision making, Estimation, Gaussian noise (electronic), Information fusions, Mathematical models, Medical imaging, Model selection, Multilayer neural networks, Neural network ensembles, Pattern recognition, Probability, Probability estimation, Problem solving, Regularization, Statistical methods, Statistical pattern recognition, Vectors}, doi = {https://doi.org/10.1109/IEMBS.2003.1280254}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-1542301061\&partnerID=40\&md5=32dbadb3b6ac3c6ae1ea33d89b52c75f}, author = {Y Wu and J I Arribas} } @article {409, title = {Cost functions to estimate a posteriori probabilities in multiclass problems}, journal = {IEEE Transactions on Neural Networks}, volume = {10}, year = {1999}, pages = {645-656}, abstract = {The problem of designing cost functions to estimate a posteriori probabilities in multiclass problems is addressed in this paper. We establish necessary and sufficient conditions that these costs must satisfy in one-class one-output networks whose outputs are consistent with probability laws. We focus our attention on a particular subset of the corresponding cost functions; those which verify two usually interesting properties: symmetry and separability (well-known cost functions, such as the quadratic cost or the cross entropy are particular cases in this subset). Finally, we present a universal stochastic gradient learning rule for single-layer networks, in the sense of minimizing a general version of these cost functions for a wide family of nonlinear activation functions.

}, keywords = {Cost functions, Estimation, Functions, Learning algorithms, Multiclass problems, Neural networks, Pattern recognition, Probability, Problem solving, Random processes, Stochastic gradient learning rule}, issn = {10459227}, doi = {10.1109/72.761724}, url = {http://www.scopus.com/inward/record.url?eid=2-s2.0-0032643080\&partnerID=40\&md5=d528195bd6ec84531e59ddd2ececcd46}, author = {Jes{\'u}s Cid-Sueiro and J I Arribas and S Urban-Munoz and A R Figueiras-Vidal} }