@misc{10481/88516, year = {2018}, month = {5}, url = {https://hdl.handle.net/10481/88516}, abstract = {Random Forest (RF) learning algorithm is considered a classifier of reference due its excellent performance. Its success is based on the diversity of rules generated from decision trees that are built via a procedure that randomizes instances and features. To find additional procedures for increasing the diversity of the trees is an interesting task. It has been considered a new split criterion, based on imprecise probabilities and general uncertainty measures, that has a clear dependence of a parameter and has shown to be more successful than the classic ones. Using that criterion in RF scheme, join with a random procedure to select the value of that parameter, the diversity of the trees in the forest and the performance are increased. This fact gives rise to a new classification algorithm, called Random Credal Random Forest (RCRF). The new method represents several improvements with respect to the classic RF: the use of a more successful split criterion which is more robust to noise than the classic ones; and an increasing of the randomness which facilitates the diversity of the rules obtained. In an experimental study, it is shown that this new algorithm is a clear enhancement of RF, especially when it applied on data sets with class noise, where the standard RF has a notable deterioration. The problem of overfitting that appears when RF classifies data sets with class noise is solved with RCRF. This new algorithm can be considered as a powerful alternative to be used on data with or without class noise.}, organization = {This work has been supported by the Spanish “Ministerio de Economía y Competitividad” and by “Fondo Europeo de Desarrollo Regional” (FEDER) under Project TEC2015-69496-R.}, publisher = {Elsevier}, keywords = {Classification}, keywords = {Ensemble schemes}, keywords = {Random forest}, keywords = {Imprecise probabilities}, keywords = {Uncertainty measures}, title = {Increasing diversity in Random Forest learning algorithm via imprecise probabilities}, doi = {10.1016/j.eswa.2017.12.029}, author = {Abellán Mulero, Joaquín and Mantas Ruiz, Carlos Javier and García Castellano, Francisco Javier and Moral García, Serafín}, }