@misc{10481/84822, year = {2023}, month = {7}, url = {https://hdl.handle.net/10481/84822}, abstract = {Imprecise classification is a relatively new task within Machine Learning. The difference with standard classification is that not only is one state of the variable under study determined, a set of states that do not have enough information against them and cannot be ruled out is determined as well. For imprecise classification, a mode called an Imprecise Credal Decision Tree (ICDT) that uses imprecise probabilities and maximum of entropy as the information measure has been presented. A difficult and interesting task is to show how to combine this type of imprecise classifiers. A procedure based on the minimum level of dominance has been presented; though it represents a very strong method of combining, it has the drawback of an important risk of possible erroneous prediction. In this research, we use the second-best theory to argue that the aforementioned type of combination can be improved through a new procedure built by relaxing the constraints. The new procedure is compared with the original one in an experimental study on a large set of datasets, and shows improvement.}, organization = {UGR-FEDER funds under Project A-TIC-344-UGR20}, organization = {FEDER/Junta de Andalucía-Consejería de Transformación Económica, Industria, Conocimiento y Universidades” under Project P20_00159}, publisher = {MDPI}, keywords = {Imprecise classification}, keywords = {Credal Decision Trees}, keywords = {Ensembles}, keywords = {Bagging}, keywords = {Combination technique}, title = {Upgrading the Fusion of Imprecise Classifiers}, doi = {10.3390/e25071088}, author = {Moral García, Serafín and Benítez, María D.}, }