@misc{10481/49082, year = {2017}, month = {5}, url = {http://hdl.handle.net/10481/49082}, abstract = {Variable selection methods play an important role in the field of attribute mining. The Naive Bayes (NB) classifier is a very simple and popular classification method that yields good results in a short processing time. Hence, it is a very appropriate classifier for very large datasets. The method has a high dependence on the relationships between the variables. The Info-Gain (IG) measure, which is based on general entropy, can be used as a quick variable selection method. This measure ranks the importance of the attribute variables on a variable under study via the information obtained from a dataset. The main drawback is that it is always non-negative and it requires setting the information threshold to select the set of most important variables for each dataset. We introduce here a new quick variable selection method that generalizes the method based on the Info-Gain measure. It uses imprecise probabilities and the maximum entropy measure to select the most informative variables without setting a threshold. This new variable selection method, combined with the Naive Bayes classifier, improves the original method and provides a valuable tool for handling datasets with a very large number of features and a huge amount of data, where more complex methods are not computationally feasible.}, organization = {This work has been supported by the Spanish “Ministerio de Economía y Competitividad” and by “Fondo Europeo de Desarrollo Regional” (FEDER) under Project TEC2015-69496-R.}, publisher = {MDPI}, keywords = {Variable selection}, keywords = {Classification}, keywords = {Naive Bayes}, keywords = {Imprecise probabilities}, keywords = {Uncertainty measures}, title = {Improving the Naive Bayes Classifier via a Quick Variable Selection Method Using Maximum of Entropy}, doi = {10.3390/e19060247}, author = {Abellán Mulero, Joaquín and García Castellano, Francisco Javier}, }