@misc{10481/105584, year = {2025}, month = {7}, url = {https://hdl.handle.net/10481/105584}, abstract = {One of the biggest challenges of today’s society is reducing energy consumption in order to keep our planet’s sustainability. Existing reports claim that computer electricity demand is rising exponentially, so it is essential to have tools that allow us to keep track of the energy consumption of computer equipment, both globally and for each of its components, in order to find new ways of reducing their consumption. In this project, we propose to measure the energy efficiency of a series of deep learning models (LLM, Large Language Models). Within this selection of LLM, the function for which they are applied is differentiated, such as text generation, programming code generation, or translation between languages. The models to be measured will be run on different computers and servers in order to compare the results obtained. Different energy consumption measurement software (CodeCarbon, Eco2AI) and different metrics are used to evaluate the performance of these models against their energy consumption. There is also a comparison of the energy consumption and carbon footprint produced between the chosen models and a human reference. Finally, a series of clustering techniques are applied to the results obtained to try and identify patterns and support the comparisons and conclusions reached.}, organization = {Departamento de Ingeniería de Computadores, Automática y Robótica. Universidad de Granada. Este trabajo ha sido parcialmente subvencionado dentro de los proyectos PID2022-137461NB-C31 y PID2023-147409NB-C21 financiados por el MICIU/AEI/10.13039/501100011033 y por ERDF, ”a way of ma-king Europe”.}, publisher = {Departamento ICAR. Universidad de Granada}, title = {Consumo energético en LLM. Medida del consumo energético en modelos de aprendizaje profundo}, author = {García Fernández, Pablo}, }