@article{15395, author = {Magne J{\o}rgensen}, title = {Evaluating Probabilistic Software Development Effort Estimates Maximizing Informativeness Subject to Calibration}, abstract = {Context: Probabilistic effort estimates inform about the uncertainty and may give useful input to plans, budgets and investment analyses. Objective \& Method: This paper introduces, motivates and illustrates two principles on how to evaluate the accuracy and other performance criteria of probabilistic effort estimates in software development contexts. Results: The first principle emphasizes a consistency between the estimation error measure and the loss function of the chosen type of probabilistic single point effort estimates. The second principle points at the importance of not just measuring calibration, but also informativeness of estimated prediction intervals and distributions. The relevance of the evaluation principles is illustrated by a performance evaluation of estimates from twenty-eight software professionals using two different uncertainty assessment methods to estimate the effort of the same thirty software maintenance tasks.}, year = {2019}, journal = {Information and Software Technology}, volume = {115}, pages = {93-96}, publisher = {Elsevier}, }