@misc{12847, keywords = {Conference}, author = {Carolyn Mair and Martin Shepperd and Magne J{\o}rgensen}, title = {Cognitive Bias Remains After De-Biasing Information}, abstract = {Context: The role of humans is becoming increasingly appreciated in our quest to improve software project prediction. The accuracy of the predictions is dependent on many factors, not least cognitive biases, which influence thinking and can often lead to poor decisions. Objective: The strong impact of the anchoring bias is well known. This study investigated the impact of anchoring on professional software engineers{\textquoteright} estimation accuracy after they had attended a workshop specifically aimed at addressing this bias. We hypothesised the bias would remain despite the workshop and that location could be a confound. Method: We ran four studies with a total of 118 software engineers to explore the impact of the anchoring bias on the accuracy of productivity estimations made by professional software engineers. The participants attended a workshop introducing the concept of cognitive bias and the influence on decision making and were introduced to strategies for de-biasing. Immediately after the workshop, participants were randomly allocated to a high or a low anchor group and asked to estimate their productivity as lines of code per hour (LOC{-}hr) on a recently completed project. Results: The anchor had a large effect (partial η2 \sim 0.22; p < 0.0001) in influencing estimates. The median estimate of hourly productivity for the high anchor was 30 LOC{-}hr and 7 LOC{-}hr for the low anchor. Lower values might reasonably be interpreted as more realistic estimates of hourly productivity. Conclusion: The impact of bias upon expert judgement is substantial, seemingly ubiquitous and difficult to eradicate. This study demonstrates the pervasive nature of the anchoring bias despite de-biasing information.}, year = {2013}, month = {July}, }