@article{Wang_Liu_Chen_2021, title={Forecast Aggregation via Peer Prediction}, volume={9}, url={https://ojs.aaai.org/index.php/HCOMP/article/view/18946}, DOI={10.1609/hcomp.v9i1.18946}, abstractNote={Crowdsourcing enables the solicitation of forecasts on a variety of prediction tasks from distributed groups of people. How to aggregate the solicited forecasts, which may vary in quality, into an accurate final prediction remains a challenging yet critical question. Studies have found that weighing expert forecasts more in aggregation can improve the accuracy of the aggregated prediction. However, this approach usually requires access to the historical performance data of the forecasters, which are often not available. In this paper, we study the problem of aggregating forecasts without having historical performance data. We propose using peer prediction methods, a family of mechanisms initially designed to truthfully elicit private information in the absence of ground truth verification, to assess the expertise of forecasters, and then using this assessment to improve forecast aggregation. We evaluate our peer-prediction-aided aggregators on a diverse collection of 14 human forecast datasets. Compared with a variety of existing aggregators, our aggregators achieve a significant and consistent improvement on aggregation accuracy measured by the Brier score and the log score. Our results reveal the effectiveness of identifying experts to improve aggregation even without historical data.}, number={1}, journal={Proceedings of the AAAI Conference on Human Computation and Crowdsourcing}, author={Wang, Juntao and Liu, Yang and Chen, Yiling}, year={2021}, month={Oct.}, pages={131-142} }