@article{Luo_2019, title={Distributed PageRank Computation: An Improved Theoretical Study}, volume={33}, url={https://ojs.aaai.org/index.php/AAAI/article/view/4363}, DOI={10.1609/aaai.v33i01.33014496}, abstractNote={<p>PageRank is a classic measure that effectively evaluates the node importance in large graphs, and has been applied in numerous applications ranging from data mining, Web algorithms, recommendation systems, load balancing, search, and identifying connectivity structures. Computing PageRank for large graphs is challenging and this has motivated the studies of distributed algorithms to compute PageRank. Previously, little works have been spent on the distributed PageRank algorithms with provably desired complexity and accuracy. Given a graph with <em>n</em> nodes and if we model the distributed computation model as the well-known congested clique model, the state-of-the-art algorithm takes <em>O</em>(√<em>log</em><em>n</em>) communication rounds to approximate the PageRank value of each node in <em>G</em>, with a probability at least 1−1/<em>n</em>. In this paper, we present improved distributed algorithms for computing PageRank. Particularly, our algorithm performs <em>O</em>(log log√<em>n</em>) rounds (a significant improvement compared with <em>O</em>(√<em>log</em><em>n</em>) rounds) to approximate the PageRank values with a probability at least 1−1/<em>n</em>. Moreover, under a reasonable assumption, our algorithm also reduces the edge bandwidth (i.e., the maximum communication message size that can be exchanged through an edge during a communication round) by a <em>O</em>(log<em>n</em>) factor compared with the state-of-the-art algorithm. Finally, we show that our algorithm can be adapted to efficiently compute another variant of PageRank, i.e., the batch one-hop Personalized PageRanks, in <em>O</em>(log log<em>n</em>) communication rounds.</p>}, number={01}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Luo, Siqiang}, year={2019}, month={Jul.}, pages={4496-4503} }