@article{Gupta_Shi_Gimpel_Sachan_2022, title={Deep Clustering of Text Representations for Supervision-Free Probing of Syntax}, volume={36}, url={https://ojs.aaai.org/index.php/AAAI/article/view/21317}, DOI={10.1609/aaai.v36i10.21317}, abstractNote={We explore deep clustering of multilingual text representations for unsupervised model interpretation and induction of syntax. As these representations are high-dimensional, out-of-the-box methods like K-means do not work well. Thus, our approach jointly transforms the representations into a lower-dimensional cluster-friendly space and clusters them. We consider two notions of syntax: Part of Speech Induction (POSI) and Constituency Labelling (CoLab) in this work. Interestingly, we find that Multilingual BERT (mBERT) contains surprising amount of syntactic knowledge of English; possibly even as much as English BERT (E-BERT). Our model can be used as a supervision-free probe which is arguably a less-biased way of probing. We find that unsupervised probes show benefits from higher layers as compared to supervised probes. We further note that our unsupervised probe utilizes E-BERT and mBERT representations differently, especially for POSI. We validate the efficacy of our probe by demonstrating its capabilities as a unsupervised syntax induction technique. Our probe works well for both syntactic formalisms by simply adapting the input representations. We report competitive performance of our probe on 45-tag English POSI, state-of-the-art performance on 12-tag POSI across 10 languages, and competitive results on CoLab. We also perform zero-shot syntax induction on resource impoverished languages and report strong results.}, number={10}, journal={Proceedings of the AAAI Conference on Artificial Intelligence}, author={Gupta, Vikram and Shi, Haoyue and Gimpel, Kevin and Sachan, Mrinmaya}, year={2022}, month={Jun.}, pages={10720-10728} }