@inproceedings{70360ca5ed0449be880d47c260030c84,
title = "Does BERT pay attention to cyberbullying?",
abstract = "Social media have brought threats like cyberbullying, which can lead to stress, anxiety, depression, and in some severe cases, suicide attempts. Detecting cyberbullying can help to warn/ block bullies and provide support to victims. However, very few studies have used self-attention-based language models like BERT for cyberbullying detection and they typically only report BERT's performance without examining in depth the reasons for its performance. In this work, we examine the use of BERT for cyberbullying detection on various datasets and attempt to explain its performance by analyzing its attention weights and gradient-based feature importance scores for textual and linguistic features. Our results show that attention weights do not correlate with feature importance scores and thus do not explain the model's performance. Additionally, they suggest that BERT relies on syntactical biases in the datasets to assign feature importance scores to class-related wordsrather than cyberbullying-related linguistic features.",
keywords = "cyberbullying, text classification, BERT, NLP",
author = "Fatma Elsafoury and Stamos Katsigiannis and Wilson, {Steven R.} and Naeem Ramzan",
year = "2021",
month = jul,
day = "11",
doi = "10.1145/3404835.3463029",
language = "English",
series = "SIGIR 2021 - Proceedings of the 44th International ACM SIGIR Conference on Research and Development in Information Retrieval",
publisher = "Association for Computing Machinery",
pages = "1900--1904",
booktitle = "SIGIR 2021 - Proceedings of the 44th International ACM SIGIR Conference on Research and Development in Information Retrieval",
address = "United States",
note = "44th International ACM SIGIR Conference on Research and Development in Information Retrieval, SIGIR 2021 ; Conference date: 11-07-2021 Through 15-07-2021",
}