Mental health is a crisis for learners globally, and digital support is increasingly seen as a critical resource. Concurrently, Intelligent Social Agents receive exponentially more engagement than other conversational systems, but their use in digital therapy provision is nascent. A survey of 1006 student users of the Intelligent Social Agent, Replika, investigated participants’ loneliness, perceived social support, use patterns, and beliefs about Replika. We found participants were more lonely than typical student populations but still perceived high social support. Many used Replika in multiple, overlapping ways—as a friend, a therapist, and an intellectual mirror. Many also held overlapping and often conflicting beliefs about Replika—calling it a machine, an intelligence, and a human. Critically, 3% reported that Replika halted their suicidal ideation. A comparative analysis of this group with the wider participant population is provided.
%0 Journal Article
%1 maples2024loneliness
%A Maples, Bethanie
%A Cerit, Merve
%A Vishwanath, Aditya
%A Pea, Roy
%D 2024
%J npj Mental Health Research
%K ai artificial depression health intelligence mental
%N 1
%P 4--
%R 10.1038/s44184-023-00047-6
%T Loneliness and suicide mitigation for students using GPT3-enabled chatbots
%U https://doi.org/10.1038/s44184-023-00047-6
%V 3
%X Mental health is a crisis for learners globally, and digital support is increasingly seen as a critical resource. Concurrently, Intelligent Social Agents receive exponentially more engagement than other conversational systems, but their use in digital therapy provision is nascent. A survey of 1006 student users of the Intelligent Social Agent, Replika, investigated participants’ loneliness, perceived social support, use patterns, and beliefs about Replika. We found participants were more lonely than typical student populations but still perceived high social support. Many used Replika in multiple, overlapping ways—as a friend, a therapist, and an intellectual mirror. Many also held overlapping and often conflicting beliefs about Replika—calling it a machine, an intelligence, and a human. Critically, 3% reported that Replika halted their suicidal ideation. A comparative analysis of this group with the wider participant population is provided.
@article{maples2024loneliness,
abstract = {Mental health is a crisis for learners globally, and digital support is increasingly seen as a critical resource. Concurrently, Intelligent Social Agents receive exponentially more engagement than other conversational systems, but their use in digital therapy provision is nascent. A survey of 1006 student users of the Intelligent Social Agent, Replika, investigated participants’ loneliness, perceived social support, use patterns, and beliefs about Replika. We found participants were more lonely than typical student populations but still perceived high social support. Many used Replika in multiple, overlapping ways—as a friend, a therapist, and an intellectual mirror. Many also held overlapping and often conflicting beliefs about Replika—calling it a machine, an intelligence, and a human. Critically, 3% reported that Replika halted their suicidal ideation. A comparative analysis of this group with the wider participant population is provided.},
added-at = {2024-01-23T10:42:07.000+0100},
author = {Maples, Bethanie and Cerit, Merve and Vishwanath, Aditya and Pea, Roy},
biburl = {https://www.bibsonomy.org/bibtex/2f12cfac3933b8d9e477f34963efcd3e6/yish},
doi = {10.1038/s44184-023-00047-6},
interhash = {26f65d079c3dc896e7708ca0c65c3234},
intrahash = {f12cfac3933b8d9e477f34963efcd3e6},
issn = {27314251},
journal = {npj Mental Health Research},
keywords = {ai artificial depression health intelligence mental},
number = 1,
pages = {4--},
refid = {Maples2024},
timestamp = {2024-01-23T10:42:07.000+0100},
title = {Loneliness and suicide mitigation for students using GPT3-enabled chatbots},
url = {https://doi.org/10.1038/s44184-023-00047-6},
volume = 3,
year = 2024
}