@COMMENT This file was generated by bib2html.pl version 0.94 @COMMENT written by Patrick Riley @COMMENT This file came from Gal A. Kaminka's publication pages at @COMMENT http://www.cs.biu.ac.il/~galk/publications/ @article{frontiers22shify, author = {Shify Treger and Gal A. Kaminka}, title = {Towards Computational Modeling of Human Goal Recognition}, journal = {Frontiers in Artificial Intelligence}, abstract = { Recently, we are seeing the emergence of plan- and goal-recognition algorithms which are based on the principle of \textit{rationality}. These avoid the use of a plan library that compactly encodes all possible observable plans, and instead generate plans dynamically to match the observations. However, recent experiments by Berkovitz (\emph{Berkovitz, The effect of spatial cognition and context on robot movement legibility in human-robot collaboration, 2018}) show that in many cases, humans seem to have reached quick (correct) decisions when observing motions which were far from rational (optimal), while optimal motions were slower to be recognized. Intrigued by these findings, we experimented with a variety of rationality-based recognition algorithms on the same data. The results clearly show that none of the algorithms reported in the literature accounts for human subject decisions, even in this simple task. This is our first contribution. We hypothesize that humans utilize plan-recognition in service of goal recognition, i.e., match observations to known plans, and use the set of recognized plans to conclude as to the likely goals. To test this hypothesis, a second contribution in this paper is the introduction of a novel offline recognition algorithm. While preliminary, the algorithm accounts for the results reported by Berkovitz significantly better than the existing algorithms. Moreover, the proposed algorithm marries rationality-based and plan-library based methods seamlessly. }, volume = {4}, year = {2022}, URL={https://www.frontiersin.org/article/10.3389/frai.2021.737327}, DOI={10.3389/frai.2021.737327}, ISSN={2624-8212}, }