2020
Fan, Min, Antle, Alissa N.
An English Language Learning Study with Rural Chinese Children Using an Augmented Reality App Proceedings Article
In: Proceedings of the Interaction Design and Children Conference, pp. 385–397, Association for Computing Machinery, London, United Kingdom, 2020, ISBN: 9781450379816.
Abstract | Links | BibTeX | Tags: augmented reality, children, developing country, english language learning, phonoblocks, phonological awareness, school
@inproceedings{10.1145/3392063.3394409,
title = {An English Language Learning Study with Rural Chinese Children Using an Augmented Reality App},
author = {Min Fan and Alissa N. Antle},
url = {https://doi.org/10.1145/3392063.3394409},
doi = {10.1145/3392063.3394409},
isbn = {9781450379816},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {Proceedings of the Interaction Design and Children Conference},
pages = {385–397},
publisher = {Association for Computing Machinery},
address = {London, United Kingdom},
series = {IDC '20},
abstract = {Augmented reality (AR) apps have the potential to support early English learning for children. However, few studies have investigated how children from rural low socio-economic status (SES) schools, who learn English as a foreign language (EFL) used and perceived an AR app in language learning. In this paper, we present an exploratory case study of 11 EFL children and four school teachers from a Chinese rural county who used an AR app (called AR PhonoBlocks), for one week. The goal of the app is to support children to learn the alphabetic principle of English. The key features are overlaid dynamic colour cues on 3D physical letters. We present the results including themes related to children's interactional behaviours and motivations, and rural teachers' feedback on the opportunities and concerns around using an AR app in a rural school context. We suggest design implications and future research directions for designing AR apps to support EFL children from low SES schools in early English learning.},
keywords = {augmented reality, children, developing country, english language learning, phonoblocks, phonological awareness, school},
pubstate = {published},
tppubtype = {inproceedings}
}
Cheung, Victor, Antle, Alissa N., Sarker, Shubhra, Fan, Min, Fan, Jianyu, Pasquier, Philippe
Techniques for Augmented-Tangibles on Mobile Devices for Early Childhood Learning Proceedings Article
In: Proceedings of the Interaction Design and Children Conference, pp. 589–601, Association for Computing Machinery, London, United Kingdom, 2020, ISBN: 9781450379816.
Abstract | Links | BibTeX | Tags: augmented reality, early childhood learning, education mobile apps, phonoblocks, tablets, tangible interaction
@inproceedings{10.1145/3392063.3394412,
title = {Techniques for Augmented-Tangibles on Mobile Devices for Early Childhood Learning},
author = {Victor Cheung and Alissa N. Antle and Shubhra Sarker and Min Fan and Jianyu Fan and Philippe Pasquier},
url = {https://dl.acm.org/doi/abs/10.1145/3392063.3394412},
doi = {10.1145/3392063.3394412},
isbn = {9781450379816},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {Proceedings of the Interaction Design and Children Conference},
pages = {589–601},
publisher = {Association for Computing Machinery},
address = {London, United Kingdom},
series = {IDC '20},
abstract = {Integrating physical learning materials with mobile device applications may have benefits for early childhood learning. We present three techniques for creating a hybrid tangible-augmented reality (T-AR) enabling technology platform. This platform enables researchers to develop applications that use readily available physical learning materials, such as letters, numbers, symbols or shapes. The techniques are visual marker-based; computer-vision and machine-learning; and capacitive touches. We describe details of implementation and demonstrate these techniques through a use case of a reading tablet app that uses wooden/plastic letters for input and augmented output. Our comparative analysis revealed that the machine-learning technique most flexibly sensed different physical letter sets but had variable accuracy impacted by lighting and tracking lag at this time. Lastly, we demonstrate how this enabling technology can be generalized to a variety of early learning apps through a second use case with physical numbers.},
keywords = {augmented reality, early childhood learning, education mobile apps, phonoblocks, tablets, tangible interaction},
pubstate = {published},
tppubtype = {inproceedings}
}
2018
Fan, Min, Baishya, Uddipana, Mclaren, Elgin-Skye, Antle, Alissa N., Sarker, Shubhra, Vincent, Amal
Block Talks: A Tangible and Augmented Reality Toolkit for Children to Learn Sentence Construction Proceedings Article
In: Extended Abstracts of the 2018 CHI Conference on Human Factors in Computing Systems, pp. 1–6, Association for Computing Machinery, Montreal QC, Canada, 2018, ISBN: 9781450356213.
Abstract | Links | BibTeX | Tags: augmented reality, blocks, children, colour cues, sentence construction, Tangible User Interfaces
@inproceedings{10.1145/3170427.3188576,
title = {Block Talks: A Tangible and Augmented Reality Toolkit for Children to Learn Sentence Construction},
author = {Min Fan and Uddipana Baishya and Elgin-Skye Mclaren and Alissa N. Antle and Shubhra Sarker and Amal Vincent},
url = {https://doi.org/10.1145/3170427.3188576},
doi = {10.1145/3170427.3188576},
isbn = {9781450356213},
year = {2018},
date = {2018-01-01},
booktitle = {Extended Abstracts of the 2018 CHI Conference on Human Factors in Computing Systems},
pages = {1–6},
publisher = {Association for Computing Machinery},
address = {Montreal QC, Canada},
series = {CHI EA '18},
abstract = {The Block Talks toolkit combines the educational potential of tangible computing and augmented reality (AR) technologies to help children learn English sentence construction. Although examples of tangible AR reading systems for children currently exist, few focus specifically on learning sentence structure. Block Talks was developed using ordinary teaching supplies including letter tiles and blocks that can be manipulated to form words and sentences. A companion app allows children to scan these sentences to receive audio and AR feedback. Block Talks takes advantage of colour cues to draw children's attention to sentence structure patterns. This paper outlines existing tangible and AR systems for literacy learning, details the Block Talks design rationale, and concludes with a discussion of the advantages of using a combined tangible and AR approach for teaching sentence construction.},
keywords = {augmented reality, blocks, children, colour cues, sentence construction, Tangible User Interfaces},
pubstate = {published},
tppubtype = {inproceedings}
}
2016
Radu, Iulian, Antle, Alissa N.
All Creatures Great and Small: Becoming Other Organisms through the EmbodySuit Proceedings Article
In: Proceedings of the The 15th International Conference on Interaction Design and Children, pp. 751–758, Association for Computing Machinery, Manchester, United Kingdom, 2016, ISBN: 9781450343138.
Abstract | Links | BibTeX | Tags: augmented reality, children, cyborgs, design, education, embodied empathy, experiential learning, nanorobots
@inproceedings{10.1145/2930674.2955209,
title = {All Creatures Great and Small: Becoming Other Organisms through the EmbodySuit},
author = {Iulian Radu and Alissa N. Antle},
url = {https://doi.org/10.1145/2930674.2955209},
doi = {10.1145/2930674.2955209},
isbn = {9781450343138},
year = {2016},
date = {2016-01-01},
booktitle = {Proceedings of the The 15th International Conference on Interaction Design and Children},
pages = {751–758},
publisher = {Association for Computing Machinery},
address = {Manchester, United Kingdom},
series = {IDC '16},
abstract = {The EmbodySuit augmented human system allows students to experience life from the perspectives of different organisms, by virtually and physically becoming birds, spiders, ants and even bacteria. Inspired by current advances in nanorobotics, Star Trek's holodeck and the Magic school bus, Embodysuit makes learning embodied and experiential. The student becomes a real organism, part of a real, natural ecosystem. The student's senses are adapted to those of the organism, and the student's actions map to the actions of an organism-sized robot inside a real environment. Our system is based on our projection of advances that will occur in the next 35 years in augmented reality, cybernetics and micro robotics. By about 2050 EmbodySuit type systems will be feasible to prototype, enabling us to address key research questions in classroom scientific inquiry; experiential and embodied learning; technology development; and design for 3D embodied cyber-systems.},
keywords = {augmented reality, children, cyborgs, design, education, embodied empathy, experiential learning, nanorobots},
pubstate = {published},
tppubtype = {inproceedings}
}
Antle, Alissa N., Matkin, Brendan, Warren, Jill
The Story of Things: Awareness through Happenstance Interaction Proceedings Article
In: Proceedings of the The 15th International Conference on Interaction Design and Children, pp. 745–750, Association for Computing Machinery, Manchester, United Kingdom, 2016, ISBN: 9781450343138.
Abstract | Links | BibTeX | Tags: augmented reality, children, environmental education, hands-on interaction, happenstance interaction, sensing systems, situated learning, wearable displays
@inproceedings{10.1145/2930674.2955211,
title = {The Story of Things: Awareness through Happenstance Interaction},
author = {Alissa N. Antle and Brendan Matkin and Jill Warren},
url = {https://doi.org/10.1145/2930674.2955211},
doi = {10.1145/2930674.2955211},
isbn = {9781450343138},
year = {2016},
date = {2016-01-01},
booktitle = {Proceedings of the The 15th International Conference on Interaction Design and Children},
pages = {745–750},
publisher = {Association for Computing Machinery},
address = {Manchester, United Kingdom},
series = {IDC '16},
abstract = {The Story of Things (SoT) system enables children to learn the story behind every object they touch in a typical day. Inspired by Living Media and the Internet of Things (IoT) our goal is to change children's awareness through hands-on interaction with the world they live in. A back-of-the-hand display is activated by stick-on finger sensors when a child touches an object. They can tap the display to select from a number of stories stored in a crowd sourced database about that object: the materials it was made from; the processes used to make it; how it impacts their body; how it will be disposed of; environmental or social rights challenges associated with the object; and how they can take positive action. This information is overlaid on the world through an augmented-reality contact lens. SoT will also enable children to see a trace of each day and in doing so help them better understand their environmental footprint and how their actions and choices can change the world for the better or worse.},
keywords = {augmented reality, children, environmental education, hands-on interaction, happenstance interaction, sensing systems, situated learning, wearable displays},
pubstate = {published},
tppubtype = {inproceedings}
}