Abstract
3D hand reconstruction from image data is a widely-studied problem in com-
puter vision and graphics, and has a particularly high relevance for virtual
and augmented reality. Although several 3D hand reconstruction approaches
leverage hand models as a strong prior to resolve ambiguities and achieve a
more robust reconstruction, most existing models account only for the hand
shape and poses and do not model the texture. To fill this gap, in this work
we present the first parametric texture model of human hands. Our model
spans several dimensions of hand appearance variability (e.g., related to gen-
der, ethnicity, or age) and only requires a commodity camera for data acqui-
sition. Experimentally, we demonstrate that our appearance model can be
used to tackle a range of challenging problems such as 3D hand reconstruc-
tion from a single monocular image. Furthermore, our appearance model
can be used to define a neural rendering layer that enables training with a
self-supervised photometric loss. We make our model publicly available.
BibTeX
@techreport{Qian_report2020, TITLE = {Parametric Hand Texture Model for {3D} Hand Reconstruction and Personalization}, AUTHOR = {Qian, Neng and Wang, Jiayi and Mueller, Franziska and Bernard, Florian and Golyanik, Vladislav and Theobalt, Christian}, LANGUAGE = {eng}, ISSN = {0946-011X}, NUMBER = {MPI-I-2020-4-001}, INSTITUTION = {Max-Planck-Institut f{\"u}r Informatik}, ADDRESS = {Saarbr{\"u}cken}, YEAR = {2020}, ABSTRACT = {3D hand reconstruction from image data is a widely-studied problem in com-<br>puter vision and graphics, and has a particularly high relevance for virtual<br>and augmented reality. Although several 3D hand reconstruction approaches<br>leverage hand models as a strong prior to resolve ambiguities and achieve a<br>more robust reconstruction, most existing models account only for the hand<br>shape and poses and do not model the texture. To {fi}ll this gap, in this work<br>we present the {fi}rst parametric texture model of human hands. Our model<br>spans several dimensions of hand appearance variability (e.g., related to gen-<br>der, ethnicity, or age) and only requires a commodity camera for data acqui-<br>sition. Experimentally, we demonstrate that our appearance model can be<br>used to tackle a range of challenging problems such as 3D hand reconstruc-<br>tion from a single monocular image. Furthermore, our appearance model<br>can be used to de{fi}ne a neural rendering layer that enables training with a<br>self-supervised photometric loss. We make our model publicly available.}, TYPE = {Research Report}, }
Endnote
%0 Report %A Qian, Neng %A Wang, Jiayi %A Mueller, Franziska %A Bernard, Florian %A Golyanik, Vladislav %A Theobalt, Christian %+ Computer Graphics, MPI for Informatics, Max Planck Society Computer Graphics, MPI for Informatics, Max Planck Society Computer Graphics, MPI for Informatics, Max Planck Society Computer Graphics, MPI for Informatics, Max Planck Society Computer Graphics, MPI for Informatics, Max Planck Society Computer Graphics, MPI for Informatics, Max Planck Society %T Parametric Hand Texture Model for 3D Hand Reconstruction and Personalization : %G eng %U http://hdl.handle.net/21.11116/0000-0006-9128-9 %Y Max-Planck-Institut für Informatik %C Saarbrücken %D 2020 %P 37 p. %X 3D hand reconstruction from image data is a widely-studied problem in com-<br>puter vision and graphics, and has a particularly high relevance for virtual<br>and augmented reality. Although several 3D hand reconstruction approaches<br>leverage hand models as a strong prior to resolve ambiguities and achieve a<br>more robust reconstruction, most existing models account only for the hand<br>shape and poses and do not model the texture. To fill this gap, in this work<br>we present the first parametric texture model of human hands. Our model<br>spans several dimensions of hand appearance variability (e.g., related to gen-<br>der, ethnicity, or age) and only requires a commodity camera for data acqui-<br>sition. Experimentally, we demonstrate that our appearance model can be<br>used to tackle a range of challenging problems such as 3D hand reconstruc-<br>tion from a single monocular image. Furthermore, our appearance model<br>can be used to define a neural rendering layer that enables training with a<br>self-supervised photometric loss. We make our model publicly available. %K hand texture model, appearance modeling, hand tracking, 3D hand recon- struction %B Research Report %@ false