@article{10.1145/3478513.3480553, author = {Briedis, Karlis Martins and Djelouah, Abdelaziz and Meyer, Mark and McGonigal, Ian and Gross, Markus and Schroers, Christopher}, title = {Neural Frame Interpolation for Rendered Content}, year = {2021}, issue_date = {December 2021}, publisher = {Association for Computing Machinery}, address = {New York, NY, USA}, volume = {40}, number = {6}, issn = {0730-0301}, url = {https://doi.org/10.1145/3478513.3480553}, doi = {10.1145/3478513.3480553}, abstract = {The demand for creating rendered content continues to drastically grow. As it often is extremely computationally expensive and thus costly to render high-quality computer-generated images, there is a high incentive to reduce this computational burden. Recent advances in learning-based frame interpolation methods have shown exciting progress but still have not achieved the production-level quality which would be required to render fewer pixels and achieve savings in rendering times and costs. Therefore, in this paper we propose a method specifically targeted to achieve high-quality frame interpolation for rendered content. In this setting, we assume that we have full input for every n-th frame in addition to auxiliary feature buffers that are cheap to evaluate (e.g. depth, normals, albedo) for every frame. We propose solutions for leveraging such auxiliary features to obtain better motion estimates, more accurate occlusion handling, and to correctly reconstruct non-linear motion between keyframes. With this, our method is able to significantly push the state-of-the-art in frame interpolation for rendered content and we are able to obtain production-level quality results.}, journal = {ACM Trans. Graph.}, month = {dec}, articleno = {239}, numpages = {13}, keywords = {deep learning, motion estimation, frame interpolation} }