{ "id": "2208.14433", "version": "v1", "published": "2022-08-30T17:53:17.000Z", "updated": "2022-08-30T17:53:17.000Z", "title": "A Portable Multiscopic Camera for Novel View and Time Synthesis in Dynamic Scenes", "authors": [ "Tianjia Zhang", "Yuen-Fui Lau", "Qifeng Chen" ], "comment": "To be presented at IROS2022", "categories": [ "cs.CV", "cs.GR" ], "abstract": "We present a portable multiscopic camera system with a dedicated model for novel view and time synthesis in dynamic scenes. Our goal is to render high-quality images for a dynamic scene from any viewpoint at any time using our portable multiscopic camera. To achieve such novel view and time synthesis, we develop a physical multiscopic camera equipped with five cameras to train a neural radiance field (NeRF) in both time and spatial domains for dynamic scenes. Our model maps a 6D coordinate (3D spatial position, 1D temporal coordinate, and 2D viewing direction) to view-dependent and time-varying emitted radiance and volume density. Volume rendering is applied to render a photo-realistic image at a specified camera pose and time. To improve the robustness of our physical camera, we propose a camera parameter optimization module and a temporal frame interpolation module to promote information propagation across time. We conduct experiments on both real-world and synthetic datasets to evaluate our system, and the results show that our approach outperforms alternative solutions qualitatively and quantitatively. Our code and dataset are available at https://yuenfuilau.github.io.", "revisions": [ { "version": "v1", "updated": "2022-08-30T17:53:17.000Z" } ], "analyses": { "keywords": [ "dynamic scene", "novel view", "time synthesis", "outperforms alternative solutions", "temporal frame interpolation module" ], "note": { "typesetting": "TeX", "pages": 0, "language": "en", "license": "arXiv", "status": "editable" } } }