|  | 
|  | 1 | +<!DOCTYPE html> | 
|  | 2 | +<html lang="en"><head><meta http-equiv="Content-Type" content="text/html; charset=UTF-8"> | 
|  | 3 | + <meta name="description" content="Project Page for Infinite Nature: Perpetual View Generation"> | 
|  | 4 | + <meta name="author" content="Andrew"> | 
|  | 5 | + | 
|  | 6 | + <meta property="og:image" content="https://infinite-nature.github.io/teaser.png"> | 
|  | 7 | + <meta property="og:url" content="https://infinite-nature.github.io/"> | 
|  | 8 | + <meta property="og:type" content="website"> | 
|  | 9 | + <meta property="og:title" content="Infinite Nature: Perpetual View Generation of Natural Scenes from a Single Image"> | 
|  | 10 | + <meta property='og:video' content='https://www.youtube.com/embed/oXUf6anNAtc'/> | 
|  | 11 | + <meta property="og:description" content="Learning to generate arbitrarily long videos of aerial trajectories through nature scenes."> | 
|  | 12 | + <meta name="twitter:card" content="summary_large_image"> | 
|  | 13 | + <meta name="twitter:creator" content="@ndrewliu"> | 
|  | 14 | + <meta name="twitter:title" content="Infinite Nature: Perpetual View Generation of Natural Scenes from a Single Image"> | 
|  | 15 | + <meta name="twitter:description" content="Learning to generate arbitrarily long videos of aerial trajectories through nature scenes."> | 
|  | 16 | + <meta name=”twitter:url” content=”https://infinite-nature.github.io” /> | 
|  | 17 | + <meta name="twitter:image" content="https://infinite-nature.github.io/teaser.png"> | 
|  | 18 | + | 
|  | 19 | + <title>Infinite Nature: Perpetual View Generation of Natural Scenes from a Single Image</title> | 
|  | 20 | + <link rel="canonical" href="https://infinite-nature.github.io"> | 
|  | 21 | + | 
|  | 22 | + <!-- Bootstrap core CSS --> | 
|  | 23 | + <link href="./index_files/bootstrap.min.css" rel="stylesheet"> | 
|  | 24 | + | 
|  | 25 | +<head> | 
|  | 26 | + <link href="https://fonts.googleapis.com/css?family=Roboto:300" rel="stylesheet"> | 
|  | 27 | + <link rel="stylesheet" type="text/css" href="main.css"> | 
|  | 28 | + <script type="text/javascript" src="main.js"></script> | 
|  | 29 | + <!-- Global site tag (gtag.js) - Google Analytics --> | 
|  | 30 | + <script async src="https://www.googletagmanager.com/gtag/js?id=G-JM2CPK6QLP"></script> | 
|  | 31 | + <script> | 
|  | 32 | + window.dataLayer = window.dataLayer || []; | 
|  | 33 | + function gtag(){dataLayer.push(arguments);} | 
|  | 34 | + gtag('js', new Date()); | 
|  | 35 | + | 
|  | 36 | + gtag('config', 'G-JM2CPK6QLP'); | 
|  | 37 | + </script> | 
|  | 38 | + <script type="text/javascript"> | 
|  | 39 | + // Toggle Display of BibTeX | 
|  | 40 | + function toggleBibtex(articleid) { | 
|  | 41 | + var bib = document.getElementById('bib_'+articleid); | 
|  | 42 | + if (bib) { | 
|  | 43 | + if(bib.className.indexOf('bibtex') != -1) { | 
|  | 44 | + bib.className.indexOf('noshow') == -1?bib.className = 'bibtex noshow':bib.className = 'bibtex'; | 
|  | 45 | + } | 
|  | 46 | + } else { | 
|  | 47 | + return; | 
|  | 48 | + } | 
|  | 49 | + } | 
|  | 50 | + </script> | 
|  | 51 | + | 
|  | 52 | +</head> | 
|  | 53 | + | 
|  | 54 | +<style> | 
|  | 55 | +/************************************* | 
|  | 56 | + The box that contain BibTeX code | 
|  | 57 | + *************************************/ | 
|  | 58 | +div.noshow { display: none; } | 
|  | 59 | +div.bibtex { | 
|  | 60 | + margin-right: 0%; | 
|  | 61 | + margin-top: 1.2em; | 
|  | 62 | + margin-bottom: 1em; | 
|  | 63 | + border: 1px solid silver; | 
|  | 64 | + padding: 0em 1em; | 
|  | 65 | + background: #ffffee; | 
|  | 66 | +} | 
|  | 67 | +div.bibtex pre { font-size: 90%; overflow: auto; width: 100%; padding: 0em 0em;} | 
|  | 68 | + | 
|  | 69 | + | 
|  | 70 | +.main { | 
|  | 71 | + margin: 0 auto; | 
|  | 72 | + padding: 15px 14px 50px; | 
|  | 73 | + font-family: sans-serif; | 
|  | 74 | + background: #ffffff; | 
|  | 75 | + color: #222; | 
|  | 76 | + max-width: 800px; | 
|  | 77 | +} | 
|  | 78 | +p { | 
|  | 79 | + line-height: 150%; | 
|  | 80 | +} | 
|  | 81 | +h1, h2 { | 
|  | 82 | + font-weight: 200; | 
|  | 83 | +} | 
|  | 84 | +h2 a { | 
|  | 85 | + color: #000; | 
|  | 86 | +} | 
|  | 87 | + | 
|  | 88 | +h3 { | 
|  | 89 | + margin-top: 40px; | 
|  | 90 | +} | 
|  | 91 | +a { | 
|  | 92 | + text-decoration: none; | 
|  | 93 | + color: #22b; | 
|  | 94 | +} | 
|  | 95 | +a:hover { | 
|  | 96 | + text-decoration: underline; | 
|  | 97 | + color: #22b; | 
|  | 98 | +} | 
|  | 99 | +a:hover img { | 
|  | 100 | + opacity: .5; | 
|  | 101 | +} | 
|  | 102 | +.paper img { background: #888; } | 
|  | 103 | +.video img { | 
|  | 104 | + border-radius: 6px; | 
|  | 105 | + background: #000; | 
|  | 106 | +} | 
|  | 107 | +h1 span { | 
|  | 108 | + font-size: 90%; | 
|  | 109 | + line-height: 1.5em; | 
|  | 110 | +} | 
|  | 111 | +h1 span::before { content: "("; } | 
|  | 112 | +h1 span::after { content: ")"; } | 
|  | 113 | +h1 span::before, h1 span::after { | 
|  | 114 | + vertical-align: .06em; | 
|  | 115 | +} | 
|  | 116 | +h2 span { | 
|  | 117 | + line-height: 2em; | 
|  | 118 | +} | 
|  | 119 | +em { | 
|  | 120 | + color: #333; | 
|  | 121 | + font-weight: bold; | 
|  | 122 | + font-style: normal; | 
|  | 123 | +} | 
|  | 124 | +.teaser { | 
|  | 125 | + max-width: 45em; | 
|  | 126 | + margin: 40px 0 60px 0; | 
|  | 127 | + white-space: nowrap; | 
|  | 128 | + position: relative; | 
|  | 129 | +} | 
|  | 130 | +.teaser .imgs .full { | 
|  | 131 | + width: 100%; | 
|  | 132 | +} | 
|  | 133 | +.teaser .imgs .center { | 
|  | 134 | + position: absolute; | 
|  | 135 | + width: 32%; | 
|  | 136 | + top: -3%; | 
|  | 137 | + left: 34%; | 
|  | 138 | +} | 
|  | 139 | +.teaser .labels span { | 
|  | 140 | + display: inline-block; | 
|  | 141 | + width: 33%; | 
|  | 142 | + text-align: center; | 
|  | 143 | + padding-top: 10px; | 
|  | 144 | +} | 
|  | 145 | +.bibtex { | 
|  | 146 | + white-space: pre; | 
|  | 147 | + font-family: monospace; | 
|  | 148 | + line-height: 150%; | 
|  | 149 | + background: #f3f3f3; | 
|  | 150 | + padding: 10px; | 
|  | 151 | + display: inline-block; | 
|  | 152 | + border-radius: 4px; | 
|  | 153 | +} | 
|  | 154 | +.crow > * { | 
|  | 155 | + vertical-align: middle; | 
|  | 156 | + margin-right: 14px; | 
|  | 157 | +} | 
|  | 158 | +a.img { | 
|  | 159 | + display: inline-block; | 
|  | 160 | +} | 
|  | 161 | +.links { | 
|  | 162 | + display: inline-block; | 
|  | 163 | + line-height: 150%; | 
|  | 164 | + padding: 8px 0; | 
|  | 165 | +} | 
|  | 166 | +.links a { | 
|  | 167 | + padding: 0 3px; | 
|  | 168 | +} | 
|  | 169 | +.examples { | 
|  | 170 | + padding: 10px 0; | 
|  | 171 | + max-width: 50em; | 
|  | 172 | +} | 
|  | 173 | +.examples img { | 
|  | 174 | + display: inline-block; | 
|  | 175 | + width: 192px; | 
|  | 176 | + height: 108px; | 
|  | 177 | + | 
|  | 178 | +} | 
|  | 179 | +.video-container { | 
|  | 180 | + position: relative; | 
|  | 181 | + width: 80%; | 
|  | 182 | + padding-bottom: 45%; | 
|  | 183 | +} | 
|  | 184 | +.video { | 
|  | 185 | + position: absolute; | 
|  | 186 | + top: 0; | 
|  | 187 | + left: 0; | 
|  | 188 | + width: 100%; | 
|  | 189 | + height: 100%; | 
|  | 190 | + border: 0; | 
|  | 191 | +} | 
|  | 192 | +</style> | 
|  | 193 | + | 
|  | 194 | +<body onload="mainDivResize('choreo_gen')" onresize="mainDivResize()"> | 
|  | 195 | + | 
|  | 196 | +<div class="topnav" id="myTopnav"> | 
|  | 197 | + <a id="100" href="index.html" class="title">AIST++ Dataset</a> | 
|  | 198 | +<!-- <div id="challenge" class="menu-dropdown"> | 
|  | 199 | +<a href="challenge_overview.html" class="droplink">Challenge</a> | 
|  | 200 | +<div class="menu-dropdown-content" style="right:0px;"> | 
|  | 201 | + <a href="challenge_overview.html" class="subitem">Overview</a> | 
|  | 202 | + <a href="challenge2019_downloads.html" class="subitem">Downloads</a> | 
|  | 203 | + <a href="evaluation.html" class="subitem">Evaluation</a> | 
|  | 204 | + <a href="challenge2019_guidelines.html" class="subitem">Participation guidelines</a> | 
|  | 205 | + <a href="challenge2019.html" class="subitem">Past challenge: 2019</a> | 
|  | 206 | + <a href="challenge.html" class="subitem">Past challenge: 2018</a> | 
|  | 207 | +</div> | 
|  | 208 | +</div> --> | 
|  | 209 | +<!-- <a id="news" href="news.html" class="menuitem">News</a> | 
|  | 210 | +<a id="extras" href="extras.html" class="menuitem">Extras</a> | 
|  | 211 | +<a id="extended" href="extended.html" class="menuitem">Extended</a> --> | 
|  | 212 | +<a id="team" href="team.html" class="menuitem">Team</a> | 
|  | 213 | +<a id="explore" href="visualizer/index.html" class="menuitem">Explore</a> | 
|  | 214 | +<a id="download" href="download.html" class="menuitem">Download</a> | 
|  | 215 | +<a id="factsfigures" href="factsfigures.html" class="menuitem">Description</a> | 
|  | 216 | +<a id="choreo_gen" href="choreo_gen.html" class="menuitem">Dance Generation</a> | 
|  | 217 | +<a id="0" href="javascript:void(0);" style="font-size:15px;" class="icon" onclick="navbarResize()">☰</a> | 
|  | 218 | + | 
|  | 219 | +</div> | 
|  | 220 | +<div class="main" id ="main"> | 
|  | 221 | + <div id="choreo_gen_banner"> | 
|  | 222 | + </div> | 
|  | 223 | +</div> | 
|  | 224 | + | 
|  | 225 | +<div class="main"> | 
|  | 226 | +<h1 align="center">Infinite Nature: Perpetual View Generation of Natural Scenes from a Single Image</h1><br> | 
|  | 227 | + | 
|  | 228 | +<table align=center width=100%> | 
|  | 229 | +<tr> | 
|  | 230 | + <td align=center width=100px> | 
|  | 231 | + <span style="font-size:16px"><a href="https://andrewhliu.github.io">Andrew Liu</a><sup>*</sup></span> | 
|  | 232 | + </td> | 
|  | 233 | + <td align=center width=100px> | 
|  | 234 | + <span style="font-size:16px"><a href="https://research.google/people/RichardTucker/">Richard Tucker</a><sup>*</sup></span> | 
|  | 235 | + </td> | 
|  | 236 | + <td align=center width=100px> | 
|  | 237 | + <span style="font-size:16px"><a href="https://varunjampani.github.io/">Varun Jampani</a></span> | 
|  | 238 | + </td> | 
|  | 239 | +</tr> | 
|  | 240 | +<tr> | 
|  | 241 | + <td align=center width=100px> | 
|  | 242 | + <span style="font-size:16px"><a href="http://www.ameeshmakadia.com/index.html">Ameesh Makadia</a></span> | 
|  | 243 | + </td> | 
|  | 244 | + <td align=center width=100px> | 
|  | 245 | + <span style="font-size:16px"><a href="http://www.cs.cornell.edu/~snavely/">Noah Snavely</a></span> | 
|  | 246 | + </td> | 
|  | 247 | + <td align=center width=100px> | 
|  | 248 | + <span style="font-size:16px"><a href="https://people.eecs.berkeley.edu/~kanazawa/">Angjoo Kanazawa</a></span> | 
|  | 249 | + </td> | 
|  | 250 | +</tr> | 
|  | 251 | +</table> | 
|  | 252 | +<h5 align=center style="font-size:16px;font-weight:normal" >Google Research</h5> | 
|  | 253 | +<hr> | 
|  | 254 | +<br> | 
|  | 255 | +<video autoplay loop muted playsinline width="100%"> | 
|  | 256 | + <source src="teaser_loop.mp4" type="video/mp4"> | 
|  | 257 | +</video> | 
|  | 258 | +<table width=100%> | 
|  | 259 | +<tr> | 
|  | 260 | + <td align=center width=42.5%> | 
|  | 261 | + <span style="font-size:16px">Input</span> | 
|  | 262 | + </td> | 
|  | 263 | + <td align=center width=15%> | 
|  | 264 | + </td> | 
|  | 265 | + <td align=center width=42.5%> | 
|  | 266 | + <span style="font-size:16px">Generated Video</span> | 
|  | 267 | + </td> | 
|  | 268 | +</tr> | 
|  | 269 | +</table> | 
|  | 270 | +<!-- <table align=center width=100%> | 
|  | 271 | + <tr> | 
|  | 272 | + <td width=75%> | 
|  | 273 | + </td> | 
|  | 274 | + <td width=25%> | 
|  | 275 | + <video autoplay loop muted playsinline width="100%"> | 
|  | 276 | + <source src="animation0.mp4" type="video/mp4"> | 
|  | 277 | + </video> | 
|  | 278 | + </td> | 
|  | 279 | +</table> --> | 
|  | 280 | + | 
|  | 281 | +<h3>Abstract</h3> | 
|  | 282 | +<p align="justify">We introduce the problem of <i>perpetual view generation</i>—long-range generation of novel views | 
|  | 283 | + corresponding to an arbitrarily long camera trajectory given a single image. This is a | 
|  | 284 | + challenging problem that goes far beyond the capabilities of current view synthesis methods, | 
|  | 285 | + which work for a limited range of viewpoints and quickly degenerate when presented with a | 
|  | 286 | + large camera motion. Methods designed for video generation also have limited ability to produce | 
|  | 287 | + long video sequences and are often agnostic to scene geometry. We take a hybrid approach that | 
|  | 288 | + integrates both geometry and image synthesis in an iterative <i>render</i>, <i>refine</i>, and <i>repeat</i> framework, | 
|  | 289 | + allowing for long-range generation that cover large distances after hundreds of frames. Our approach | 
|  | 290 | + can be trained from a set of monocular video sequences without any manual annotation. We propose a | 
|  | 291 | + dataset of aerial footage of natural coastal scenes, and compare our method with recent view synthesis | 
|  | 292 | + and conditional video generation baselines, showing that it can generate plausible scenes for much | 
|  | 293 | + longer time horizons over large camera trajectories compared to existing methods. | 
|  | 294 | +</p> | 
|  | 295 | + | 
|  | 296 | +<h3>Paper</h3> | 
|  | 297 | +<div class=crow> | 
|  | 298 | +<a class=img href="http://arxiv.org/abs/2012.09855"><img src="inf_nat_first_page.png" height=150 style="border: 1px solid #555"></a> | 
|  | 299 | +<div class=links> | 
|  | 300 | +<em>Infinite Nature: Perpetual View Generation of Natural Scenes from a Single Image</em><br> | 
|  | 301 | +Andrew Liu*, Richard Tucker*, Varun Jampani,<br> | 
|  | 302 | +Ameesh Makadia, Noah Snavely, Angjoo Kanazawa<br><br> | 
|  | 303 | +arXiv<br> | 
|  | 304 | +[<a href="http://arxiv.org/abs/2012.09855">Arxiv</a>] | 
|  | 305 | +</div> | 
|  | 306 | +</div> | 
|  | 307 | + | 
|  | 308 | +<h3>Video</h3> | 
|  | 309 | +<div class="video-container"> | 
|  | 310 | + <iframe class="video" src="https://www.youtube.com/embed/oXUf6anNAtc" allowfullscreen></iframe> | 
|  | 311 | +</div> | 
|  | 312 | +<div class=crow> | 
|  | 313 | +<div class=links> | 
|  | 314 | +[<a href="https://youtu.be/oXUf6anNAtc">YouTube</a>] | 
|  | 315 | +</div> | 
|  | 316 | +</div> | 
|  | 317 | + | 
|  | 318 | +<h3>Code</h3> | 
|  | 319 | +<p>Coming Soon!</p> | 
|  | 320 | + | 
|  | 321 | +<h3>Perpetual View Generation</h3> | 
|  | 322 | + | 
|  | 323 | +<div align="center"> | 
|  | 324 | + <video autoplay loop muted playsinline controls width="32%"> | 
|  | 325 | + <source src="animation0.mp4" type="video/mp4"> | 
|  | 326 | + </video> | 
|  | 327 | + <video autoplay loop muted playsinline controls width="32%"> | 
|  | 328 | + <source src="animation1.mp4" type="video/mp4"> | 
|  | 329 | + </video> | 
|  | 330 | + <video autoplay loop muted playsinline controls width="32%"> | 
|  | 331 | + <source src="animation4.mp4" type="video/mp4"> | 
|  | 332 | + </video> | 
|  | 333 | + <br> | 
|  | 334 | + <video autoplay loop muted playsinline controls width="32%"> | 
|  | 335 | + <source src="animation2.mp4" type="video/mp4"> | 
|  | 336 | + </video> | 
|  | 337 | + <video autoplay loop muted playsinline controls width="32%"> | 
|  | 338 | + <source src="animation3.mp4" type="video/mp4"> | 
|  | 339 | + </video> | 
|  | 340 | +</div> | 
|  | 341 | + | 
|  | 342 | + | 
|  | 343 | +<h3>Aerial Coastline Imagery Dataset (ACID)</h3> | 
|  | 344 | +<p align=justify>In order to train our model, we identified thousands of aerial drone videos of different coastline and nature scenes on YouTube. We run structure-from-motion to get camera poses and release this data in the same format as <a href="https://google.github.io/realestate10k/">RealEstate10k</a>. Shown below are some randomly selected example videos that we identified.</p> | 
|  | 345 | +<br> | 
|  | 346 | +<div align=center> | 
|  | 347 | +<iframe src="https://www.youtube.com/embed/3gYsoFlJzMo?mute=1" width=32% frameborder="0"> </iframe> | 
|  | 348 | +<iframe src="https://www.youtube.com/embed/Ow1kqyTRsvY?mute=1" width=32% frameborder="0"> </iframe> | 
|  | 349 | +<iframe src="https://www.youtube.com/embed/B5pl4yRKmug?mute=1" width=32% frameborder="0"> </iframe> | 
|  | 350 | +<br> | 
|  | 351 | +<iframe src="https://www.youtube.com/embed/zcW7DealMxU?mute=1" width=32% frameborder="0"> </iframe> | 
|  | 352 | +<iframe src="https://www.youtube.com/embed/ySmTXPeF0Sg?mute=1" width=32% frameborder="0"> </iframe> | 
|  | 353 | +<iframe src="https://www.youtube.com/embed/xHstpLRmhWg?mute=1" width=32% frameborder="0"> </iframe> | 
|  | 354 | +</div> | 
|  | 355 | +<br> | 
|  | 356 | +[<a href="http://storage.googleapis.com/gresearch/aerial-coastline-imagery-dataset/acid_v1_release.tar.gz">Download Dataset</a>] | 
|  | 357 | + | 
|  | 358 | +<h3>BibTeX</h3> | 
|  | 359 | +<div class=bibtex>@InProceedings{infinite_nature_2020, | 
|  | 360 | + author = {Liu, Andrew and Tucker, Richard and Jampani, Varun and | 
|  | 361 | + Makadia, Ameesh and Snavely, Noah and Kanazawa, Angjoo}, | 
|  | 362 | + title = {Infinite Nature: Perpetual View Generation of Natural Scenes from a Single Image}, | 
|  | 363 | + booktitle = {arXiv}, | 
|  | 364 | + month = {December}, | 
|  | 365 | + year = {2020} | 
|  | 366 | +} | 
|  | 367 | +</div> | 
|  | 368 | + | 
|  | 369 | +</div> | 
|  | 370 | + | 
|  | 371 | +</body> | 
|  | 372 | +</html> | 
0 commit comments