{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Set CUDA:0 as current device.\n", "Change working directory to /home/dengnc/dvs/data/__new/barbershop_all\n", "Load net from fovea200@snerffast4-rgb_e6_fc512x4_d1.20-6.00_s64_~p.pth ...\n", "Load net from periph200@snerffast2-rgb_e6_fc256x4_d1.20-6.00_s32_~p.pth ...\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "/home/dengnc/miniconda3/lib/python3.8/site-packages/torch/nn/functional.py:1709: UserWarning: nn.functional.sigmoid is deprecated. Use torch.sigmoid instead.\n", " warnings.warn(\"nn.functional.sigmoid is deprecated. Use torch.sigmoid instead.\")\n" ] } ], "source": [ "import sys\n", "import os\n", "import torch\n", "import torch.nn as nn\n", "\n", "rootdir = os.path.abspath(sys.path[0] + '/../')\n", "sys.path.append(rootdir)\n", "torch.cuda.set_device(0)\n", "print(\"Set CUDA:%d as current device.\" % torch.cuda.current_device())\n", "torch.autograd.set_grad_enabled(False)\n", "\n", "from configs.spherical_view_syn import SphericalViewSynConfig\n", "from utils import netio\n", "from utils import img\n", "from utils import device\n", "from utils.view import *\n", "from components.fnr import FoveatedNeuralRenderer\n", "\n", "\n", "def load_net(path):\n", " config = SphericalViewSynConfig()\n", " config.from_id(os.path.splitext(path)[0])\n", " config.sa['perturb_sample'] = False\n", " net = config.create_net().to(device.default())\n", " netio.load(path, net)\n", " return net\n", "\n", "\n", "def find_file(prefix):\n", " for path in os.listdir():\n", " if path.startswith(prefix):\n", " return path\n", " return None\n", "\n", "\n", "scenes = {\n", " 'classroom': 'classroom_all',\n", " 'stones': 'stones_all',\n", " 'barbershop': 'barbershop_all',\n", " 'lobby': 'lobby_all'\n", "}\n", "\n", "# origin\n", "fov_list = [20, 45, 110]\n", "res_list = [(256, 256), (256, 256), (400, 360)]\n", "\n", "# only fovea & mid\n", "fov_list = [20, 45, 110]\n", "res_list = [(256, 256), (256, 256), (900, 810)]\n", "\n", "# only fovea & mid, expand fovea to 30\n", "fov_list = [30, 45, 110]\n", "res_list = [(400, 400), (256, 256), (900, 810)]\n", "\n", "# only fovea & mid, expand fovea to 40\n", "fov_list = [60, 110, 110]\n", "res_list = [(800, 800), (900, 900), (900, 810)]\n", "\n", "res_full = (1600, 1440)\n", "\n", "scene = 'barbershop'\n", "os.chdir(f'{rootdir}/data/__new/{scenes[scene]}')\n", "print('Change working directory to ', os.getcwd())\n", "\n", "fovea_net = load_net(find_file('fovea'))\n", "periph_net = load_net(find_file('periph'))\n", "renderer = FoveatedNeuralRenderer(fov_list, res_list, nn.ModuleList([fovea_net, periph_net, periph_net]),\n", " res_full, device=device.default())\n", "\n", "view = Trans(torch.tensor([-0.03081111, 0.0020451, -0.01802763], device=device.default()),\n", " torch.tensor([0.998645, 0.002576269, -0.05197617,\n", " -0.001313272, 0.9997034, 0.02431908,\n", " 0.0520234, -0.02421787, 0.9983522], device=device.default()).view(3, 3))\n", "gaze = [37.55656052, 20.7297554]\n", "images = renderer(view, gaze, using_mask=False, ret_raw=True)\n", "outputdir = '../__demo/mono_f60&m110/'\n", "misc.create_dir(outputdir)\n", "img.save(images['layers_img'][0], f'{outputdir}{scene}_fovea.png')\n", "img.save(images['blended'], f'{outputdir}{scene}.png')\n", "img.save(images['blended_raw'], f'{outputdir}{scene}_noCE.png')" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "interpreter": { "hash": "82066b63b621a9e3d15e3b7c11ca76da6238eff3834294910d715044bd0561e5" }, "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.8.5" }, "metadata": { "interpreter": { "hash": "82066b63b621a9e3d15e3b7c11ca76da6238eff3834294910d715044bd0561e5" } } }, "nbformat": 4, "nbformat_minor": 4 }