gen_demo_mono.ipynb 8.42 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "source": [
    "import sys\n",
    "import os\n",
    "import torch\n",
    "import torch.nn as nn\n",
    "import matplotlib.pyplot as plt\n",
    "\n",
    "rootdir = os.path.abspath(sys.path[0] + '/../')\n",
    "sys.path.append(rootdir)\n",
    "torch.cuda.set_device(0)\n",
    "print(\"Set CUDA:%d as current device.\" % torch.cuda.current_device())\n",
    "torch.autograd.set_grad_enabled(False)\n",
    "\n",
    "from configs.spherical_view_syn import SphericalViewSynConfig\n",
    "from utils import netio\n",
    "from utils import img\n",
    "from utils import device\n",
    "from utils.view import *\n",
    "from components.fnr import FoveatedNeuralRenderer\n",
    "\n",
    "\n",
    "def load_net(path):\n",
    "    config = SphericalViewSynConfig()\n",
    "    config.from_id(os.path.splitext(path)[0])\n",
Nianchen Deng's avatar
Nianchen Deng committed
30
    "    config.sa['perturb_sample'] = False\n",
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
    "    net = config.create_net().to(device.default())\n",
    "    netio.load(path, net)\n",
    "    return net\n",
    "\n",
    "\n",
    "def find_file(prefix):\n",
    "    for path in os.listdir():\n",
    "        if path.startswith(prefix):\n",
    "            return path\n",
    "    return None\n",
    "\n",
    "\n",
    "def plot_images(images):\n",
    "    plt.figure(figsize=(12, 4))\n",
    "    plt.subplot(131)\n",
    "    img.plot(images['layers_img'][0])\n",
    "    plt.subplot(132)\n",
    "    img.plot(images['layers_img'][1])\n",
    "    plt.subplot(133)\n",
    "    img.plot(images['layers_img'][2])\n",
Nianchen Deng's avatar
Nianchen Deng committed
51
52
53
54
    "    #plt.figure(figsize=(12, 12))\n",
    "    #img.plot(images['overlaid'])\n",
    "    #plt.figure(figsize=(12, 12))\n",
    "    #img.plot(images['blended_raw'])\n",
Nianchen Deng's avatar
sync    
Nianchen Deng committed
55
    "    plt.figure(figsize=(12, 12))\n",
56
57
58
59
60
61
62
63
64
65
66
    "    img.plot(images['blended'])\n",
    "\n",
    "\n",
    "scenes = {\n",
    "    'classroom': 'classroom_all',\n",
    "    'stones': 'stones_all',\n",
    "    'barbershop': 'barbershop_all',\n",
    "    'lobby': 'lobby_all'\n",
    "}\n",
    "\n",
    "fov_list = [20, 45, 110]\n",
Nianchen Deng's avatar
Nianchen Deng committed
67
    "res_list = [(256, 256), (256, 256), (400, 360)]\n",
68
    "res_full = (1600, 1440)"
Nianchen Deng's avatar
Nianchen Deng committed
69
   ],
70
71
72
   "outputs": [
    {
     "output_type": "stream",
Nianchen Deng's avatar
Nianchen Deng committed
73
     "name": "stdout",
74
     "text": [
Nianchen Deng's avatar
Nianchen Deng committed
75
      "Set CUDA:0 as current device.\n"
76
77
78
     ]
    }
   ],
Nianchen Deng's avatar
Nianchen Deng committed
79
80
81
82
83
   "metadata": {}
  },
  {
   "cell_type": "code",
   "execution_count": 2,
84
   "source": [
85
    "scene = 'barbershop'\n",
86
87
88
89
90
91
    "os.chdir(f'{rootdir}/data/__new/{scenes[scene]}')\n",
    "print('Change working directory to ', os.getcwd())\n",
    "\n",
    "fovea_net = load_net(find_file('fovea'))\n",
    "periph_net = load_net(find_file('periph'))\n",
    "renderer = FoveatedNeuralRenderer(fov_list, res_list, nn.ModuleList([fovea_net, periph_net, periph_net]),\n",
Nianchen Deng's avatar
sync    
Nianchen Deng committed
92
    "                                  res_full, device=device.default())"
Nianchen Deng's avatar
Nianchen Deng committed
93
94
95
96
97
98
99
100
101
102
103
104
105
   ],
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "Change working directory to  /home/dengnc/dvs/data/__new/barbershop_all\n",
      "Load net from fovea200@snerffast4-rgb_e6_fc512x4_d1.20-6.00_s64_~p.pth ...\n",
      "Load net from periph200@snerffast2-rgb_e6_fc256x4_d1.20-6.00_s32_~p.pth ...\n"
     ]
    }
   ],
   "metadata": {}
106
107
108
  },
  {
   "cell_type": "code",
Nianchen Deng's avatar
Nianchen Deng committed
109
   "execution_count": 3,
110
111
112
   "source": [
    "params = {\n",
    "    'classroom': [\n",
Nianchen Deng's avatar
Nianchen Deng committed
113
114
115
    "        #[0, 0, 0,   -53, 0,   0, 0],\n",
    "        \n",
    "        #For Eval\n",
116
    "        [0, 0, 0,   0, 0,   0, 0],\n",
Nianchen Deng's avatar
Nianchen Deng committed
117
118
119
120
121
122
123
124
125
126
127
    "        [0, 0, 0,   20, -20,   0, 0],\n",
    "        [-0.03, 0, 0, 0, 0, 0, -83],\n",
    "        [0.03, 0, 0, 0, 0, 0, -83],\n",
    "        [0.3, 0, 0.3, 0, 0], # For panorama (Trans)\n",
    "        [-0.3, -0.3, -0.3, 0, 0], # For panorama (Trans)\n",
    "        [0, -0.3, 0.3, 0, 10, 0, 0], # For panorama (V-D)\n",
    "        [0, 0.3, 0.3, 0, 10, 0, 0], # For panorama (V-D)\n",
    "        [0, 0.3, 0.3, 0, 10, 160, 350], # For panorama (New)\n",
    "        \n",
    "        # For fig latency-quality\n",
    "        #[0, 0, 0,   10, -13,   0, 0], \n",
128
129
    "    ],\n",
    "    'stones': [\n",
Nianchen Deng's avatar
Nianchen Deng committed
130
131
132
133
134
135
    "        #[0, 0, 0, 0, 10, -300, -50],\n",
    "        #[0, 0, 0, 0, 10, 200, -50],\n",
    "        #For Eval\n",
    "        [-0.5, -0.5, -0.5, -25, 0, 50, -230],\n",
    "        [-0.5, -0.5, -0.5, 0, 0, 280, -220],\n",
    "        [-0.5, 0, 0.0, -30, 5, 0, 0],\n",
136
137
    "    ],\n",
    "    'barbershop': [\n",
Nianchen Deng's avatar
Nianchen Deng committed
138
139
140
141
142
143
144
145
146
147
148
149
150
151
    "        #[0, 0, 0,   0, 0,   0, 0],\n",
    "        #[0, 0, 0, 20, 0, -300, 50], #For fig rendering-system\n",
    "        #[0, 0, 0, -140, -30, 150, -250],\n",
    "        #[0, 0, 0, -60, -30, 75, -125],\n",
    "        #For Teaser & Eval\n",
    "        [0, 0, 0,   20, 10,   0, 0],\n",
    "        [0, 0, 0,   -20, -10,   0, 0],\n",
    "        [0.15, 0, 0.15,   -13, -5,   0, 0],\n",
    "        [-0.15, -0.15, 0, 12, 12, 0, 0],\n",
    "        [-0.15, 0, 0.15, -35, 2, 0, 0],\n",
    "        [0, 0.15, 0.15, -13, 10, 0, 0],\n",
    "        [0.15, 0.15, 0, 43, 2, 0, 0],\n",
    "        [-0.15, 0.15, 0.15, -53, -21, 0, 0],\n",
    "        [-0.15, 0.15, 0.15, -53, -21, 200, -200]\n",
152
153
    "    ],\n",
    "    'lobby': [\n",
Nianchen Deng's avatar
Nianchen Deng committed
154
155
156
157
158
159
160
161
162
    "        #[0, 0, 0, 0, 0, 75, 0],\n",
    "        #[0, 0, 0, 0, 0, 5, 150],\n",
    "        #[0.5, 0, 0.5, 29, -12, 0, 0],\n",
    "        #For Eval\n",
    "        [-0.5, -0.5, -0.5, -25, 0, -150, 0],\n",
    "        [-0.5, -0.5, -0.5, 25, 25, -150, 200],\n",
    "        [-0.03, 0, 0, 0, 0, 75, -20],\n",
    "        [0.03, 0, 0, 0, 0, 71, -20]\n",
    "        #[0, 0, 0, -120, 0, 75, 50],\n",
163
164
165
166
167
168
    "    ]\n",
    "}\n",
    "\n",
    "for i, param in enumerate(params[scene]):\n",
    "    view = Trans(torch.tensor(param[:3], device=device.default()),\n",
    "                 torch.tensor(euler_to_matrix([-param[4], param[3], 0]), device=device.default()).view(3, 3))\n",
Nianchen Deng's avatar
sync    
Nianchen Deng committed
169
170
171
    "    images = renderer(view, param[-2:], using_mask=False, ret_raw=True)\n",
    "    images['overlaid'] = renderer.foveation.synthesis(images['layers_raw'], param[-2:], do_blend=False)\n",
    "    if True:\n",
172
173
174
175
176
177
    "        outputdir = '../__demo/mono/'\n",
    "        misc.create_dir(outputdir)\n",
    "        img.save(images['layers_img'][0], f'{outputdir}{scene}_{i}_fovea.png')\n",
    "        img.save(images['layers_img'][1], f'{outputdir}{scene}_{i}_mid.png')\n",
    "        img.save(images['layers_img'][2], f'{outputdir}{scene}_{i}_periph.png')\n",
    "        img.save(images['blended'], f'{outputdir}{scene}_{i}_blended.png')\n",
Nianchen Deng's avatar
Nianchen Deng committed
178
179
    "        #img.save(images['overlaid'], f'{outputdir}{scene}_{i}_overlaid.png')\n",
    "        #img.save(images['blended_raw'], f'{outputdir}{scene}_{i}.png')\n",
180
181
    "    else:\n",
    "        images = plot_images(images)\n"
Nianchen Deng's avatar
Nianchen Deng committed
182
183
184
   ],
   "outputs": [],
   "metadata": {}
185
186
187
  },
  {
   "cell_type": "code",
Nianchen Deng's avatar
Nianchen Deng committed
188
   "execution_count": 9,
189
   "source": [
Nianchen Deng's avatar
Nianchen Deng committed
190
191
192
193
194
195
196
197
198
    "def load_views(data_desc_file) -> Trans:\n",
    "    with open(data_desc_file, 'r', encoding='utf-8') as file:\n",
    "        data_desc = json.loads(file.read())\n",
    "        view_centers = torch.tensor(\n",
    "            data_desc['view_centers'], device=device.default()).view(-1, 3)\n",
    "        view_rots = torch.tensor(\n",
    "            data_desc['view_rots'], device=device.default()).view(-1, 3, 3)\n",
    "        return Trans(view_centers, view_rots)\n",
    "\n",
199
    "\n",
Nianchen Deng's avatar
Nianchen Deng committed
200
    "views = load_views('for_panorama_cvt.json')\n",
201
202
203
    "print('Dataset loaded.')\n",
    "for view_idx in range(views.size()[0]):\n",
    "    center = (0, 0)\n",
Nianchen Deng's avatar
Nianchen Deng committed
204
205
    "    images = renderer(views.get(view_idx), center, using_mask=True)\n",
    "    outputdir = 'panorama'\n",
206
    "    misc.create_dir(outputdir)\n",
Nianchen Deng's avatar
Nianchen Deng committed
207
208
209
210
211
212
213
214
215
216
217
218
    "    img.save(images['blended'], f'{outputdir}/{view_idx:04d}.png')"
   ],
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "Dataset loaded.\n"
     ]
    }
   ],
   "metadata": {}
219
220
221
222
  }
 ],
 "metadata": {
  "kernelspec": {
Nianchen Deng's avatar
Nianchen Deng committed
223
224
   "name": "python3",
   "display_name": "Python 3.8.5 64-bit ('base': conda)"
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.5"
  },
  "metadata": {
   "interpreter": {
    "hash": "82066b63b621a9e3d15e3b7c11ca76da6238eff3834294910d715044bd0561e5"
   }
Nianchen Deng's avatar
Nianchen Deng committed
242
243
244
  },
  "interpreter": {
   "hash": "82066b63b621a9e3d15e3b7c11ca76da6238eff3834294910d715044bd0561e5"
Nianchen Deng's avatar
sync    
Nianchen Deng committed
245
  }
246
247
 },
 "nbformat": 4,
Nianchen Deng's avatar
Nianchen Deng committed
248
249
 "nbformat_minor": 4
}