gen_video.ipynb 5.22 KB
Newer Older
Nianchen Deng's avatar
Nianchen Deng committed
1
2
3
4
{
 "cells": [
  {
   "cell_type": "code",
5
   "execution_count": null,
Nianchen Deng's avatar
Nianchen Deng committed
6
   "metadata": {},
7
   "outputs": [],
Nianchen Deng's avatar
Nianchen Deng committed
8
9
10
11
12
13
   "source": [
    "import sys\n",
    "import os\n",
    "import torch\n",
    "import matplotlib.pyplot as plt\n",
    "\n",
14
15
    "rootdir = os.path.abspath(sys.path[0] + '/../')\n",
    "sys.path.append(rootdir)\n",
Nianchen Deng's avatar
Nianchen Deng committed
16
17
18
19
    "torch.cuda.set_device(2)\n",
    "print(\"Set CUDA:%d as current device.\" % torch.cuda.current_device())\n",
    "torch.autograd.set_grad_enabled(False)\n",
    "\n",
Nianchen Deng's avatar
sync    
Nianchen Deng committed
20
21
22
23
24
25
26
27
    "from data.spherical_view_syn import *\n",
    "from configs.spherical_view_syn import SphericalViewSynConfig\n",
    "from utils import netio\n",
    "from utils import misc\n",
    "from utils import img\n",
    "from utils import device\n",
    "from utils import view\n",
    "from components.gen_final import GenFinal\n",
Nianchen Deng's avatar
Nianchen Deng committed
28
29
30
31
32
    "\n",
    "\n",
    "def load_net(path):\n",
    "    config = SphericalViewSynConfig()\n",
    "    config.from_id(path[:-4])\n",
Nianchen Deng's avatar
Nianchen Deng committed
33
    "    config.sa['perturb_sample'] = False\n",
Nianchen Deng's avatar
Nianchen Deng committed
34
    "    #config.print()\n",
Nianchen Deng's avatar
sync    
Nianchen Deng committed
35
36
    "    net = config.create_net().to(device.default())\n",
    "    netio.load(path, net)\n",
Nianchen Deng's avatar
Nianchen Deng committed
37
38
39
40
41
42
43
44
45
46
    "    return net\n",
    "\n",
    "\n",
    "def find_file(prefix):\n",
    "    for path in os.listdir():\n",
    "        if path.startswith(prefix):\n",
    "            return path\n",
    "    return None\n",
    "\n",
    "\n",
Nianchen Deng's avatar
sync    
Nianchen Deng committed
47
    "def load_views(data_desc_file) -> tuple[view.Trans, torch.Tensor]:\n",
Nianchen Deng's avatar
Nianchen Deng committed
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
    "    with open(data_desc_file, 'r', encoding='utf-8') as file:\n",
    "        lines = file.readlines()\n",
    "        n = len(lines) // 7\n",
    "        gazes = torch.empty(n * 2, 3)\n",
    "        views = torch.empty(n * 2, 4, 4)\n",
    "        view_idx = 0\n",
    "        for i in range(0, len(lines), 7):\n",
    "            gazes[view_idx * 2] = torch.tensor([\n",
    "                float(str) for str in lines[i + 1].split(',')\n",
    "            ])\n",
    "            gazes[view_idx * 2 + 1] = torch.tensor([\n",
    "                float(str) for str in lines[i + 2].split(',')\n",
    "            ])\n",
    "            views[view_idx * 2] = torch.tensor([\n",
    "                float(str) for str in lines[i + 3].split(',')\n",
    "            ]).view(4, 4)\n",
    "            views[view_idx * 2 + 1] = torch.tensor([\n",
    "                float(str) for str in lines[i + 4].split(',')\n",
    "            ]).view(4, 4)\n",
    "            view_idx += 1\n",
Nianchen Deng's avatar
sync    
Nianchen Deng committed
68
69
    "        gazes = gazes.to(device.default())\n",
    "        views = views.to(device.default())\n",
Nianchen Deng's avatar
Nianchen Deng committed
70
71
72
73
74
75
76
77
78
    "    return view.Trans(views[:, :3, 3], views[:, :3, :3]), gazes\n",
    "\n",
    "fov_list = [20, 45, 110]\n",
    "res_list = [(128, 128), (256, 256), (256, 230)]  # (192,256)]\n",
    "res_full = (1600, 1440)\n"
   ]
  },
  {
   "cell_type": "code",
79
   "execution_count": null,
Nianchen Deng's avatar
Nianchen Deng committed
80
   "metadata": {},
81
   "outputs": [],
Nianchen Deng's avatar
Nianchen Deng committed
82
   "source": [
83
84
85
86
87
    "os.chdir(os.path.join(rootdir, 'data/__0_user_study/us_gas_all_in_one'))\n",
    "#os.chdir(os.path.join(rootdir, 'data/__0_user_study/us_mc_all_in_one'))\n",
    "#os.chdir(os.path.join(rootdir, 'data/bedroom_all_in_one'))\n",
    "#os.chdir(os.path.join(rootdir, 'data/lobby_all_in_one'))\n",
    "#os.chdir(os.path.join(rootdir, 'data/gallery_all_in_one'))\n",
Nianchen Deng's avatar
Nianchen Deng committed
88
89
90
91
92
93
94
95
96
97
98
    "print('Change working directory to ', os.getcwd())\n",
    "\n",
    "fovea_net = load_net(find_file('fovea'))\n",
    "periph_net = load_net(find_file('periph'))\n",
    "\n",
    "# Load Dataset\n",
    "views, gazes = load_views('hmd.csv')\n",
    "print('Dataset loaded.')\n",
    "print('views:', views.size())\n",
    "\n",
    "gen = GenFinal(fov_list, res_list, res_full, fovea_net, periph_net,\n",
Nianchen Deng's avatar
sync    
Nianchen Deng committed
99
    "               device=device.default())\n",
Nianchen Deng's avatar
Nianchen Deng committed
100
101
102
103
104
    "gaze_centers = gen.full_cam.proj(gazes, center_as_origin=True)\n"
   ]
  },
  {
   "cell_type": "code",
105
   "execution_count": null,
Nianchen Deng's avatar
Nianchen Deng committed
106
   "metadata": {},
107
   "outputs": [],
Nianchen Deng's avatar
Nianchen Deng committed
108
109
110
111
112
113
114
115
116
117
118
119
   "source": [
    "for view_idx in range(gaze_centers.size(0) / 2):\n",
    "    left_center = (gaze_centers[view_idx * 2][0].item(),\n",
    "                   gaze_centers[view_idx * 2][1].item())\n",
    "    right_center = (gaze_centers[view_idx * 2 + 1][0].item(),\n",
    "                    gaze_centers[view_idx * 2 + 1][1].item())\n",
    "    left_view = views.get(view_idx * 2)\n",
    "    right_view = views.get(view_idx * 2 + 1)\n",
    "    mono_trans = view.Trans((left_view.t + right_view.t) / 2, left_view.r)\n",
    "    left_images = gen.gen(left_center, left_view, mono_trans=mono_trans)\n",
    "    right_images = gen.gen(right_center, right_view, mono_trans=mono_trans)\n",
    "    \n",
Nianchen Deng's avatar
sync    
Nianchen Deng committed
120
    "    os.makedirs('output/video_frames/hmd2', exist_ok=True)\n",
Nianchen Deng's avatar
sync    
Nianchen Deng committed
121
    "    img.save(torch.cat([left_images['blended'], right_images['blended']], -1),\n",
Nianchen Deng's avatar
Nianchen Deng committed
122
123
124
125
126
127
128
    "                          'output/video_frames/hmd2/view%04d.png' % view_idx)\n",
    "    print('Frame %d saved' % view_idx)\n"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
Nianchen Deng's avatar
Nianchen Deng committed
129
130
131
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
Nianchen Deng's avatar
Nianchen Deng committed
132
133
134
135
136
137
138
139
140
141
142
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
Nianchen Deng's avatar
Nianchen Deng committed
143
144
   "version": "3.8.5"
  }
Nianchen Deng's avatar
Nianchen Deng committed
145
146
 },
 "nbformat": 4,
Nianchen Deng's avatar
Nianchen Deng committed
147
148
 "nbformat_minor": 4
}