Commit 2824f796 authored by Nianchen Deng's avatar Nianchen Deng
Browse files

sync

parent 5699ccbf
...@@ -24,14 +24,19 @@ ...@@ -24,14 +24,19 @@
"program": "train.py", "program": "train.py",
"args": [ "args": [
//"-c", //"-c",
//"snerf_voxels", //"snerf_voxels+ls+f32",
"/home/dengnc/dvs/data/__new/barbershop_fovea_r360x80_t0.6/_nets/train_t0.3/snerfadvx_voxels_x4/checkpoint_10.tar", "/data1/dnc/dvs/data/__nerf/room/_nets/train/snerf_voxels+ls+f32/checkpoint_1.tar",
"--prune", "--prune",
"100", "1",
"--split", "--split",
"100" "1",
//"data/__new/barbershop_fovea_r360x80_t0.6/train_t0.3.json" "-e",
"100",
"--views",
"5",
//"data/__nerf/room/train.json"
], ],
"justMyCode": false,
"console": "integratedTerminal" "console": "integratedTerminal"
}, },
{ {
......
{
"model": "SNeRF",
"args": {
"color": "rgb",
"n_pot_encode": 10,
"n_dir_encode": 4,
"fc_params": {
"nf": 256,
"n_layers": 8,
"activation": "relu",
"skips": [ 4 ]
},
"n_featdim": 32,
"space": "voxels",
"steps": [4, 16, 8],
"n_samples": 16,
"perturb_sample": true,
"density_regularization_weight": 1e-4,
"density_regularization_scale": 1e4
}
}
\ No newline at end of file
...@@ -16,7 +16,7 @@ class BaseModelMeta(type): ...@@ -16,7 +16,7 @@ class BaseModelMeta(type):
class BaseModel(nn.Module, metaclass=BaseModelMeta): class BaseModel(nn.Module, metaclass=BaseModelMeta):
trainer = "Train" TrainerClass = "Train"
@property @property
def args(self): def args(self):
......
...@@ -10,7 +10,7 @@ from utils.misc import masked_scatter ...@@ -10,7 +10,7 @@ from utils.misc import masked_scatter
class NeRF(BaseModel): class NeRF(BaseModel):
trainer = "TrainWithSpace" TrainerClass = "TrainWithSpace"
SamplerClass = Sampler SamplerClass = Sampler
RendererClass = VolumnRenderer RendererClass = VolumnRenderer
...@@ -124,21 +124,11 @@ class NeRF(BaseModel): ...@@ -124,21 +124,11 @@ class NeRF(BaseModel):
return self.pot_encoder(x) return self.pot_encoder(x)
def encode_d(self, samples: Samples) -> torch.Tensor: def encode_d(self, samples: Samples) -> torch.Tensor:
return self.dir_encoder(samples.dirs) if self.dir_encoder is not None else None return self.dir_encoder(samples.dirs) if self.dir_encoder else None
@torch.no_grad() @torch.no_grad()
def get_scores(self, sampled_points: torch.Tensor, sampled_voxel_indices: torch.Tensor) -> torch.Tensor: def split(self):
densities = self.render(Samples(sampled_points, None, None, None, sampled_voxel_indices), ret = self.space.split()
'density')
return 1 - (-densities).exp()
@torch.no_grad()
def pruning(self, threshold: float = 0.5, train_stats=False):
return self.space.pruning(self.get_scores, threshold, train_stats)
@torch.no_grad()
def splitting(self):
ret = self.space.splitting()
if 'n_samples' in self.args0: if 'n_samples' in self.args0:
self.args0['n_samples'] *= 2 self.args0['n_samples'] *= 2
if 'voxel_size' in self.args0: if 'voxel_size' in self.args0:
...@@ -149,12 +139,10 @@ class NeRF(BaseModel): ...@@ -149,12 +139,10 @@ class NeRF(BaseModel):
if 'sample_step' in self.args0: if 'sample_step' in self.args0:
self.args0['sample_step'] /= 2 self.args0['sample_step'] /= 2
self.sampler = self.SamplerClass(**self.args) self.sampler = self.SamplerClass(**self.args)
if self.args.get('n_featdim') and hasattr(self, "trainer"):
self.trainer.reset_optimizer()
return ret return ret
@torch.no_grad()
def double_samples(self):
pass
@perf @perf
def forward(self, rays_o: torch.Tensor, rays_d: torch.Tensor, *, def forward(self, rays_o: torch.Tensor, rays_d: torch.Tensor, *,
extra_outputs: List[str] = [], **kwargs) -> torch.Tensor: extra_outputs: List[str] = [], **kwargs) -> torch.Tensor:
......
...@@ -40,16 +40,8 @@ class SNeRFAdvanceX(SNeRFAdvance): ...@@ -40,16 +40,8 @@ class SNeRFAdvanceX(SNeRFAdvance):
return self.cores[chunk_id](x, d, outputs, **extras) return self.cores[chunk_id](x, d, outputs, **extras)
@torch.no_grad() @torch.no_grad()
def get_scores(self, sampled_points: torch.Tensor, sampled_voxel_indices: torch.Tensor) -> torch.Tensor: def split(self):
raise NotImplementedError() ret = super().split()
@torch.no_grad()
def pruning(self, threshold: float = 0.5, train_stats=False):
raise NotImplementedError()
@torch.no_grad()
def splitting(self):
ret = super().splitting()
k = self.args["n_samples"] // self.space.steps[0].item() k = self.args["n_samples"] // self.space.steps[0].item()
net_samples = [val * k for val in self.space.balance_cut(0, len(self.cores))] net_samples = [val * k for val in self.space.balance_cut(0, len(self.cores))]
if len(net_samples) != len(self.cores): if len(net_samples) != len(self.cores):
......
...@@ -4,10 +4,6 @@ from .snerf import * ...@@ -4,10 +4,6 @@ from .snerf import *
class SNeRFX(SNeRF): class SNeRFX(SNeRF):
trainer = "TrainWithSpace"
SamplerClass = SphericalSampler
RendererClass = VolumnRenderer
def __init__(self, args0: dict, args1: dict = {}): def __init__(self, args0: dict, args1: dict = {}):
""" """
Initialize a multi-sphere-layer net Initialize a multi-sphere-layer net
...@@ -42,16 +38,8 @@ class SNeRFX(SNeRF): ...@@ -42,16 +38,8 @@ class SNeRFX(SNeRF):
return self.cores[chunk_id](x, d, outputs) return self.cores[chunk_id](x, d, outputs)
@torch.no_grad() @torch.no_grad()
def get_scores(self, sampled_points: torch.Tensor, sampled_voxel_indices: torch.Tensor) -> torch.Tensor: def split(self):
raise NotImplementedError() ret = super().split()
@torch.no_grad()
def pruning(self, threshold: float = 0.5, train_stats=False):
raise NotImplementedError()
@torch.no_grad()
def splitting(self):
ret = super().splitting()
k = self.args["n_samples"] // self.space.steps[0].item() k = self.args["n_samples"] // self.space.steps[0].item()
net_samples = [ net_samples = [
val * k for val in self.space.balance_cut(0, len(self.cores)) val * k for val in self.space.balance_cut(0, len(self.cores))
......
from math import ceil
import torch import torch
import numpy as np from typing import List, Tuple, Union
from typing import List, NoReturn, Tuple, Union
from torch import nn from torch import nn
from plyfile import PlyData, PlyElement
from utils.geometry import * from utils.geometry import *
from utils.constants import * from utils.constants import *
...@@ -73,11 +70,11 @@ class Space(nn.Module): ...@@ -73,11 +70,11 @@ class Space(nn.Module):
return voxel_indices return voxel_indices
@torch.no_grad() @torch.no_grad()
def pruning(self, score_fn, threshold: float = 0.5, train_stats=False): def prune(self, keeps: torch.Tensor) -> Tuple[int, int]:
raise NotImplementedError() raise NotImplementedError()
@torch.no_grad() @torch.no_grad()
def splitting(self): def split(self):
raise NotImplementedError() raise NotImplementedError()
...@@ -108,7 +105,7 @@ class Voxels(Space): ...@@ -108,7 +105,7 @@ class Voxels(Space):
return self.voxels.size(0) return self.voxels.size(0)
@property @property
def n_corner(self) -> int: def n_corners(self) -> int:
"""`int` Number of corners""" """`int` Number of corners"""
return self.corners.size(0) return self.corners.size(0)
...@@ -145,12 +142,18 @@ class Voxels(Space): ...@@ -145,12 +142,18 @@ class Voxels(Space):
:param n_dims `int`: embedding dimension :param n_dims `int`: embedding dimension
:return `Embedding(n_corners, n_dims)`: new embedding on voxel corners :return `Embedding(n_corners, n_dims)`: new embedding on voxel corners
""" """
name = f'emb_{name}' if self.get_embedding(name) is not None:
self.add_module(name, torch.nn.Embedding(self.n_corners.item(), n_dims)) raise KeyError(f"Embedding '{name}' already existed")
return self.__getattr__(name) emb = torch.nn.Embedding(self.n_corners, n_dims, device=self.device)
setattr(self, f'emb_{name}', emb)
return emb
def get_embedding(self, name: str = 'default') -> torch.nn.Embedding: def get_embedding(self, name: str = 'default') -> torch.nn.Embedding:
return getattr(self, f'emb_{name}') return getattr(self, f'emb_{name}', None)
def set_embedding(self, weight: torch.Tensor, name: str = 'default'):
emb = torch.nn.Embedding(*weight.shape, _weight=weight, device=self.device)
setattr(self, f'emb_{name}', emb)
def extract_embedding(self, pts: torch.Tensor, voxel_indices: torch.Tensor, def extract_embedding(self, pts: torch.Tensor, voxel_indices: torch.Tensor,
name: str = 'default') -> torch.Tensor: name: str = 'default') -> torch.Tensor:
...@@ -167,9 +170,8 @@ class Voxels(Space): ...@@ -167,9 +170,8 @@ class Voxels(Space):
raise KeyError(f"Embedding '{name}' doesn't exist") raise KeyError(f"Embedding '{name}' doesn't exist")
voxels = self.voxels[voxel_indices] # (N, 3) voxels = self.voxels[voxel_indices] # (N, 3)
corner_indices = self.corner_indices[voxel_indices] # (N, 8) corner_indices = self.corner_indices[voxel_indices] # (N, 8)
p = (pts - voxels) / self.voxel_size + 0.5 # (N, 3) normed-coords in voxel p = (pts - voxels) / self.voxel_size + .5 # (N, 3) normed-coords in voxel
features = emb(corner_indices).reshape(pts.size(0), 8, -1) # (N, 8, X) return trilinear_interp(p, emb(corner_indices))
return trilinear_interp(p, features)
@perf @perf
def ray_intersect(self, rays_o: torch.Tensor, rays_d: torch.Tensor, n_max_hits: int) -> Intersections: def ray_intersect(self, rays_o: torch.Tensor, rays_d: torch.Tensor, n_max_hits: int) -> Intersections:
...@@ -220,17 +222,34 @@ class Voxels(Space): ...@@ -220,17 +222,34 @@ class Voxels(Space):
return voxel_indices return voxel_indices
@torch.no_grad() @torch.no_grad()
def splitting(self) -> None: def split(self) -> None:
""" """
Split voxels into smaller voxels with half size. Split voxels into smaller voxels with half size.
""" """
n_voxels_before = self.n_voxels new_steps = self.steps * 2
self.steps *= 2 new_voxels = split_voxels(self.voxels, self.voxel_size, 2, align_border=False)\
self.voxels = split_voxels(self.voxels, self.voxel_size, 2, align_border=False)\
.reshape(-1, 3) .reshape(-1, 3)
self._update_corners() new_corners, new_corner_indices = get_corners(new_voxels, self.bbox, new_steps)
# Calculate new embeddings through trilinear interpolation
grid_indices_of_new_corners = to_flat_indices(
to_grid_coords(new_corners, self.bbox, steps=self.steps).min(self.steps - 1),
self.steps)
voxel_indices_of_new_corners = self.voxel_indices_in_grid[grid_indices_of_new_corners]
for name, _ in self.named_modules():
if not name.startswith("emb_"):
continue
new_emb_weight = self.extract_embedding(new_corners, voxel_indices_of_new_corners,
name=name[4:])
self.set_embedding(new_emb_weight, name=name[4:])
# Apply new tensors
self.steps = new_steps
self.voxels = new_voxels
self.corners = new_corners
self.corner_indices = new_corner_indices
self._update_voxel_indices_in_grid() self._update_voxel_indices_in_grid()
return n_voxels_before, self.n_voxels return self.n_voxels // 8, self.n_voxels
@torch.no_grad() @torch.no_grad()
def prune(self, keeps: torch.Tensor) -> Tuple[int, int]: def prune(self, keeps: torch.Tensor) -> Tuple[int, int]:
...@@ -239,11 +258,6 @@ class Voxels(Space): ...@@ -239,11 +258,6 @@ class Voxels(Space):
self._update_voxel_indices_in_grid() self._update_voxel_indices_in_grid()
return keeps.size(0), keeps.sum().item() return keeps.size(0), keeps.sum().item()
@torch.no_grad()
def pruning(self, score_fn, threshold: float = 0.5) -> None:
scores = self._get_scores(score_fn, lambda x: torch.max(x, -1)[0]) # (M)
return self.prune(scores > threshold)
def n_voxels_along_dim(self, dim: int) -> torch.Tensor: def n_voxels_along_dim(self, dim: int) -> torch.Tensor:
sum_dims = [val for val in range(self.dims) if val != dim] sum_dims = [val for val in range(self.dims) if val != dim]
return self.voxel_indices_in_grid.reshape(*self.steps).ne(-1).sum(sum_dims) return self.voxel_indices_in_grid.reshape(*self.steps).ne(-1).sum(sum_dims)
...@@ -261,39 +275,30 @@ class Voxels(Space): ...@@ -261,39 +275,30 @@ class Voxels(Space):
part = int(cdf[i]) + 1 part = int(cdf[i]) + 1
return bins return bins
def sample(self, bits: int, perturb: bool = False) -> Tuple[torch.Tensor, torch.Tensor]: def sample(self, S: int, perturb: bool = False, include_border: bool = True) -> Tuple[torch.Tensor, torch.Tensor]:
sampled_xyz = split_voxels(self.voxels, self.voxel_size, bits) """
sampled_idx = torch.arange(self.n_voxels, device=self.device)[:, None].expand( For each voxel, sample `S^3` points uniformly, with small perturb if `perturb` is `True`.
*sampled_xyz.shape[:2])
sampled_xyz, sampled_idx = sampled_xyz.reshape(-1, 3), sampled_idx.flatten() When `perturb` is `False`, `include_border` can specify whether to sample points from border to border or at centers of sub-voxels.
When `perturb` is `True`, points are sampled at centers of sub-voxels, then applying a random offset in sub-voxels.
@torch.no_grad() :param S `int`: number of samples along each dim
def _get_scores(self, score_fn, reduce_fn=None, bits=16) -> torch.Tensor: :param perturb `bool?`: whether perturb samples, defaults to `False`
def get_scores_once(pts, idxs): :param include_border `bool?`: whether include border, defaults to `True`
scores = score_fn(pts, idxs).reshape(-1, bits ** 3) # (B, P) :return `Tensor(N*S^3, 3)`: sampled points
if reduce_fn is not None: :return `Tensor(N*S^3)`: voxel indices of sampled points
scores = reduce_fn(scores) # (B[, ...]) """
return scores pts = split_voxels(self.voxels, self.voxel_size, S,
align_border=not perturb and include_border) # (N, X, D)
sampled_xyz, sampled_idx = self.sample(bits) voxel_indices = torch.arange(self.n_voxels, device=self.device)[:, None]\
chunk_size = 64 .expand(*pts.shape[:-1]) # (N) -> (N, X)
return torch.cat([ if perturb:
get_scores_once(sampled_xyz[i:i + chunk_size], sampled_idx[i:i + chunk_size]) pts += (torch.rand_like(pts) - .5) * self.voxel_size / S
for i in range(0, self.voxels.size(0), chunk_size) return pts.reshape(-1, 3), voxel_indices.flatten()
], 0) # (M[, ...])
def _ray_intersect(self, rays_o: torch.Tensor, rays_d: torch.Tensor, n_max_hits: int) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: def _ray_intersect(self, rays_o: torch.Tensor, rays_d: torch.Tensor, n_max_hits: int) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]:
return aabb_ray_intersect(self.voxel_size, n_max_hits, self.voxels, rays_o, rays_d) return aabb_ray_intersect(self.voxel_size, n_max_hits, self.voxels, rays_o, rays_d)
def _update_corners(self):
"""
Update voxel corners.
"""
corners, corner_indices = get_corners(self.voxels, self.bbox, self.steps)
self.register_buffer("corners", corners)
self.register_buffer("corner_indices", corner_indices)
def _update_voxel_indices_in_grid(self): def _update_voxel_indices_in_grid(self):
""" """
Update voxel indices in grid. Update voxel indices in grid.
...@@ -314,7 +319,7 @@ class Voxels(Space): ...@@ -314,7 +319,7 @@ class Voxels(Space):
# Handle embeddings # Handle embeddings
for name, module in self.named_modules(): for name, module in self.named_modules():
if name.startswith('emb_'): if name.startswith('emb_'):
setattr(self, name, torch.nn.Embedding(self.n_corners.item(), module.embedding_dim)) setattr(self, name, torch.nn.Embedding(self.n_corners, module.embedding_dim))
class Octree(Voxels): class Octree(Voxels):
...@@ -339,8 +344,8 @@ class Octree(Voxels): ...@@ -339,8 +344,8 @@ class Octree(Voxels):
return octree_ray_intersect(self.voxel_size, n_max_hits, nodes, tree, rays_o, rays_d) return octree_ray_intersect(self.voxel_size, n_max_hits, nodes, tree, rays_o, rays_d)
@torch.no_grad() @torch.no_grad()
def splitting(self): def split(self):
ret = super().splitting() ret = super().split()
self.clear() self.clear()
return ret return ret
......
{
"cells": [
{
"cell_type": "code",
"execution_count": 26,
"metadata": {},
"outputs": [],
"source": [
"import torch\n",
"from utils.voxels import *\n",
"\n",
"bbox, steps = torch.tensor([[-2, -3.14159, 1], [2, 3.14159, 0]]), torch.tensor([2, 3, 3])\n",
"voxel_size = (bbox[1] - bbox[0]) / steps\n",
"voxels = init_voxels(bbox, steps)\n",
"corners, corner_indices = get_corners(voxels, bbox, steps)\n",
"voxel_indices_in_grid = torch.arange(voxels.shape[0])\n",
"emb = torch.nn.Embedding(corners.shape[0], 3, _weight=corners)"
]
},
{
"cell_type": "code",
"execution_count": 27,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"torch.Size([11, 3]) tensor([ 0, -1, -1, 1, -1, -1, 2, 3, 4, -1, 5, 6, -1, 7, 8, -1, 9, 10])\n"
]
}
],
"source": [
"keeps = torch.tensor([True]*18)\n",
"keeps[torch.tensor([1,2,4,5,9,12,15])] = False\n",
"voxels = voxels[keeps]\n",
"corner_indices = corner_indices[keeps]\n",
"grid_indices, _ = to_grid_indices(voxels, bbox, steps=steps)\n",
"voxel_indices_in_grid = grid_indices.new_full([steps.prod().item()], -1)\n",
"voxel_indices_in_grid[grid_indices] = torch.arange(voxels.shape[0])\n",
"print(voxels.shape, voxel_indices_in_grid)"
]
},
{
"cell_type": "code",
"execution_count": 28,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"torch.Size([88, 3]) torch.Size([185, 3]) torch.Size([88, 8])\n"
]
}
],
"source": [
"new_voxels = split_voxels(voxels, (bbox[1] - bbox[0]) / steps, 2, align_border=False).reshape(-1, 3)\n",
"new_corners, new_corner_indices = get_corners(new_voxels, bbox, steps * 2)\n",
"print(new_voxels.shape, new_corners.shape, new_corner_indices.shape)"
]
},
{
"cell_type": "code",
"execution_count": 30,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"tensor([ 0, 0, -1, 0, 0, -1, 1, 1, -1, 1, 1, -1, 2, 2, 3, 3, 4, 4,\n",
" 4, 2, 2, 3, 3, 4, 4, 4, 2, 2, 3, 3, 4, 4, 4, 0, 0, -1,\n",
" 0, 0, -1, 1, 1, -1, 1, 1, -1, 2, 2, 3, 3, 4, 4, 4, 2, 2,\n",
" 3, 3, 4, 4, 4, 2, 2, 3, 3, 4, 4, 4, -1, -1, 5, 5, 6, 6,\n",
" 6, -1, -1, 5, 5, 6, 6, 6, -1, -1, 7, 7, 8, 8, 8, -1, -1, 7,\n",
" 7, 8, 8, 8, -1, -1, 9, 9, 10, 10, 10, -1, -1, 9, 9, 10, 10, 10,\n",
" -1, -1, 9, 9, 10, 10, 10, 5, 5, 6, 6, 6, 5, 5, 6, 6, 6, 7,\n",
" 7, 8, 8, 8, 7, 7, 8, 8, 8, 9, 9, 10, 10, 10, 9, 9, 10, 10,\n",
" 10, 9, 9, 10, 10, 10, 5, 5, 6, 6, 6, 5, 5, 6, 6, 6, 7, 7,\n",
" 8, 8, 8, 7, 7, 8, 8, 8, 9, 9, 10, 10, 10, 9, 9, 10, 10, 10,\n",
" 9, 9, 10, 10, 10])\n",
"tensor(0)\n"
]
}
],
"source": [
"voxel_indices_of_new_corner = voxel_indices_in_grid[to_flat_indices(to_grid_coords(new_corners, bbox, steps=steps).min(steps - 1), steps)]\n",
"print(voxel_indices_of_new_corner)\n",
"p_of_new_corners = (new_corners - voxels[voxel_indices_of_new_corner]) / voxel_size + .5\n",
"print(((new_corners - trilinear_interp(p_of_new_corners, emb(corner_indices[voxel_indices_of_new_corner]))) > 1e-6).sum())"
]
}
],
"metadata": {
"interpreter": {
"hash": "08b118544df3cb8970a671e5837a88fd458f4d4c799ef1fb2709465a22a45b92"
},
"kernelspec": {
"display_name": "Python 3.9.5 64-bit ('base': conda)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.5"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}
...@@ -38,7 +38,7 @@ from data.loader import DataLoader ...@@ -38,7 +38,7 @@ from data.loader import DataLoader
from utils.constants import HUGE_FLOAT from utils.constants import HUGE_FLOAT
RAYS_PER_BATCH = 2 ** 14 RAYS_PER_BATCH = 2 ** 12
DATA_LOADER_CHUNK_SIZE = 1e8 DATA_LOADER_CHUNK_SIZE = 1e8
......
This source diff could not be displayed because it is too large. You can view the blob instead.
AT4G28520
AT2G39730
AT5G02500
AT2G07560
AT4G33010
AT5G17920
AT5G04140
AT2G36880
AT2G26080
AT1G56070
AT3G08530
AT3G09440
AT5G53460
AT3G11130
AT3G17390
AT1G52570
AT3G03780
AT2G36530
AT5G61780
AT2G18960
AT5G56000
AT2G21390
AT4G37930
AT5G28540
AT3G09840
AT5G50920
AT2G41740
AT4G31490
AT4G30190
AT4G29060
AT2G33150
AT5G08680
AT4G31480
AT1G62020
AT3G13470
AT5G65690
AT3G52930
AT3G02090
AT1G09780
AT1G67290
AT3G12580
AT3G08590
AT3G43300
AT4G34450
AT1G23190
AT3G15730
AT2G16730
AT5G66760
AT3G19170
AT3G23810
AT3G55410
AT5G02490
AT1G70730
AT2G05710
AT3G13390
AT2G04030
AT1G55490
AT3G48870
AT5G15490
AT1G79930
AT1G07890
AT1G02500
AT2G28000
AT3G62170
AT5G57320
AT5G20890
AT2G36460
AT1G06950
AT1G59610
AT3G11710
AT3G09630
AT1G23310
AT3G04120
AT5G49910
AT4G24280
AT1G10290
AT3G28820
AT5G42270
AT5G26360
AT1G53500
AT1G13440
AT3G46520
AT3G45140
AT5G39320
AT1G53310
AT3G12780
AT1G78900
AT5G03340
AT2G47510
AT3G59970
AT1G32060
AT1G03880
AT4G37910
AT5G09810
AT1G07810
AT4G27500
AT1G68750
AT5G13990
AT1G74470
AT3G12110
AT4G14360
AT5G66570
AT1G80070
AT5G58050
AT4G38970
AT1G56410
AT5G07410
AT3G57290
AT3G62030
AT2G18020
AT4G36130
AT1G55570
AT2G36580
AT4G20360
AT1G14610
AT1G57720
AT5G57350
AT1G14830
AT4G35830
AT3G46750
AT1G68010
AT5G13630
AT3G18190
AT1G07670
AT5G20920
AT5G19580
AT3G06860
AT3G17240
AT5G35630
AT3G46970
AT2G27880
AT3G26650
AT3G62830
AT2G21330
AT2G20580
AT3G62360
AT3G51070
AT3G58610
AT1G33170
AT4G35010
AT3G28790
AT5G58170
AT1G69940
AT5G11560
AT3G58510
AT3G13290
AT4G10440
AT5G09550
AT1G78570
AT3G21180
AT1G26630
AT5G21150
AT3G44310
AT2G16600
AT1G06680
AT2G28190
AT3G05420
AT1G54560
AT2G20420
AT3G52600
AT5G43330
AT1G56190
AT4G16760
AT3G18780
AT3G13460
AT3G01500
AT2G27040
AT3G57410
AT5G08280
AT3G23300
AT1G20950
AT4G29130
AT1G04410
AT1G80410
AT5G34850
AT2G39990
AT3G02230
AT4G15980
AT5G48140
AT1G26850
AT3G46780
AT5G58290
AT5G11170
AT5G11200
AT1G03630
AT4G04910
AT1G22530
AT1G70770
AT1G47260
AT1G76030
AT3G27240
AT1G12270
AT3G53230
AT3G03960
AT2G40660
AT5G15090
AT5G19990
AT1G18070
AT4G34980
AT5G62530
AT2G40840
AT4G26530
AT4G09000
AT5G19510
AT1G18080
AT3G55800
AT1G65930
AT2G32730
AT5G37830
AT1G66410
AT5G37780
AT1G29900
AT1G79550
AT3G18130
AT5G38430
AT1G67090
AT5G56500
AT4G24800
AT5G53620
AT3G07830
AT5G35360
AT1G48630
AT5G08300
AT3G02530
AT5G24940
AT4G38510
AT3G11830
AT1G23800
AT4G27440
AT3G17820
AT1G54870
AT5G08530
AT5G19820
AT5G62690
AT5G62700
AT5G06600
AT1G15690
AT5G26570
AT4G38740
AT2G46280
AT5G07420
AT4G14880
AT2G35690
AT3G58500
AT3G02360
AT3G17940
AT3G59920
AT3G60240
AT3G25860
AT5G61720
AT3G14210
AT2G45290
AT3G01280
AT1G25350
AT5G21274
AT5G26742
AT1G78300
AT1G04170
AT2G30970
AT3G09820
AT1G18500
AT1G72960
AT4G21150
AT1G35720
AT3G54050
AT5G20720
AT4G34870
AT1G72730
AT2G13360
AT2G27860
AT2G43750
AT5G50600
AT5G50700
AT1G53750
AT3G04840
AT5G58420
AT4G04640
AT2G35840
AT4G10260
AT3G55200
AT3G55220
AT5G23060
AT1G13930
AT1G69740
AT2G17980
AT3G06650
AT3G47950
AT3G16000
AT3G11964
AT5G22800
AT1G32470
AT5G28840
AT4G19120
AT2G30490
AT2G47650
AT4G14960
AT5G05010
AT3G51160
AT2G42500
AT5G66020
AT1G35160
AT3G51800
AT1G09640
AT5G48230
AT3G15640
AT2G30050
AT1G04820
AT1G50010
AT1G50480
AT4G34670
AT1G35670
AT3G12290
AT3G28770
AT1G48090
AT1G18210
AT4G34200
AT5G23860
AT3G48560
AT2G31390
AT2G41220
AT1G11660
AT5G15650
AT3G05970
AT5G49460
AT2G19520
AT3G56190
AT3G46440
AT2G44160
AT2G29550
AT3G12915
AT1G20020
AT3G11630
AT5G11110
AT1G29150
AT5G15450
AT1G79340
AT5G20010
AT4G18480
AT5G60980
AT5G40810
AT5G57870
AT4G39330
AT1G13950
AT5G36700
AT5G36790
AT3G20050
AT1G62390
AT2G47030
AT1G64190
AT3G55440
AT1G24510
AT3G01310
AT5G65720
AT4G39800
AT4G23850
AT5G52640
AT4G25630
AT3G13860
AT5G24710
AT5G02240
AT1G02790
AT5G20290
AT5G42740
AT3G12145
AT5G61410
AT5G56680
AT2G21130
AT4G23670
AT2G21170
AT5G11880
AT4G18100
AT1G24620
AT5G60790
AT4G29840
AT1G74050
AT5G54770
AT5G06290
AT1G29880
AT4G31700
AT2G41680
AT3G53110
AT3G10370
AT2G22560
AT5G47210
AT3G63490
AT1G69410
AT3G05530
AT5G52920
AT3G22310
AT2G25110
AT2G24270
AT3G52880
AT4G34860
AT1G37130
AT5G50850
AT5G64690
AT2G21870
AT1G30350
AT1G75990
AT4G24620
AT2G14740
AT5G42790
AT4G25900
AT5G35530
AT1G04810
AT3G47520
AT5G38480
AT3G02520
AT5G13650
AT3G19450
AT1G32780
AT2G41840
AT3G01640
AT5G43940
AT5G49650
AT4G02770
AT5G41670
AT1G07370
AT3G62870
AT3G47930
AT1G65960
AT2G28680
AT1G64790
AT3G58570
AT2G38770
AT4G20890
AT1G26570
AT4G24680
AT5G16050
AT5G60160
AT1G17220
AT1G49670
AT1G30630
AT3G18000
AT3G63410
AT5G48480
AT5G66510
AT1G78820
AT1G04710
AT5G43060
AT1G30580
AT5G04430
AT3G28500
AT1G17100
AT2G01250
AT4G02150
AT5G25880
AT1G23730
AT1G68560
AT3G07100
AT3G13870
AT1G65730
AT2G41790
AT1G07750
AT5G63680
AT5G04420
AT2G31610
AT2G05920
AT4G17520
AT1G78630
AT3G07110
AT1G69830
AT1G56450
AT5G19220
AT1G12000
AT2G07050
AT1G11770
AT3G14310
AT4G31180
AT5G20980
AT3G53870
AT4G11030
AT1G78830
AT1G26480
AT4G20980
AT4G12650
AT3G26060
AT3G24830
AT2G33340
AT3G26450
AT1G03890
AT1G08360
AT1G50370
AT5G10360
AT3G17060
AT3G06580
AT2G43770
AT5G65430
AT3G56940
AT5G16840
AT1G49630
AT1G56110
AT1G06410
AT3G08943
AT2G40490
AT2G29570
AT5G35590
AT4G33070
AT5G03630
AT5G48760
AT4G39080
AT3G52960
AT2G45800
AT1G18540
AT5G61970
AT1G15750
AT1G04430
AT1G50310
AT4G26270
AT2G04842
AT5G48650
AT3G19820
AT4G18920
AT3G61760
AT5G47180
AT4G19410
AT5G39310
AT3G61240
AT4G01310
AT5G01410
AT4G11150
AT1G74000
AT5G07440
AT5G44340
AT3G22200
AT2G47730
AT1G43670
AT4G34110
AT5G65020
AT5G63570
AT2G19730
AT3G05060
AT3G06720
AT5G25100
AT5G39410
AT1G77490
AT3G25800
AT1G64520
AT2G27530
AT3G62250
AT3G02970
AT5G04180
AT3G56090
AT2G33730
AT5G06760
AT3G23940
AT4G39280
AT2G40730
AT4G20460
AT1G70310
AT1G13020
AT3G09200
AT5G06060
AT4G35860
AT5G22060
AT1G66680
AT5G45390
AT4G05530
AT1G72150
AT3G08560
AT1G77940
AT4G24190
AT1G22780
AT1G34030
AT4G09800
AT1G13320
AT1G35580
AT5G27470
AT2G42910
AT5G06460
AT1G06220
AT2G38230
AT1G56500
AT1G32200
AT1G15500
AT4G01660
AT5G22440
AT1G16350
AT1G31330
AT1G34430
AT2G44640
AT3G27190
AT5G08160
AT2G36390
AT1G09100
AT3G57150
AT1G14650
AT4G27585
AT1G54220
AT1G31690
AT2G37270
AT2G04390
AT1G67430
AT4G33650
AT2G17190
AT1G16470
AT4G33030
AT4G26780
AT3G58750
AT2G44120
AT3G19980
AT5G43010
AT1G53850
AT2G42490
AT3G63520
AT5G46290
AT4G35790
AT5G30510
AT3G55610
AT1G20010
AT1G63000
AT3G22890
AT1G02780
AT1G43710
AT1G54450
AT1G01960
AT5G49020
AT1G13060
AT3G46830
AT4G39980
AT3G13330
AT1G61580
AT4G17170
AT3G11940
AT2G46860
AT5G12250
AT5G46430
AT3G01120
AT2G18230
AT2G27710
AT3G18080
AT1G10270
AT1G02080
AT4G01690
AT4G09570
AT3G49910
AT5G37600
AT3G25660
AT1G70410
AT1G20130
AT5G16130
AT2G22475
AT4G10340
AT2G30200
AT5G36260
AT1G75350
AT4G02080
AT3G20000
AT4G38680
AT3G48420
AT2G37770
AT1G31780
AT3G13580
AT2G33040
AT2G03020
AT3G62600
AT5G64460
AT3G60600
AT1G56330
AT4G14570
AT4G36750
AT3G12800
AT1G23740
AT1G48860
AT4G17090
AT5G23900
AT1G14320
AT3G15090
AT5G27850
AT3G54470
AT4G33680
AT2G45810
AT1G78060
AT1G45000
AT5G18230
AT3G01340
AT2G05990
AT4G32260
AT1G30360
AT2G21060
AT5G40040
AT3G11250
AT5G40650
AT5G09660
AT5G63510
AT2G47470
AT2G37040
AT5G25780
AT5G16970
AT3G49720
AT4G16480
AT1G19570
AT3G02560
AT4G35630
AT5G64440
AT5G67030
AT5G08570
AT2G37190
AT2G35210
AT1G66580
AT1G78850
AT1G79040
AT2G43710
AT1G32440
AT1G14850
AT5G03940
AT2G20360
AT3G11950
AT1G48470
AT2G38280
AT5G65010
AT4G24820
AT1G48900
AT3G53430
AT5G28500
AT4G11380
AT3G18060
AT5G61170
AT2G19950
AT1G80480
AT3G06350
AT1G22920
AT1G13890
AT4G27070
AT5G40870
AT3G01480
AT1G09870
AT4G09320
AT4G25080
AT5G47200
AT5G66680
AT2G41560
AT1G29910
AT1G29920
AT1G07770
AT5G59850
AT1G29965
AT1G19580
AT3G27380
AT3G53580
AT1G80670
AT2G33800
AT1G75930
AT2G45790
AT2G01140
AT3G55280
AT5G47110
AT5G52840
AT4G30160
AT1G59900
AT3G14930
AT3G14067
AT1G78940
AT5G54430
AT4G09720
AT5G63860
AT3G04790
AT3G23400
AT1G03475
AT5G38830
AT2G22240
AT4G27700
AT4G35260
AT5G26000
AT2G34430
AT5G56350
AT3G55190
AT1G09750
AT1G17290
AT1G20330
AT2G45030
AT5G54810
AT3G16380
AT3G08940
AT1G60070
AT5G19760
AT5G41950
AT4G18360
AT4G16210
AT2G33845
AT1G02150
AT5G64760
AT4G01480
AT1G05190
AT3G60570
AT4G16720
AT2G32920
AT3G54540
AT3G09810
AT1G26830
AT2G47680
AT3G04870
AT3G10540
AT5G20080
AT3G07660
AT4G34350
AT4G37840
AT5G27980
AT5G51430
AT5G20710
AT5G16300
AT5G53400
AT4G14030
AT4G23650
AT5G13710
AT5G23540
AT2G43030
AT2G38700
AT1G29670
AT1G80560
AT5G14040
AT1G78920
AT5G13120
AT5G66140
AT4G16390
AT3G03060
AT2G37550
AT5G27030
AT3G10380
AT3G25920
AT3G26400
AT2G27600
AT4G00430
AT5G46580
AT2G45300
AT5G03650
AT5G12110
AT1G79500
AT4G05420
AT3G28715
AT5G14780
AT1G58290
AT5G50370
AT4G09670
AT2G35040
AT3G15670
AT5G14030
AT4G00110
AT2G36060
AT5G19600
AT3G23700
AT3G20320
AT1G25490
AT3G24430
AT3G55330
AT1G07140
AT2G45060
AT5G17020
AT5G45620
AT3G22110
AT3G15610
AT2G34420
AT2G41210
AT3G08580
AT3G10410
AT4G37990
AT5G27120
AT2G37760
AT3G04620
AT4G12980
AT5G65110
AT5G06140
AT4G11010
AT5G58140
AT5G67630
AT1G01620
AT5G66280
AT1G21630
AT5G17770
AT4G08870
AT4G32720
AT2G24050
AT1G52280
AT1G55480
AT3G06830
AT1G31730
AT5G26667
AT5G47010
AT1G14670
AT4G11260
AT1G31230
AT4G04040
AT5G11710
AT1G16720
AT5G60670
AT5G01600
AT3G27740
AT5G65950
AT1G20340
AT3G26340
AT2G15430
AT1G64980
AT1G30620
AT1G12840
AT1G01050
AT5G20830
AT1G78380
AT3G62010
AT4G05160
AT4G24640
AT5G42220
AT4G21100
AT4G22890
AT1G54100
AT5G20070
AT5G27600
AT1G74960
AT4G29510
AT2G36500
AT5G20990
AT1G16760
AT2G33100
AT1G71500
AT1G79250
AT1G69960
AT1G30050
AT2G43090
AT2G43040
AT1G12920
AT5G13490
AT1G47200
AT1G05010
AT3G17630
AT4G09730
AT2G27680
AT2G21490
AT4G18810
AT1G18450
AT4G30950
AT1G07410
AT3G10050
AT4G26500
AT1G47980
AT3G10670
AT5G38470
AT2G17870
AT2G34460
AT3G61430
AT3G56460
AT4G03520
AT2G21250
AT4G19210
AT3G48680
AT1G10670
AT4G20260
AT1G71860
AT4G28440
AT1G48420
AT5G63400
AT2G01720
AT1G79750
AT3G19020
AT1G14950
AT3G12490
AT1G55540
AT3G05560
AT5G58230
AT2G25140
AT5G59150
AT3G16640
AT4G17890
AT4G11860
AT1G17720
AT5G20630
AT2G43790
AT2G45070
AT1G79860
AT1G15730
AT1G11250
AT5G45280
AT4G08900
AT3G27925
AT5G63190
AT4G23900
AT5G57020
AT1G27680
AT1G74010
AT5G27770
AT5G08650
AT4G35700
AT1G60770
AT1G56360
AT2G19860
AT3G61200
AT1G22730
AT1G71780
AT2G09990
AT5G58070
AT4G13560
AT5G18380
AT1G54280
AT2G38670
AT2G01470
AT5G15550
AT3G61820
AT3G45030
AT5G62300
AT5G22480
AT1G09270
AT2G25290
AT2G37220
AT5G55230
AT1G11870
AT3G53710
AT5G19360
AT3G53900
AT5G41520
AT4G38230
AT2G13540
AT2G44530
AT5G44070
AT3G59980
AT5G20230
AT4G26510
AT2G28490
AT3G16780
AT1G71820
AT3G16100
AT3G48930
AT5G51970
AT2G31980
AT3G27300
AT5G20280
AT1G61790
AT2G45240
AT4G26410
AT5G23140
AT3G63170
AT4G39260
AT5G11670
AT4G23630
AT3G48170
AT4G32840
AT1G63180
AT1G06530
AT5G27630
AT5G10170
AT5G12180
AT3G16910
AT1G65030
AT3G13065
AT3G60820
AT5G60010
AT4G23460
AT2G39130
AT5G01530
AT3G62310
AT4G18030
AT4G18596
AT4G31340
AT3G20680
AT1G76080
AT4G27090
AT1G02280
AT2G17280
AT3G02720
AT1G69200
AT3G63150
AT2G26890
AT1G47490
AT3G47370
AT3G01540
AT4G30910
AT1G59830
AT2G06850
AT5G57330
AT3G51470
AT2G26230
AT5G57490
AT1G26910
AT2G23070
AT3G51810
AT5G06160
AT1G17470
AT5G10500
AT1G60780
AT5G58440
AT5G53480
AT5G38640
AT3G60770
AT3G48860
AT5G65260
AT5G63890
AT5G47770
AT5G64030
AT1G02560
AT4G04350
AT1G31910
AT1G02000
AT2G45820
AT2G13620
AT3G07960
AT1G59820
AT1G67930
AT3G05500
AT3G49080
AT5G40770
AT5G05780
AT2G02130
AT1G55150
AT4G27320
AT1G74850
AT1G01100
AT1G64770
AT1G61150
AT5G19620
AT2G43945
AT4G15560
AT3G53620
AT5G51660
AT3G54110
AT4G03620
AT3G05090
AT1G75680
AT2G17390
AT5G16290
AT3G55750
AT5G14220
AT5G53560
AT5G04740
AT2G33840
AT4G16830
AT4G35890
AT5G53140
AT3G18740
AT3G22850
AT2G31670
AT3G05590
AT1G76400
AT5G05570
AT2G22450
AT4G33760
AT3G63190
AT1G79570
AT2G25600
AT4G08630
AT1G04690
AT1G65540
AT5G11770
AT2G23120
AT1G16780
AT5G46750
AT5G13150
AT4G14300
AT4G36020
AT2G41530
AT1G12310
AT1G20760
AT2G40300
AT4G04770
AT3G44590
AT4G31750
AT1G20810
AT3G53970
AT5G05000
AT3G03110
AT5G10260
AT5G03740
AT1G03860
AT3G61140
AT1G41880
AT3G13227
AT5G24420
AT2G33870
AT1G26460
AT1G24020
AT5G14660
AT5G23880
AT3G56340
AT3G07630
AT5G09650
AT4G35100
AT4G29120
AT4G14230
AT1G15130
AT2G30720
AT2G26060
AT1G71880
AT3G50000
AT5G20490
AT5G64050
AT1G32990
AT1G76450
AT2G02100
AT4G20780
AT3G21200
AT3G16830
AT1G08980
AT1G48950
AT3G61230
AT5G09500
AT1G07660
AT1G07820
AT2G28740
AT3G45930
AT3G46320
AT3G53730
AT5G59690
AT5G59970
AT2G27510
AT1G61690
AT5G20690
AT4G16160
AT5G66470
AT2G02560
AT4G30440
AT1G10510
AT2G44610
AT5G59880
AT3G58700
AT4G18730
AT5G45775
AT3G52300
AT2G43910
AT1G62780
AT5G18900
AT3G07160
AT5G52340
AT2G02790
AT1G23290
AT1G44970
AT2G17800
AT4G33870
AT5G66420
AT3G26520
AT1G15140
AT1G11840
AT3G12690
AT5G19440
AT2G47400
AT3G03950
AT1G21670
AT3G10460
AT5G18100
AT4G15545
AT5G47930
AT1G49300
AT1G75200
AT3G49540
AT3G13490
AT3G46940
AT1G61770
AT4G32605
AT1G20575
AT5G19370
AT4G21860
AT3G58730
AT3G61260
AT1G78370
AT5G52360
AT2G47970
AT4G22930
AT1G48160
AT3G27820
AT3G23780
AT2G19010
AT4G20410
AT4G23400
AT2G21410
AT1G32220
AT3G18820
AT5G56280
AT4G02230
AT4G35850
AT3G55620
AT5G51030
AT2G22250
AT3G53180
AT1G50940
AT5G06110
AT2G39770
AT1G76510
AT1G52400
AT1G24400
AT3G15280
AT4G15000
AT1G16270
AT1G68200
AT4G36690
AT1G61520
AT1G54780
AT4G30530
AT1G48520
AT3G03080
AT1G76850
AT1G53580
AT5G15980
AT5G65940
AT3G56070
AT3G08970
AT3G51240
AT5G66030
AT3G66654
AT4G09650
AT1G21380
AT5G40370
AT2G35120
AT1G67700
AT1G64440
AT5G14320
AT2G33793
AT3G46040
AT5G46630
AT3G13750
AT4G36390
AT4G15093
AT5G18520
AT3G02830
AT5G05670
AT4G10840
AT2G46470
AT4G34700
AT5G57460
AT5G02770
AT1G05350
AT5G15200
AT1G31817
AT5G07470
AT5G47840
AT5G55280
AT5G59840
AT2G16440
AT3G19930
AT1G04160
AT3G49120
AT5G09840
AT4G12420
AT5G62270
AT3G16520
AT1G26110
AT4G25590
AT4G34640
AT4G37510
AT4G25890
AT4G25780
AT3G50110
AT1G58215
AT5G19290
AT3G13160
AT5G37850
AT5G16590
AT2G33220
AT3G06483
AT2G04700
AT2G16430
AT3G18490
AT3G48690
AT4G11220
AT3G07880
AT5G23740
AT2G22780
AT2G13560
AT3G27280
AT4G09520
AT3G53500
AT1G18280
AT4G35950
AT4G33945
AT3G28940
AT3G26618
AT5G27410
AT1G04760
AT4G29410
AT1G73990
AT5G16780
AT5G61500
AT1G07210
AT3G03920
AT3G20390
AT1G44900
AT4G14800
AT4G20530
AT4G20540
AT4G20550
AT4G20560
AT4G20570
AT4G20645
AT4G20670
AT3G15180
AT1G04190
AT3G51780
AT1G74970
AT3G03980
AT1G62820
AT4G26840
AT3G19230
AT3G20410
AT1G60810
AT1G16340
AT5G24850
AT1G63060
AT3G24090
AT1G03120
AT1G15340
AT1G34220
AT1G64040
AT1G35620
AT3G24350
AT4G13850
AT4G37000
AT2G21280
AT5G35620
AT5G54180
AT4G16180
AT2G37370
AT5G07490
AT2G33470
AT3G18230
AT5G57290
AT1G32380
AT1G08880
AT1G34130
AT3G10060
AT5G48180
AT2G32060
AT3G44750
AT2G28900
AT4G30800
AT3G55360
AT3G05280
AT3G16370
AT1G09830
AT3G23920
AT5G42960
AT1G23900
AT2G35500
AT1G09760
AT1G09580
AT5G24690
AT5G66740
AT4G31500
AT5G48570
AT2G07707
AT5G16440
AT4G24550
AT1G04980
AT1G74640
AT2G14750
AT3G13772
AT5G38840
AT3G26720
AT2G32600
AT4G13840
AT3G51300
AT3G53610
AT4G10790
AT5G11980
AT5G52240
AT5G09390
AT5G05730
AT4G34740
AT2G26490
AT1G51110
AT1G12570
AT3G20790
AT5G25540
AT1G16880
AT2G20610
AT1G29140
AT4G37800
AT5G39500
AT4G38710
AT1G12050
AT1G11750
AT4G14110
AT5G56640
AT1G67730
AT1G11890
AT3G46000
AT2G23080
AT1G53280
AT1G15290
AT1G79560
AT3G18580
AT5G22510
AT5G62350
AT2G20450
AT3G10700
AT4G22670
AT5G48810
AT1G16240
AT1G31850
AT1G71230
AT3G54240
AT3G20330
AT5G19940
AT4G04950
AT1G15415
AT5G58590
AT5G05170
AT4G39520
AT1G04600
AT3G05910
AT5G16450
AT3G61110
AT1G30300
AT4G34830
AT2G47250
AT3G47810
AT1G10590
AT4G14870
AT5G05520
AT4G20020
AT5G14170
AT4G04720
AT2G02740
AT2G15400
AT3G13222
AT1G61870
AT3G49240
AT1G48920
AT3G02750
AT1G72230
AT5G53530
AT5G60960
AT3G22230
AT2G38730
AT4G32150
AT4G00620
AT1G17070
AT1G56050
AT4G02520
AT3G14990
AT2G20270
AT1G73230
AT2G44650
AT1G07040
AT2G19670
AT5G58060
AT1G22450
AT2G27200
AT3G09900
AT1G04450
AT5G47880
AT4G23680
AT5G12860
AT1G28306
AT2G30740
AT4G17040
AT4G12800
AT5G51280
AT1G01610
AT1G66070
AT4G00740
AT3G54700
AT5G66120
AT2G37500
AT5G54270
AT1G14010
AT4G12590
AT2G37010
AT5G60730
AT5G23390
AT1G08130
AT5G14580
AT5G51830
AT2G03480
AT1G75950
AT4G36480
AT1G15440
AT3G12160
AT2G32230
AT5G57300
AT4G23600
AT1G20140
AT2G40620
AT5G40420
AT4G15640
AT5G60860
AT2G17265
AT1G44446
AT5G60640
AT1G09630
AT1G49970
AT5G23940
AT3G56650
AT3G10850
AT1G15950
AT4G26430
AT5G59710
AT4G20720
AT1G08830
AT2G45140
AT5G61840
AT3G47450
AT2G38550
AT1G72810
AT2G29690
AT4G16060
AT5G58710
AT3G45400
AT5G54500
AT5G32470
AT2G36660
AT2G15880
AT5G67320
AT2G40360
AT3G01800
AT3G03100
AT1G48620
AT1G77120
AT4G29830
AT2G20530
AT4G13230
AT3G59890
AT3G60245
AT1G01910
AT3G22360
AT1G60710
AT5G59240
AT5G35700
AT2G43950
AT5G19680
AT4G03080
AT1G16920
AT2G32080
AT5G63220
AT5G14060
AT1G49600
AT2G42220
AT2G25870
AT3G12600
AT5G36890
AT4G18970
AT2G30860
AT2G42790
AT5G41700
AT3G19590
AT3G17900
AT4G37200
AT2G37790
AT5G59160
AT2G03680
AT2G40290
AT5G13280
AT5G05970
AT2G37690
AT3G58840
AT3G11270
AT3G52950
AT2G44760
AT1G23780
AT2G29080
AT5G01750
AT3G22650
AT1G01780
AT2G28080
AT5G08380
AT5G55160
AT1G72710
AT1G22740
AT1G21720
AT3G63095
AT5G64040
AT5G66720
AT1G26690
AT5G18700
AT5G10160
AT2G34560
AT1G03160
AT2G22480
AT2G21620
AT5G05850
AT5G67510
AT5G43470
AT3G06850
AT3G45640
AT4G32410
AT3G08910
AT1G48130
AT5G39870
AT3G61470
AT3G23490
AT1G21660
AT4G01395
AT3G04940
AT5G51570
AT2G24765
AT4G09020
AT4G12400
AT3G13410
AT2G46000
AT4G28450
AT1G04040
AT4G03550
AT5G27560
AT5G63490
AT1G21690
AT3G52610
AT5G14200
AT2G19000
AT3G48890
AT3G58560
AT1G79850
AT5G52820
AT5G51260
AT2G29960
AT4G24510
AT5G59950
AT1G12070
AT3G25770
AT4G18800
AT4G03560
AT2G46170
AT5G67490
AT4G28660
AT2G20060
AT5G57440
AT1G51570
AT1G29940
AT4G34660
AT2G02400
AT5G24490
AT3G56290
AT3G09500
AT5G18400
AT4G14950
AT1G53070
AT4G05400
AT3G60210
AT5G46860
AT3G45770
AT1G42550
AT4G20960
AT3G60740
AT3G01570
AT1G75220
AT1G31800
AT3G47800
AT3G16400
AT4G38130
AT4G02510
AT4G08520
AT3G18600
AT3G07330
AT3G21280
AT2G31170
AT3G22630
AT3G62720
AT3G20290
AT3G54840
AT4G39890
AT1G27390
AT3G05190
AT3G56490
AT2G45440
AT5G49880
AT1G76090
AT4G01150
AT5G12290
AT5G62440
AT1G48020
AT5G49930
AT1G63680
AT1G02130
AT1G06700
AT3G26370
AT2G20890
AT5G45950
AT5G04830
AT2G29450
AT1G20440
AT2G01540
AT1G10390
AT3G59990
AT3G08710
AT1G05510
AT4G13370
AT3G25070
AT4G26570
AT5G59300
AT2G39420
AT4G21710
AT3G62370
AT1G21130
AT4G04020
AT4G34050
AT5G09890
AT2G30870
AT4G16450
AT3G02710
AT1G69840
AT3G12390
AT4G39710
AT4G00400
AT1G76180
AT4G18060
AT3G23820
AT4G30890
AT5G24165
AT4G03110
AT2G43980
AT1G73050
AT1G55090
AT1G65980
AT1G02140
AT3G23620
AT1G13690
AT2G20810
AT1G67360
AT5G38435
AT1G25260
AT4G00170
AT4G29350
AT3G10660
AT4G27160
AT5G64670
AT3G50360
AT5G54310
AT3G45980
AT2G38540
AT3G53020
AT4G27270
AT1G09930
AT1G05270
AT2G32810
AT5G46180
AT5G02890
AT5G39650
AT1G07320
AT4G29520
AT3G46010
AT1G30470
AT1G63780
AT1G03680
AT1G70540
AT2G43420
AT3G58460
AT5G40760
AT5G17620
AT4G01900
AT4G24770
AT1G48480
AT2G29700
AT5G62750
AT5G05620
AT2G32260
AT2G39390
AT1G60000
AT4G12250
AT5G41770
AT5G62550
AT3G22060
AT4G39990
AT3G09580
AT4G17300
AT5G47870
AT5G04290
AT1G29530
AT4G39230
AT5G22100
AT2G19830
AT1G72340
AT5G57810
AT5G15680
AT5G22830
AT1G72610
AT5G39850
AT1G32580
AT5G47760
AT1G74210
AT2G35490
AT3G03570
AT1G13110
AT1G44790
AT1G18850
AT5G64990
AT3G01680
AT1G02305
AT5G54910
AT3G18750
AT5G02610
AT2G36620
AT4G36630
AT1G73720
AT5G42190
AT1G18190
AT5G67385
AT1G15390
AT4G39120
AT2G47640
AT1G08220
AT2G15270
AT2G39840
AT1G71260
AT4G30840
AT1G11930
AT3G53990
AT1G22275
AT5G02960
AT1G77140
AT1G36730
AT1G47290
AT1G72640
AT1G19150
AT2G38740
AT4G25140
AT1G30530
AT1G54530
AT1G04640
AT3G12130
AT3G60540
AT1G21480
AT1G52930
AT4G28780
AT3G60830
AT3G10360
AT3G56240
AT5G62930
AT5G58220
AT4G14080
AT1G55900
AT1G11260
AT5G44780
AT1G29250
AT3G13940
AT2G47240
AT1G43860
AT2G46820
AT1G02090
AT5G27840
AT1G54500
AT4G02840
AT4G00752
AT2G34630
AT2G30620
AT1G09700
AT1G76200
AT4G16360
AT1G54520
AT4G27800
AT3G12170
AT3G48460
AT5G02940
AT4G17510
AT3G12640
AT4G17260
AT1G31440
AT2G18040
AT3G26740
AT4G24490
AT5G39590
AT3G23830
AT1G53900
AT3G30841
AT3G56080
AT4G21110
AT5G44500
AT2G46100
AT3G57050
AT1G71440
AT1G80790
AT2G22360
AT5G04600
AT5G04900
AT2G40190
AT5G11810
AT2G17700
AT5G11440
AT5G04510
AT4G13670
AT4G02530
AT3G04780
AT2G45640
AT2G38270
AT1G48830
AT1G49750
AT4G27170
AT3G16470
AT1G69620
AT5G44610
AT5G50320
AT1G43130
AT3G22500
AT2G26210
AT3G02630
AT5G47600
AT5G14240
AT4G30580
AT3G25220
AT4G34120
AT2G25840
AT1G15910
AT5G52190
AT1G06550
AT3G51130
AT5G52780
AT1G22840
AT1G32160
AT3G10350
AT4G30690
AT1G74070
AT4G23640
AT3G55920
AT4G32130
AT1G22270
AT2G24940
AT1G71680
AT3G02880
AT3G03630
AT1G08490
AT5G39493
AT4G34620
AT3G18220
AT4G31420
AT5G62200
AT3G22320
AT1G65290
AT5G45550
AT4G01990
AT5G20140
AT1G11480
AT3G54640
AT3G50270
AT3G05000
AT3G61650
AT1G67350
AT2G01650
AT2G22230
AT2G46580
AT5G52440
AT1G32080
AT5G02100
AT2G35010
AT1G09590
AT1G09690
AT1G69340
AT1G34000
AT5G51600
AT2G30880
AT2G43130
AT1G13030
AT3G15010
AT3G17465
AT2G05830
AT3G02770
AT5G67270
AT1G23860
AT5G58470
AT4G05090
AT5G66750
AT3G04880
AT5G06830
AT4G39220
AT4G23570
AT1G27190
AT2G19080
AT3G06550
AT4G05390
AT3G52390
AT3G57350
AT1G32550
AT2G21150
AT3G26380
AT1G22150
AT5G54740
AT4G00860
AT4G01610
AT1G06400
AT5G67590
AT3G49250
AT5G16760
AT2G43235
AT2G44540
AT2G41600
AT3G43520
AT5G42980
AT2G44310
AT3G59010
AT4G18465
AT2G47330
AT5G51750
AT1G26880
AT5G64350
AT3G58480
AT1G15710
AT2G02850
AT3G06790
AT2G17972
AT1G18170
AT3G09300
AT3G21400
AT1G27650
AT5G15470
AT5G15870
AT3G19130
AT4G11820
AT5G18110
AT2G25830
AT1G23140
AT3G01250
AT4G32470
AT1G70330
AT5G20250
AT5G25060
AT5G22740
AT4G28200
AT4G21770
AT2G33410
AT4G12770
AT2G05520
AT1G24460
AT1G18480
AT4G27780
AT4G27230
AT3G05100
AT1G28290
AT2G42130
AT5G06910
AT1G66130
AT4G35230
AT1G08700
AT5G22030
AT1G70790
AT5G05270
AT4G23920
AT3G51420
AT1G02410
AT3G01520
AT1G31070
AT5G67530
AT3G11730
AT5G16390
AT5G12410
AT5G13610
AT4G39150
AT5G61760
AT4G17560
AT1G80360
AT2G06530
AT4G20830
AT3G58580
AT5G02160
AT1G50900
AT1G77840
AT1G35220
AT4G32760
AT3G45300
AT4G02140
AT3G06730
AT2G39930
AT1G49820
AT1G20580
AT1G08640
AT4G02340
AT1G79870
AT5G42820
AT1G63160
AT1G07250
AT1G71310
AT1G12910
AT3G59650
AT3G28630
AT1G51100
AT2G47580
AT5G23040
AT5G46940
AT2G31970
AT1G27130
AT2G45740
AT5G64960
AT2G42120
AT5G08330
AT2G47710
AT3G48250
AT3G25585
AT4G26720
AT3G02180
AT1G24610
AT5G25265
AT1G24050
AT1G76680
AT1G31660
AT2G39010
AT1G06820
AT2G25280
AT1G64750
AT2G19330
AT3G28150
AT2G24450
AT3G60810
AT4G25840
AT2G16640
AT5G18200
AT1G10522
AT1G04680
AT5G14530
AT5G23535
AT4G31140
AT2G29340
AT2G20840
AT4G28360
AT4G35610
AT3G15790
AT1G09300
AT2G28120
AT4G03430
AT2G34590
AT4G38690
AT1G65150
AT4G39670
AT2G39140
AT1G50670
AT3G03120
AT5G48030
AT5G47520
AT1G80750
AT3G16170
AT3G06610
AT3G48820
AT1G73980
AT3G29075
AT1G10500
AT1G08520
AT1G60550
AT1G02170
AT4G28700
AT3G29200
AT1G18630
AT4G17870
AT1G18640
AT4G39730
AT2G21960
AT1G07645
AT2G18030
AT1G70670
AT3G23390
AT4G14320
AT1G19880
AT1G07170
AT2G30000
AT2G25800
AT1G26550
AT1G07330
AT2G42680
AT1G58360
AT2G33070
AT5G40580
AT1G77750
AT3G44100
AT2G16370
AT3G01510
AT3G11820
AT4G17650
AT1G18990
AT5G18850
AT5G15770
AT2G37470
AT2G40170
AT3G13120
AT5G12210
AT2G06510
AT5G63380
AT3G27700
AT2G45000
AT1G10310
AT1G32330
AT1G13330
AT5G50960
AT3G47470
AT4G31350
AT4G36810
AT4G15130
AT3G06430
AT5G65650
AT5G14910
AT3G55260
AT4G00300
AT1G77440
AT5G66100
AT2G02120
AT2G19740
AT2G41040
AT1G50240
AT1G13090
AT1G26761
AT1G29810
AT2G43400
AT5G03280
AT3G27020
AT2G36835
AT1G08410
AT1G04110
AT3G15660
AT1G63500
AT2G26930
AT5G19150
AT2G35800
AT1G03900
AT5G18480
AT5G64370
AT3G51010
AT3G54300
AT1G05460
AT2G35100
AT4G10040
AT3G08010
AT2G34840
AT1G74310
AT5G64290
AT5G03050
AT5G58870
AT5G55130
AT3G06540
AT2G30930
AT5G27450
AT2G20930
AT3G01590
AT1G11680
AT1G12640
AT5G47320
AT5G56710
AT2G44230
AT3G07320
AT2G31960
AT1G54320
AT2G41490
AT4G38200
AT1G67440
AT1G01370
AT3G22480
AT4G33350
AT4G04320
AT1G28060
AT5G04870
AT1G76690
AT1G07960
AT4G29810
AT5G19460
AT4G23430
AT5G33320
AT1G51160
AT5G09420
AT1G51590
AT2G35410
AT3G10940
AT1G05850
AT2G21470
AT4G08790
AT3G02900
AT1G56580
AT1G54570
AT1G22620
AT2G43020
AT5G58260
AT3G21220
AT4G24530
AT4G13050
AT5G27670
AT1G50510
AT2G15900
AT4G25340
AT1G67540
AT1G79720
AT4G02290
AT5G58240
AT1G27435
AT3G61560
AT4G00585
AT5G38880
AT2G16460
AT5G03910
AT3G18590
AT4G16700
AT2G13290
AT4G31930
AT1G14380
AT3G53700
AT3G25545
AT1G24095
AT3G61070
AT3G54690
AT2G15790
AT5G58030
AT2G24490
AT2G34700
AT3G27890
AT3G02730
AT3G16310
AT4G16800
AT5G50100
AT5G10780
AT4G30610
AT3G54560
AT1G29700
AT5G12190
AT5G52210
AT1G07440
AT1G23100
AT4G35760
AT1G47380
AT2G21600
AT5G42570
AT5G15530
AT5G47780
AT5G27700
AT3G08950
AT1G17350
AT4G26760
AT2G31305
AT1G73250
AT3G18570
AT1G06130
AT4G34180
AT1G65650
AT5G30495
AT2G44050
AT3G09880
AT3G16410
AT4G32400
AT2G32090
AT1G08750
AT4G33625
AT2G43520
AT3G50520
AT5G08740
AT5G09260
AT5G37590
AT3G01380
AT1G17745
AT1G77090
AT3G27120
AT3G53890
AT2G07725
AT2G46560
AT2G24180
AT3G12380
AT1G66900
AT3G22845
AT4G22000
AT4G15930
AT3G26600
AT3G42790
AT4G36580
AT3G51550
AT2G33620
AT3G18940
AT5G24260
AT5G51010
AT3G55180
AT3G15710
AT5G10090
AT1G60650
AT2G36020
AT5G18430
AT3G17910
AT2G29290
AT4G21445
AT1G75660
AT2G31410
AT5G26030
AT2G17010
AT1G13730
AT2G17410
AT4G15520
AT4G31860
AT1G72460
AT1G78870
AT5G12980
AT2G46900
AT2G20120
AT1G20560
AT2G40100
AT5G56760
AT1G77420
AT3G26860
AT2G16280
AT2G30390
AT3G01290
AT3G28900
AT1G50710
AT5G46250
AT5G07510
AT5G59810
AT5G41600
AT2G35605
AT3G52720
AT3G02920
AT1G08780
AT2G48120
AT4G00380
AT3G12140
AT2G03640
AT3G08600
AT1G11400
AT3G60360
AT1G14900
AT3G20550
AT5G40200
AT1G29590
AT3G60340
AT1G78240
AT3G45280
AT5G15350
AT3G25290
AT2G26460
AT1G12970
AT5G47390
AT4G11240
AT5G20520
AT5G17590
AT4G15940
AT5G20590
AT5G22220
AT3G05030
AT3G21370
AT1G08660
AT3G02220
AT2G17560
AT3G28730
AT5G39800
AT2G01350
AT3G12010
AT5G08180
AT3G54890
AT3G05130
AT1G68310
AT4G28980
AT1G51510
AT1G29060
AT1G73620
AT1G26940
AT3G02320
AT5G22470
AT3G09970
AT4G21940
AT3G51510
AT3G12760
AT4G29470
AT1G47820
AT1G73030
AT5G10560
AT1G16890
AT5G58110
AT3G28700
AT2G44200
AT5G54110
AT2G21160
AT4G24805
AT4G34920
AT3G50960
AT5G50930
AT1G35470
AT4G22310
AT4G27140
AT2G39900
AT5G16010
AT1G04645
AT3G25680
AT3G13640
AT3G13230
AT1G74560
AT1G17050
AT3G27050
AT1G74320
AT2G31750
AT5G37770
AT4G26750
AT4G26470
AT4G15550
AT3G12410
AT3G27660
AT4G31130
AT5G06530
AT5G63830
AT3G08630
AT1G20850
AT1G67830
AT3G17410
AT3G62130
AT3G14920
AT1G52260
AT1G17270
AT1G75460
AT2G36830
AT3G09570
AT2G35830
AT1G04900
AT1G68020
AT3G19770
AT4G28860
AT2G44860
AT3G16060
AT5G43810
AT1G65090
AT3G03590
AT2G15690
AT1G60950
AT5G45010
AT2G33600
AT3G12020
AT3G04290
AT1G16280
AT4G01130
AT3G03600
AT5G49720
AT3G12260
AT3G10330
AT2G26910
AT1G01820
AT1G11475
AT1G10200
AT2G38910
AT5G60620
AT5G41940
AT1G03050
AT1G79090
AT2G36310
AT5G08050
AT2G16930
AT5G50870
AT1G17210
AT5G43500
AT2G42210
AT5G35660
AT1G49430
AT4G10120
AT2G02040
AT2G32900
AT3G07640
AT3G15820
AT3G11740
AT1G75850
AT1G50400
AT4G38350
AT4G24930
AT3G47940
AT1G80500
AT3G10530
AT1G14510
AT5G67560
AT5G27520
AT5G56260
AT5G66410
AT1G04650
AT3G57530
AT5G37810
AT5G25530
AT2G38710
AT1G51660
AT5G14310
AT4G34260
AT5G65900
AT3G07480
AT4G30010
AT1G11765
AT5G04280
AT4G32960
AT2G28355
AT1G45145
AT5G49510
AT4G36800
AT2G31035
AT3G25480
AT2G30710
AT3G57220
AT1G29260
AT1G32250
AT5G11480
AT5G24460
AT5G11680
AT1G78510
AT2G18700
AT5G47630
AT4G25050
AT3G11070
AT4G00560
AT4G10750
AT5G61310
AT5G66090
AT5G40480
AT5G63910
AT4G31200
AT2G26000
AT3G60500
AT4G39910
AT5G07860
AT1G17840
AT2G35340
AT2G24090
AT4G21895
AT2G41950
AT4G19003
AT1G49480
AT5G39950
AT5G25900
AT3G18410
AT1G30890
AT2G01490
AT1G29850
AT4G16444
AT4G15955
AT1G35340
AT3G18500
AT4G04850
AT4G17920
AT4G24860
AT4G29960
AT5G56950
AT2G44690
AT3G16090
AT1G17190
AT5G24130
AT1G72170
AT5G16400
AT5G47435
AT1G70520
AT1G21850
AT4G22140
AT2G23610
AT1G74730
AT2G18600
AT1G17540
AT2G18390
AT1G16740
AT3G50980
AT4G32120
AT4G17760
AT4G17770
AT5G52110
AT1G68590
AT5G66055
AT3G02480
AT1G03870
AT3G09980
AT4G27560
AT2G02160
AT5G48335
AT3G47610
AT4G17420
AT1G06230
AT4G27130
AT5G50580
AT5G50680
AT5G56660
AT4G37270
AT1G62010
AT1G77390
AT3G23580
AT2G40400
AT1G13190
AT4G13510
AT2G48150
AT1G80910
AT4G22570
AT5G46230
AT5G59845
AT5G47890
AT5G26600
AT2G33775
AT3G54490
AT5G63800
AT2G26430
AT3G21820
AT1G13870
AT2G40890
AT5G57950
AT1G54410
AT5G22950
AT5G02280
AT3G21670
AT1G52300
AT4G11980
AT4G32070
AT5G42130
AT3G57280
AT3G02110
AT3G10220
AT1G19360
AT4G02620
AT3G28490
AT1G21190
AT2G22570
AT2G24310
AT4G16990
AT2G23940
AT3G11770
AT3G01440
AT5G48540
AT1G24490
AT1G21600
AT1G03150
AT3G28460
AT2G22900
AT1G53840
AT3G61980
AT3G19810
AT4G30780
AT3G01450
AT5G22370
AT4G25370
AT2G26100
AT1G24120
AT1G80510
AT5G55140
AT1G26170
AT1G04970
AT2G33830
AT1G51610
AT1G73940
AT3G46230
AT4G24130
AT1G44820
AT2G41670
AT3G45010
AT3G18760
AT2G22610
AT2G39850
AT5G37130
AT3G56010
AT4G33670
AT5G04910
AT4G29680
AT1G06670
AT5G11780
AT2G44790
AT4G08230
AT1G62600
AT3G01740
AT1G08860
AT1G24267
AT5G24350
AT3G03490
AT1G11040
AT5G63870
AT4G25440
AT1G31860
AT2G07727
AT1G03400
AT5G51150
AT1G48170
AT3G24320
AT1G50450
AT5G13020
AT2G36130
AT2G03630
AT5G03440
AT5G20040
AT3G15850
AT1G08160
AT2G30530
AT5G23395
AT2G19790
AT5G17250
AT4G29870
AT3G27670
AT4G33640
AT1G11572
AT1G52780
AT1G28340
AT1G43900
AT5G54390
AT3G26030
AT5G39080
AT2G19385
AT1G12140
AT3G52730
AT5G59140
AT2G19750
AT4G29390
AT5G56670
AT1G62850
AT5G62310
AT1G48440
AT1G30480
AT3G04950
AT1G65320
AT4G16440
AT5G09250
AT5G12970
AT1G30460
AT4G17340
AT2G16860
AT2G41475
AT5G07430
AT5G45810
AT1G47830
AT2G16505
AT1G80350
AT4G12640
AT3G14960
AT1G13820
AT5G24314
AT1G20840
AT1G68540
AT4G26455
AT2G03980
AT2G34640
AT2G02760
AT5G05370
AT3G61620
AT5G20090
AT5G47810
AT1G25480
AT5G03080
AT2G44730
AT3G16980
AT3G54630
AT4G21590
AT4G34510
AT4G27180
AT5G25310
AT1G78140
AT5G57550
AT2G35320
AT1G27760
AT5G52960
AT1G54070
AT5G16150
AT3G04020
AT4G31770
AT2G10950
AT1G52530
AT3G60730
AT4G25100
AT3G21055
AT2G31570
AT1G02830
AT5G51720
AT5G20930
AT3G21380
AT3G18140
AT1G76260
AT1G16380
AT5G15880
AT5G67470
AT5G15802
AT1G76405
AT2G47120
AT1G72010
AT5G41880
AT3G58680
AT4G08350
AT4G38500
AT1G48270
AT5G49550
AT2G46090
AT4G17486
AT1G65410
AT5G15860
AT2G01460
AT5G47030
AT3G27080
AT1G03550
AT1G15700
AT3G12280
AT5G23670
AT5G07460
AT5G54010
AT3G49670
AT2G27385
AT3G01660
AT2G46800
AT3G49100
AT4G34940
AT2G26280
AT5G40260
AT5G54580
AT1G17330
AT2G38010
AT5G10300
AT4G25680
AT3G25170
AT5G64090
AT5G58800
AT5G38980
AT5G62410
AT3G13224
AT1G20270
AT5G43970
AT5G06660
AT5G55470
AT1G24100
AT4G36100
AT3G15380
AT5G54800
AT2G19450
AT5G49570
AT5G46020
AT5G59870
AT5G09640
AT2G28600
AT3G11200
AT1G76490
AT3G17780
AT2G21640
AT5G64140
AT1G49140
AT5G18620
AT3G05070
AT5G65520
AT5G64540
AT1G26220
AT2G03820
AT1G67840
AT2G17720
AT4G05450
AT1G17520
AT2G26900
AT3G60960
AT1G78180
AT5G48840
AT3G12930
AT2G47420
AT5G62290
AT1G60170
AT3G07010
AT2G14530
AT5G14050
AT2G15240
AT1G28270
AT1G51540
AT1G48240
AT1G70710
AT3G25165
AT1G79490
AT3G62550
AT2G27260
AT4G05150
AT3G18050
AT3G11750
AT1G49330
AT3G25400
AT3G07070
AT1G52980
AT5G51130
AT1G51770
AT4G14145
AT4G09160
AT3G22930
AT1G11390
AT4G36720
AT1G51400
AT5G23440
AT3G55830
AT2G34810
AT1G64710
AT4G31160
AT5G03470
AT1G74530
AT2G41430
AT4G01560
AT5G14010
AT1G50610
AT4G33400
AT1G31220
AT1G11530
AT3G51600
AT5G56150
AT5G21100
AT3G14090
AT1G11760
AT1G30960
AT5G27720
AT2G18630
AT1G73190
AT5G15050
AT2G42800
AT4G24970
AT5G50030
AT4G17010
AT1G15270
AT1G22460
AT3G45050
AT1G33680
AT4G27450
AT5G41685
AT5G57980
AT4G19040
AT2G26680
AT3G28450
AT1G26670
AT3G08640
AT5G13310
AT4G02610
AT1G50320
AT3G55270
AT1G77930
AT5G12130
AT4G26770
AT5G55980
AT1G67230
AT5G11760
AT5G55920
AT2G18170
AT1G63310
AT2G01060
AT2G20700
AT3G56720
AT5G17790
AT1G02880
AT3G18430
AT1G24110
AT5G39790
AT3G15110
AT1G78670
AT2G26570
AT3G01370
AT1G77860
AT2G42400
AT5G58090
AT1G22170
AT3G13080
AT2G36180
AT2G42070
AT1G67460
AT4G20330
AT1G78340
AT1G54830
AT1G70220
AT5G20650
AT4G33060
AT1G43620
AT1G70890
AT2G43240
AT2G39970
AT4G26020
AT5G07560
AT1G16860
AT5G51640
AT3G15810
AT5G19980
AT5G40740
AT3G27000
AT4G34020
AT5G12390
AT2G01070
AT3G05410
AT4G26190
AT5G16120
AT4G04670
AT5G65770
AT5G48470
AT1G15240
AT3G59140
AT4G14270
AT1G27460
AT5G10050
AT5G11280
AT1G47750
AT5G38110
AT1G74740
AT5G12030
AT3G02950
AT1G52630
AT5G59250
AT1G01470
AT2G33700
AT3G60620
AT5G63140
AT1G43700
AT5G47100
AT4G37760
AT3G63160
AT5G67290
AT1G02335
AT1G51650
AT1G07970
AT1G11780
AT4G27420
AT5G64330
AT2G16920
AT1G29990
AT5G05070
AT2G26300
AT1G68000
AT5G01700
AT3G61600
AT1G64170
AT3G16990
AT1G65700
AT4G32390
AT4G31850
AT5G45490
AT4G37880
AT1G10230
AT3G12620
AT1G50520
AT5G03070
AT1G04010
AT4G32714
AT5G36950
AT1G78020
AT1G62930
AT1G54610
AT5G53370
AT1G65010
AT4G04570
AT1G79820
AT1G29790
AT3G29280
AT5G17160
AT1G71170
AT5G39530
AT3G57420
AT5G13740
AT4G37330
AT5G58430
AT4G13630
AT1G76990
AT1G15720
AT1G01970
AT4G04925
AT4G15010
AT5G14140
AT3G12030
AT5G56360
AT4G13800
AT5G02050
AT1G55360
AT3G56700
AT1G17140
AT5G14210
AT4G06599
AT5G46570
AT3G52210
AT4G21960
AT1G69380
AT5G56090
AT4G34138
AT5G50900
AT3G01650
AT2G29730
AT5G14450
AT1G16010
AT4G21540
AT3G15356
AT5G42670
AT4G16650
AT2G18730
AT5G03860
AT1G76670
AT1G65840
AT2G25880
AT3G07570
AT5G12100
AT3G10260
AT1G71040
AT1G64680
AT4G38380
AT1G17130
AT3G47430
AT4G38400
AT1G08030
AT3G05020
AT2G26660
AT1G03250
AT1G18650
AT3G46510
AT5G64910
AT3G54870
AT5G47640
AT2G04550
AT5G13970
AT5G20130
AT1G61630
AT4G26000
AT1G78280
AT4G33430
AT5G05660
AT4G29380
AT5G06340
AT1G79350
AT5G23820
AT3G47833
AT1G45688
AT5G22050
AT2G15820
AT3G54960
AT1G23440
AT3G26990
AT1G67900
AT3G09250
AT3G03990
AT3G07390
AT4G27640
AT5G46390
AT2G35470
AT3G54080
AT2G01590
AT3G55520
AT3G19800
AT1G09160
AT3G16175
AT5G59730
AT3G55960
AT3G51450
AT2G40980
AT1G22610
AT3G11620
AT2G26380
AT1G34340
AT4G31840
AT2G31800
AT5G02830
AT2G26970
AT5G42870
AT5G03730
AT1G58060
AT1G59760
AT4G20280
AT1G27535
AT3G03040
AT2G03670
AT2G20280
AT1G22690
AT4G01700
AT1G57990
AT3G17030
AT2G35350
AT1G48760
AT2G35960
AT3G61580
AT1G68910
AT5G39990
AT4G29650
AT2G38760
AT4G01920
AT5G09920
AT1G48880
AT5G42340
AT1G47640
AT1G03910
AT5G04540
AT1G63290
AT3G08850
AT3G03160
AT5G41010
AT1G01580
AT5G64620
AT3G42860
AT3G60840
AT5G47020
AT2G32540
AT1G01730
AT2G47600
AT3G49590
AT4G24730
AT1G47240
AT3G58170
AT1G54850
AT4G10280
AT5G60190
AT1G55805
AT2G41160
AT4G30400
AT4G15820
AT5G07770
AT5G16890
AT1G10280
AT1G28670
AT1G73410
AT5G32440
AT3G16565
AT1G51730
AT2G23560
AT3G58180
AT2G38530
AT3G23190
AT2G41900
AT5G05760
AT5G40090
AT2G30150
AT5G42890
AT2G36300
AT5G27395
AT3G21630
AT3G12610
AT5G48870
AT1G62960
AT2G22370
AT2G24390
AT2G03290
AT1G13790
AT3G08490
AT2G47900
AT4G08320
AT1G49740
AT1G07840
AT2G01680
AT1G15540
AT1G72820
AT3G04660
AT2G45520
AT5G51020
AT1G69430
AT4G29240
AT4G37630
AT5G38670
AT2G22600
AT2G04360
AT3G53170
AT1G33900
AT2G47980
AT5G23920
AT3G52290
AT1G13600
AT1G20230
AT3G01550
AT4G15070
AT3G14650
AT3G55080
AT1G16520
AT5G22500
AT3G46260
AT5G59320
AT5G08720
AT5G55640
AT3G04430
AT5G40060
AT1G01940
AT1G16640
AT2G25520
AT1G43640
AT3G54950
AT5G47800
AT5G18690
AT1G53550
AT5G25430
AT4G08691
AT1G73100
AT1G19690
AT1G45063
AT2G17120
AT1G72175
AT1G75710
AT2G38440
AT3G18295
AT4G21260
AT1G55290
AT5G27710
AT1G12280
AT1G73080
AT4G31640
AT3G50100
AT5G61700
AT4G27510
AT1G59600
AT5G04070
AT1G76920
AT4G29720
AT1G32750
AT3G27180
AT4G37100
AT5G08490
AT4G23530
AT1G24290
AT2G18850
AT5G20580
AT2G28890
AT2G25770
AT5G57815
AT5G11960
AT2G36360
AT2G33610
...@@ -13,8 +13,9 @@ from data.loader import DataLoader ...@@ -13,8 +13,9 @@ from data.loader import DataLoader
from utils.misc import list_epochs, print_and_log from utils.misc import list_epochs, print_and_log
RAYS_PER_BATCH = 2 ** 16 RAYS_PER_BATCH = 2 ** 12
DATA_LOADER_CHUNK_SIZE = 1e8 DATA_LOADER_CHUNK_SIZE = 1e8
root_dir = Path.cwd()
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
...@@ -68,7 +69,7 @@ if args.mdl_path: ...@@ -68,7 +69,7 @@ if args.mdl_path:
model_args = model.args model_args = model.args
else: else:
# Create model from specified configuration # Create model from specified configuration
with Path(f'{sys.path[0]}/configs/{args.config}.json').open() as fp: with Path(f'{root_dir}/configs/{args.config}.json').open() as fp:
config = json.load(fp) config = json.load(fp)
model_name = args.config model_name = args.config
model_class = config['model'] model_class = config['model']
...@@ -76,7 +77,7 @@ else: ...@@ -76,7 +77,7 @@ else:
model_args['bbox'] = dataset.bbox model_args['bbox'] = dataset.bbox
model_args['depth_range'] = dataset.depth_range model_args['depth_range'] = dataset.depth_range
model, states = mdl.create(model_class, model_args), None model, states = mdl.create(model_class, model_args), None
model.to(device.default()).train() model.to(device.default())
run_dir = Path(f"_nets/{dataset.name}/{model_name}") run_dir = Path(f"_nets/{dataset.name}/{model_name}")
run_dir.mkdir(parents=True, exist_ok=True) run_dir.mkdir(parents=True, exist_ok=True)
......
...@@ -22,5 +22,5 @@ def get_class(class_name: str) -> type: ...@@ -22,5 +22,5 @@ def get_class(class_name: str) -> type:
def get_trainer(model: BaseModel, **kwargs) -> base.Train: def get_trainer(model: BaseModel, **kwargs) -> base.Train:
train_class = get_class(model.trainer) train_class = get_class(model.TrainerClass)
return train_class(model, **kwargs) return train_class(model, **kwargs)
...@@ -42,8 +42,9 @@ class Train(object, metaclass=BaseTrainMeta): ...@@ -42,8 +42,9 @@ class Train(object, metaclass=BaseTrainMeta):
self.iters = 0 self.iters = 0
self.run_dir = run_dir self.run_dir = run_dir
self.model.trainer = self
self.model.train() self.model.train()
self.optimizer = torch.optim.Adam(self.model.parameters(), lr=5e-4) self.reset_optimizer()
if states: if states:
if 'epoch' in states: if 'epoch' in states:
...@@ -58,6 +59,9 @@ class Train(object, metaclass=BaseTrainMeta): ...@@ -58,6 +59,9 @@ class Train(object, metaclass=BaseTrainMeta):
if self.perf_mode: if self.perf_mode:
enable_perf() enable_perf()
def reset_optimizer(self):
self.optimizer = torch.optim.Adam(self.model.parameters(), lr=5e-4)
def train(self, data_loader: DataLoader, max_epochs: int): def train(self, data_loader: DataLoader, max_epochs: int):
self.data_loader = data_loader self.data_loader = data_loader
self.iters_per_epoch = self.perf_frames or len(data_loader) self.iters_per_epoch = self.perf_frames or len(data_loader)
......
...@@ -20,18 +20,15 @@ class TrainWithSpace(Train): ...@@ -20,18 +20,15 @@ class TrainWithSpace(Train):
if self.splitting_loop == 1 or self.epoch % self.splitting_loop == 1: if self.splitting_loop == 1 or self.epoch % self.splitting_loop == 1:
try: try:
with torch.no_grad(): with torch.no_grad():
before, after = self.model.splitting() before, after = self.model.split()
print_and_log( print_and_log(f"Splitting done: {before} -> {after}")
f"Splitting done. # of voxels before: {before}, after: {after}")
except NotImplementedError: except NotImplementedError:
print_and_log( print_and_log(
"Note: The space does not support splitting operation. Just skip it.") "Note: The space does not support splitting operation. Just skip it.")
if self.pruning_loop == 1 or self.epoch % self.pruning_loop == 1: if self.pruning_loop == 1 or self.epoch % self.pruning_loop == 1:
try: try:
with torch.no_grad(): with torch.no_grad():
#before, after = self.model.pruning() # self._prune_voxels_by_densities()
# print(f"Pruning by voxel densities done. # of voxels before: {before}, after: {after}")
# self._prune_inner_voxels()
self._prune_voxels_by_weights() self._prune_voxels_by_weights()
except NotImplementedError: except NotImplementedError:
print_and_log( print_and_log(
...@@ -39,26 +36,26 @@ class TrainWithSpace(Train): ...@@ -39,26 +36,26 @@ class TrainWithSpace(Train):
super()._train_epoch() super()._train_epoch()
def _prune_inner_voxels(self): def _prune_voxels_by_densities(self):
space: Voxels = self.model.space space: Voxels = self.model.space
voxel_access_counts = torch.zeros(space.n_voxels, dtype=torch.long, threshold = .5
device=space.voxels.device) bits = 16
iters_in_epoch = 0
batch_size = self.data_loader.batch_size @torch.no_grad()
self.data_loader.batch_size = 2 ** 14 def get_scores(sampled_points: torch.Tensor, sampled_voxel_indices: torch.Tensor) -> torch.Tensor:
for _, rays_o, rays_d, _ in self.data_loader: densities = self.model.render(
self.model(rays_o, rays_d, Samples(sampled_points, None, None, None, sampled_voxel_indices),
raymarching_early_stop_tolerance=0.01, 'density')
raymarching_chunk_size_or_sections=[1], return 1 - (-densities).exp()
perturb_sample=False,
voxel_access_counts=voxel_access_counts, sampled_xyz, sampled_idx = space.sample(bits)
voxel_access_tolerance=0) chunk_size = 64
iters_in_epoch += 1 scores = torch.cat([
percent = iters_in_epoch / len(self.data_loader) * 100 torch.max(get_scores(sampled_xyz[i:i + chunk_size], sampled_idx[i:i + chunk_size])
sys.stdout.write(f'Pruning inner voxels...{percent:.1f}% \r') .reshape(-1, bits ** 3), -1)[0]
self.data_loader.batch_size = batch_size for i in range(0, self.voxels.size(0), chunk_size)
before, after = space.prune(voxel_access_counts > 0) ], 0) # (M[, ...])
print(f"Prune inner voxels: {before} -> {after}") return space.prune(scores > threshold)
def _prune_voxels_by_weights(self): def _prune_voxels_by_weights(self):
space: Voxels = self.model.space space: Voxels = self.model.space
......
...@@ -57,10 +57,11 @@ def meshgrid(*size: int, normalize: bool = False, swap_dim: bool = False) -> tor ...@@ -57,10 +57,11 @@ def meshgrid(*size: int, normalize: bool = False, swap_dim: bool = False) -> tor
""" """
if len(size) == 1: if len(size) == 1:
size = (size[0], size[0]) size = (size[0], size[0])
y, x = torch.meshgrid(torch.arange(0, size[0]), torch.arange(0, size[1])) y, x = torch.meshgrid(torch.arange(size[0]), torch.arange(size[1]), indexing='ij')
if swap_dim: if normalize:
return torch.stack([y / (size[0] - 1.), x / (size[1] - 1.)], 2) if normalize else torch.stack([y, x], 2) x.div_(size[1] - 1.)
return torch.stack([x / (size[1] - 1.), y / (size[0] - 1.)], 2) if normalize else torch.stack([x, y], 2) y.div_(size[0] - 1.)
return torch.stack([y, x], 2) if swap_dim else torch.stack([x, y], 2)
def get_angle(x: torch.Tensor, y: torch.Tensor) -> torch.Tensor: def get_angle(x: torch.Tensor, y: torch.Tensor) -> torch.Tensor:
......
...@@ -13,6 +13,13 @@ def get_grid_steps(bbox: torch.Tensor, step_size: Union[torch.Tensor, float]) -> ...@@ -13,6 +13,13 @@ def get_grid_steps(bbox: torch.Tensor, step_size: Union[torch.Tensor, float]) ->
return ((bbox[1] - bbox[0]) / step_size).ceil().long() return ((bbox[1] - bbox[0]) / step_size).ceil().long()
def to_flat_indices(grid_coords: torch.Tensor, steps: torch.Tensor) -> torch.Tensor:
indices = grid_coords[..., 0]
for i in range(1, grid_coords.shape[-1]):
indices = indices * steps[i] + grid_coords[..., i]
return indices
def to_grid_coords(pts: torch.Tensor, bbox: torch.Tensor, *, def to_grid_coords(pts: torch.Tensor, bbox: torch.Tensor, *,
step_size: Union[torch.Tensor, float] = None, step_size: Union[torch.Tensor, float] = None,
steps: torch.Tensor = None) -> torch.Tensor: steps: torch.Tensor = None) -> torch.Tensor:
...@@ -55,20 +62,7 @@ def to_grid_indices(pts: torch.Tensor, bbox: torch.Tensor, *, ...@@ -55,20 +62,7 @@ def to_grid_indices(pts: torch.Tensor, bbox: torch.Tensor, *,
steps = get_grid_steps(bbox, step_size) # (D) steps = get_grid_steps(bbox, step_size) # (D)
grid_coords = to_grid_coords(pts, bbox, step_size=step_size, steps=steps) # (N..., D) grid_coords = to_grid_coords(pts, bbox, step_size=step_size, steps=steps) # (N..., D)
outside_mask = torch.logical_or(grid_coords < 0, grid_coords >= steps).any(-1) # (N...) outside_mask = torch.logical_or(grid_coords < 0, grid_coords >= steps).any(-1) # (N...)
if pts.size(-1) == 1: grid_indices = to_flat_indices(grid_coords, steps)
grid_indices = grid_coords[..., 0]
elif pts.size(-1) == 2:
grid_indices = grid_coords[..., 0] * steps[1] + grid_coords[..., 1]
elif pts.size(-1) == 3:
grid_indices = grid_coords[..., 0] * steps[1] * steps[2] \
+ grid_coords[..., 1] * steps[2] + grid_coords[..., 2]
elif pts.size(-1) == 4:
grid_indices = grid_coords[..., 0] * steps[1] * steps[2] * steps[3] \
+ grid_coords[..., 1] * steps[2] * steps[3] \
+ grid_coords[..., 2] * steps[3] \
+ grid_coords[..., 3]
else:
raise NotImplementedError("The function does not support D>4")
return grid_indices, outside_mask return grid_indices, outside_mask
...@@ -76,7 +70,7 @@ def init_voxels(bbox: torch.Tensor, steps: torch.Tensor): ...@@ -76,7 +70,7 @@ def init_voxels(bbox: torch.Tensor, steps: torch.Tensor):
""" """
Initialize voxels. Initialize voxels.
""" """
x, y, z = torch.meshgrid(*[torch.arange(steps[i]) for i in range(3)]) x, y, z = torch.meshgrid(*[torch.arange(steps[i]) for i in range(3)], indexing="ij")
return to_voxel_centers(torch.stack([x, y, z], -1).reshape(-1, 3), bbox, steps=steps) return to_voxel_centers(torch.stack([x, y, z], -1).reshape(-1, 3), bbox, steps=steps)
...@@ -96,7 +90,7 @@ def to_voxel_centers(grid_coords: torch.Tensor, bbox: torch.Tensor, *, ...@@ -96,7 +90,7 @@ def to_voxel_centers(grid_coords: torch.Tensor, bbox: torch.Tensor, *,
:param steps `Tensor(1|D)`: (optional) steps alone every dim :param steps `Tensor(1|D)`: (optional) steps alone every dim
:return `Tensor(N..., D)`: discretized grid coordinates :return `Tensor(N..., D)`: discretized grid coordinates
""" """
grid_coords = grid_coords.float() + 0.5 grid_coords = grid_coords.float() + .5
if step_size is not None: if step_size is not None:
return grid_coords * step_size + bbox[0] return grid_coords * step_size + bbox[0]
return grid_coords / steps * (bbox[1] - bbox[0]) + bbox[0] return grid_coords / steps * (bbox[1] - bbox[0]) + bbox[0]
...@@ -121,8 +115,8 @@ def split_voxels_local(voxel_size: Union[torch.Tensor, float], n: int, align_bor ...@@ -121,8 +115,8 @@ def split_voxels_local(voxel_size: Union[torch.Tensor, float], n: int, align_bor
dtype = like.dtype dtype = like.dtype
device = like.device device = like.device
c = torch.arange(1 - n, n, 2, dtype=dtype, device=device) c = torch.arange(1 - n, n, 2, dtype=dtype, device=device)
offset = torch.stack(torch.meshgrid([c] * dims), -1).flatten(0, -2) * voxel_size / 2 /\ offset = torch.stack(torch.meshgrid([c] * dims, indexing='ij'), -1).flatten(0, -2)\
(n - 1 if align_border else n) * voxel_size * .5 / (n - 1 if align_border else n)
return offset return offset
...@@ -144,7 +138,7 @@ def split_voxels(voxel_centers: torch.Tensor, voxel_size: Union[torch.Tensor, fl ...@@ -144,7 +138,7 @@ def split_voxels(voxel_centers: torch.Tensor, voxel_size: Union[torch.Tensor, fl
def get_corners(voxel_centers: torch.Tensor, bbox: torch.Tensor, steps: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: def get_corners(voxel_centers: torch.Tensor, bbox: torch.Tensor, steps: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]:
half_voxel_size = (bbox[1] - bbox[0]) / steps * 0.5 half_voxel_size = (bbox[1] - bbox[0]) / steps * 0.5
expand_bbox = bbox expand_bbox = bbox.clone()
expand_bbox[0] -= 0.5 * half_voxel_size expand_bbox[0] -= 0.5 * half_voxel_size
expand_bbox[1] += 0.5 * half_voxel_size expand_bbox[1] += 0.5 * half_voxel_size
double_grid_coords = to_grid_coords(voxel_centers, expand_bbox, step_size=half_voxel_size) double_grid_coords = to_grid_coords(voxel_centers, expand_bbox, step_size=half_voxel_size)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment