-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathreplica.py
66 lines (59 loc) · 2.25 KB
/
replica.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
import glob
import os
from typing import Optional
import numpy as np
import torch
from natsort import natsorted
from src.datasets.basedataset import GradSLAMDataset
class ReplicaSemanticDataset(GradSLAMDataset):
def __init__(
self,
config_dict,
basedir,
sequence,
stride: Optional[int] = None,
start: Optional[int] = 0,
end: Optional[int] = -1,
desired_height: Optional[int] = 480,
desired_width: Optional[int] = 640,
load_embeddings: Optional[bool] = False,
embedding_dir: Optional[str] = "embeddings",
embedding_dim: Optional[int] = 512,
**kwargs,
):
print("Load Replica dataset!!!")
self.input_folder = os.path.join(basedir, sequence)
self.pose_path = os.path.join(self.input_folder, "traj.txt")
super().__init__(
config_dict,
stride=stride,
start=start,
end=end,
desired_height=desired_height,
desired_width=desired_width,
load_embeddings=load_embeddings,
embedding_dir=embedding_dir,
embedding_dim=embedding_dim,
**kwargs,
)
def get_filepaths(self):
color_paths = natsorted(glob.glob(f"{self.input_folder}/results/frame*.jpg"))
depth_paths = natsorted(glob.glob(f"{self.input_folder}/results/depth*.png"))
object_paths = natsorted(glob.glob(f"{self.input_folder}/semantic_class/semantic_class_*.png"))
embedding_paths = None
if self.load_embeddings:
embedding_paths = natsorted(glob.glob(f"{self.input_folder}/{self.embedding_dir}/*.pt"))
return color_paths, depth_paths, object_paths, embedding_paths
def load_poses(self):
poses = []
with open(self.pose_path, "r") as f:
lines = f.readlines()
for i in range(self.num_imgs):
line = lines[i]
c2w = np.array(list(map(float, line.split()))).reshape(4, 4)
c2w = torch.from_numpy(c2w).float()
poses.append(c2w)
return poses
def read_embedding_from_file(self, embedding_file_path):
embedding = torch.load(embedding_file_path)
return embedding.permute(0, 2, 3, 1) # (1, H, W, embedding_dim)