forked from RuijieZhu94/EC-Depth
-
Notifications
You must be signed in to change notification settings - Fork 0
/
utils.py
executable file
·88 lines (69 loc) · 2.22 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
import torch
import torch.distributed as dist
import random
import numpy as np
from torch.utils.data import DistributedSampler as _DistributedSampler
def readlines(filename):
"""Read all the lines in a text file and return as a list
"""
with open(filename, 'r') as f:
lines = f.read().splitlines()
return lines
def normalize_image(x):
"""Rescale image pixels to span range [0, 1]
"""
ma = float(x.max().cpu().data)
mi = float(x.min().cpu().data)
d = ma - mi if ma != mi else 1e5
return (x - mi) / d
def sec_to_hm(t):
"""Convert time in seconds to time in hours, minutes and seconds
e.g. 10239 -> (2, 50, 39)
"""
t = int(t)
s = t % 60
t //= 60
m = t % 60
t //= 60
return t, m, s
def sec_to_hm_str(t):
"""Convert time in seconds to a nice string
e.g. 10239 -> '02h50m39s'
"""
h, m, s = sec_to_hm(t)
return "{:02d}h{:02d}m{:02d}s".format(h, m, s)
def seed_worker(worker_id):
worker_seed = torch.initial_seed() % 2**32
np.random.seed(worker_seed)
random.seed(worker_seed)
def get_dist_info(return_gpu_per_machine=False):
if torch.__version__ < '1.0':
initialized = dist._initialized
else:
if dist.is_available():
initialized = dist.is_initialized()
else:
initialized = False
if initialized:
rank = dist.get_rank()
world_size = dist.get_world_size()
else:
rank = 0
world_size = 1
if return_gpu_per_machine:
gpu_per_machine = torch.cuda.device_count()
return rank, world_size, gpu_per_machine
return rank, world_size
class DistributedSampler(_DistributedSampler):
def __init__(self, dataset, num_replicas=None, rank=None, shuffle=True):
super().__init__(dataset, num_replicas=num_replicas, rank=rank)
self.shuffle = shuffle
def __iter__(self):
if self.shuffle:
g = torch.Generator()
g.manual_seed(self.epoch)
indices = torch.randperm(len(self.dataset), generator=g).tolist()
else:
indices = torch.arange(len(self.dataset)).tolist()
indices = indices[self.rank:self.total_size:self.num_replicas]
return iter(indices)