-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathutils.py
More file actions
123 lines (94 loc) · 3.08 KB
/
utils.py
File metadata and controls
123 lines (94 loc) · 3.08 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
import torch, pdb, math, numpy, os, atexit, logging, time
from datetime import datetime
import matplotlib.pyplot as plt
from tensorboardX import SummaryWriter
from multiprocessing import Process, Queue
class Timer:
def __init__(self):
self.t = time.time()
def tic(self):
t0 = self.t
self.t = time.time()
return self.t - t0
class Tensorboard:
def __init__(self, experiment, log_dir="tensorboard_logs"):
experiment_name = experiment.split("/")[-1]
save_dir = os.path.join(log_dir, experiment_name)
self.writer = SummaryWriter(save_dir)
self.index_dict = dict()
def log_scalar(self, name, value, index=-1):
if index == -1:
if name in self.index_dict:
self.index_dict[name] += 1
index = self.index_dict[name]
else:
self.index_dict[name] = 1
index = 1
self.writer.add_scalar(name, value, index)
def log_histogram(self, name, value, bins, index=-1):
if index == -1:
if name in self.index_dict:
self.index_dict[name] += 1
index = self.index_dict[name]
else:
self.index_dict[name] = 1
index = 1
self.writer.add_histogram(name, value, index, bins)
def compute_uncertainty(s_pred, method='max'):
if method == 'max':
diff = torch.sum((s_pred.squeeze().unsqueeze(0) - s_pred.unsqueeze(1))**2, 2)
u = diff.max()
elif method == 'var':
u = torch.var(s_pred, 0).sum()
return u
def one_hot(i, d, cuda=True, unsqueeze=True):
x = torch.zeros(d)
x[i] = 1
if cuda: x = x.cuda()
if unsqueeze:
x = x.unsqueeze(0)
return x
def grad_norm(model):
norm = 0
for w in model.parameters():
if w.grad is not None:
norm += w.grad.norm().detach().item()
return norm
def count_parameters(model):
cnt = 0
for p in model.parameters():
cnt = cnt + p.numel()
return cnt
def precision_at_k(scores, labels, k=5, dim=1):
b = scores.size(0)
topk = torch.topk(scores, k=k, dim=dim, largest=True)[1].float()
matches = labels.view(-1, 1).expand(b, k) == topk
return matches.float().sum().item() / b
# simple logger
class SimpleLogger():
def __init__(self, fname):
self.fname = fname
if not os.path.isdir(os.path.dirname(fname)):
os.system(f'mkdir -p {os.path.dirname(fname)}')
def log(self, s, date=True):
f = open(self.fname, 'a')
if date:
s = f'{str(datetime.now())}: {s}'
f.write(s + '\n')
else:
s = f'{s}'
f.write(s + '\n')
print(s)
f.close()
def logtxt(fname, s, date=True):
if not os.path.isdir(os.path.dirname(fname)):
os.system(f'mkdir -p {os.path.dirname(fname)}')
f = open(fname, 'a')
if date:
s = f'{str(datetime.now())}: {s}'
f.write(s + '\n')
else:
s = f'{s}'
f.write(s + '\n')
print(s)
f.close()