forked from LynnHo/DCGAN-LSGAN-WGAN-GP-DRAGAN-Pytorch
-
Notifications
You must be signed in to change notification settings - Fork 1
/
utils.py
74 lines (57 loc) · 2.08 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import shutil
import torch
def mkdir(paths):
if not isinstance(paths, (list, tuple)):
paths = [paths]
for path in paths:
if not os.path.isdir(path):
os.makedirs(path)
def cuda_devices(gpu_ids):
gpu_ids = [str(i) for i in gpu_ids]
os.environ['CUDA_VISIBLE_DEVICES'] = ','.join(gpu_ids)
def cuda(xs):
if torch.cuda.is_available():
if not isinstance(xs, (list, tuple)):
return xs.cuda()
else:
return [x.cuda() for x in xs]
def save_checkpoint(state, save_path, is_best=False, max_keep=None):
# save checkpoint
torch.save(state, save_path)
# deal with max_keep
save_dir = os.path.dirname(save_path)
list_path = os.path.join(save_dir, 'latest_checkpoint')
save_path = os.path.basename(save_path)
if os.path.exists(list_path):
with open(list_path) as f:
ckpt_list = f.readlines()
ckpt_list = [save_path + '\n'] + ckpt_list
else:
ckpt_list = [save_path + '\n']
if max_keep is not None:
for ckpt in ckpt_list[max_keep:]:
ckpt = os.path.join(save_dir, ckpt[:-1])
if os.path.exists(ckpt):
os.remove(ckpt)
ckpt_list[max_keep:] = []
with open(list_path, 'w') as f:
f.writelines(ckpt_list)
# copy best
if is_best:
shutil.copyfile(save_path, os.path.join(save_dir, 'best_model.ckpt'))
def load_checkpoint(ckpt_dir_or_file, map_location=None, load_best=False):
if os.path.isdir(ckpt_dir_or_file):
if load_best:
ckpt_path = os.path.join(ckpt_dir_or_file, 'best_model.ckpt')
else:
with open(os.path.join(ckpt_dir_or_file, 'latest_checkpoint')) as f:
ckpt_path = os.path.join(ckpt_dir_or_file, f.readline()[:-1])
else:
ckpt_path = ckpt_dir_or_file
ckpt = torch.load(ckpt_path, map_location=map_location)
print(' [*] Loading checkpoint from %s succeed!' % ckpt_path)
return ckpt