-
Notifications
You must be signed in to change notification settings - Fork 0
/
ej.py
131 lines (111 loc) · 3.64 KB
/
ej.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
import os
import torch.nn as nn
import torch.optim as optim
from alias_utils.data import house
from alias_utils.model import RunnerBuilder as Builder
from alias_utils.group import GroupBuilder as GroupBuilder
from alias_utils.loss import MSELoss, MNormLoss
from alias_utils.metrics import (
Loss as LossMetric,
RSquared as RSquaredMetric,
DesignMatNorm,
TestLoss
)
from matplotlib import pyplot as plt
def basic_model():
# Read in the data
data = house.data()
# Construct a basic layered model
wrapper = (
Builder()
.name("Basic 3-Layer")
.loss(MSELoss())
.optimizer(optim.Adam)
.steps(
nn.Linear(13, 8),
nn.Linear(8, 8),
nn.Linear(8, 1),
)
.with_metric(LossMetric())
.with_metric(DesignMatNorm())
.build()
)
# Train the model
import time
start = time.perf_counter()
wrapper.train(data, n_epochs=1000)
print(f"Training took {time.perf_counter() - start:.2f} seconds.")
wrapper.plot(log=False, normalize=True)
#wrapper.plot_two(LossMetric, RSquaredMetric)
#wrapper.plot_two(LossMetric, RSquaredMetric, log=True)
# wrapper.plot_two(LossMetric, DesignMatNorm, log=True)
# wrapper.plot_two(0, 1)
plt.tight_layout()
plt.show()
print("Success")
def group_model():
# Read in the data
data = house.data()
# Set up several models
specs = [(13, 8, 8, 1), (13, 16, 16, 1), (13, 32, 32, 1)]
group = (
GroupBuilder()
.name("Basic 3-Layer Group")
.loss(MSELoss())
.optimizer(optim.Adam)
.specs(*specs)
.with_metric(LossMetric)
.with_metric(DesignMatNorm)
.copies(3)
.load()
)
# Train the group
group.train(data, n_epochs=500)
def giant_model_house(
min_depth=1,
max_depth=10,
min_width_pow=3,
max_width_pow=5,
optimizers=(optim.Adam,),
batch_size=(32,)
):
# Set up several models
def model(depth: int, width: int):
return (13, *([width] * depth), 1)
specs = [model(d, 2 ** (2*w)) for d in range(min_depth, max_depth+1) for w in range(min_width_pow, max_width_pow+1)]
specs = [*sorted(specs, key=lambda x: sum(x))]
for bs in batch_size:
if bs:
train_data, test_data = house.data(train_percent=0.8, batch_size=bs)
else:
train_data, test_data = house.data(train_percent=0.8)
for o in optimizers:
# if o == optim.Adam and bs == 0:
# continue
print(f"Running on {len(specs)} models with {o} optimizer and batch size {bs}.")
n = f"Giant House Group {o.__name__} {bs}"
filtered_specs = []
for steps in specs:
for i in range(8):
s_name = n + " " + "-".join(str(s) for s in steps + (i,))
if os.path.exists(f"checkpoints/{s_name}"):
break
else:
filtered_specs.append(steps)
print(f"Found {len(filtered_specs)} models to train.")
giant = (
GroupBuilder()
.name(n)
.loss(MSELoss())
.optimizer(o)
.specs(*filtered_specs)
.with_metric(LossMetric)
.with_metric(DesignMatNorm)
.with_metric(RSquaredMetric)
.with_metric(TestLoss, data=test_data, loss=MSELoss())
.copies(8)
.build()
)
giant.train(train_data, n_epochs=1000)
if __name__ == "__main__":
giant_model_house()