-
Notifications
You must be signed in to change notification settings - Fork 0
/
genre_classification.py
95 lines (73 loc) · 3.21 KB
/
genre_classification.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
import json
import numpy as np
from sklearn.model_selection import train_test_split
from tensorflow import keras
import matplotlib.pyplot as plt
DATASET_PATH = "data.json"
# load data
# split the data into train and test
# build the network architecture
# compile network
# train network
# load data
def load_data(dataset_path):
with open(dataset_path, "r") as fp:
data = json.load(fp)
# convert lists into numpy arrays
inputs = np.array(data["mfcc"])
targets = np.array(data["labels"])
return inputs, targets
def plot_history(history):
fig,axs=plt.subplots(2)
#create accuracy subplot
axs[0].plot(history.history["accuracy"],label="train accuracy")
axs[0].plot(history.history["val_accuracy"], label="test accuracy")
axs[0].set_ylabel("Accuracy")
axs[0].legend(loc="lower right")
axs[0].set_title("Accuracy eval")
#create error subplot
axs[1].plot(history.history["loss"],label="train error")
axs[1].plot(history.history["val_loss"], label="test error")
axs[1].set_ylabel("Error")
axs[1].set_xlabel("Epoch")
axs[1].legend(loc="upper right")
axs[1].set_title("Error eval")
plt.show()
if __name__ == "__main__":
# load data
inputs, targets = load_data(
DATASET_PATH) # actually inputs has three dimension when we build model we dont use zero dimension
# split dataset into train and test
inputs_train, inputs_test, targets_train, targets_test = train_test_split(inputs,
targets,
test_size=0.3)
# build the network architecture(model)
model = keras.Sequential([
# input layer
keras.layers.Flatten(input_shape=(inputs.shape[1], inputs.shape[2])),
# Flatten pass directly multi-dimension arrays
# 1st hidden layer
keras.layers.Dense(512, activation="relu",kernel_regularizer=keras.regularizers.l2(0.001)),
keras.layers.Dropout(0.3),
# 2nd hidden layer
keras.layers.Dense(256, activation="relu",kernel_regularizer=keras.regularizers.l2(0.001)),
keras.layers.Dropout(0.3),
# 3rd hidden layer
keras.layers.Dense(64, activation="relu",kernel_regularizer=keras.regularizers.l2(0.001)),
keras.layers.Dropout(0.3),
# output layer
keras.layers.Dense(10, activation="softmax") # use 10 norons because we want to split 10 categories
])
# compile model
optimizer = keras.optimizers.Adam(learning_rate=0.0001)
model.compile(optimizer=optimizer,
loss="sparse_categorical_crossentropy",
metrics=["accuracy"])
model.summary() # The model summary table reports the strength of the relationship between the model and the dependent variable
# train model
history=model.fit(inputs_train, targets_train, #solving overfitting: add variable and method
validation_data=(inputs_test, targets_test),
epochs=50,
batch_size=32)
#plots accuracy and error over the epochs
plot_history(history)