-
Notifications
You must be signed in to change notification settings - Fork 0
/
train.py
144 lines (122 loc) · 4.81 KB
/
train.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
import json
import sys
import numpy as np
import joblib
from sklearn.metrics import classification_report
from sklearn.model_selection import train_test_split
from sktime.classification.deep_learning import LSTMFCNClassifier
from sktime.classification.distance_based import KNeighborsTimeSeriesClassifier, KNeighborsTimeSeriesClassifierTslearn
from sktime.classification.hybrid import HIVECOTEV2
from sktime.dists_kernels import FlatDist, ScipyDist
from utils import (
HEICLOUD_DATA,
TIME_INTERVAL_CONFIG,
TS_TYPE,
fdr,
fpr,
fttar,
load_dataset,
)
def train(name, clf):
for ts in TS_TYPE:
for ti in TIME_INTERVAL_CONFIG:
result = dict()
result["time_interval"] = ti["time_interval_name"]
result["ts_type"] = ts
result["model"] = name
print(f"Run analysis on data: {ti['time_interval_name']} for {ts}")
X, y, _, _ = load_dataset(ti["time_interval_name"], ts_type=ts, max_size=300)
# for i in range(len(HEICLOUD_DATA)):
# X_new, y_new, _, _ = load_dataset(
# ti["time_interval_name"], ts_type=ts, data=[HEICLOUD_DATA[i]], max_size=10000
# )
# X = np.append(X, X_new, axis=0)
# y = np.append(y, y_new, axis=0)
# break
print(f"Data has shape: {X.shape}")
X_train, X_test, y_train, y_test = train_test_split(
X, y, test_size=0.2, random_state=42, stratify=y
)
clf.fit(X_train, y_train)
joblib.dump(
clf, f"/mnt/data/models/model_{name}_{ti['time_interval_name']}_{ts}.pickle"
)
print("Predicting test set...")
result["test"] = {}
y_pred = clf.predict(X_test)
report = classification_report(y_test, y_pred, output_dict=True)
fttar_test = fttar(y_test, y_pred)
fpr_test = fpr(y_test, y_pred)
fdr_test = fdr(y_test, y_pred)
result["test"]["report"] = report
result["test"]["fttar"] = fttar_test
result["test"]["fpr"] = fpr_test
result["test"]["fdr"] = fdr_test
print(report)
print(f"FTTAR: {fttar_test}")
print(f"False Positive Rate: {fpr_test}")
print(f"False Discovery Rate: {fdr_test}")
print("Predicting production set...")
result["prod"] = {}
X_new, y_new, _, _ = load_dataset(
ti["time_interval_name"], ts_type=ts, data=HEICLOUD_DATA
)
y_pred = clf.predict(X_new)
report = classification_report(y_new, y_pred, output_dict=True)
fttar_test = fttar(y_new, y_pred)
fpr_test = fpr(y_new, y_pred)
fdr_test = fdr(y_new, y_pred)
result["prod"]["report"] = report
result["prod"]["fttar"] = fttar_test
result["prod"]["fpr"] = fpr_test
result["prod"]["fdr"] = fdr_test
print(report)
print(f"FTTAR: {fttar_test}")
print(f"False Positive Rate: {fpr_test}")
print(f"False Discovery Rate: {fdr_test}")
with open(f"result_{ti['time_interval_name']}.json", "a+") as f:
f.write(json.dumps(result) + "\n")
if __name__ == "__main__":
name = sys.argv[1]
match name:
case "lstm":
clf = LSTMFCNClassifier(verbose=1, n_epochs=100)
case "hivecotev":
clf = HIVECOTEV2(n_jobs=-1, verbose=1)
case "knn-euclidean":
eucl_dist = FlatDist(ScipyDist())
clf = KNeighborsTimeSeriesClassifier(n_neighbors=2, n_jobs=-1, distance=eucl_dist)
case "knn-dtw":
clf = KNeighborsTimeSeriesClassifier(n_neighbors=2, n_jobs=-1, distance="dtw")
case "knn-dtw-sakoe":
clf = KNeighborsTimeSeriesClassifierTslearn(
n_neighbors=2,
verbose=1,
n_jobs=-1,
metric="dtw",
metric_params={
"global_constraint": "sakoe_chiba",
"sakoe_chiba_radius": 3,
},
)
case "knn-dtw-soft":
clf = KNeighborsTimeSeriesClassifierTslearn(
n_neighbors=2,
verbose=1,
n_jobs=-1,
metric="softdtw",
)
case "knn-dtw-itakura":
clf = KNeighborsTimeSeriesClassifierTslearn(
n_neighbors=2,
n_jobs=-1,
verbose=1,
metric="dtw",
metric_params={
"global_constraint": "itakura",
"itakura_max_slope": 2.0,
},
)
case _:
raise NotImplementedError(f"{name} not found")
train(name, clf)