-
Notifications
You must be signed in to change notification settings - Fork 0
/
Logistic_Regressor.py
69 lines (45 loc) · 1.19 KB
/
Logistic_Regressor.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
def LogisticClassifier:
def __init__(self, rate=0.1,tol=1e-4,max_iter=1000):
self.rate=rate
self.tol=tol
self.iter=max_iter
self.intercept=True
self.center=True
self.scale=True
self.hist=[]
def matrix_design(self,X):
if self.center:
X=X-self.means
if self.scale:
X=X/self.standard_error
if self.intercept:
X=np.hstack([np.ones((X.shape[0],1)),X])
return X
def fit_center_scale(self,X):
self.means=X.mean(axis=0)
self.standard_error=np.std(X,axis=0)
def sigmoid(z):
return 1/(1+np.exp(-z))
def fit(self,X,y):
self.fit_center_scale(X)
n,k=X.shape
X=self.matrix_design(X)
prev_loss=-float('inf')
self.conv=False
self.beta=np.zeros(k+(1 if self.add_intercept else 0))
for i in range(self.iter):
y_hat=sigmoid(X@self.beta)
self.loss=np.mean(-y * np.log(y_hat)-(1-y)*np.log(1-y_hat))
if(abs(prev_loss-self.loss)<self.tol):
self.conv=True
break
else prev_loss=self.loss
rem=(y_hat-y).reshape((n,1))
grad=(X*rem).mean(axis=0)
self.beta-=self.rate*grad
self.iter=i+1
def predict_prob(self,X):
X=self.matrix_design(X)
return sigmoid(X@self.beta)
def predict(self,X):
return (self.predict_prob(X)>0.5).astype(int)