forked from AtulSingh72/Perceptron-Model
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmobile.py
114 lines (100 loc) · 3.57 KB
/
mobile.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
# Importing liabraries
import numpy as np
import pandas as pd
import math
import seaborn as sns
import matplotlib.pyplot as plt
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
threshhold = 0.844
# Importing data
dataset = pd.read_csv("mobile_cleaned-1549119762886.csv")
features = ["aperture", "price", "battery_capacity", "internal_memory", "stand_by_time", "screen_size", "processor_rank"]
X = dataset[features]
Y = dataset[["is_liked"]]
class Preprocessing:
def __init__(self, X, Y):
self.X = X
self.Y = Y
self.X_test = None
self.X_train = None
self.y_test = None
self.y_train = None
def binarize(self, X, n):
Y=[]
if type(X) is pd.core.frame.DataFrame:
X = np.asarray(X)
for x in X:
if(x >=n):
Y.append(1)
else:
Y.append(0)
return np.asarray(Y)
def scaling(self, t):
self.X = StandardScaler().fit_transform(self.X)
scaler = StandardScaler().fit(self.Y)
self.Y = scaler.transform(self.Y)
narmalized_threshold = (t - scaler.mean_)/scaler.var_
self.Y = self.binarize(self.Y, narmalized_threshold[0])
def TTS(self):
self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(self.X, self.Y, test_size = 0.2, random_state = 512, stratify = self.Y)
preprocess = Preprocessing(X, Y)
preprocess.scaling(threshhold)
preprocess.TTS()
class Perceptron:
def __init__(self):
self.w = None
self.b = None
def model(self, X):
sigmoid = 1/(1+math.exp(-np.dot(self.w, X)))
if(sigmoid >= self.b):
return 1
else:
return 0
"""if(np.dot(self.w, X) >= self.b):
return 1
else:
return 0"""
def predict(self, X):
Y = []
for x in X:
Y.append(self.model(x))
return Y
def grad_w(self, X, Y, y_pred):
return (y_pred-Y)*X
def grad_b(self, Y, y_pred):
return (y_pred-Y)
def loss(self, y, y_pred):
return (y-y_pred)**2
def fit(self, X, Y, epochs, lr):
self.w = np.ones(7)
self.b = 0
accuracy = {}
max_acc = 0
wt_matrix = []
loss_matrix = []
for i in range(epochs):
loss = 0
for x, y in zip(X, Y):
y_pred = self.model(x)
for j in range(0, 7):
self.w[j] = self.w[j] - lr*self.grad_w(x[j], y, y_pred)
self.b = self.b - lr*self.grad_b(y, y_pred)
loss = loss + self.loss(y, y_pred)
wt_matrix.append(self.w)
loss_matrix.append(loss)
accuracy[i] = accuracy_score(self.predict(X), Y)
if (accuracy[i] > max_acc):
max_acc = accuracy[i]
chkptw = self.w
chkptb = self.b
self.w = chkptw
self.b = chkptb
return loss_matrix
per = Perceptron()
loss = per.fit(preprocess.X_train, preprocess.y_train, 100, 0.24)
prediction2 = per.predict(preprocess.X_train)
print("Accuracy Score for train:"+str(accuracy_score(prediction2, preprocess.y_train))+"\n")
prediction1 = per.predict(preprocess.X_test)
print("Accuracy Score for test:"+str(accuracy_score(prediction1, preprocess.y_test))+"\n")