I have a data set of 8 rows out of which seven rows are predicted and I need to predict the 8th-row data input. I am not getting where my logic is getting wrong, please let me know where I am getting wrong.{what ever predicted label present before i am getting wrong labels for that too)
flag=0
while flag==0:
cost=0
for i in range(rows):
if lDict.get(i)!=None:
dp=dotproduct(w,x[i])
#print("dotproduct",dp)
sig=sigmoid(dp)
#print("sigmoid",sig)
deviation[i]=lDict.get(i)-sig
#print("deviation",deviation)
step1=lDict.get(i)*sig
#print("step1",step1)
step2=(1-lDict.get(i))*(1-sig)
#print("step2",step2)
cost=cost+(-step1-step2)
print("cost",cost)
for i in range(cols):
for j in range(rows):
if lDict.get(j)!=None:
delf[i]=delf[i]+(deviation[j]*x[j][i])
#w[i]=w[i]-(eta*delf[i])
#print("new_w",w)
print("delf[i]",delf[i])
#delw[i]=delw[i]+eta*delf[i]
#print("delw",delw)
for i in range(cols):
w[i]=w[i]-(eta*delf[i])
print("updated w is ",w)
newcost=0
#print("newcost",newcost)
for i in range(rows):
if lDict.get(i)!=None:
#print("w",w)
dp=dotproduct(w,x[i])
#print("newdot",dp)
sig=sigmoid(dp)
#print("newsig",sig)
step1=lDict.get(i)*sig
#print("newstep",step1)
step2=(1-lDict.get(i))*(1-sig)
#print("newstep2",step2)
cost=newcost
newcost=newcost+(-step1-step2)
print("new cost is",newcost)
print("old cost",cost)
if newcost-cost<stop:
flag=1
print("flag",flag)
for i in range(rows):
if lDict.get(i)!=None:
dp=sigmoid(dotproduct(w,x[i]))
print("dp",dp)
print("weights",w)
#sig=sigmoid(dp)
#print("sig",sig)
if sig<0.5:
print(0,"",i)
else:
print(1,"",i)`