# :
'''
-----》
'''
'''
,
'''
import matplotlib.pyplot as plt
import numpy as np
X = [[1, 1], [2, 2], [0, 0], [5, 5], [4, 5], [6, 4]]
y = [-1, -1, -1, 1, 1, 1]
def filer(X,y,W,b):
M = []
for i in range(0, len(X)):
if (X[i][0] * W[0] + X[i][1] * W[1] + b) * y[i] < 0:
M.append([X[i][0], X[i][1], y[i]])
print(M)
return M
def update(W,b,M, a):
W1 = 0
W2 = 0
b_update = 0
if len(M) > 0:
for i in range(0, len(M)):
W1 += M[i][0] * M[i][2]
W2 += M[i][1] * M[i][2]
b_update += M[i][2]
W[0] = W[0] + a * M[i][0] * M[i][2]
W[1] = W[1] + a * M[i][1] * M[i][2]
b = b + a * M[i][2]
return W, b
W = [2, 2]
b = 1
loss1 = []
while len(filer(X, y, W, b)) != 0:
W, b = update(W, b, filer(X, y, W, b), 0.1)
print(W)
print(b)
loss1.append(len(filer(X, y, W, b)) * len(filer(X, y, W, b)))
for i in range(0, len(X)):
plt.scatter(X[i][0], X[i][1], color='r')
ax = np.linspace(0, 6, 100)
ay = [(W[0] * x + b)/(0 - W[1]) for x in ax]
plt.plot(ax, ay, color='b')
plt.show()
plt.plot(loss1, label='loss1')
plt.legend(loc=0)
plt.show()
# (W[0] * x + W[1] * y +b = 0) (x, y) 0 1
# :len(filer(X, y, W, b)) != 0 0
'''
?
X = [[1, 1], [2, 2], [0, 0], [5, 5], [4, 5], [6, 4]]
y = [-1, -1, -1, 1, 1, 1]
'''
from torch import nn
from torch import optim as optimizer
import torch
from torch.autograd import Variable
X = [[1, 1], [2, 2], [0.1, 0.1], [0, 0], [5, 5], [4, 5], [6, 4]]
y = [[0], [0], [1], [1], [0], [1], [1]]
class Model(nn.Module):
def __init__(self):
super(Model, self).__init__()
self.lr = nn.Linear(2, 1)
self.sm = nn.Sigmoid()
def forward(self, x):
x = self.lr(x)
x = self.sm(x)
return x
X = Variable(torch.Tensor(X).float(), requires_grad=True)
y = Variable(torch.Tensor(y).float())
model_sgd = Model()
model_ad = Model()
loss_function = torch.nn.BCELoss()
sgd = optimizer.SGD(model_sgd.parameters(), lr=0.01)
loss2 = []
for epoch in range(1000):
out = model_sgd(X)
sgd.zero_grad()
loss = loss_function(out, y)
loss.backward()
sgd.step()
print('epoch={}, loss={}'.format(epoch, loss.data.numpy()))
loss2.append(loss.data.numpy())
plt.plot(loss2, label='loss2')
plt.legend(loc=0)
plt.show()