D astur kodi
import numpy as np
import matplotlib.pyplot as plt
# Ma'lumotlar
X = np.array([[1, 1], [1, 2], [2, 2], [2, 3]])
y = np.dot(X, np.array([1, 2])) + 3
# Chiziqli regressiya
def linear_regression(X, y, epochs=20, lr=0.01):
# Aralash og'irliklar va bias
w = np.random.randn(X.shape[1])
b = np.random.randn()
# Gradient pastlash va Loss grafik
losses = []
for _ in range(epochs):
y_pred = np.dot(X, w) + b
error = y_pred - y
w_grad = 2 * np.dot(X.T, error)
b_grad = 2 * np.sum(error)
w -= lr * w_grad
b -= lr * b_grad
loss = np.mean(error ** 2)
losses.append(loss)
return w, b, losses
# Ikkinchi darajali polynomial regressiya
def polynomial_regression(X, y, epochs=20, lr=0.01):
# Aralash og'irliklar va bias
w1 = np.random.randn()
w2 = np.random.randn()
b = np.random.randn()
# Gradient pastlash va Loss grafik
losses = []
for _ in range(epochs):
y_pred = w1 * X[:, 0]**2 + w2 * X[:, 1] + b
error = y_pred - y
w1_grad = 2 * np.dot(X[:, 0]**2, error)
w2_grad = 2 * np.dot(X[:, 1], error)
b_grad = 2 * np.sum(error)
w1 -= lr * w1_grad
w2 -= lr * w2_grad
b -= lr * b_grad
loss = np.mean(error ** 2)
losses.append(loss)
return w1, w2, b, losses
# Chiziqli regressiya
w, b, losses_linear = linear_regression(X, y)
print("Chiziqli regressiya:")
print("w:", w, "b:", b)
# Ikkinchi darajali polynomial regressiya
w1, w2, b, losses_poly = polynomial_regression(X, y)
print("\nIkkinchi darajali polynomial regressiya:")
print("w1:", w1, "w2:", w2, "b:", b)
# Loss grafiklari
plt.figure(figsize=(12, 6))
plt.plot(losses_linear, label='Chiziqli Regressiya')
plt.plot(losses_poly, label='Ikkinchi Darajali Polynomial Regressiya')
plt.xlabel('Epoxa')
plt.ylabel('Loss')
plt.title('Loss Grafiklari')
plt.legend()
plt.show()
Natijamiz
3-Mashinali o‘qitishda Logistik regressiya yordamida sinflashtirish masalasini yechish algoritmi va dasturini tuzish.
Mashinali o‘qitishda Logistik regressiya yordamida sinflashtirish masalasini yechish uchun quyidagi algoritmani va dasturini tuzishimiz mumkin:
|