Chiziqli regressiya tushunchasi. y=wx funksiyadagi Gradient (og`irlik) qiymatini topish. Gradient pastlash grafigi va Loss grafigini xosil qilish.
import random
def get_loss(x, y, massa):
loss = 0
for i in range(len(x)):
loss += (y[i] - x[i] * massa) ** 2
return loss / len(x)
qiymat = int(input("Funksiya qiymatini kiriting: "))
a = float(input("a= "))
xatolik = float(input("Xatolikni kiriting: "))
massa = random.uniform(0, 2)
print("w1=", massa)
x = []
y = []
for i in range(qiymat):
x_i = float(input(f"Please, create {i+1}-x: "))
y_i = float(input(f"Please, create {i+1}-y: "))
x.append(x_i)
y.append(y_i)
while get_loss(x, y, massa) > xatolik:
print(f"Loss: {get_loss(x, y, massa)} massa: {massa}")
t = 0
for i in range(len(x)):
t += (y[i] - massa * x[i]) * x[i]
loss1 = (t * 2) / qiymat
massa = massa + a * loss1
print("Finished => ", massa)
natija
import numpy as np
import matplotlib.pyplot as plt
x = np.array([1, 2, 3, 4, 5])
y = np.array([1.4, 2.5, 7.2, 12.8, 21.3])
def quadratic_function(x, w1, w2):
return w1 * x + w2 * x**2
def loss_function(w1, w2):
y_pred = quadratic_function(x, w1, w2)
return np.sum((y_pred - y)**2)
w1_vals = np.linspace(-5, 5, 10)
w2_vals = np.linspace(-5, 5, 10)
W1, W2 = np.meshgrid(w1_vals, w2_vals)
loss_vals = np.zeros_like(W1)
for i in range(len(w1_vals)):
for j in range(len(w2_vals)):
loss_vals[j, i] = loss_function(w1_vals[i], w2_vals[j])
# Plot the loss surface
fig = plt.figure(figsize=(6, 4))
ax = fig.add_subplot(projection='3d')
ax.plot_surface(W1, W2, loss_vals, cmap='cool')
ax.set_xlabel('w1')
ax.set_ylabel('w2')
ax.set_zlabel('loss')
plt.show()
print(f"w1: {w1_vals}")
print(f"w2: {w2_vals}")
print(f"loss: {loss_vals}")
natija:
Tayyorladi: Abdumajidov Abdumo’min
Tekshirdi: Qo'chqarov Muslimjon
Dostları ilə paylaş: |