Topshiriq
Neyron tarmog‘i. Eng oddiy neyron tarmoq qurish texnologiyasi
Ishdan maqsad. Talabalar neyron tarmog‘i, ularning ishlash prinsiplari, ishlash texnologiyalarini o‘rganish ular asosida sodda neyron tarmog‘ini ishlab chiqish.
Vazifalar:
Neyron tarmoq tushunchasini o‘rganish
Oddiy neyron tarmoq ishlab chiqish
Topshiriqni baholash. MAX 5 ball.
Berilgan topshiriqqa optimal yondoshish – 2 ball;
Xulosa va foydalangan adabiyotlarni yoritilganlik – 1 ball;
Topshiriqni himoya qilish – 2 ball.
import pandas as pd
data_set = pd.read_csv('data.csv')
import torch
import torch.nn as nn
import torch.optim as optim
from torch.autograd import Variable
# Ma'lumotlar
data = [
[200, 150, 350, 0],
[205, 165, 340, 0],
[210, 160, 330, 0],
[110, 100, 50, 1],
[100, 90, 40, 1],
[90, 90, 40, 1],
[40, 35, 10, 2],
[35, 35, 9, 2],
[30, 25, 7, 2],
]
inputs = torch.tensor([[x[0], x[1], x[2]] for x in data], dtype=torch.float32)
labels = torch.tensor([x[3] for x in data], dtype=torch.float32)
class SimpleNN(nn.Module):
def __init__(self):
super(SimpleNN, self).__init__()
self.fc = nn.Linear(3, 1)
def forward(self, x):
x = self.fc(x)
return x
model = SimpleNN()
criterion = nn.BCEWithLogitsLoss()
optimizer = optim.SGD(model.parameters(), lr=0.01)
num_epochs = 1000
for epoch in range(num_epochs):
inputs = Variable(inputs)
labels = Variable(labels)
outputs = model(inputs)
loss = criterion(outputs.squeeze(), labels)
optimizer.zero_grad()
loss.backward()
optimizer.step()
# Har bir 100 ta epoxa uchun xatoni chiqarish
if (epoch + 1) % 100 == 0:
print(f'Epoch [{epoch+1}/{num_epochs}], Loss: {loss.item():.4f}')
# Modelni sinov qilish
with torch.no_grad():
test_data = torch.tensor([[150,210,350]], dtype=torch.float32)
predicted_class = torch.round(torch.sigmoid(model(test_data)))
print(f'Predicted Class: {predicted_class.item()}')
|