• Chiziqli regressiya java kodi
  • Guruh: 214-21. Bajardi: Yuldoshov Dilshodbek




    Download 41.91 Kb.
    Sana22.04.2024
    Hajmi41.91 Kb.
    #204529
    Bog'liq
    Mashinali o\'qitishga kirish amaliy
    Yuzi, Документ Microsoft Word, Arduino led1



    MUHAMMAD AL-XORAZMIY NOMIDAGI
    TOSHKENT AXBOROT TEXNOLOGIYALARI
    UNIVERSITETI

    1-Topshiriq


    Guruh: 214-21.


    Bajardi: Yuldoshov Dilshodbek
    Tekshirdi: Abdug‘aniyev Muxriddin


    Toshkent 2024

    Chiziqli regressiya java kodi:
    import java.util.ArrayList;
    import java.util.List;
    import java.util.Scanner;

    public class Main {


    public static void main(String[] args) {
    Scanner scanner = new Scanner(System.in);
    List xValues = new ArrayList<>();
    List yValues = new ArrayList<>();
    char continueInput = 'y';

    while (continueInput == 'y' || continueInput == 'Y') {


    System.out.print("X qiymatini kiriting: ");
    double xInput = scanner.nextDouble();
    System.out.print("Y qiymatini kiriting: ");
    double yInput = scanner.nextDouble();

    xValues.add(xInput);


    yValues.add(yInput);

    System.out.print("Yana ma'lumot qo'shishni xohlaysizmi? (y/n): ");


    continueInput = scanner.next().charAt(0);
    }

    double[] regressionResult = linearRegression(xValues, yValues);


    double slope = regressionResult[0];
    double intercept = regressionResult[1];

    System.out.println("Chiziqli regressiya: y = " + slope + "x + " + intercept);


    }

    public static double[] linearRegression(List x, List y) {


    int n = x.size();
    double sumX = 0, sumY = 0, sumXY = 0, sumXX = 0;

    for (int i = 0; i < n; ++i) {


    sumX += x.get(i);
    sumY += y.get(i);
    sumXY += x.get(i) * y.get(i);
    sumXX += x.get(i) * x.get(i);
    }

    double xMean = sumX / n;


    double yMean = sumY / n;

    double slope = (sumXY - n * xMean * yMean) / (sumXX - n * xMean * xMean);


    double intercept = yMean - slope * xMean;

    return new double[]{slope, intercept};


    }
    }

    import numpy as np


    import matplotlib.pyplot as plt
    x = np.array([11,7,23,9,2,17,11,9,4,11,14,9,6])
    y = np.array([99,86,89,88,101,86,103,87,95,78,77,85,86])
    def linear_regression(X, y):
    X = np.column_stack((np.ones_like(X), X))
    w = np.linalg.inv(X.T @ X) @ X.T @ y
    return w

    def predict(X, w):


    X = np.column_stack((np.ones_like(X), X))
    y_pred = X @ w
    return y_pred

    def cost_function(X, y, w):


    m = len(y)
    J = np.sum((X @ w - y) ** 2) / (2 * m)
    return J

    w = linear_regression(x, y)


    w1_values = np.linspace(w[1] - 10, w[1] + 10, 100)
    J_values = [cost_function(np.column_stack((np.ones_like(x), x)), y, np.array([w[0], w1])) for w1 in w1_values]

    # plt.scatter(w1_values, J_values, color='blue')


    plt.plot(w1_values, J_values, color='red')
    plt.xlabel('w1')
    plt.ylabel('J(w)')
    plt.title('Cost Function')
    plt.grid(True)
    plt.show()

    print("Parameters of the linear regression model:")


    print("w0:", w[0])
    print("w1:", w[1])
    y_pred = predict(x, w)
    J = cost_function(np.column_stack((np.ones_like(x), x)), y, w)
    print("Cost (Mean Squared Error):", J)
    plt.scatter(x, y, color='blue', label='Data points')
    plt.plot(x, y_pred, color='red', label='Linear Regression Line')
    plt.xlabel('x')
    plt.ylabel('y')
    plt.title('Linear Regression')
    plt.legend()
    plt.grid(True)
    plt.show()




    Download 41.91 Kb.




    Download 41.91 Kb.

    Bosh sahifa
    Aloqalar

        Bosh sahifa



    Guruh: 214-21. Bajardi: Yuldoshov Dilshodbek

    Download 41.91 Kb.