Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import numpy as np
- import pandas as pd
- from sklearn.model_selection import train_test_split
- import matplotlib.pyplot as plt
- import random
- from sklearn.linear_model import LinearRegression
- from sklearn.preprocessing import PolynomialFeatures
- def function(x, coefficients):
- y = coefficients[0]
- for i in range(1, len(coefficients)):
- y += coefficients[i] * x ** i
- return y
- def f(x, coefficients):
- return x.dot(coefficients[1:]) + coefficients[0]
- def fun_name(coefficients):
- name = f'f(x)={np.round(coefficients[0], 2)}'
- for i in range(1, len(coefficients)):
- if coefficients[i] >= 0:
- name += '+'
- name += f'{np.round(coefficients[i], 2)}*x^{i}'
- return name
- def mean_squared_error(actual, predicted):
- n = len(actual)
- squared_errors = [(actual[i] - predicted[i]) ** 2 for i in range(n)]
- mse = sum(squared_errors) / n
- return mse[0]
- def gauss(A, b):
- n = A.shape[0]
- x = np.zeros((n, 1))
- for i in range(n):
- for j in range(i + 1, n):
- factor = A[j, i] / A[i, i]
- b[j] -= factor * b[i]
- for k in range(n):
- A[j, k] -= factor * A[i, k]
- for i in range(n - 1, -1, -1):
- x[i] = b[i]
- for j in range(i + 1, n):
- x[i] -= A[i, j] * x[j]
- x[i] /= A[i, i]
- x = [x[i][0] for i in range(len(x))]
- return x
- def least_squares_polynomial_regression(X, y):
- X = np.hstack((np.ones((X.shape[0], 1)), X))
- A = np.dot(X.T, X)
- b = np.dot(X.T, y)
- coefficients = gauss(A, b)
- return coefficients
- iris = pd.read_csv('iris.csv')
- data = iris.drop(labels=[iris.columns[-1], iris.columns[1], iris.columns[0], iris.columns[3], iris.columns[4]], axis=1)
- data = np.copy(data)
- target = np.sin(data)
- for i in range(len(target)):
- target[i] += random.uniform(-0.3, 0.3)
- degree = 1
- data_poly_manual = np.hstack([data ** i for i in range(1, degree + 1)])
- X_train, X_test, y_train, y_test = train_test_split(data_poly_manual, target, test_size=0.3, random_state=42)
- coefficients = least_squares_polynomial_regression(X_train, y_train)
- y_pred = f(X_test, coefficients)
- plt.figure(figsize=(10, 7))
- X = np.linspace(np.min(data), np.max(data), 100)
- plt.scatter(X_train[:, 0], y_train[:, 0], c='blue', label='Навчальна вибірка')
- plt.scatter(X_test[:, 0], y_test[:, 0], c='red', label='Тестова вибірка')
- plt.plot(X, np.sin(X), c='black', label='f(x)=sin(x)', lw=2)
- plt.legend()
- plt.xlabel(f'x')
- plt.ylabel(f'y')
- plt.title('Побудова точок навчальної та тестової вибірки')
- plt.show()
- mse = mean_squared_error(y_test, y_pred)
- print(f"Mean Squared Error: {mse}\n")
- match_df = pd.DataFrame({f'x_test': X_test[:, 0],
- f'y_test': y_test[:, 0],
- f'y_test_pred': y_pred})
- print(match_df)
- print(f'\n{fun_name(coefficients)} ; p = {degree}')
- plt.figure(figsize=(10, 7))
- plt.scatter(X_train[:, 0], y_train[:, 0], c='blue', label='Навчальна вибірка')
- plt.scatter(X_test[:, 0], y_test[:, 0], c='red', label='Тестова вибірка')
- plt.plot(X, np.sin(X), c='black', label='f(x)=sin(x)', lw=2)
- plt.plot(X, function(X, coefficients), label=f'p={degree}')
- plt.legend()
- plt.xlabel(f'x')
- plt.ylabel(f'y')
- plt.show()
- plt.figure(figsize=(15, 10))
- plt.scatter(X_train[:, 0], y_train[:, 0], c='blue', label='Навчальна вибірка')
- plt.scatter(X_test[:, 0], y_test[:, 0], c='red', label='Тестова вибірка')
- plt.plot(X, np.sin(X), c='black', label='f(x)=sin(x)', lw=2)
- mse_df = pd.DataFrame({'MSE_Train': [], 'MSE_Test': []})
- for p in range(1, 21):
- degree = p
- data_poly_manual = np.hstack([data ** i for i in range(1, degree + 1)])
- X_train, X_test, y_train, y_test = train_test_split(data_poly_manual, target, test_size=0.3, random_state=42)
- coefficients = least_squares_polynomial_regression(X_train, y_train)
- y_pred = f(X_test, coefficients)
- mse_test = mean_squared_error(y_test, y_pred)
- mse_train = mean_squared_error(y_train, f(X_train, coefficients))
- mse_df.loc[p] = [mse_train, mse_test]
- if p < 6:
- plt.plot(X, function(X, coefficients), label=f' p={degree}')
- print(f'{fun_name(coefficients)} ; p = {degree}')
- plt.legend()
- plt.xlabel(f'x')
- plt.ylabel(f'y')
- plt.show()
- mse_df.index.names = ['p']
- print(mse_df)
- plt.figure(figsize=(10, 7))
- plt.plot(mse_df.index, mse_df['MSE_Train'], label='MSE_Train')
- plt.plot(mse_df.index, mse_df['MSE_Test'], label='MSE_Test')
- plt.xlabel(f'p')
- plt.ylabel(f'MSE')
- plt.legend()
- plt.show()
- mse_df_min = mse_df.idxmin()
- print(mse_df_min)
- degree = mse_df_min.loc['MSE_Test']
- data_poly_manual = np.hstack([data ** i for i in range(1, degree + 1)])
- X_train, X_test, y_train, y_test = train_test_split(data_poly_manual, target, test_size=0.3, random_state=42)
- coefficients = least_squares_polynomial_regression(X_train, y_train)
- my_coefficients = coefficients.copy()
- y_pred = f(X_test, coefficients)
- mse = mean_squared_error(y_test, y_pred)
- print(f'\n{fun_name(coefficients)} ; p = {degree}')
- plt.figure(figsize=(10, 7))
- plt.scatter(X_train[:, 0], y_train[:, 0], c='blue', label='Навчальна вибірка')
- plt.scatter(X_test[:, 0], y_test[:, 0], c='red', label='Тестова вибірка')
- plt.plot(X, np.sin(X), c='black', label='f(x)=sin(x)', lw=2)
- plt.plot(X, function(X, coefficients), label=f'p={degree}')
- plt.legend()
- plt.xlabel(f'x')
- plt.ylabel(f'y')
- plt.show()
- mse_sk_df = pd.DataFrame({'MSE_Train_sk': [], 'MSE_Test_sk': []})
- for p in range(1, 21):
- x = data
- y = target
- x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.3, random_state=42)
- poly_features = PolynomialFeatures(degree=p)
- x_train_poly = poly_features.fit_transform(x_train)
- x_test_poly = poly_features.transform(x_test)
- model = LinearRegression()
- model.fit(x_train_poly, y_train)
- y_pred = model.predict(x_test_poly)
- test_error1 = mean_squared_error(y_test, y_pred)
- y_train_pred = model.predict(x_train_poly)
- train_error1 = mean_squared_error(y_train, y_train_pred)
- mse_sk_df.loc[p] = [train_error1, test_error1]
- mse_sk_df.index.names = ['p']
- mse_concat = pd.concat([mse_df, mse_sk_df], axis=1)
- print(mse_concat)
- mse_df_min_match = mse_concat.idxmin()
- print(mse_df_min_match)
- degree_sk = mse_df_min_match.loc['MSE_Test_sk']
- x = data
- y = target
- x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.3, random_state=42)
- poly_features = PolynomialFeatures(degree=degree_sk)
- x_train_poly = poly_features.fit_transform(x_train)
- x_test_poly = poly_features.transform(x_test)
- model = LinearRegression()
- model.fit(x_train_poly, y_train)
- y_pred = model.predict(x_test_poly)
- x_plot = X
- x_plot_poly = poly_features.transform(x_plot.reshape(-1, 1))
- y_plot = model.predict(x_plot_poly)
- coefficients = model.coef_
- intercept = model.intercept_
- coefficients[0, 0] = intercept
- coefficients = coefficients[0]
- print(f'\n{fun_name(coefficients)} ; p = {degree_sk}')
- print(f'{fun_name(my_coefficients)} ; p = {degree}')
- plt.figure(figsize=(10, 7))
- plt.title('Порівняння роботи з sklearn')
- plt.scatter(X_train[:, 0], y_train[:, 0], c='blue', label='Навчальна вибірка')
- plt.scatter(X_test[:, 0], y_test[:, 0], c='red', label='Тестова вибірка')
- plt.plot(X, np.sin(X), c='black', label='f(x)=sin(x)', lw=2)
- plt.plot(x_plot, y_plot, label=f"p_sk={degree_sk}")
- plt.plot(X, function(X, my_coefficients), label=f'p={degree}')
- plt.xlabel("X")
- plt.ylabel("Y")
- plt.legend()
- plt.show()
- plt.figure(figsize=(10, 7))
- plt.title('Порівняння роботи з sklearn')
- plt.plot(mse_concat.index, mse_concat['MSE_Train'], label='MSE_Train')
- plt.plot(mse_concat.index, mse_concat['MSE_Test'], label='MSE_Test')
- plt.plot(mse_concat.index, mse_concat['MSE_Train_sk'], label='MSE_Train_sk')
- plt.plot(mse_concat.index, mse_concat['MSE_Test_sk'], label='MSE_Test_sk')
- plt.xlabel(f'p')
- plt.ylabel(f'MSE')
- plt.legend()
- plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement