Advertisement
brandblox

BingBingBoong

Mar 18th, 2025 (edited)
101
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 3.15 KB | None | 0 0
  1. import numpy as np
  2. import matplotlib.pyplot as plt
  3. import pandas as pd
  4. from sklearn import datasets
  5. from sklearn.model_selection import train_test_split
  6. from sklearn.linear_model import LinearRegression
  7. from sklearn.preprocessing import PolynomialFeatures
  8.  
  9. def scatter_plot():
  10.     data = datasets.load_diabetes()
  11.     X, y = data.data[:, np.newaxis, 2], data.target  # Using one feature for visualization
  12.     plt.scatter(X, y, color='blue')
  13.     plt.title("Scatter Plot of Diabetes Dataset")
  14.     plt.xlabel("Feature")
  15.     plt.ylabel("Target")
  16.     plt.show()
  17.  
  18. def simple_linear_regression():
  19.     data = datasets.load_diabetes()
  20.     X, y = data.data[:, np.newaxis, 2], data.target
  21.     X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
  22.     model = LinearRegression()
  23.     model.fit(X_train, y_train)
  24.     y_pred = model.predict(X_test)
  25.    
  26.     plt.scatter(X_test, y_test, color='blue', label='Actual')
  27.     plt.plot(X_test, y_pred, color='red', linewidth=2, label='Predicted')
  28.     plt.title("Simple Linear Regression")
  29.     plt.legend()
  30.     plt.show()
  31.  
  32. def polynomial_regression(degree=2):
  33.     data = datasets.load_diabetes()
  34.     X, y = data.data[:, np.newaxis, 2], data.target
  35.     poly = PolynomialFeatures(degree=degree)
  36.     X_poly = poly.fit_transform(X)
  37.     X_train, X_test, y_train, y_test = train_test_split(X_poly, y, test_size=0.2, random_state=42)
  38.     model = LinearRegression()
  39.     model.fit(X_train, y_train)
  40.     y_pred = model.predict(X_test)
  41.    
  42.     plt.scatter(X, y, color='blue', label='Actual')
  43.     plt.scatter(X_test[:, 1], y_pred, color='red', label='Predicted')
  44.     plt.title(f"Polynomial Regression (Degree {degree})")
  45.     plt.legend()
  46.     plt.show()
  47.  
  48. def multiple_linear_regression():
  49.     data = datasets.load_diabetes()
  50.     X = data.data[:, :3]  # Selecting first 3 features for simplicity
  51.     y = data.target
  52.     X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
  53.     model = LinearRegression()
  54.     model.fit(X_train, y_train)
  55.     y_pred = model.predict(X_test)
  56.    
  57.     print("Coefficients:", model.coef_)
  58.     print("Intercept:", model.intercept_)
  59.     print("Mean Squared Error:", np.mean((y_pred - y_test) ** 2))
  60.     print("R^2 Score:", model.score(X_test, y_test))
  61.    
  62.     plt.scatter(y_test, y_pred, color='blue')
  63.     plt.plot([y_test.min(), y_test.max()], [y_test.min(), y_test.max()], color='red', linewidth=2)
  64.     plt.xlabel("Actual Values")
  65.     plt.ylabel("Predicted Values")
  66.     plt.title("Multiple Linear Regression: Actual vs Predicted")
  67.     plt.show()
  68.  
  69. def line_chart():
  70.     data = datasets.load_diabetes()
  71.     X, y = data.data[:, np.newaxis, 2], data.target
  72.     sorted_indices = np.argsort(X[:, 0])
  73.     X_sorted, y_sorted = X[sorted_indices], y[sorted_indices]
  74.    
  75.     plt.plot(X_sorted, y_sorted, color='green', marker='o', linestyle='dashed')
  76.     plt.title("Line Chart of Diabetes Dataset")
  77.     plt.xlabel("Feature")
  78.     plt.ylabel("Target")
  79.     plt.show()
  80.  
  81. # Uncomment the functions you want to run:
  82. # scatter_plot()
  83. # simple_linear_regression()
  84. # polynomial_regression(degree=3)
  85. # multiple_linear_regression()
  86. # line_chart()
  87.  
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement