Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import numpy as np
- import matplotlib.pyplot as plt
- import pandas as pd
- from sklearn import datasets
- from sklearn.model_selection import train_test_split
- from sklearn.linear_model import LinearRegression
- from sklearn.preprocessing import PolynomialFeatures
- def scatter_plot():
- data = datasets.load_diabetes()
- X, y = data.data[:, np.newaxis, 2], data.target # Using one feature for visualization
- plt.scatter(X, y, color='blue')
- plt.title("Scatter Plot of Diabetes Dataset")
- plt.xlabel("Feature")
- plt.ylabel("Target")
- plt.show()
- def simple_linear_regression():
- data = datasets.load_diabetes()
- X, y = data.data[:, np.newaxis, 2], data.target
- X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
- model = LinearRegression()
- model.fit(X_train, y_train)
- y_pred = model.predict(X_test)
- plt.scatter(X_test, y_test, color='blue', label='Actual')
- plt.plot(X_test, y_pred, color='red', linewidth=2, label='Predicted')
- plt.title("Simple Linear Regression")
- plt.legend()
- plt.show()
- def polynomial_regression(degree=2):
- data = datasets.load_diabetes()
- X, y = data.data[:, np.newaxis, 2], data.target
- poly = PolynomialFeatures(degree=degree)
- X_poly = poly.fit_transform(X)
- X_train, X_test, y_train, y_test = train_test_split(X_poly, y, test_size=0.2, random_state=42)
- model = LinearRegression()
- model.fit(X_train, y_train)
- y_pred = model.predict(X_test)
- plt.scatter(X, y, color='blue', label='Actual')
- plt.scatter(X_test[:, 1], y_pred, color='red', label='Predicted')
- plt.title(f"Polynomial Regression (Degree {degree})")
- plt.legend()
- plt.show()
- def multiple_linear_regression():
- data = datasets.load_diabetes()
- X = data.data[:, :3] # Selecting first 3 features for simplicity
- y = data.target
- X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
- model = LinearRegression()
- model.fit(X_train, y_train)
- y_pred = model.predict(X_test)
- print("Coefficients:", model.coef_)
- print("Intercept:", model.intercept_)
- print("Mean Squared Error:", np.mean((y_pred - y_test) ** 2))
- print("R^2 Score:", model.score(X_test, y_test))
- plt.scatter(y_test, y_pred, color='blue')
- plt.plot([y_test.min(), y_test.max()], [y_test.min(), y_test.max()], color='red', linewidth=2)
- plt.xlabel("Actual Values")
- plt.ylabel("Predicted Values")
- plt.title("Multiple Linear Regression: Actual vs Predicted")
- plt.show()
- def line_chart():
- data = datasets.load_diabetes()
- X, y = data.data[:, np.newaxis, 2], data.target
- sorted_indices = np.argsort(X[:, 0])
- X_sorted, y_sorted = X[sorted_indices], y[sorted_indices]
- plt.plot(X_sorted, y_sorted, color='green', marker='o', linestyle='dashed')
- plt.title("Line Chart of Diabetes Dataset")
- plt.xlabel("Feature")
- plt.ylabel("Target")
- plt.show()
- # Uncomment the functions you want to run:
- # scatter_plot()
- # simple_linear_regression()
- # polynomial_regression(degree=3)
- # multiple_linear_regression()
- # line_chart()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement