import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error, r2_score
# Sample dataset: you can replace this with your own dataset
# For this example, we're assuming a dataset with 3 features
data = {
'Feature1': [1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
'Feature2': [1, 2, 1, 3, 5, 6, 4, 8, 9, 10],
'Feature3': [5, 7, 6, 8, 6, 7, 8, 9, 10, 11],
'Target': [5, 7, 6, 9, 11, 13, 14, 17, 19, 21]
}
# Convert to DataFrame
df = pd.DataFrame(data)
# Features (X) and Target variable (y)
X = df[['Feature1', 'Feature2', 'Feature3']]
y = df['Target']
# Split the data into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
# Create the model
model = LinearRegression()
# Train the model
model.fit(X_train, y_train)
# Make predictions
y_pred = model.predict(X_test)
# Evaluate the model
mse = mean_squared_error(y_test, y_pred)
r2 = r2_score(y_test, y_pred)
print(f"Mean Squared Error: {mse}")
print(f"R-squared: {r2}")
# Output the coefficients and intercept
print(f"Coefficients: {model.coef_}")
print(f"Intercept: {model.intercept_}")
# Visualizing the results (optional, for 3D)
from mpl_toolkits.mplot3d import Axes3D
# Just a 3D plot for 3 features
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
# Scatter plot for actual data
ax.scatter(X_test['Feature1'], X_test['Feature2'], y_test, color='blue', label='Actual data')
# Scatter plot for predicted data
ax.scatter(X_test['Feature1'], X_test['Feature2'], y_pred, color='red', label='Predicted data')
ax.set_xlabel('Feature1')
ax.set_ylabel('Feature2')
ax.set_zlabel('Target')
plt.legend()
plt.show()
Comments