Skip to content

Commit

Permalink
Merge pull request #1157 from PrasDev4/main
Browse files Browse the repository at this point in the history
Increasing model performance using hyperparameter tuning
  • Loading branch information
UTSAVS26 authored Jan 24, 2025
2 parents 60cd103 + eae95cd commit 592dbbd
Showing 1 changed file with 22 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
import numpy as np
import pandas as pd
import pickle
from sklearn.model_selection import train_test_split, GridSearchCV
from sklearn.ensemble import RandomForestClassifier

# Loading the dataset
df = pd.read_csv('kaggle_diabetes.csv')
Expand All @@ -20,17 +22,31 @@
df_copy['Insulin'].fillna(df_copy['Insulin'].median(), inplace=True)
df_copy['BMI'].fillna(df_copy['BMI'].median(), inplace=True)

# Model Building
from sklearn.model_selection import train_test_split
# Splitting the data into features and target variable
X = df.drop(columns='Outcome')
y = df['Outcome']

# Splitting into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.20, random_state=0)

# Creating Random Forest Model
from sklearn.ensemble import RandomForestClassifier
classifier = RandomForestClassifier(n_estimators=20)
# Hyperparameter tuning using GridSearchCV
param_grid = {
'n_estimators': [10, 50, 100],
'max_depth': [None, 10, 20],
'min_samples_split': [2, 5, 10]
}

grid_search = GridSearchCV(estimator=RandomForestClassifier(), param_grid=param_grid, cv=5)
grid_search.fit(X_train, y_train)

# Best parameters found
best_params = grid_search.best_params_
print(f"Best hyperparameters: {best_params}")

# Building Random Forest Model with best hyperparameters
classifier = RandomForestClassifier(**best_params)
classifier.fit(X_train, y_train)

# Creating a pickle file for the classifier
filename = 'diabetes-prediction-rfc-model.pkl'
pickle.dump(classifier, open(filename, 'wb'))
pickle.dump(classifier, open(filename, 'wb'))

0 comments on commit 592dbbd

Please sign in to comment.