Binary classification using ensemble model

InvesTime
InvesTime
1 هزار بار بازدید - 9 ماه پیش - Machine learning models:
Machine learning models:
Binary Classification Models in Machi...

Data Set:
https://www.kaggle.com/datasets/fedes...

Ensemble Model Code:
accuracy_ensemble, precision_ensemble, recall_ensemble = {}, {}, {}
models_ensemble = {}

def evaluate(model, X_train, X_val, Y_train, y_val,key):

   model.fit(X_train, Y_train)

   Prediction
   predictions = model.predict(X_val)

   Calculate Accuracy, Precision and Recall Metrics
   accuracy_ensemble[key]= accuracy_score(predictions, y_val)
   precision_ensemble[key] = precision_score(predictions, y_val)
   recall_ensemble[key] = recall_score(predictions, y_val)

   auc = roc_auc_score(y_val, Y_predict)
   print('Classification Report:')
   print(classification_report(y_val,predictions,digits=4))

   sns.heatmap(confusion_matrix(Y_val,predictions),fmt='',annot=True)

   plt.title('Confusion Matrix')
   plt.xlabel('FPR')
   plt.ylabel('TPR')
   plt.xlabel('Predicted Labels')
   plt.ylabel('True Labels')

from sklearn.ensemble import BaggingClassifier
from xgboost import XGBClassifier

tree = XGBClassifier()
models_ensemble['Bagging'] = BaggingClassifier(base_estimator=tree, n_estimators=40, random_state=0)
#bagging_clf.fit(X_train, Y_train)

evaluate(models_ensemble['Bagging'], X_train, X_val, Y_train, Y_val,'Bagging')

from sklearn.ensemble import AdaBoostClassifier

models_ensemble['AdaBoostClassifier'] = AdaBoostClassifier(n_estimators=10)
evaluate(models_ensemble['AdaBoostClassifier'], X_train, X_val, Y_train, Y_val,'AdaBoostClassifier')

from sklearn.ensemble import GradientBoostingClassifier

models_ensemble['Gradient Boost'] = GradientBoostingClassifier(n_estimators=100, random_state=42)
evaluate(models_ensemble['Gradient Boost'], X_train, X_val, Y_train, Y_val,'Gradient Boost')

from sklearn.ensemble import VotingClassifier
clf1 = ExtraTreesClassifier()
clf2 = RandomForestClassifier()
clf3 = XGBClassifier()
clf4 = DecisionTreeClassifier()
models_ensemble['Soft Voting'] = VotingClassifier(estimators=[('ExTrees', clf1), ('Random Forest', clf2), ('XGB', clf3),('Decision Tree',clf4)], voting='soft')
evaluate(models_ensemble['Soft Voting'],X_train, X_val, Y_train, Y_val,'Soft Voting')

from sklearn.ensemble import VotingClassifier
clf1 = ExtraTreesClassifier()
clf2 = RandomForestClassifier()
clf3 = XGBClassifier()
clf4 = DecisionTreeClassifier()
models_ensemble['Hard Voting'] = VotingClassifier(estimators=[('ExTrees', clf1), ('Random Forest', clf2), ('XGB', clf3),('Decision Tree',clf4)], voting='hard')
evaluate(models_ensemble['Hard Voting'],X_train, X_val, Y_train, Y_val,'Hard Voting')

from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
from sklearn.ensemble import StackingClassifier
models_ensemble['Stacked'] = StackingClassifier(estimators=[('m1', models['Xgboost']),
('m2', models['Extra Tree Classifier']), ('m3', models['Random Forest']),('m4',models['Decision Trees'])],final_estimator=LinearSVC())

evaluate(models_ensemble['Stacked'], X_train, X_val, Y_train, Y_val,'Stacked')

import pandas as pd

df_model_ensemble = pd.DataFrame(index=models_ensemble.keys(), columns=['Accuracy', 'Precision', 'Recall'])
df_model_ensemble['Accuracy'] = accuracy_ensemble.values()
df_model_ensemble['Precision']= precision_ensemble.values()
df_model_ensemble['Recall'] = recall_ensemble.values()

df_model_ensemble.round(4)

ax  = df_model_ensemble.plot.bar(rot=45)
ax.legend(ncol= len(models.keys()), bbox_to_anchor=(0, 1), loc='lower left', prop={'size': 14})
plt.tight_layout()
9 ماه پیش در تاریخ 1402/08/15 منتشر شده است.
1,088 بـار بازدید شده
... بیشتر