python实现―逻辑回归―实战案例(包含全部代码和样本数据,见文章底部百度网盘链接)

匿名 (未验证) 提交于 2019-12-02 22:54:36




#-*- coding:utf-8 -*- import pandas as pd import numpy as np from sklearn.model_selection import KFold from sklearn.model_selection import cross_val_score from sklearn.linear_model import LogisticRegression 
data_lr = pd.read_excel('D:\python原始数据\logist_model.xlsx','logist_model') print(data_lr.shape) print(data_lr.head(10)) 
array = data_lr.values X_train =array[0:200,2:5]   Y_train = array[0:200, 5] X_test =array[200:291,2:5] Y_test = array[200:291,5] 
model = LogisticRegression() model.fit(X_train, Y_train) print("截距项",model.intercept_) print("系数",model.coef_) 
#准确率  scores = cross_val_score(model, X_train, Y_train, cv=10) print("准确率", np.mean(scores), scores)  #混淆矩阵  from sklearn.metrics import confusion_matrix predicted = model.predict(X_test) matrix = confusion_matrix(Y_test, predicted) classes = ['0', '1'] dataframe = pd.DataFrame(data=matrix, index=classes, columns=classes) print(dataframe)  #AUC图  from sklearn.metrics import roc_curve, auc predictions = model.predict_proba(X_test) fpr, tpr, thresholds = roc_curve(Y_test, predictions[:,1]) roc_auc = auc(fpr, tpr) import matplotlib.pyplot as plt plt.plot(fpr, tpr,'b', label='auc=%0.2f' % roc_auc) plt.legend(loc ='lower right') plt.plot([0, 1],[0,1],'r--') plt.xlim([0.0, 1.0]) plt.ylim([0.0, 1.0]) plt.xlabel("fpr") plt.ylabel("tpr") plt.show() 





易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!