pythonƒƒjƒ…[

‹@ŠBŠwK?

from sklearn.linear_model import LogisticRegression
from sklearn.datasets import make_classification
from sklearn.model_selection import train_test_split

X,y=make_classification(n_samples=100, n_features=2, n_redundant=0, random_state=0)

train_x, test_x, train_y, test_y = train_test_split(X,y,random_state=42)

model=LogisticRegression()
model.fit(train_x, train_y)
print( "coefficient(ŒW”‚βŒX‚«): ",model.coef_)
print( "intercept(Ψ•Π): ",model.intercept_ )
pred_y = model.predict(test_x)
print( "predicti—\‘ͺ’lj: ",pred_y )

import matplotlib
import matplotlib.pyplot as plt
import japanize_matplotlib
plt.scatter(X[:, 0], X[:, 1],c=y, cmap=matplotlib.cm.get_cmap(name="bwr"), alpha=0.7)

import numpy as np
Xi=np.linspace(-10,10)
Y=-model.coef_[0][0]/model.coef_[0][1]*Xi - model.intercept_ / model.coef_[0][1]
plt.plot(Xi,Y)

plt.xlim(min(X[:,0])-1,  max(X[:, 0])+1)
plt.ylim(min(X[:,1])-1,  max(X[:, 1])+1)


plt.title("ƒƒWƒXƒeƒBƒbƒN‰ρ‹A")
plt.xlabel("x-޲")
plt.ylabel("y-޲")
plt.show()
coefficient(ŒW”‚βŒX‚«):  [[2.25567275 0.35071038]]
intercept(Ψ•Π):  [0.52004691]
predicti—\‘ͺ’lj:  [0 0 0 1 0 1 1 1 0 0 0 1 0 0 0 1 1 1 0 0 0 1 1 0 0]