
In [1]:
# Importing the libraries
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
In [2]:
# 나이와 연봉으로 분석해서, 물건을 구매할지 안할지를 분류하자!!
In [3]:
df = pd.read_csv("../data/Social_Network_Ads.csv")
In [4]:
df.head()
Out[4]:
User ID | Gender | Age | EstimatedSalary | Purchased | |
---|---|---|---|---|---|
0 | 15624510 | Male | 19 | 19000 | 0 |
1 | 15810944 | Male | 35 | 20000 | 0 |
2 | 15668575 | Female | 26 | 43000 | 0 |
3 | 15603246 | Female | 27 | 57000 | 0 |
4 | 15804002 | Male | 19 | 76000 | 0 |
In [5]:
df["Purchased"].unique()
Out[5]:
array([0, 1], dtype=int64)
In [6]:
X = df.loc[:, "Age":"EstimatedSalary"]
In [7]:
X
Out[7]:
Age | EstimatedSalary | |
---|---|---|
0 | 19 | 19000 |
1 | 35 | 20000 |
2 | 26 | 43000 |
3 | 27 | 57000 |
4 | 19 | 76000 |
... | ... | ... |
395 | 46 | 41000 |
396 | 51 | 23000 |
397 | 50 | 20000 |
398 | 36 | 33000 |
399 | 49 | 36000 |
400 rows × 2 columns
In [8]:
y = df["Purchased"]
In [9]:
y
Out[9]:
0 0 1 0 2 0 3 0 4 0 .. 395 1 396 1 397 1 398 0 399 1 Name: Purchased, Length: 400, dtype: int64
In [10]:
# 로지스틱 리그레션은, 피처 스케일링을 하자
In [11]:
from sklearn.preprocessing import MinMaxScaler
In [12]:
scaler_X = MinMaxScaler()
In [13]:
scaler_X.fit_transform(X)
Out[13]:
array([[0.02380952, 0.02962963], [0.4047619 , 0.03703704], [0.19047619, 0.20740741], [0.21428571, 0.31111111], [0.02380952, 0.45185185], [0.21428571, 0.31851852], [0.21428571, 0.51111111], [0.33333333, 1. ], [0.16666667, 0.13333333], [0.4047619 , 0.37037037], [0.19047619, 0.48148148], [0.19047619, 0.27407407], [0.04761905, 0.52592593], [0.33333333, 0.02222222], [0. , 0.4962963 ], [0.26190476, 0.48148148], [0.69047619, 0.07407407], [0.64285714, 0.08148148], [0.66666667, 0.0962963 ], [0.71428571, 0.1037037 ], [0.64285714, 0.05185185], [0.69047619, 0.25185185], [0.71428571, 0.19259259], [0.64285714, 0.05185185], [0.66666667, 0.05925926], [0.69047619, 0.03703704], [0.73809524, 0.0962963 ], [0.69047619, 0.11111111], [0.26190476, 0.20740741], [0.30952381, 0.02222222], [0.30952381, 0.43703704], [0.21428571, 0.9037037 ], [0.07142857, 0.00740741], [0.23809524, 0.21481481], [0.21428571, 0.55555556], [0.4047619 , 0.08888889], [0.35714286, 0.0962963 ], [0.28571429, 0.25185185], [0.19047619, 0.42222222], [0.21428571, 0.11851852], [0.21428571, 0.01481481], [0.35714286, 0.26666667], [0.4047619 , 0.68888889], [0.28571429, 0. ], [0.23809524, 0.51111111], [0.11904762, 0.03703704], [0.16666667, 0.47407407], [0.21428571, 0.28888889], [0.28571429, 0.88888889], [0.30952381, 0.54814815], [0.14285714, 0.12592593], [0. , 0.21481481], [0.26190476, 0.5037037 ], [0.4047619 , 0.05925926], [0.21428571, 0.31851852], [0.14285714, 0.2962963 ], [0.11904762, 0.24444444], [0.23809524, 0.47407407], [0.0952381 , 0.02222222], [0.33333333, 0.75555556], [0.21428571, 0.03703704], [0.16666667, 0.53333333], [0.11904762, 0.37777778], [0.33333333, 0.77777778], [0.97619048, 0.5037037 ], [0.14285714, 0.31851852], [0.14285714, 0.02962963], [0.11904762, 0.4962963 ], [0.0952381 , 0.35555556], [0.30952381, 0.39259259], [0.16666667, 0.48148148], [0.14285714, 0.08888889], [0.04761905, 0.05925926], [0.35714286, 0.72592593], [0.33333333, 0.02222222], [0.38095238, 0.71851852], [0. , 0.27407407], [0.0952381 , 0.08888889], [0.23809524, 0.53333333], [0.19047619, 0.01481481], [0.28571429, 0.48148148], [0.5 , 0.2 ], [0.04761905, 0.25185185], [0.4047619 , 0.54074074], [0.28571429, 0.34814815], [0.30952381, 0.76296296], [0.14285714, 0.2962963 ], [0.23809524, 0.51851852], [0.19047619, 0.48888889], [0.4047619 , 0.25925926], [0.0952381 , 0.48888889], [0.28571429, 0.74814815], [0.19047619, 0. ], [0.26190476, 0.0962963 ], [0.26190476, 0.5037037 ], [0.4047619 , 0.21481481], [0.4047619 , 0.07407407], [0.23809524, 0.8 ], [0.4047619 , 0.42962963], [0.23809524, 0.16296296], [0.21428571, 0.54074074], [0.23809524, 0.32592593], [0.33333333, 0.52592593], [0.35714286, 0.99259259], [0.02380952, 0.04444444], [0.07142857, 0.42222222], [0.19047619, 0.14814815], [0.21428571, 0.54814815], [0.19047619, 0.52592593], [0.47619048, 0.48148148], [0.5 , 0.41481481], [0.45238095, 0.41481481], [0.47619048, 0.34074074], [0.45238095, 0.2962963 ], [0.57142857, 0.48148148], [0.52380952, 0.31111111], [0.4047619 , 0.44444444], [0.42857143, 0.27407407], [0.52380952, 0.32592593], [0.54761905, 0.32592593], [0.42857143, 0.44444444], [0.45238095, 0.42222222], [0.52380952, 0.44444444], [0.4047619 , 0.28148148], [0.54761905, 0.26666667], [0.5 , 0.34074074], [0.57142857, 0.37037037], [0.19047619, 0.12592593], [0.28571429, 0.01481481], [0.19047619, 0.51111111], [0.30952381, 0.31851852], [0.35714286, 0.11851852], [0.28571429, 0.53333333], [0.07142857, 0.39259259], [0.23809524, 0.2962963 ], [0.11904762, 0.35555556], [0.04761905, 0.4962963 ], [0.28571429, 0.68148148], [0.23809524, 0.32592593], [0.02380952, 0.07407407], [0.02380952, 0.51851852], [0. , 0.39259259], [0.4047619 , 0.32592593], [0.28571429, 0.54814815], [0.38095238, 0.07407407], [0.14285714, 0.54814815], [0.21428571, 0.6 ], [0.54761905, 0.11111111], [0.26190476, 0.34074074], [0.04761905, 0.43703704], [0.19047619, 0. ], [0.54761905, 0.22222222], [0.30952381, 0.45185185], [0.42857143, 0.25925926], [0.52380952, 0.23703704], [0.30952381, 0. ], [0.66666667, 0.32592593], [0.26190476, 0.44444444], [0.19047619, 0.11111111], [0.33333333, 0.88888889], [0.33333333, 0.62962963], [0.16666667, 0.55555556], [0.45238095, 0.13333333], [0.4047619 , 0.17037037], [0.35714286, 0.4 ], [0. , 0.52592593], [0.0952381 , 0.2962963 ], [0.4047619 , 0.41481481], [0.26190476, 0.98518519], [0.26190476, 0.23703704], [0.07142857, 0.54074074], [0.38095238, 0.74074074], [0.19047619, 0.76296296], [0.38095238, 0.20740741], [0.38095238, 0.42222222], [0.11904762, 0.0962963 ], [0.4047619 , 0.23703704], [0.16666667, 0.05185185], [0.14285714, 0.05925926], [0.30952381, 0.14074074], [0.19047619, 0.00740741], [0.30952381, 0.41481481], [0.33333333, 0.75555556], [0.35714286, 0.20740741], [0.35714286, 0.33333333], [0.30952381, 0.37777778], [0.04761905, 0.4962963 ], [0.35714286, 0.19259259], [0.4047619 , 0.42222222], [0.23809524, 0.12592593], [0.14285714, 0.51111111], [0.02380952, 0.08148148], [0.26190476, 0.20740741], [0.02380952, 0.40740741], [0.23809524, 0.54814815], [0.38095238, 0.20740741], [0.28571429, 0.47407407], [0.04761905, 0.15555556], [0.19047619, 0.48148148], [0.4047619 , 0.05185185], [0.4047619 , 0.17777778], [0.73809524, 0.43703704], [0.5 , 0.88148148], [0.54761905, 0.41481481], [0.95238095, 0.63703704], [0.69047619, 0.23703704], [0.88095238, 0.85185185], [0.80952381, 0.73333333], [0.52380952, 0.94074074], [0.66666667, 0.05185185], [0.71428571, 0.6 ], [0.80952381, 1. ], [0.97619048, 0.2 ], [0.4047619 , 0.31851852], [0.69047619, 0.20740741], [1. , 0.68888889], [0.73809524, 0.37037037], [0.52380952, 0.46666667], [0.66666667, 0.6 ], [0.97619048, 0.94814815], [0.54761905, 0.48148148], [0.4047619 , 0.56296296], [0.45238095, 0.95555556], [1. , 0.64444444], [0.4047619 , 0.33333333], [0.45238095, 0.28148148], [0.42857143, 0.82222222], [0.9047619 , 0.87407407], [0.52380952, 0.42222222], [0.57142857, 0.48148148], [0.4047619 , 0.97777778], [0.5 , 0.2 ], [0.52380952, 0.68148148], [0.73809524, 0.52592593], [0.47619048, 0.71851852], [0.66666667, 0.47407407], [0.52380952, 0.31111111], [0.45238095, 0.48148148], [0.66666667, 0.4962963 ], [0.83333333, 0.94814815], [0.57142857, 0.99259259], [0.47619048, 0.32592593], [0.76190476, 0.54074074], [0.9047619 , 0.65925926], [0.54761905, 0.42222222], [0.78571429, 0.97037037], [0.4047619 , 0.25925926], [0.92857143, 0.79259259], [0.54761905, 0.27407407], [0.4047619 , 0.60740741], [0.61904762, 0.17777778], [0.45238095, 0.27407407], [0.71428571, 0.88148148], [0.45238095, 0.97037037], [0.76190476, 0.21481481], [0.80952381, 0.55555556], [0.54761905, 0.42222222], [0.52380952, 0.31111111], [0.95238095, 0.59259259], [0.64285714, 0.85925926], [0.4047619 , 0.45925926], [0.42857143, 0.95555556], [0.88095238, 0.81481481], [0.4047619 , 0.42222222], [0.71428571, 0.55555556], [0.57142857, 0.68888889], [0.52380952, 0.44444444], [0.45238095, 0.43703704], [0.69047619, 0.95555556], [0.52380952, 0.34074074], [0.5952381 , 0.87407407], [0.97619048, 0.45185185], [1. , 0.2 ], [0.5 , 0.67407407], [0.92857143, 0.08148148], [0.92857143, 0.43703704], [0.47619048, 0.41481481], [0.73809524, 0.54074074], [0.80952381, 0.17037037], [0.76190476, 0.15555556], [0.97619048, 0.54074074], [0.4047619 , 0.34074074], [0.45238095, 0.40740741], [0.80952381, 0.04444444], [0.71428571, 0.93333333], [0.45238095, 0.57777778], [0.45238095, 0.34814815], [0.71428571, 0.91111111], [0.54761905, 0.47407407], [0.45238095, 0.46666667], [0.5 , 0.88148148], [0.73809524, 0.54814815], [0.88095238, 0.17777778], [0.45238095, 0.45925926], [0.4047619 , 0.31111111], [0.42857143, 0.35555556], [0.57142857, 0.42962963], [0.5952381 , 0.71851852], [0.64285714, 0.47407407], [0.66666667, 0.75555556], [0.95238095, 0.17037037], [0.71428571, 0.43703704], [0.45238095, 0.9037037 ], [0.45238095, 0.47407407], [0.52380952, 0.33333333], [0.57142857, 0.28888889], [0.78571429, 0.88148148], [0.69047619, 0.72592593], [0.42857143, 0.81481481], [0.47619048, 0.25925926], [0.57142857, 0.40740741], [0.5 , 0.6 ], [0.47619048, 0.25925926], [0.73809524, 0.93333333], [0.5 , 0.47407407], [0.5 , 0.44444444], [0.85714286, 0.65925926], [0.4047619 , 0.2962963 ], [0.64285714, 0.12592593], [0.42857143, 0.33333333], [0.80952381, 0.91111111], [0.83333333, 0.4962963 ], [0.54761905, 0.27407407], [0.71428571, 0.11111111], [0.71428571, 0.85925926], [0.54761905, 0.33333333], [0.54761905, 0.42222222], [0.57142857, 0.44444444], [0.42857143, 0.76296296], [0.69047619, 0.68148148], [0.47619048, 0.26666667], [0.71428571, 0.77037037], [0.57142857, 0.37037037], [0.52380952, 0.37037037], [0.92857143, 0.33333333], [0.42857143, 0.28888889], [0.95238095, 0.95555556], [0.4047619 , 0.47407407], [0.47619048, 0.2962963 ], [0.5 , 0.79259259], [0.83333333, 0.65925926], [0.4047619 , 0.44444444], [0.47619048, 0.37037037], [0.69047619, 0.26666667], [0.69047619, 0.66666667], [0.54761905, 0.35555556], [0.83333333, 0.42222222], [0.85714286, 0.68888889], [0.5 , 0.45925926], [0.47619048, 0.34074074], [0.47619048, 0.72592593], [0.45238095, 0.44444444], [0.57142857, 0.55555556], [0.45238095, 0.31111111], [0.42857143, 0.62222222], [1. , 0.14074074], [0.85714286, 0.40740741], [0.54761905, 0.42222222], [0.52380952, 0.41481481], [0.57142857, 0.28888889], [0.5952381 , 0.84444444], [0.83333333, 0.14074074], [0.69047619, 0.25925926], [0.57142857, 0.47407407], [0.57142857, 0.65925926], [0.97619048, 0.1037037 ], [0.95238095, 0.23703704], [0.66666667, 0.54074074], [0.47619048, 0.41481481], [0.85714286, 0.08148148], [1. , 0.22962963], [1. , 0.5037037 ], [0.5 , 0.42962963], [0.97619048, 0.85185185], [0.45238095, 0.48148148], [0.66666667, 0.12592593], [0.66666667, 0.43703704], [0.57142857, 0.28148148], [0.54761905, 0.53333333], [0.95238095, 0.05925926], [0.57142857, 0.36296296], [0.71428571, 0.13333333], [0.61904762, 0.91851852], [0.73809524, 0.0962963 ], [0.92857143, 0.13333333], [0.9047619 , 0.33333333], [0.73809524, 0.17777778], [0.5 , 0.41481481], [0.69047619, 0.14074074], [0.71428571, 0.14814815], [0.71428571, 0.13333333], [0.69047619, 0.05925926], [0.64285714, 0.22222222], [1. , 0.2 ], [0.5 , 0.32592593], [0.66666667, 0.19259259], [0.78571429, 0.05925926], [0.76190476, 0.03703704], [0.42857143, 0.13333333], [0.73809524, 0.15555556]])
In [14]:
X = scaler_X.fit_transform(X)
In [15]:
y
Out[15]:
0 0 1 0 2 0 3 0 4 0 .. 395 1 396 1 397 1 398 0 399 1 Name: Purchased, Length: 400, dtype: int64
In [16]:
from sklearn.model_selection import train_test_split
In [17]:
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.25, random_state=1)
In [ ]:
In [19]:
# 구매를 할지 말지 여부에 대한, 인공지능 개발
# 할지 말지 => 분류의 문제!!
In [18]:
from sklearn.linear_model import LogisticRegression
In [19]:
classifier = LogisticRegression(random_state=1) # 변수명만 보아도 무엇인지 알수있다.
In [20]:
classifier.fit(X_train,y_train)
Out[20]:
LogisticRegression(random_state=1)In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
LogisticRegression(random_state=1)
In [21]:
y_pred = classifier.predict(X_test)
In [22]:
y_pred
Out[22]:
array([0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0], dtype=int64)
In [23]:
classifier.predict_proba(X_test) # 좌측열이 0일확률 , 우측열이 1일확률
Out[23]:
array([[0.86288917, 0.13711083], [0.70574277, 0.29425723], [0.49025346, 0.50974654], [0.38383672, 0.61616328], [0.75287835, 0.24712165], [0.71129842, 0.28870158], [0.97386557, 0.02613443], [0.28966461, 0.71033539], [0.93977209, 0.06022791], [0.12665048, 0.87334952], [0.76998121, 0.23001879], [0.64784915, 0.35215085], [0.74892811, 0.25107189], [0.1531759 , 0.8468241 ], [0.2308496 , 0.7691504 ], [0.06437334, 0.93562666], [0.1158091 , 0.8841909 ], [0.91090318, 0.08909682], [0.8275719 , 0.1724281 ], [0.07924665, 0.92075335], [0.61066233, 0.38933767], [0.2708696 , 0.7291304 ], [0.19380091, 0.80619909], [0.58666188, 0.41333812], [0.72241711, 0.27758289], [0.96740026, 0.03259974], [0.26765906, 0.73234094], [0.36227221, 0.63772779], [0.4643787 , 0.5356213 ], [0.14241022, 0.85758978], [0.7849406 , 0.2150594 ], [0.94528387, 0.05471613], [0.93955256, 0.06044744], [0.1842331 , 0.8157669 ], [0.85196752, 0.14803248], [0.81646998, 0.18353002], [0.97780003, 0.02219997], [0.75572999, 0.24427001], [0.62540997, 0.37459003], [0.98159252, 0.01840748], [0.13217047, 0.86782953], [0.09975728, 0.90024272], [0.56412277, 0.43587723], [0.93144621, 0.06855379], [0.25669316, 0.74330684], [0.63123437, 0.36876563], [0.60149068, 0.39850932], [0.23204368, 0.76795632], [0.61703901, 0.38296099], [0.47107607, 0.52892393], [0.9352704 , 0.0647296 ], [0.78282372, 0.21717628], [0.52722181, 0.47277819], [0.68932852, 0.31067148], [0.56554415, 0.43445585], [0.74711787, 0.25288213], [0.24478251, 0.75521749], [0.80527561, 0.19472439], [0.11231343, 0.88768657], [0.25176908, 0.74823092], [0.83748287, 0.16251713], [0.90512579, 0.09487421], [0.46580477, 0.53419523], [0.8290771 , 0.1709229 ], [0.59409164, 0.40590836], [0.32543863, 0.67456137], [0.9501836 , 0.0498164 ], [0.58175782, 0.41824218], [0.23548422, 0.76451578], [0.77437752, 0.22562248], [0.33052572, 0.66947428], [0.84705661, 0.15294339], [0.46031003, 0.53968997], [0.65786581, 0.34213419], [0.9290781 , 0.0709219 ], [0.13085527, 0.86914473], [0.85663321, 0.14336679], [0.96669606, 0.03330394], [0.79214651, 0.20785349], [0.79545105, 0.20454895], [0.94031341, 0.05968659], [0.73979099, 0.26020901], [0.23687211, 0.76312789], [0.9329054 , 0.0670946 ], [0.91628607, 0.08371393], [0.87151489, 0.12848511], [0.61203631, 0.38796369], [0.80254888, 0.19745112], [0.68829919, 0.31170081], [0.95322984, 0.04677016], [0.51162714, 0.48837286], [0.27891764, 0.72108236], [0.56460496, 0.43539504], [0.75448865, 0.24551135], [0.86390626, 0.13609374], [0.30839502, 0.69160498], [0.2942329 , 0.7057671 ], [0.67473982, 0.32526018], [0.72758505, 0.27241495], [0.86402151, 0.13597849]])
In [25]:
y_pred
Out[25]:
array([0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0], dtype=int64)
In [26]:
y_test.values
Out[26]:
array([0, 0, 1, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0], dtype=int64)
In [27]:
X_test.shape
Out[27]:
(100, 2)
In [ ]:
Confusion Matrix¶
두 개의 클래스로 분류하는 경우는 아래와 같다.¶
In [28]:
from sklearn.metrics import confusion_matrix
In [30]:
# 0과 1로 분류하는 문제
# 따라서, y_test 값도 0과 1, y_pred 값도 0과 1
# y_test의 0과 1은 행으로 셋팅하고, y_pred 의 0과 1은 열로 셋팅합니다.
cm = confusion_matrix(y_test, y_pred) # 외워야한다.
In [31]:
cm
Out[31]:
array([[52, 6], [14, 28]], dtype=int64)
In [34]:
(52+28) / cm.sum() # 정확도 수동계산
Out[34]:
0.8
In [35]:
from sklearn.metrics import accuracy_score # 정확도 함수
In [36]:
accuracy_score(y_test, y_pred)
Out[36]:
0.8
In [37]:
from sklearn.metrics import classification_report
In [39]:
print(classification_report(y_test, y_pred)) # print를 생략하면 메모리 상태가 보임
precision recall f1-score support 0 0.79 0.90 0.84 58 1 0.82 0.67 0.74 42 accuracy 0.80 100 macro avg 0.81 0.78 0.79 100 weighted avg 0.80 0.80 0.80 100
In [ ]:
아래 코드는, 그대로 활용하면 됩니다.¶
In [ ]:
# 아래는, 테스트 데이터로 나온 예측 결과를 나타낸 것이다. Confusion Matrix 와 같다.
In [40]:
# Visualising the Test set results
from matplotlib.colors import ListedColormap
X_set, y_set = X_test, y_test
X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01),
np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01))
plt.figure(figsize=[10,7])
plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'green')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(y_set)):
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
c = ListedColormap(('red', 'green'))(i), label = j)
plt.title('Classifier (Test set)')
plt.xlabel('Age')
plt.ylabel('Estimated Salary')
plt.legend()
plt.show()
*c* argument looks like a single numeric RGB or RGBA sequence, which should be avoided as value-mapping will have precedence in case its length matches with *x* & *y*. Please use the *color* keyword-argument or provide a 2D array with a single row if you intend to specify the same RGB or RGBA value for all points. *c* argument looks like a single numeric RGB or RGBA sequence, which should be avoided as value-mapping will have precedence in case its length matches with *x* & *y*. Please use the *color* keyword-argument or provide a 2D array with a single row if you intend to specify the same RGB or RGBA value for all points.
In [ ]:
In [ ]:
# 아래는 학습에 사용된 데이터를 그래프로 나타낸 것입니다.
In [ ]:
# Visualising the Training set results
from matplotlib.colors import ListedColormap
X_set, y_set = X_train, y_train
X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01),
np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01))
plt.figure(figsize=[10,7])
plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'green')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(y_set)):
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
c = ListedColormap(('red', 'green'))(i), label = j)
plt.title('Logistic Regression (Training set)')
plt.xlabel('Age')
plt.ylabel('Estimated Salary')
plt.legend()
plt.show()
In [ ]:
'DataScience > MachineLearning' 카테고리의 다른 글
Machine [supervised{Classification(Support Vector Machine)}] (0) | 2022.12.02 |
---|---|
Machine Logistic Regression 데이터의 결점보완(0,nan), 데이터의 불균형 up sampling 기법, 결과를 히트맵으로 표현 (0) | 2022.12.02 |
Machine 예측 모델 실습, 배포를 위한 저장 (0) | 2022.12.01 |
Machine Multiple Linear Regression (0) | 2022.12.01 |
Machine [supervised{Prediction(Linear Regression)}] (0) | 2022.12.01 |