Auto MPG 데이터셋을 사용하여 1970년대 후반과 1980년대 초반의 자동차 연비를 예측하는 모델을 만듭니다. 이 정보에는 실린더 수, 배기량, 마력(horsepower), 공차 중량 같은 속성이 포함됩니다.¶
In [1]:
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
In [ ]:
# 구글 드라이브 마운트
In [ ]:
In [2]:
from google.colab import drive
drive.mount('/content/drive')
Mounted at /content/drive
In [ ]:
# Working Directory 설정
In [3]:
import os
In [4]:
os.chdir('/content/drive/MyDrive/Colab Notebooks/ml_plus/data')
In [5]:
pwd
Out[5]:
'/content/drive/MyDrive/Colab Notebooks/ml_plus/data'
In [ ]:
In [ ]:
파일은 auto-mpg.csv 입니다.
In [6]:
df = pd.read_csv('auto-mpg.csv')
In [7]:
df
Out[7]:
mpg | cyl | displ | hp | weight | accel | yr | origin | name | |
---|---|---|---|---|---|---|---|---|---|
0 | 18.0 | 8 | 307.0 | 130 | 3504 | 12.0 | 70 | 1 | chevrolet chevelle malibu |
1 | 15.0 | 8 | 350.0 | 165 | 3693 | 11.5 | 70 | 1 | buick skylark 320 |
2 | 18.0 | 8 | 318.0 | 150 | 3436 | 11.0 | 70 | 1 | plymouth satellite |
3 | 16.0 | 8 | 304.0 | 150 | 3433 | 12.0 | 70 | 1 | amc rebel sst |
4 | 17.0 | 8 | 302.0 | 140 | 3449 | 10.5 | 70 | 1 | ford torino |
... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
387 | 27.0 | 4 | 140.0 | 86 | 2790 | 15.6 | 82 | 1 | ford mustang gl |
388 | 44.0 | 4 | 97.0 | 52 | 2130 | 24.6 | 82 | 2 | vw pickup |
389 | 32.0 | 4 | 135.0 | 84 | 2295 | 11.6 | 82 | 1 | dodge rampage |
390 | 28.0 | 4 | 120.0 | 79 | 2625 | 18.6 | 82 | 1 | ford ranger |
391 | 31.0 | 4 | 119.0 | 82 | 2720 | 19.4 | 82 | 1 | chevy s-10 |
392 rows × 9 columns
In [8]:
# 1 빈데이터 확인
df.isna().sum()
Out[8]:
mpg 0 cyl 0 displ 0 hp 0 weight 0 accel 0 yr 0 origin 0 name 0 dtype: int64
In [9]:
df.describe()
Out[9]:
mpg | cyl | displ | hp | weight | accel | yr | origin | |
---|---|---|---|---|---|---|---|---|
count | 392.000000 | 392.000000 | 392.000000 | 392.000000 | 392.000000 | 392.000000 | 392.000000 | 392.000000 |
mean | 23.445918 | 5.471939 | 194.411990 | 104.469388 | 2977.584184 | 15.541327 | 75.979592 | 1.576531 |
std | 7.805007 | 1.705783 | 104.644004 | 38.491160 | 849.402560 | 2.758864 | 3.683737 | 0.805518 |
min | 9.000000 | 3.000000 | 68.000000 | 46.000000 | 1613.000000 | 8.000000 | 70.000000 | 1.000000 |
25% | 17.000000 | 4.000000 | 105.000000 | 75.000000 | 2225.250000 | 13.775000 | 73.000000 | 1.000000 |
50% | 22.750000 | 4.000000 | 151.000000 | 93.500000 | 2803.500000 | 15.500000 | 76.000000 | 1.000000 |
75% | 29.000000 | 8.000000 | 275.750000 | 126.000000 | 3614.750000 | 17.025000 | 79.000000 | 2.000000 |
max | 46.600000 | 8.000000 | 455.000000 | 230.000000 | 5140.000000 | 24.800000 | 82.000000 | 3.000000 |
In [ ]:
# 2 빈 데이터 삭제 (만약잇다면.)
df.dropna(inplace = True)
In [ ]:
# 3. X , y 셋팅
In [10]:
df.head(1)
Out[10]:
mpg | cyl | displ | hp | weight | accel | yr | origin | name | |
---|---|---|---|---|---|---|---|---|---|
0 | 18.0 | 8 | 307.0 | 130 | 3504 | 12.0 | 70 | 1 | chevrolet chevelle malibu |
In [11]:
y = df['mpg']
In [12]:
X = df.loc[:, 'cyl' : 'origin' ]
In [ ]:
# 4. 카테고리컬 데이터 처리
# Origin 컬럼은 다음과 같다. (1. American, 2. European,3. Japanese).
# 숫자든 문자든, 원핫 인코딩 진행
In [14]:
X['origin'].nunique()
Out[14]:
3
In [15]:
from sklearn.preprocessing import OneHotEncoder
from sklearn.compose import ColumnTransformer
In [18]:
ct = ColumnTransformer( [( 'encoder',OneHotEncoder(), [6])],remainder= 'passthrough' )
In [19]:
X = ct.fit_transform(X.values)
In [ ]:
# 5. X 만 피처 스케일링 하시오 (차트 확인을 위해, y는 하지 않습니다.)
In [22]:
from sklearn.preprocessing import MinMaxScaler
In [23]:
scaler_X = MinMaxScaler()
In [24]:
X = scaler_X.fit_transform(X)
In [ ]:
# y는 벨리데이션을 보여주기 위해 피처스케일링을 하지않는다.(원래는 해야되는것)
In [ ]:
# 5. 트레인 / 테스트용 셋으로 분리
In [26]:
from sklearn.model_selection import train_test_split
In [27]:
X_train, X_test,y_train, y_test = train_test_split(X,y,test_size=0.2, random_state= 7)
In [ ]:
# 딥러닝 모델링
In [28]:
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
In [29]:
X_train.shape
Out[29]:
(313, 9)
In [ ]:
In [35]:
def build_model() :
model = Sequential()
model.add( Dense(64,'relu',input_shape = (9,)))
model.add( Dense(64,'relu'))
model.add( Dense(1,'linear'))
model.compile(optimizer= tf.keras.optimizers.Adam(learning_rate=0.001),
loss='mse', metrics=['mse','mae']) # tf.keras.optimizers.Adam(learning_rate=) == learning_rate 조절 (보폭)
return model
In [ ]:
# 만약 줄여서 쓰고싶다면
from tensorflow.keras.optimizers import Adam # 이렇게한다면 optimizer= tf.keras.optimizers.Adam(learning_rate=0.001) 이렇게 쓰지않고 optimizer=Adam(learning_rate=0.001) 이렇게 쓸수있다.
In [54]:
model = build_model()
In [55]:
model.summary()
Model: "sequential_4" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= dense_12 (Dense) (None, 64) 640 dense_13 (Dense) (None, 64) 4160 dense_14 (Dense) (None, 1) 65 ================================================================= Total params: 4,865 Trainable params: 4,865 Non-trainable params: 0 _________________________________________________________________
In [56]:
epoch_history = model.fit(X_train,y_train, epochs = 200, validation_split=0.2 ) # validation_split=0.2 == Epoch가 한번 끝날때마다 학습에 사용하지 않은 데이터로 시험을 보는 것을 말한다.
# split이란, X_train,y_train 의 퍼센트를 지정하여 나눈다는의미 여기선 20퍼 로지정하였다
Epoch 1/200 8/8 [==============================] - 1s 28ms/step - loss: 600.2812 - mse: 600.2812 - mae: 23.2224 - val_loss: 548.5940 - val_mse: 548.5940 - val_mae: 21.9811 Epoch 2/200 8/8 [==============================] - 0s 6ms/step - loss: 582.0087 - mse: 582.0087 - mae: 22.8072 - val_loss: 529.6600 - val_mse: 529.6600 - val_mae: 21.5226 Epoch 3/200 8/8 [==============================] - 0s 5ms/step - loss: 561.7009 - mse: 561.7009 - mae: 22.3329 - val_loss: 506.7584 - val_mse: 506.7584 - val_mae: 20.9554 Epoch 4/200 8/8 [==============================] - 0s 6ms/step - loss: 535.9139 - mse: 535.9139 - mae: 21.7138 - val_loss: 476.5693 - val_mse: 476.5693 - val_mae: 20.1805 Epoch 5/200 8/8 [==============================] - 0s 7ms/step - loss: 501.2526 - mse: 501.2526 - mae: 20.8628 - val_loss: 436.8453 - val_mse: 436.8453 - val_mae: 19.0989 Epoch 6/200 8/8 [==============================] - 0s 6ms/step - loss: 456.5540 - mse: 456.5540 - mae: 19.6968 - val_loss: 387.5936 - val_mse: 387.5936 - val_mae: 17.6471 Epoch 7/200 8/8 [==============================] - 0s 7ms/step - loss: 402.1339 - mse: 402.1339 - mae: 18.1325 - val_loss: 330.0593 - val_mse: 330.0593 - val_mae: 15.7657 Epoch 8/200 8/8 [==============================] - 0s 7ms/step - loss: 340.7202 - mse: 340.7202 - mae: 16.1262 - val_loss: 267.7518 - val_mse: 267.7518 - val_mae: 13.4309 Epoch 9/200 8/8 [==============================] - 0s 6ms/step - loss: 275.0967 - mse: 275.0967 - mae: 13.7394 - val_loss: 208.0983 - val_mse: 208.0983 - val_mae: 11.2632 Epoch 10/200 8/8 [==============================] - 0s 8ms/step - loss: 212.0328 - mse: 212.0328 - mae: 11.7630 - val_loss: 160.6423 - val_mse: 160.6423 - val_mae: 9.8321 Epoch 11/200 8/8 [==============================] - 0s 6ms/step - loss: 163.3562 - mse: 163.3562 - mae: 10.4158 - val_loss: 131.9553 - val_mse: 131.9553 - val_mae: 9.2272 Epoch 12/200 8/8 [==============================] - 0s 6ms/step - loss: 132.1801 - mse: 132.1801 - mae: 9.6547 - val_loss: 118.6204 - val_mse: 118.6204 - val_mae: 9.2000 Epoch 13/200 8/8 [==============================] - 0s 8ms/step - loss: 116.8609 - mse: 116.8609 - mae: 9.3054 - val_loss: 109.4010 - val_mse: 109.4010 - val_mae: 9.0666 Epoch 14/200 8/8 [==============================] - 0s 7ms/step - loss: 103.6123 - mse: 103.6123 - mae: 8.7580 - val_loss: 95.7948 - val_mse: 95.7948 - val_mae: 8.5211 Epoch 15/200 8/8 [==============================] - 0s 6ms/step - loss: 90.4434 - mse: 90.4434 - mae: 8.1498 - val_loss: 81.9542 - val_mse: 81.9542 - val_mae: 7.8625 Epoch 16/200 8/8 [==============================] - 0s 9ms/step - loss: 78.6266 - mse: 78.6266 - mae: 7.5646 - val_loss: 69.6861 - val_mse: 69.6861 - val_mae: 7.2268 Epoch 17/200 8/8 [==============================] - 0s 10ms/step - loss: 68.2263 - mse: 68.2263 - mae: 7.0259 - val_loss: 58.5859 - val_mse: 58.5859 - val_mae: 6.5840 Epoch 18/200 8/8 [==============================] - 0s 9ms/step - loss: 58.7619 - mse: 58.7619 - mae: 6.4882 - val_loss: 49.5735 - val_mse: 49.5735 - val_mae: 6.0265 Epoch 19/200 8/8 [==============================] - 0s 6ms/step - loss: 51.2554 - mse: 51.2554 - mae: 6.0529 - val_loss: 41.5763 - val_mse: 41.5763 - val_mae: 5.4710 Epoch 20/200 8/8 [==============================] - 0s 6ms/step - loss: 44.9645 - mse: 44.9645 - mae: 5.6639 - val_loss: 35.2950 - val_mse: 35.2950 - val_mae: 5.0694 Epoch 21/200 8/8 [==============================] - 0s 8ms/step - loss: 39.6852 - mse: 39.6852 - mae: 5.3057 - val_loss: 30.1549 - val_mse: 30.1549 - val_mae: 4.6935 Epoch 22/200 8/8 [==============================] - 0s 6ms/step - loss: 35.5639 - mse: 35.5639 - mae: 4.9672 - val_loss: 25.7421 - val_mse: 25.7421 - val_mae: 4.3096 Epoch 23/200 8/8 [==============================] - 0s 6ms/step - loss: 32.1462 - mse: 32.1462 - mae: 4.6604 - val_loss: 22.4336 - val_mse: 22.4336 - val_mae: 3.9878 Epoch 24/200 8/8 [==============================] - 0s 6ms/step - loss: 29.4771 - mse: 29.4771 - mae: 4.3984 - val_loss: 19.9340 - val_mse: 19.9340 - val_mae: 3.7254 Epoch 25/200 8/8 [==============================] - 0s 7ms/step - loss: 27.4005 - mse: 27.4005 - mae: 4.1993 - val_loss: 17.8824 - val_mse: 17.8824 - val_mae: 3.4900 Epoch 26/200 8/8 [==============================] - 0s 7ms/step - loss: 25.6445 - mse: 25.6445 - mae: 4.0150 - val_loss: 16.3180 - val_mse: 16.3180 - val_mae: 3.2840 Epoch 27/200 8/8 [==============================] - 0s 8ms/step - loss: 24.2372 - mse: 24.2372 - mae: 3.8755 - val_loss: 15.0795 - val_mse: 15.0795 - val_mae: 3.1125 Epoch 28/200 8/8 [==============================] - 0s 6ms/step - loss: 23.0716 - mse: 23.0716 - mae: 3.7536 - val_loss: 14.1471 - val_mse: 14.1471 - val_mae: 2.9664 Epoch 29/200 8/8 [==============================] - 0s 6ms/step - loss: 22.0323 - mse: 22.0323 - mae: 3.6398 - val_loss: 13.4545 - val_mse: 13.4545 - val_mae: 2.8609 Epoch 30/200 8/8 [==============================] - 0s 6ms/step - loss: 21.1013 - mse: 21.1013 - mae: 3.5444 - val_loss: 12.8383 - val_mse: 12.8383 - val_mae: 2.7601 Epoch 31/200 8/8 [==============================] - 0s 7ms/step - loss: 20.3228 - mse: 20.3228 - mae: 3.4779 - val_loss: 12.4134 - val_mse: 12.4134 - val_mae: 2.6923 Epoch 32/200 8/8 [==============================] - 0s 6ms/step - loss: 19.6565 - mse: 19.6565 - mae: 3.4327 - val_loss: 12.0432 - val_mse: 12.0432 - val_mae: 2.6385 Epoch 33/200 8/8 [==============================] - 0s 8ms/step - loss: 19.0720 - mse: 19.0720 - mae: 3.3762 - val_loss: 11.8186 - val_mse: 11.8186 - val_mae: 2.6162 Epoch 34/200 8/8 [==============================] - 0s 6ms/step - loss: 18.5258 - mse: 18.5258 - mae: 3.3385 - val_loss: 11.5757 - val_mse: 11.5757 - val_mae: 2.5858 Epoch 35/200 8/8 [==============================] - 0s 6ms/step - loss: 17.9254 - mse: 17.9254 - mae: 3.2920 - val_loss: 11.3411 - val_mse: 11.3411 - val_mae: 2.5684 Epoch 36/200 8/8 [==============================] - 0s 7ms/step - loss: 17.4252 - mse: 17.4252 - mae: 3.2459 - val_loss: 11.1440 - val_mse: 11.1440 - val_mae: 2.5518 Epoch 37/200 8/8 [==============================] - 0s 6ms/step - loss: 17.0420 - mse: 17.0420 - mae: 3.2074 - val_loss: 10.9728 - val_mse: 10.9728 - val_mae: 2.5317 Epoch 38/200 8/8 [==============================] - 0s 6ms/step - loss: 16.7063 - mse: 16.7063 - mae: 3.1887 - val_loss: 10.8182 - val_mse: 10.8182 - val_mae: 2.5171 Epoch 39/200 8/8 [==============================] - 0s 6ms/step - loss: 16.3407 - mse: 16.3407 - mae: 3.1514 - val_loss: 10.6949 - val_mse: 10.6949 - val_mae: 2.5029 Epoch 40/200 8/8 [==============================] - 0s 6ms/step - loss: 16.0283 - mse: 16.0283 - mae: 3.1246 - val_loss: 10.5125 - val_mse: 10.5125 - val_mae: 2.4859 Epoch 41/200 8/8 [==============================] - 0s 6ms/step - loss: 15.7217 - mse: 15.7217 - mae: 3.0989 - val_loss: 10.3997 - val_mse: 10.3997 - val_mae: 2.4821 Epoch 42/200 8/8 [==============================] - 0s 6ms/step - loss: 15.4969 - mse: 15.4969 - mae: 3.0724 - val_loss: 10.2970 - val_mse: 10.2970 - val_mae: 2.4678 Epoch 43/200 8/8 [==============================] - 0s 8ms/step - loss: 15.2317 - mse: 15.2317 - mae: 3.0492 - val_loss: 10.1783 - val_mse: 10.1783 - val_mae: 2.4604 Epoch 44/200 8/8 [==============================] - 0s 6ms/step - loss: 14.9751 - mse: 14.9751 - mae: 3.0277 - val_loss: 10.0554 - val_mse: 10.0554 - val_mae: 2.4461 Epoch 45/200 8/8 [==============================] - 0s 6ms/step - loss: 14.7595 - mse: 14.7595 - mae: 3.0008 - val_loss: 9.9548 - val_mse: 9.9548 - val_mae: 2.4305 Epoch 46/200 8/8 [==============================] - 0s 8ms/step - loss: 14.5308 - mse: 14.5308 - mae: 2.9715 - val_loss: 9.8454 - val_mse: 9.8454 - val_mae: 2.4247 Epoch 47/200 8/8 [==============================] - 0s 6ms/step - loss: 14.3639 - mse: 14.3639 - mae: 2.9643 - val_loss: 9.7349 - val_mse: 9.7349 - val_mae: 2.4221 Epoch 48/200 8/8 [==============================] - 0s 6ms/step - loss: 14.1758 - mse: 14.1758 - mae: 2.9470 - val_loss: 9.6079 - val_mse: 9.6079 - val_mae: 2.4082 Epoch 49/200 8/8 [==============================] - 0s 7ms/step - loss: 13.9411 - mse: 13.9411 - mae: 2.9030 - val_loss: 9.4548 - val_mse: 9.4548 - val_mae: 2.3791 Epoch 50/200 8/8 [==============================] - 0s 6ms/step - loss: 13.7606 - mse: 13.7606 - mae: 2.8777 - val_loss: 9.2842 - val_mse: 9.2842 - val_mae: 2.3621 Epoch 51/200 8/8 [==============================] - 0s 8ms/step - loss: 13.5741 - mse: 13.5741 - mae: 2.8513 - val_loss: 9.1379 - val_mse: 9.1379 - val_mae: 2.3453 Epoch 52/200 8/8 [==============================] - 0s 7ms/step - loss: 13.3688 - mse: 13.3688 - mae: 2.8317 - val_loss: 9.0423 - val_mse: 9.0423 - val_mae: 2.3404 Epoch 53/200 8/8 [==============================] - 0s 6ms/step - loss: 13.2084 - mse: 13.2084 - mae: 2.8211 - val_loss: 8.9547 - val_mse: 8.9547 - val_mae: 2.3364 Epoch 54/200 8/8 [==============================] - 0s 6ms/step - loss: 13.0132 - mse: 13.0132 - mae: 2.7941 - val_loss: 8.7855 - val_mse: 8.7855 - val_mae: 2.3098 Epoch 55/200 8/8 [==============================] - 0s 6ms/step - loss: 12.8419 - mse: 12.8419 - mae: 2.7634 - val_loss: 8.6634 - val_mse: 8.6634 - val_mae: 2.2941 Epoch 56/200 8/8 [==============================] - 0s 6ms/step - loss: 12.7083 - mse: 12.7083 - mae: 2.7422 - val_loss: 8.5366 - val_mse: 8.5366 - val_mae: 2.2742 Epoch 57/200 8/8 [==============================] - 0s 7ms/step - loss: 12.5117 - mse: 12.5117 - mae: 2.7173 - val_loss: 8.4392 - val_mse: 8.4392 - val_mae: 2.2751 Epoch 58/200 8/8 [==============================] - 0s 7ms/step - loss: 12.3800 - mse: 12.3800 - mae: 2.7078 - val_loss: 8.3229 - val_mse: 8.3229 - val_mae: 2.2625 Epoch 59/200 8/8 [==============================] - 0s 6ms/step - loss: 12.2179 - mse: 12.2179 - mae: 2.6776 - val_loss: 8.1562 - val_mse: 8.1562 - val_mae: 2.2300 Epoch 60/200 8/8 [==============================] - 0s 6ms/step - loss: 12.0414 - mse: 12.0414 - mae: 2.6511 - val_loss: 8.0641 - val_mse: 8.0641 - val_mae: 2.2201 Epoch 61/200 8/8 [==============================] - 0s 10ms/step - loss: 11.9166 - mse: 11.9166 - mae: 2.6333 - val_loss: 7.9448 - val_mse: 7.9448 - val_mae: 2.2003 Epoch 62/200 8/8 [==============================] - 0s 6ms/step - loss: 11.7692 - mse: 11.7692 - mae: 2.6157 - val_loss: 7.8488 - val_mse: 7.8488 - val_mae: 2.1910 Epoch 63/200 8/8 [==============================] - 0s 6ms/step - loss: 11.6402 - mse: 11.6402 - mae: 2.6053 - val_loss: 7.7920 - val_mse: 7.7920 - val_mae: 2.1857 Epoch 64/200 8/8 [==============================] - 0s 9ms/step - loss: 11.5001 - mse: 11.5001 - mae: 2.5842 - val_loss: 7.6608 - val_mse: 7.6608 - val_mae: 2.1651 Epoch 65/200 8/8 [==============================] - 0s 6ms/step - loss: 11.3667 - mse: 11.3667 - mae: 2.5573 - val_loss: 7.5360 - val_mse: 7.5360 - val_mae: 2.1465 Epoch 66/200 8/8 [==============================] - 0s 6ms/step - loss: 11.2112 - mse: 11.2112 - mae: 2.5360 - val_loss: 7.4298 - val_mse: 7.4298 - val_mae: 2.1321 Epoch 67/200 8/8 [==============================] - 0s 6ms/step - loss: 11.0826 - mse: 11.0826 - mae: 2.5175 - val_loss: 7.3195 - val_mse: 7.3195 - val_mae: 2.1139 Epoch 68/200 8/8 [==============================] - 0s 10ms/step - loss: 10.9920 - mse: 10.9920 - mae: 2.5013 - val_loss: 7.2253 - val_mse: 7.2253 - val_mae: 2.0956 Epoch 69/200 8/8 [==============================] - 0s 6ms/step - loss: 10.8526 - mse: 10.8526 - mae: 2.4811 - val_loss: 7.1145 - val_mse: 7.1145 - val_mae: 2.0764 Epoch 70/200 8/8 [==============================] - 0s 6ms/step - loss: 10.7798 - mse: 10.7798 - mae: 2.4688 - val_loss: 7.0747 - val_mse: 7.0747 - val_mae: 2.0701 Epoch 71/200 8/8 [==============================] - 0s 6ms/step - loss: 10.6429 - mse: 10.6429 - mae: 2.4527 - val_loss: 6.9464 - val_mse: 6.9464 - val_mae: 2.0478 Epoch 72/200 8/8 [==============================] - 0s 6ms/step - loss: 10.5642 - mse: 10.5642 - mae: 2.4294 - val_loss: 6.8630 - val_mse: 6.8630 - val_mae: 2.0305 Epoch 73/200 8/8 [==============================] - 0s 6ms/step - loss: 10.4302 - mse: 10.4302 - mae: 2.4193 - val_loss: 6.8311 - val_mse: 6.8311 - val_mae: 2.0246 Epoch 74/200 8/8 [==============================] - 0s 6ms/step - loss: 10.3346 - mse: 10.3346 - mae: 2.4047 - val_loss: 6.7394 - val_mse: 6.7394 - val_mae: 2.0163 Epoch 75/200 8/8 [==============================] - 0s 10ms/step - loss: 10.2831 - mse: 10.2831 - mae: 2.3784 - val_loss: 6.6192 - val_mse: 6.6192 - val_mae: 1.9883 Epoch 76/200 8/8 [==============================] - 0s 6ms/step - loss: 10.1074 - mse: 10.1074 - mae: 2.3586 - val_loss: 6.6084 - val_mse: 6.6084 - val_mae: 1.9848 Epoch 77/200 8/8 [==============================] - 0s 10ms/step - loss: 10.0267 - mse: 10.0267 - mae: 2.3571 - val_loss: 6.5580 - val_mse: 6.5580 - val_mae: 1.9802 Epoch 78/200 8/8 [==============================] - 0s 9ms/step - loss: 9.9327 - mse: 9.9327 - mae: 2.3413 - val_loss: 6.4456 - val_mse: 6.4456 - val_mae: 1.9686 Epoch 79/200 8/8 [==============================] - 0s 6ms/step - loss: 9.8233 - mse: 9.8233 - mae: 2.3180 - val_loss: 6.3512 - val_mse: 6.3512 - val_mae: 1.9495 Epoch 80/200 8/8 [==============================] - 0s 7ms/step - loss: 9.7689 - mse: 9.7689 - mae: 2.2986 - val_loss: 6.2961 - val_mse: 6.2961 - val_mae: 1.9299 Epoch 81/200 8/8 [==============================] - 0s 6ms/step - loss: 9.6877 - mse: 9.6877 - mae: 2.2895 - val_loss: 6.2810 - val_mse: 6.2810 - val_mae: 1.9395 Epoch 82/200 8/8 [==============================] - 0s 6ms/step - loss: 9.6032 - mse: 9.6032 - mae: 2.2802 - val_loss: 6.2073 - val_mse: 6.2073 - val_mae: 1.9148 Epoch 83/200 8/8 [==============================] - 0s 6ms/step - loss: 9.4767 - mse: 9.4767 - mae: 2.2576 - val_loss: 6.1728 - val_mse: 6.1728 - val_mae: 1.9114 Epoch 84/200 8/8 [==============================] - 0s 6ms/step - loss: 9.4954 - mse: 9.4954 - mae: 2.2657 - val_loss: 6.0836 - val_mse: 6.0836 - val_mae: 1.8985 Epoch 85/200 8/8 [==============================] - 0s 7ms/step - loss: 9.3628 - mse: 9.3628 - mae: 2.2375 - val_loss: 6.0494 - val_mse: 6.0494 - val_mae: 1.8828 Epoch 86/200 8/8 [==============================] - 0s 7ms/step - loss: 9.2544 - mse: 9.2544 - mae: 2.2252 - val_loss: 6.0265 - val_mse: 6.0265 - val_mae: 1.8832 Epoch 87/200 8/8 [==============================] - 0s 6ms/step - loss: 9.1847 - mse: 9.1847 - mae: 2.2162 - val_loss: 5.9676 - val_mse: 5.9676 - val_mae: 1.8775 Epoch 88/200 8/8 [==============================] - 0s 6ms/step - loss: 9.1218 - mse: 9.1218 - mae: 2.2075 - val_loss: 5.9174 - val_mse: 5.9174 - val_mae: 1.8649 Epoch 89/200 8/8 [==============================] - 0s 6ms/step - loss: 9.0781 - mse: 9.0781 - mae: 2.1954 - val_loss: 5.8470 - val_mse: 5.8470 - val_mae: 1.8564 Epoch 90/200 8/8 [==============================] - 0s 6ms/step - loss: 9.0174 - mse: 9.0174 - mae: 2.1825 - val_loss: 5.8151 - val_mse: 5.8151 - val_mae: 1.8392 Epoch 91/200 8/8 [==============================] - 0s 6ms/step - loss: 8.9762 - mse: 8.9762 - mae: 2.1870 - val_loss: 5.8164 - val_mse: 5.8164 - val_mae: 1.8372 Epoch 92/200 8/8 [==============================] - 0s 6ms/step - loss: 8.9189 - mse: 8.9189 - mae: 2.1670 - val_loss: 5.7187 - val_mse: 5.7187 - val_mae: 1.8136 Epoch 93/200 8/8 [==============================] - 0s 7ms/step - loss: 8.7993 - mse: 8.7993 - mae: 2.1555 - val_loss: 5.7728 - val_mse: 5.7728 - val_mae: 1.8385 Epoch 94/200 8/8 [==============================] - 0s 7ms/step - loss: 8.7586 - mse: 8.7586 - mae: 2.1536 - val_loss: 5.6917 - val_mse: 5.6917 - val_mae: 1.8218 Epoch 95/200 8/8 [==============================] - 0s 7ms/step - loss: 8.6763 - mse: 8.6763 - mae: 2.1400 - val_loss: 5.6459 - val_mse: 5.6459 - val_mae: 1.8167 Epoch 96/200 8/8 [==============================] - 0s 7ms/step - loss: 8.6165 - mse: 8.6165 - mae: 2.1307 - val_loss: 5.5880 - val_mse: 5.5880 - val_mae: 1.7949 Epoch 97/200 8/8 [==============================] - 0s 6ms/step - loss: 8.6050 - mse: 8.6050 - mae: 2.1375 - val_loss: 5.6026 - val_mse: 5.6026 - val_mae: 1.7986 Epoch 98/200 8/8 [==============================] - 0s 7ms/step - loss: 8.4997 - mse: 8.4997 - mae: 2.1140 - val_loss: 5.5159 - val_mse: 5.5159 - val_mae: 1.7733 Epoch 99/200 8/8 [==============================] - 0s 6ms/step - loss: 8.4762 - mse: 8.4762 - mae: 2.1046 - val_loss: 5.4963 - val_mse: 5.4963 - val_mae: 1.7763 Epoch 100/200 8/8 [==============================] - 0s 6ms/step - loss: 8.3945 - mse: 8.3945 - mae: 2.1016 - val_loss: 5.5243 - val_mse: 5.5243 - val_mae: 1.7808 Epoch 101/200 8/8 [==============================] - 0s 7ms/step - loss: 8.3632 - mse: 8.3632 - mae: 2.0998 - val_loss: 5.4536 - val_mse: 5.4536 - val_mae: 1.7666 Epoch 102/200 8/8 [==============================] - 0s 6ms/step - loss: 8.3374 - mse: 8.3374 - mae: 2.0905 - val_loss: 5.4284 - val_mse: 5.4284 - val_mae: 1.7657 Epoch 103/200 8/8 [==============================] - 0s 7ms/step - loss: 8.2962 - mse: 8.2962 - mae: 2.0993 - val_loss: 5.5061 - val_mse: 5.5061 - val_mae: 1.7999 Epoch 104/200 8/8 [==============================] - 0s 6ms/step - loss: 8.2331 - mse: 8.2331 - mae: 2.0880 - val_loss: 5.3945 - val_mse: 5.3945 - val_mae: 1.7636 Epoch 105/200 8/8 [==============================] - 0s 6ms/step - loss: 8.2164 - mse: 8.2164 - mae: 2.0768 - val_loss: 5.3215 - val_mse: 5.3215 - val_mae: 1.7411 Epoch 106/200 8/8 [==============================] - 0s 7ms/step - loss: 8.1463 - mse: 8.1463 - mae: 2.0685 - val_loss: 5.3771 - val_mse: 5.3771 - val_mae: 1.7526 Epoch 107/200 8/8 [==============================] - 0s 6ms/step - loss: 8.1879 - mse: 8.1879 - mae: 2.0771 - val_loss: 5.3468 - val_mse: 5.3468 - val_mae: 1.7370 Epoch 108/200 8/8 [==============================] - 0s 6ms/step - loss: 8.0962 - mse: 8.0962 - mae: 2.0802 - val_loss: 5.4160 - val_mse: 5.4160 - val_mae: 1.7677 Epoch 109/200 8/8 [==============================] - 0s 6ms/step - loss: 8.1169 - mse: 8.1169 - mae: 2.0760 - val_loss: 5.3278 - val_mse: 5.3278 - val_mae: 1.7607 Epoch 110/200 8/8 [==============================] - 0s 7ms/step - loss: 8.0257 - mse: 8.0257 - mae: 2.0582 - val_loss: 5.2782 - val_mse: 5.2782 - val_mae: 1.7364 Epoch 111/200 8/8 [==============================] - 0s 12ms/step - loss: 7.9873 - mse: 7.9873 - mae: 2.0547 - val_loss: 5.3990 - val_mse: 5.3990 - val_mae: 1.7604 Epoch 112/200 8/8 [==============================] - 0s 8ms/step - loss: 7.9490 - mse: 7.9490 - mae: 2.0517 - val_loss: 5.2961 - val_mse: 5.2961 - val_mae: 1.7354 Epoch 113/200 8/8 [==============================] - 0s 6ms/step - loss: 7.9287 - mse: 7.9287 - mae: 2.0464 - val_loss: 5.2761 - val_mse: 5.2761 - val_mae: 1.7263 Epoch 114/200 8/8 [==============================] - 0s 7ms/step - loss: 7.9386 - mse: 7.9386 - mae: 2.0496 - val_loss: 5.2549 - val_mse: 5.2549 - val_mae: 1.7332 Epoch 115/200 8/8 [==============================] - 0s 7ms/step - loss: 7.8934 - mse: 7.8934 - mae: 2.0520 - val_loss: 5.4480 - val_mse: 5.4480 - val_mae: 1.7851 Epoch 116/200 8/8 [==============================] - 0s 6ms/step - loss: 7.9678 - mse: 7.9678 - mae: 2.0709 - val_loss: 5.3737 - val_mse: 5.3737 - val_mae: 1.7758 Epoch 117/200 8/8 [==============================] - 0s 6ms/step - loss: 7.8283 - mse: 7.8283 - mae: 2.0306 - val_loss: 5.2198 - val_mse: 5.2198 - val_mae: 1.7055 Epoch 118/200 8/8 [==============================] - 0s 10ms/step - loss: 7.8668 - mse: 7.8668 - mae: 2.0385 - val_loss: 5.2783 - val_mse: 5.2783 - val_mae: 1.7303 Epoch 119/200 8/8 [==============================] - 0s 7ms/step - loss: 7.8609 - mse: 7.8609 - mae: 2.0512 - val_loss: 5.4249 - val_mse: 5.4249 - val_mae: 1.7750 Epoch 120/200 8/8 [==============================] - 0s 6ms/step - loss: 7.7273 - mse: 7.7273 - mae: 2.0290 - val_loss: 5.2000 - val_mse: 5.2000 - val_mae: 1.7142 Epoch 121/200 8/8 [==============================] - 0s 6ms/step - loss: 7.8087 - mse: 7.8087 - mae: 2.0311 - val_loss: 5.1893 - val_mse: 5.1893 - val_mae: 1.7004 Epoch 122/200 8/8 [==============================] - 0s 7ms/step - loss: 7.7263 - mse: 7.7263 - mae: 2.0249 - val_loss: 5.3257 - val_mse: 5.3257 - val_mae: 1.7562 Epoch 123/200 8/8 [==============================] - 0s 7ms/step - loss: 7.7329 - mse: 7.7329 - mae: 2.0353 - val_loss: 5.3145 - val_mse: 5.3145 - val_mae: 1.7491 Epoch 124/200 8/8 [==============================] - 0s 6ms/step - loss: 7.7504 - mse: 7.7504 - mae: 2.0221 - val_loss: 5.2133 - val_mse: 5.2133 - val_mae: 1.7193 Epoch 125/200 8/8 [==============================] - 0s 7ms/step - loss: 7.6766 - mse: 7.6766 - mae: 2.0212 - val_loss: 5.2776 - val_mse: 5.2776 - val_mae: 1.7475 Epoch 126/200 8/8 [==============================] - 0s 6ms/step - loss: 7.6747 - mse: 7.6747 - mae: 2.0262 - val_loss: 5.2029 - val_mse: 5.2029 - val_mae: 1.7246 Epoch 127/200 8/8 [==============================] - 0s 6ms/step - loss: 7.8005 - mse: 7.8005 - mae: 2.0428 - val_loss: 5.2781 - val_mse: 5.2781 - val_mae: 1.7466 Epoch 128/200 8/8 [==============================] - 0s 7ms/step - loss: 7.6288 - mse: 7.6288 - mae: 2.0164 - val_loss: 5.1645 - val_mse: 5.1645 - val_mae: 1.6910 Epoch 129/200 8/8 [==============================] - 0s 6ms/step - loss: 7.6841 - mse: 7.6841 - mae: 2.0137 - val_loss: 5.1747 - val_mse: 5.1747 - val_mae: 1.7010 Epoch 130/200 8/8 [==============================] - 0s 6ms/step - loss: 7.5985 - mse: 7.5985 - mae: 2.0089 - val_loss: 5.3551 - val_mse: 5.3551 - val_mae: 1.7593 Epoch 131/200 8/8 [==============================] - 0s 7ms/step - loss: 7.6257 - mse: 7.6257 - mae: 2.0211 - val_loss: 5.3000 - val_mse: 5.3000 - val_mae: 1.7508 Epoch 132/200 8/8 [==============================] - 0s 9ms/step - loss: 7.6045 - mse: 7.6045 - mae: 2.0152 - val_loss: 5.1491 - val_mse: 5.1491 - val_mae: 1.7127 Epoch 133/200 8/8 [==============================] - 0s 8ms/step - loss: 7.5685 - mse: 7.5685 - mae: 2.0027 - val_loss: 5.2157 - val_mse: 5.2157 - val_mae: 1.7228 Epoch 134/200 8/8 [==============================] - 0s 7ms/step - loss: 7.5749 - mse: 7.5749 - mae: 2.0116 - val_loss: 5.3104 - val_mse: 5.3104 - val_mae: 1.7473 Epoch 135/200 8/8 [==============================] - 0s 6ms/step - loss: 7.5392 - mse: 7.5392 - mae: 2.0098 - val_loss: 5.1909 - val_mse: 5.1909 - val_mae: 1.7250 Epoch 136/200 8/8 [==============================] - 0s 7ms/step - loss: 7.5674 - mse: 7.5674 - mae: 2.0175 - val_loss: 5.2822 - val_mse: 5.2822 - val_mae: 1.7520 Epoch 137/200 8/8 [==============================] - 0s 7ms/step - loss: 7.5423 - mse: 7.5423 - mae: 2.0141 - val_loss: 5.1791 - val_mse: 5.1791 - val_mae: 1.7145 Epoch 138/200 8/8 [==============================] - 0s 7ms/step - loss: 7.5252 - mse: 7.5252 - mae: 2.0064 - val_loss: 5.2644 - val_mse: 5.2644 - val_mae: 1.7403 Epoch 139/200 8/8 [==============================] - 0s 6ms/step - loss: 7.5676 - mse: 7.5676 - mae: 2.0084 - val_loss: 5.0988 - val_mse: 5.0988 - val_mae: 1.6901 Epoch 140/200 8/8 [==============================] - 0s 6ms/step - loss: 7.5011 - mse: 7.5011 - mae: 2.0065 - val_loss: 5.3162 - val_mse: 5.3162 - val_mae: 1.7513 Epoch 141/200 8/8 [==============================] - 0s 6ms/step - loss: 7.5071 - mse: 7.5071 - mae: 2.0078 - val_loss: 5.2578 - val_mse: 5.2578 - val_mae: 1.7439 Epoch 142/200 8/8 [==============================] - 0s 6ms/step - loss: 7.6120 - mse: 7.6120 - mae: 2.0172 - val_loss: 5.1242 - val_mse: 5.1242 - val_mae: 1.7012 Epoch 143/200 8/8 [==============================] - 0s 7ms/step - loss: 7.6647 - mse: 7.6647 - mae: 2.0374 - val_loss: 5.6246 - val_mse: 5.6246 - val_mae: 1.8307 Epoch 144/200 8/8 [==============================] - 0s 7ms/step - loss: 7.4455 - mse: 7.4455 - mae: 2.0162 - val_loss: 5.2171 - val_mse: 5.2171 - val_mae: 1.7450 Epoch 145/200 8/8 [==============================] - 0s 6ms/step - loss: 7.5381 - mse: 7.5381 - mae: 2.0125 - val_loss: 5.1568 - val_mse: 5.1568 - val_mae: 1.7149 Epoch 146/200 8/8 [==============================] - 0s 6ms/step - loss: 7.4362 - mse: 7.4362 - mae: 2.0064 - val_loss: 5.4422 - val_mse: 5.4422 - val_mae: 1.7791 Epoch 147/200 8/8 [==============================] - 0s 6ms/step - loss: 7.4335 - mse: 7.4335 - mae: 2.0122 - val_loss: 5.2511 - val_mse: 5.2511 - val_mae: 1.7368 Epoch 148/200 8/8 [==============================] - 0s 6ms/step - loss: 7.4064 - mse: 7.4064 - mae: 1.9947 - val_loss: 5.1757 - val_mse: 5.1757 - val_mae: 1.7117 Epoch 149/200 8/8 [==============================] - 0s 7ms/step - loss: 7.4036 - mse: 7.4036 - mae: 1.9976 - val_loss: 5.1927 - val_mse: 5.1927 - val_mae: 1.7231 Epoch 150/200 8/8 [==============================] - 0s 6ms/step - loss: 7.3787 - mse: 7.3787 - mae: 1.9940 - val_loss: 5.2643 - val_mse: 5.2643 - val_mae: 1.7391 Epoch 151/200 8/8 [==============================] - 0s 6ms/step - loss: 7.4437 - mse: 7.4437 - mae: 2.0074 - val_loss: 5.2503 - val_mse: 5.2503 - val_mae: 1.7345 Epoch 152/200 8/8 [==============================] - 0s 7ms/step - loss: 7.3954 - mse: 7.3954 - mae: 1.9979 - val_loss: 5.1797 - val_mse: 5.1797 - val_mae: 1.7247 Epoch 153/200 8/8 [==============================] - 0s 8ms/step - loss: 7.4609 - mse: 7.4609 - mae: 2.0032 - val_loss: 5.3922 - val_mse: 5.3922 - val_mae: 1.7778 Epoch 154/200 8/8 [==============================] - 0s 6ms/step - loss: 7.3377 - mse: 7.3377 - mae: 1.9984 - val_loss: 5.2482 - val_mse: 5.2482 - val_mae: 1.7431 Epoch 155/200 8/8 [==============================] - 0s 6ms/step - loss: 7.3692 - mse: 7.3692 - mae: 1.9948 - val_loss: 5.2056 - val_mse: 5.2056 - val_mae: 1.7268 Epoch 156/200 8/8 [==============================] - 0s 6ms/step - loss: 7.3463 - mse: 7.3463 - mae: 1.9831 - val_loss: 5.3444 - val_mse: 5.3444 - val_mae: 1.7586 Epoch 157/200 8/8 [==============================] - 0s 7ms/step - loss: 7.3494 - mse: 7.3494 - mae: 1.9955 - val_loss: 5.2355 - val_mse: 5.2355 - val_mae: 1.7296 Epoch 158/200 8/8 [==============================] - 0s 7ms/step - loss: 7.3199 - mse: 7.3199 - mae: 1.9872 - val_loss: 5.2230 - val_mse: 5.2230 - val_mae: 1.7307 Epoch 159/200 8/8 [==============================] - 0s 7ms/step - loss: 7.3185 - mse: 7.3185 - mae: 1.9879 - val_loss: 5.2142 - val_mse: 5.2142 - val_mae: 1.7208 Epoch 160/200 8/8 [==============================] - 0s 7ms/step - loss: 7.3030 - mse: 7.3030 - mae: 1.9883 - val_loss: 5.2813 - val_mse: 5.2813 - val_mae: 1.7433 Epoch 161/200 8/8 [==============================] - 0s 6ms/step - loss: 7.3239 - mse: 7.3239 - mae: 1.9840 - val_loss: 5.1925 - val_mse: 5.1925 - val_mae: 1.7295 Epoch 162/200 8/8 [==============================] - 0s 6ms/step - loss: 7.3912 - mse: 7.3912 - mae: 2.0108 - val_loss: 5.4174 - val_mse: 5.4174 - val_mae: 1.7808 Epoch 163/200 8/8 [==============================] - 0s 7ms/step - loss: 7.2620 - mse: 7.2620 - mae: 1.9863 - val_loss: 5.1431 - val_mse: 5.1431 - val_mae: 1.7151 Epoch 164/200 8/8 [==============================] - 0s 7ms/step - loss: 7.3283 - mse: 7.3283 - mae: 1.9852 - val_loss: 5.1802 - val_mse: 5.1802 - val_mae: 1.7154 Epoch 165/200 8/8 [==============================] - 0s 6ms/step - loss: 7.2842 - mse: 7.2842 - mae: 1.9832 - val_loss: 5.3665 - val_mse: 5.3665 - val_mae: 1.7613 Epoch 166/200 8/8 [==============================] - 0s 6ms/step - loss: 7.2775 - mse: 7.2775 - mae: 1.9863 - val_loss: 5.2955 - val_mse: 5.2955 - val_mae: 1.7482 Epoch 167/200 8/8 [==============================] - 0s 7ms/step - loss: 7.2907 - mse: 7.2907 - mae: 1.9868 - val_loss: 5.1956 - val_mse: 5.1956 - val_mae: 1.7320 Epoch 168/200 8/8 [==============================] - 0s 6ms/step - loss: 7.2661 - mse: 7.2661 - mae: 1.9873 - val_loss: 5.3678 - val_mse: 5.3678 - val_mae: 1.7696 Epoch 169/200 8/8 [==============================] - 0s 6ms/step - loss: 7.2817 - mse: 7.2817 - mae: 1.9929 - val_loss: 5.2355 - val_mse: 5.2355 - val_mae: 1.7360 Epoch 170/200 8/8 [==============================] - 0s 6ms/step - loss: 7.2781 - mse: 7.2781 - mae: 1.9837 - val_loss: 5.1549 - val_mse: 5.1549 - val_mae: 1.7201 Epoch 171/200 8/8 [==============================] - 0s 7ms/step - loss: 7.2251 - mse: 7.2251 - mae: 1.9757 - val_loss: 5.2590 - val_mse: 5.2590 - val_mae: 1.7354 Epoch 172/200 8/8 [==============================] - 0s 7ms/step - loss: 7.4377 - mse: 7.4377 - mae: 2.0142 - val_loss: 5.5835 - val_mse: 5.5835 - val_mae: 1.8181 Epoch 173/200 8/8 [==============================] - 0s 6ms/step - loss: 7.2890 - mse: 7.2890 - mae: 1.9916 - val_loss: 5.2235 - val_mse: 5.2235 - val_mae: 1.7442 Epoch 174/200 8/8 [==============================] - 0s 10ms/step - loss: 7.2730 - mse: 7.2730 - mae: 1.9964 - val_loss: 5.3122 - val_mse: 5.3122 - val_mae: 1.7516 Epoch 175/200 8/8 [==============================] - 0s 6ms/step - loss: 7.2134 - mse: 7.2134 - mae: 1.9816 - val_loss: 5.3066 - val_mse: 5.3066 - val_mae: 1.7485 Epoch 176/200 8/8 [==============================] - 0s 6ms/step - loss: 7.2066 - mse: 7.2066 - mae: 1.9819 - val_loss: 5.2990 - val_mse: 5.2990 - val_mae: 1.7464 Epoch 177/200 8/8 [==============================] - 0s 6ms/step - loss: 7.2252 - mse: 7.2252 - mae: 1.9850 - val_loss: 5.2220 - val_mse: 5.2220 - val_mae: 1.7252 Epoch 178/200 8/8 [==============================] - 0s 8ms/step - loss: 7.2359 - mse: 7.2359 - mae: 1.9815 - val_loss: 5.3346 - val_mse: 5.3346 - val_mae: 1.7539 Epoch 179/200 8/8 [==============================] - 0s 7ms/step - loss: 7.2021 - mse: 7.2021 - mae: 1.9772 - val_loss: 5.2072 - val_mse: 5.2072 - val_mae: 1.7219 Epoch 180/200 8/8 [==============================] - 0s 8ms/step - loss: 7.2100 - mse: 7.2100 - mae: 1.9802 - val_loss: 5.3328 - val_mse: 5.3328 - val_mae: 1.7623 Epoch 181/200 8/8 [==============================] - 0s 6ms/step - loss: 7.2275 - mse: 7.2275 - mae: 1.9890 - val_loss: 5.2823 - val_mse: 5.2823 - val_mae: 1.7590 Epoch 182/200 8/8 [==============================] - 0s 6ms/step - loss: 7.1615 - mse: 7.1615 - mae: 1.9766 - val_loss: 5.1960 - val_mse: 5.1960 - val_mae: 1.7169 Epoch 183/200 8/8 [==============================] - 0s 6ms/step - loss: 7.1862 - mse: 7.1862 - mae: 1.9708 - val_loss: 5.2362 - val_mse: 5.2362 - val_mae: 1.7278 Epoch 184/200 8/8 [==============================] - 0s 7ms/step - loss: 7.1935 - mse: 7.1935 - mae: 1.9708 - val_loss: 5.2727 - val_mse: 5.2727 - val_mae: 1.7419 Epoch 185/200 8/8 [==============================] - 0s 7ms/step - loss: 7.1632 - mse: 7.1632 - mae: 1.9800 - val_loss: 5.1797 - val_mse: 5.1797 - val_mae: 1.7284 Epoch 186/200 8/8 [==============================] - 0s 6ms/step - loss: 7.3413 - mse: 7.3413 - mae: 2.0018 - val_loss: 5.4138 - val_mse: 5.4138 - val_mae: 1.7779 Epoch 187/200 8/8 [==============================] - 0s 6ms/step - loss: 7.2222 - mse: 7.2222 - mae: 1.9770 - val_loss: 5.1247 - val_mse: 5.1247 - val_mae: 1.7119 Epoch 188/200 8/8 [==============================] - 0s 7ms/step - loss: 7.1695 - mse: 7.1695 - mae: 1.9708 - val_loss: 5.3322 - val_mse: 5.3322 - val_mae: 1.7635 Epoch 189/200 8/8 [==============================] - 0s 6ms/step - loss: 7.2121 - mse: 7.2121 - mae: 1.9775 - val_loss: 5.1964 - val_mse: 5.1964 - val_mae: 1.7224 Epoch 190/200 8/8 [==============================] - 0s 7ms/step - loss: 7.1344 - mse: 7.1344 - mae: 1.9700 - val_loss: 5.1846 - val_mse: 5.1846 - val_mae: 1.7249 Epoch 191/200 8/8 [==============================] - 0s 6ms/step - loss: 7.1343 - mse: 7.1343 - mae: 1.9704 - val_loss: 5.3413 - val_mse: 5.3413 - val_mae: 1.7643 Epoch 192/200 8/8 [==============================] - 0s 6ms/step - loss: 7.1355 - mse: 7.1355 - mae: 1.9787 - val_loss: 5.2403 - val_mse: 5.2403 - val_mae: 1.7498 Epoch 193/200 8/8 [==============================] - 0s 7ms/step - loss: 7.1945 - mse: 7.1945 - mae: 1.9781 - val_loss: 5.2530 - val_mse: 5.2530 - val_mae: 1.7332 Epoch 194/200 8/8 [==============================] - 0s 7ms/step - loss: 7.3127 - mse: 7.3127 - mae: 2.0071 - val_loss: 5.5224 - val_mse: 5.5224 - val_mae: 1.8115 Epoch 195/200 8/8 [==============================] - 0s 6ms/step - loss: 7.0523 - mse: 7.0523 - mae: 1.9701 - val_loss: 5.2339 - val_mse: 5.2339 - val_mae: 1.7526 Epoch 196/200 8/8 [==============================] - 0s 6ms/step - loss: 7.3432 - mse: 7.3432 - mae: 2.0016 - val_loss: 5.0968 - val_mse: 5.0968 - val_mae: 1.7005 Epoch 197/200 8/8 [==============================] - 0s 7ms/step - loss: 7.2678 - mse: 7.2678 - mae: 1.9938 - val_loss: 5.5358 - val_mse: 5.5358 - val_mae: 1.8204 Epoch 198/200 8/8 [==============================] - 0s 6ms/step - loss: 7.4028 - mse: 7.4028 - mae: 1.9872 - val_loss: 5.1030 - val_mse: 5.1030 - val_mae: 1.7060 Epoch 199/200 8/8 [==============================] - 0s 6ms/step - loss: 7.1859 - mse: 7.1859 - mae: 1.9885 - val_loss: 5.4726 - val_mse: 5.4726 - val_mae: 1.8021 Epoch 200/200 8/8 [==============================] - 0s 7ms/step - loss: 7.1609 - mse: 7.1609 - mae: 1.9737 - val_loss: 5.2368 - val_mse: 5.2368 - val_mae: 1.7444
In [ ]:
######## val == validation #########
# 학습은 train 검증은 test
# 밸리데이션이란, 에포크가 한번 끝날때 마다, 학습에 사용하지 않은 데이터로
# 시험을 보는 것을 말한다.
# 학습에 사용하지 않은 데이터는 어디서 얻을수 있느냐,
# 기존에 있던 X_trian, y_train 에서 validation_split=0.2 , 즉20퍼로 다시 나눠서 사용
# 테스트란, 인공지능이 완전이 학습이 다 끝난 상태에서 평가하는것을 테스트라고 하고,
# 밸리데이션은, 학습 중에, 에포크 끝날때 마다 평가하는 것을 말한다.(딥러닝에서만 있는것.)
# 일종의 중간시험
# 결과물이 정상적으로 나오는지 확인하기 위함, 완성된 후에 확인하면 잘못될 가능성도 있기때문이다.
In [ ]:
In [ ]:
In [39]:
import seaborn as sb
import matplotlib.pyplot as plt
In [40]:
plt.plot(epoch_history.history['loss']) # Epoch가 25부터는 거의 변화가없다 즉, 한계가 있다는것을 알 수 있다.
plt.show()
In [ ]:
## epoch_history 객체에 저장된 통계치를 사용해 모델의 훈련 과정을 시각화!
In [53]:
import matplotlib.pyplot as plt
def plot_history(history) :
hist = pd.DataFrame(history.history)
hist['epoch'] = history.epoch
plt.figure(figsize = (8, 12))
plt.subplot(2, 1, 1)
plt.xlabel('Eopoch')
plt.ylabel('Mean Abs Error [MPG]')
plt.plot(hist['epoch'], hist['mae'], label = 'Train Error')
plt.plot(hist['epoch'], hist['val_mae'], label = 'Val Error')
plt.ylim([0,5])
plt.legend()
plt.subplot(2, 1, 2)
plt.xlabel('Eopoch')
plt.ylabel('Mean Squared Error [MPG]')
plt.plot(hist['epoch'], hist['mse'], label = 'Train Error')
plt.plot(hist['epoch'], hist['val_mse'], label = 'Val Error')
plt.ylim([0,20])
plt.legend()
plt.show()
In [57]:
plot_history(epoch_history)
이 그래프(validation)를 보면 수 백번 에포크를 진행한 이후에는 모델이 거의 향상되지 않는 것 같습니다. model.fit 메서드를 수정하여 검증 점수가 향상되지 않으면 자동으로 훈련을 멈추도록 만들어 보죠. 에포크마다 훈련 상태를 점검하기 위해 EarlyStopping 콜백(callback)을 사용하겠습니다. 지정된 에포크 횟수 동안 성능 향상이 없으면 자동으로 훈련이 멈춥니다.
이 콜백에 대해 더 자세한 내용은 여기를 참고하세요.링크 텍스트
In [ ]:
# callbacks이란, 내가 만든 함수를, 프레임워크가 실행시켜주는 것!
# 안드로이드는 전부 callbacks 함수에서 실행하는것
In [58]:
model = build_model() # 내가만들고 내가원할때 실행하는 함수 == callbacks함수가 아님
In [59]:
# patience 를 10 으로 설정하면.. 10번의 에포크동안 성능향상이 없으면 학습을 멈추라는뜻
early_stop = tf.keras.callbacks.EarlyStopping(monitor='val_loss',patience=10 ) # 다시: 이렇게 쓸수있는 이유는 위에서 validation_split=0.2를 설정해 두었기 때문, patience=10 == val_loss가 에포크10번 지났는데도 향상이 되지않으면 멈추라는 의미
In [60]:
epoch_history = model.fit(X_train,y_train ,epochs=1000000, validation_split=0.2, callbacks=[early_stop]) # 콜백을 여러개 넣을수도 있다
Epoch 1/1000000 8/8 [==============================] - 1s 28ms/step - loss: 605.3304 - mse: 605.3304 - mae: 23.3304 - val_loss: 553.1509 - val_mse: 553.1509 - val_mae: 22.0846 Epoch 2/1000000 8/8 [==============================] - 0s 6ms/step - loss: 584.7609 - mse: 584.7609 - mae: 22.8683 - val_loss: 531.3099 - val_mse: 531.3099 - val_mae: 21.5568 Epoch 3/1000000 8/8 [==============================] - 0s 7ms/step - loss: 560.9420 - mse: 560.9420 - mae: 22.3107 - val_loss: 504.9069 - val_mse: 504.9069 - val_mae: 20.8938 Epoch 4/1000000 8/8 [==============================] - 0s 6ms/step - loss: 531.1887 - mse: 531.1887 - mae: 21.5954 - val_loss: 471.5186 - val_mse: 471.5186 - val_mae: 20.0184 Epoch 5/1000000 8/8 [==============================] - 0s 8ms/step - loss: 493.2675 - mse: 493.2675 - mae: 20.6347 - val_loss: 428.5940 - val_mse: 428.5940 - val_mae: 18.8238 Epoch 6/1000000 8/8 [==============================] - 0s 6ms/step - loss: 445.2752 - mse: 445.2752 - mae: 19.3199 - val_loss: 375.6982 - val_mse: 375.6982 - val_mae: 17.2231 Epoch 7/1000000 8/8 [==============================] - 0s 6ms/step - loss: 386.3196 - mse: 386.3196 - mae: 17.5760 - val_loss: 314.4095 - val_mse: 314.4095 - val_mae: 15.1170 Epoch 8/1000000 8/8 [==============================] - 0s 6ms/step - loss: 319.4677 - mse: 319.4677 - mae: 15.3469 - val_loss: 249.5599 - val_mse: 249.5599 - val_mae: 12.6919 Epoch 9/1000000 8/8 [==============================] - 0s 7ms/step - loss: 250.7768 - mse: 250.7768 - mae: 13.0134 - val_loss: 191.2442 - val_mse: 191.2442 - val_mae: 10.9073 Epoch 10/1000000 8/8 [==============================] - 0s 6ms/step - loss: 193.1672 - mse: 193.1672 - mae: 11.3887 - val_loss: 149.5983 - val_mse: 149.5983 - val_mae: 9.7771 Epoch 11/1000000 8/8 [==============================] - 0s 6ms/step - loss: 149.5127 - mse: 149.5127 - mae: 10.2032 - val_loss: 129.2927 - val_mse: 129.2927 - val_mae: 9.4505 Epoch 12/1000000 8/8 [==============================] - 0s 6ms/step - loss: 127.2863 - mse: 127.2863 - mae: 9.6251 - val_loss: 119.2709 - val_mse: 119.2709 - val_mae: 9.3678 Epoch 13/1000000 8/8 [==============================] - 0s 6ms/step - loss: 112.9564 - mse: 112.9564 - mae: 9.1341 - val_loss: 107.5244 - val_mse: 107.5244 - val_mae: 8.9939 Epoch 14/1000000 8/8 [==============================] - 0s 6ms/step - loss: 100.0515 - mse: 100.0515 - mae: 8.5816 - val_loss: 92.2121 - val_mse: 92.2121 - val_mae: 8.3169 Epoch 15/1000000 8/8 [==============================] - 0s 8ms/step - loss: 87.4673 - mse: 87.4673 - mae: 7.9835 - val_loss: 78.1044 - val_mse: 78.1044 - val_mae: 7.6417 Epoch 16/1000000 8/8 [==============================] - 0s 6ms/step - loss: 75.5417 - mse: 75.5417 - mae: 7.3977 - val_loss: 66.1553 - val_mse: 66.1553 - val_mae: 7.0088 Epoch 17/1000000 8/8 [==============================] - 0s 6ms/step - loss: 65.5123 - mse: 65.5123 - mae: 6.8624 - val_loss: 56.0064 - val_mse: 56.0064 - val_mae: 6.4088 Epoch 18/1000000 8/8 [==============================] - 0s 6ms/step - loss: 57.3969 - mse: 57.3969 - mae: 6.4111 - val_loss: 47.2093 - val_mse: 47.2093 - val_mae: 5.8512 Epoch 19/1000000 8/8 [==============================] - 0s 6ms/step - loss: 50.1026 - mse: 50.1026 - mae: 5.9642 - val_loss: 39.9961 - val_mse: 39.9961 - val_mae: 5.3740 Epoch 20/1000000 8/8 [==============================] - 0s 9ms/step - loss: 44.7218 - mse: 44.7218 - mae: 5.6067 - val_loss: 33.9779 - val_mse: 33.9779 - val_mae: 4.9527 Epoch 21/1000000 8/8 [==============================] - 0s 6ms/step - loss: 39.8221 - mse: 39.8221 - mae: 5.2456 - val_loss: 29.2459 - val_mse: 29.2459 - val_mae: 4.5809 Epoch 22/1000000 8/8 [==============================] - 0s 6ms/step - loss: 36.0439 - mse: 36.0439 - mae: 4.9419 - val_loss: 25.4219 - val_mse: 25.4219 - val_mae: 4.2369 Epoch 23/1000000 8/8 [==============================] - 0s 8ms/step - loss: 32.8964 - mse: 32.8964 - mae: 4.6590 - val_loss: 22.4249 - val_mse: 22.4249 - val_mae: 3.9446 Epoch 24/1000000 8/8 [==============================] - 0s 9ms/step - loss: 30.3968 - mse: 30.3968 - mae: 4.4191 - val_loss: 20.1006 - val_mse: 20.1006 - val_mae: 3.6941 Epoch 25/1000000 8/8 [==============================] - 0s 7ms/step - loss: 28.4247 - mse: 28.4247 - mae: 4.2261 - val_loss: 18.2116 - val_mse: 18.2116 - val_mae: 3.4766 Epoch 26/1000000 8/8 [==============================] - 0s 7ms/step - loss: 26.9197 - mse: 26.9197 - mae: 4.0693 - val_loss: 16.6741 - val_mse: 16.6741 - val_mae: 3.2816 Epoch 27/1000000 8/8 [==============================] - 0s 6ms/step - loss: 25.2584 - mse: 25.2584 - mae: 3.9061 - val_loss: 15.5086 - val_mse: 15.5086 - val_mae: 3.1161 Epoch 28/1000000 8/8 [==============================] - 0s 6ms/step - loss: 24.2090 - mse: 24.2090 - mae: 3.7939 - val_loss: 14.5276 - val_mse: 14.5276 - val_mae: 2.9693 Epoch 29/1000000 8/8 [==============================] - 0s 6ms/step - loss: 22.9322 - mse: 22.9322 - mae: 3.6632 - val_loss: 13.8862 - val_mse: 13.8862 - val_mae: 2.8762 Epoch 30/1000000 8/8 [==============================] - 0s 6ms/step - loss: 22.0180 - mse: 22.0180 - mae: 3.5710 - val_loss: 13.2805 - val_mse: 13.2805 - val_mae: 2.7965 Epoch 31/1000000 8/8 [==============================] - 0s 6ms/step - loss: 21.1483 - mse: 21.1483 - mae: 3.4978 - val_loss: 12.7900 - val_mse: 12.7900 - val_mae: 2.7263 Epoch 32/1000000 8/8 [==============================] - 0s 6ms/step - loss: 20.3630 - mse: 20.3630 - mae: 3.4349 - val_loss: 12.3845 - val_mse: 12.3845 - val_mae: 2.6650 Epoch 33/1000000 8/8 [==============================] - 0s 7ms/step - loss: 19.6723 - mse: 19.6723 - mae: 3.3849 - val_loss: 12.0135 - val_mse: 12.0135 - val_mae: 2.6025 Epoch 34/1000000 8/8 [==============================] - 0s 6ms/step - loss: 19.0808 - mse: 19.0808 - mae: 3.3416 - val_loss: 11.7055 - val_mse: 11.7055 - val_mae: 2.5487 Epoch 35/1000000 8/8 [==============================] - 0s 7ms/step - loss: 18.5532 - mse: 18.5532 - mae: 3.3036 - val_loss: 11.4776 - val_mse: 11.4776 - val_mae: 2.5260 Epoch 36/1000000 8/8 [==============================] - 0s 6ms/step - loss: 17.9937 - mse: 17.9937 - mae: 3.2446 - val_loss: 11.2325 - val_mse: 11.2325 - val_mae: 2.5024 Epoch 37/1000000 8/8 [==============================] - 0s 6ms/step - loss: 17.5448 - mse: 17.5448 - mae: 3.2069 - val_loss: 11.0631 - val_mse: 11.0631 - val_mae: 2.4874 Epoch 38/1000000 8/8 [==============================] - 0s 6ms/step - loss: 17.1010 - mse: 17.1010 - mae: 3.1694 - val_loss: 10.8920 - val_mse: 10.8920 - val_mae: 2.4745 Epoch 39/1000000 8/8 [==============================] - 0s 7ms/step - loss: 16.7930 - mse: 16.7930 - mae: 3.1549 - val_loss: 10.7577 - val_mse: 10.7577 - val_mae: 2.4630 Epoch 40/1000000 8/8 [==============================] - 0s 8ms/step - loss: 16.3973 - mse: 16.3973 - mae: 3.1225 - val_loss: 10.6262 - val_mse: 10.6262 - val_mae: 2.4545 Epoch 41/1000000 8/8 [==============================] - 0s 6ms/step - loss: 16.0654 - mse: 16.0654 - mae: 3.0904 - val_loss: 10.5042 - val_mse: 10.5042 - val_mae: 2.4440 Epoch 42/1000000 8/8 [==============================] - 0s 6ms/step - loss: 15.7853 - mse: 15.7853 - mae: 3.0670 - val_loss: 10.3856 - val_mse: 10.3856 - val_mae: 2.4327 Epoch 43/1000000 8/8 [==============================] - 0s 6ms/step - loss: 15.5185 - mse: 15.5185 - mae: 3.0356 - val_loss: 10.3259 - val_mse: 10.3259 - val_mae: 2.4276 Epoch 44/1000000 8/8 [==============================] - 0s 6ms/step - loss: 15.2255 - mse: 15.2255 - mae: 3.0097 - val_loss: 10.2149 - val_mse: 10.2149 - val_mae: 2.4121 Epoch 45/1000000 8/8 [==============================] - 0s 7ms/step - loss: 15.0026 - mse: 15.0026 - mae: 2.9850 - val_loss: 10.1022 - val_mse: 10.1022 - val_mae: 2.4031 Epoch 46/1000000 8/8 [==============================] - 0s 7ms/step - loss: 14.7486 - mse: 14.7486 - mae: 2.9616 - val_loss: 10.0230 - val_mse: 10.0230 - val_mae: 2.3979 Epoch 47/1000000 8/8 [==============================] - 0s 6ms/step - loss: 14.5255 - mse: 14.5255 - mae: 2.9424 - val_loss: 9.9251 - val_mse: 9.9251 - val_mae: 2.3901 Epoch 48/1000000 8/8 [==============================] - 0s 6ms/step - loss: 14.3286 - mse: 14.3286 - mae: 2.9229 - val_loss: 9.8394 - val_mse: 9.8394 - val_mae: 2.3871 Epoch 49/1000000 8/8 [==============================] - 0s 6ms/step - loss: 14.1168 - mse: 14.1168 - mae: 2.9004 - val_loss: 9.7418 - val_mse: 9.7418 - val_mae: 2.3783 Epoch 50/1000000 8/8 [==============================] - 0s 6ms/step - loss: 13.9404 - mse: 13.9404 - mae: 2.8744 - val_loss: 9.6517 - val_mse: 9.6517 - val_mae: 2.3727 Epoch 51/1000000 8/8 [==============================] - 0s 6ms/step - loss: 13.7495 - mse: 13.7495 - mae: 2.8538 - val_loss: 9.5510 - val_mse: 9.5510 - val_mae: 2.3667 Epoch 52/1000000 8/8 [==============================] - 0s 6ms/step - loss: 13.6067 - mse: 13.6067 - mae: 2.8392 - val_loss: 9.4492 - val_mse: 9.4492 - val_mae: 2.3661 Epoch 53/1000000 8/8 [==============================] - 0s 6ms/step - loss: 13.4129 - mse: 13.4129 - mae: 2.8197 - val_loss: 9.3211 - val_mse: 9.3211 - val_mae: 2.3541 Epoch 54/1000000 8/8 [==============================] - 0s 6ms/step - loss: 13.2448 - mse: 13.2448 - mae: 2.7935 - val_loss: 9.2381 - val_mse: 9.2381 - val_mae: 2.3472 Epoch 55/1000000 8/8 [==============================] - 0s 6ms/step - loss: 13.0863 - mse: 13.0863 - mae: 2.7735 - val_loss: 9.1407 - val_mse: 9.1407 - val_mae: 2.3352 Epoch 56/1000000 8/8 [==============================] - 0s 7ms/step - loss: 12.9425 - mse: 12.9425 - mae: 2.7579 - val_loss: 9.0625 - val_mse: 9.0625 - val_mae: 2.3308 Epoch 57/1000000 8/8 [==============================] - 0s 7ms/step - loss: 12.7996 - mse: 12.7996 - mae: 2.7431 - val_loss: 8.9771 - val_mse: 8.9771 - val_mae: 2.3262 Epoch 58/1000000 8/8 [==============================] - 0s 7ms/step - loss: 12.6629 - mse: 12.6629 - mae: 2.7227 - val_loss: 8.8658 - val_mse: 8.8658 - val_mae: 2.3093 Epoch 59/1000000 8/8 [==============================] - 0s 7ms/step - loss: 12.5486 - mse: 12.5486 - mae: 2.7056 - val_loss: 8.7839 - val_mse: 8.7839 - val_mae: 2.3082 Epoch 60/1000000 8/8 [==============================] - 0s 7ms/step - loss: 12.4078 - mse: 12.4078 - mae: 2.6890 - val_loss: 8.6590 - val_mse: 8.6590 - val_mae: 2.2949 Epoch 61/1000000 8/8 [==============================] - 0s 6ms/step - loss: 12.2759 - mse: 12.2759 - mae: 2.6763 - val_loss: 8.6083 - val_mse: 8.6083 - val_mae: 2.2918 Epoch 62/1000000 8/8 [==============================] - 0s 6ms/step - loss: 12.1361 - mse: 12.1361 - mae: 2.6588 - val_loss: 8.4741 - val_mse: 8.4741 - val_mae: 2.2766 Epoch 63/1000000 8/8 [==============================] - 0s 6ms/step - loss: 12.0256 - mse: 12.0256 - mae: 2.6456 - val_loss: 8.4097 - val_mse: 8.4097 - val_mae: 2.2687 Epoch 64/1000000 8/8 [==============================] - 0s 6ms/step - loss: 11.8915 - mse: 11.8915 - mae: 2.6246 - val_loss: 8.3086 - val_mse: 8.3086 - val_mae: 2.2563 Epoch 65/1000000 8/8 [==============================] - 0s 6ms/step - loss: 11.8092 - mse: 11.8092 - mae: 2.6126 - val_loss: 8.2528 - val_mse: 8.2528 - val_mae: 2.2520 Epoch 66/1000000 8/8 [==============================] - 0s 6ms/step - loss: 11.6786 - mse: 11.6786 - mae: 2.5970 - val_loss: 8.1672 - val_mse: 8.1672 - val_mae: 2.2444 Epoch 67/1000000 8/8 [==============================] - 0s 6ms/step - loss: 11.5843 - mse: 11.5843 - mae: 2.5834 - val_loss: 8.0760 - val_mse: 8.0760 - val_mae: 2.2309 Epoch 68/1000000 8/8 [==============================] - 0s 6ms/step - loss: 11.4636 - mse: 11.4636 - mae: 2.5617 - val_loss: 8.0269 - val_mse: 8.0269 - val_mae: 2.2249 Epoch 69/1000000 8/8 [==============================] - 0s 7ms/step - loss: 11.3359 - mse: 11.3359 - mae: 2.5475 - val_loss: 7.9817 - val_mse: 7.9817 - val_mae: 2.2163 Epoch 70/1000000 8/8 [==============================] - 0s 7ms/step - loss: 11.2480 - mse: 11.2480 - mae: 2.5358 - val_loss: 7.9093 - val_mse: 7.9093 - val_mae: 2.2056 Epoch 71/1000000 8/8 [==============================] - 0s 6ms/step - loss: 11.1631 - mse: 11.1631 - mae: 2.5231 - val_loss: 7.7891 - val_mse: 7.7891 - val_mae: 2.1913 Epoch 72/1000000 8/8 [==============================] - 0s 6ms/step - loss: 11.0645 - mse: 11.0645 - mae: 2.5110 - val_loss: 7.7385 - val_mse: 7.7385 - val_mae: 2.1833 Epoch 73/1000000 8/8 [==============================] - 0s 6ms/step - loss: 10.9938 - mse: 10.9938 - mae: 2.4976 - val_loss: 7.6522 - val_mse: 7.6522 - val_mae: 2.1700 Epoch 74/1000000 8/8 [==============================] - 0s 6ms/step - loss: 10.8935 - mse: 10.8935 - mae: 2.4815 - val_loss: 7.6189 - val_mse: 7.6189 - val_mae: 2.1657 Epoch 75/1000000 8/8 [==============================] - 0s 7ms/step - loss: 10.8259 - mse: 10.8259 - mae: 2.4676 - val_loss: 7.5397 - val_mse: 7.5397 - val_mae: 2.1508 Epoch 76/1000000 8/8 [==============================] - 0s 6ms/step - loss: 10.6963 - mse: 10.6963 - mae: 2.4517 - val_loss: 7.4634 - val_mse: 7.4634 - val_mae: 2.1404 Epoch 77/1000000 8/8 [==============================] - 0s 6ms/step - loss: 10.6242 - mse: 10.6242 - mae: 2.4428 - val_loss: 7.4396 - val_mse: 7.4396 - val_mae: 2.1347 Epoch 78/1000000 8/8 [==============================] - 0s 6ms/step - loss: 10.6044 - mse: 10.6044 - mae: 2.4390 - val_loss: 7.4424 - val_mse: 7.4424 - val_mae: 2.1424 Epoch 79/1000000 8/8 [==============================] - 0s 7ms/step - loss: 10.5004 - mse: 10.5004 - mae: 2.4208 - val_loss: 7.3107 - val_mse: 7.3107 - val_mae: 2.1071 Epoch 80/1000000 8/8 [==============================] - 0s 6ms/step - loss: 10.4023 - mse: 10.4023 - mae: 2.4003 - val_loss: 7.2547 - val_mse: 7.2547 - val_mae: 2.1029 Epoch 81/1000000 8/8 [==============================] - 0s 6ms/step - loss: 10.3249 - mse: 10.3249 - mae: 2.3917 - val_loss: 7.2194 - val_mse: 7.2194 - val_mae: 2.1019 Epoch 82/1000000 8/8 [==============================] - 0s 6ms/step - loss: 10.2200 - mse: 10.2200 - mae: 2.3743 - val_loss: 7.1231 - val_mse: 7.1231 - val_mae: 2.0845 Epoch 83/1000000 8/8 [==============================] - 0s 6ms/step - loss: 10.1508 - mse: 10.1508 - mae: 2.3627 - val_loss: 7.0546 - val_mse: 7.0546 - val_mae: 2.0715 Epoch 84/1000000 8/8 [==============================] - 0s 6ms/step - loss: 10.0804 - mse: 10.0804 - mae: 2.3517 - val_loss: 7.0395 - val_mse: 7.0395 - val_mae: 2.0761 Epoch 85/1000000 8/8 [==============================] - 0s 6ms/step - loss: 10.0314 - mse: 10.0314 - mae: 2.3436 - val_loss: 6.9552 - val_mse: 6.9552 - val_mae: 2.0594 Epoch 86/1000000 8/8 [==============================] - 0s 6ms/step - loss: 9.9485 - mse: 9.9485 - mae: 2.3358 - val_loss: 6.9313 - val_mse: 6.9313 - val_mae: 2.0576 Epoch 87/1000000 8/8 [==============================] - 0s 6ms/step - loss: 9.8974 - mse: 9.8974 - mae: 2.3284 - val_loss: 6.8412 - val_mse: 6.8412 - val_mae: 2.0431 Epoch 88/1000000 8/8 [==============================] - 0s 6ms/step - loss: 9.8392 - mse: 9.8392 - mae: 2.3165 - val_loss: 6.8458 - val_mse: 6.8458 - val_mae: 2.0509 Epoch 89/1000000 8/8 [==============================] - 0s 6ms/step - loss: 9.7424 - mse: 9.7424 - mae: 2.3074 - val_loss: 6.7615 - val_mse: 6.7615 - val_mae: 2.0275 Epoch 90/1000000 8/8 [==============================] - 0s 6ms/step - loss: 9.6783 - mse: 9.6783 - mae: 2.2927 - val_loss: 6.6995 - val_mse: 6.6995 - val_mae: 2.0185 Epoch 91/1000000 8/8 [==============================] - 0s 6ms/step - loss: 9.6328 - mse: 9.6328 - mae: 2.2854 - val_loss: 6.6758 - val_mse: 6.6758 - val_mae: 2.0128 Epoch 92/1000000 8/8 [==============================] - 0s 6ms/step - loss: 9.5734 - mse: 9.5734 - mae: 2.2815 - val_loss: 6.6341 - val_mse: 6.6341 - val_mae: 1.9984 Epoch 93/1000000 8/8 [==============================] - 0s 8ms/step - loss: 9.5030 - mse: 9.5030 - mae: 2.2736 - val_loss: 6.6025 - val_mse: 6.6025 - val_mae: 1.9973 Epoch 94/1000000 8/8 [==============================] - 0s 9ms/step - loss: 9.5452 - mse: 9.5452 - mae: 2.2649 - val_loss: 6.5194 - val_mse: 6.5194 - val_mae: 1.9748 Epoch 95/1000000 8/8 [==============================] - 0s 6ms/step - loss: 9.3830 - mse: 9.3830 - mae: 2.2495 - val_loss: 6.4962 - val_mse: 6.4962 - val_mae: 1.9780 Epoch 96/1000000 8/8 [==============================] - 0s 6ms/step - loss: 9.3768 - mse: 9.3768 - mae: 2.2604 - val_loss: 6.5596 - val_mse: 6.5596 - val_mae: 1.9971 Epoch 97/1000000 8/8 [==============================] - 0s 6ms/step - loss: 9.2852 - mse: 9.2852 - mae: 2.2461 - val_loss: 6.4629 - val_mse: 6.4629 - val_mae: 1.9756 Epoch 98/1000000 8/8 [==============================] - 0s 6ms/step - loss: 9.2196 - mse: 9.2196 - mae: 2.2342 - val_loss: 6.4174 - val_mse: 6.4174 - val_mae: 1.9650 Epoch 99/1000000 8/8 [==============================] - 0s 6ms/step - loss: 9.2060 - mse: 9.2060 - mae: 2.2231 - val_loss: 6.3347 - val_mse: 6.3347 - val_mae: 1.9301 Epoch 100/1000000 8/8 [==============================] - 0s 6ms/step - loss: 9.1877 - mse: 9.1877 - mae: 2.2337 - val_loss: 6.4189 - val_mse: 6.4189 - val_mae: 1.9655 Epoch 101/1000000 8/8 [==============================] - 0s 6ms/step - loss: 9.0935 - mse: 9.0935 - mae: 2.2148 - val_loss: 6.2853 - val_mse: 6.2853 - val_mae: 1.9304 Epoch 102/1000000 8/8 [==============================] - 0s 6ms/step - loss: 9.0322 - mse: 9.0322 - mae: 2.1993 - val_loss: 6.2873 - val_mse: 6.2873 - val_mae: 1.9493 Epoch 103/1000000 8/8 [==============================] - 0s 6ms/step - loss: 8.9756 - mse: 8.9756 - mae: 2.2022 - val_loss: 6.2435 - val_mse: 6.2435 - val_mae: 1.9410 Epoch 104/1000000 8/8 [==============================] - 0s 6ms/step - loss: 8.9284 - mse: 8.9284 - mae: 2.2016 - val_loss: 6.1633 - val_mse: 6.1633 - val_mae: 1.9260 Epoch 105/1000000 8/8 [==============================] - 0s 6ms/step - loss: 8.8816 - mse: 8.8816 - mae: 2.1850 - val_loss: 6.1232 - val_mse: 6.1232 - val_mae: 1.9188 Epoch 106/1000000 8/8 [==============================] - 0s 7ms/step - loss: 8.8398 - mse: 8.8398 - mae: 2.1809 - val_loss: 6.0920 - val_mse: 6.0920 - val_mae: 1.9098 Epoch 107/1000000 8/8 [==============================] - 0s 6ms/step - loss: 8.8265 - mse: 8.8265 - mae: 2.1848 - val_loss: 6.0940 - val_mse: 6.0940 - val_mae: 1.9236 Epoch 108/1000000 8/8 [==============================] - 0s 6ms/step - loss: 8.7640 - mse: 8.7640 - mae: 2.1715 - val_loss: 6.0610 - val_mse: 6.0610 - val_mae: 1.9060 Epoch 109/1000000 8/8 [==============================] - 0s 6ms/step - loss: 8.7224 - mse: 8.7224 - mae: 2.1662 - val_loss: 6.0541 - val_mse: 6.0541 - val_mae: 1.9112 Epoch 110/1000000 8/8 [==============================] - 0s 6ms/step - loss: 8.6910 - mse: 8.6910 - mae: 2.1696 - val_loss: 6.1437 - val_mse: 6.1437 - val_mae: 1.9240 Epoch 111/1000000 8/8 [==============================] - 0s 6ms/step - loss: 8.6511 - mse: 8.6511 - mae: 2.1704 - val_loss: 6.0158 - val_mse: 6.0158 - val_mae: 1.9025 Epoch 112/1000000 8/8 [==============================] - 0s 7ms/step - loss: 8.6292 - mse: 8.6292 - mae: 2.1474 - val_loss: 5.9508 - val_mse: 5.9508 - val_mae: 1.8778 Epoch 113/1000000 8/8 [==============================] - 0s 6ms/step - loss: 8.6114 - mse: 8.6114 - mae: 2.1461 - val_loss: 6.0427 - val_mse: 6.0427 - val_mae: 1.8938 Epoch 114/1000000 8/8 [==============================] - 0s 7ms/step - loss: 8.5831 - mse: 8.5831 - mae: 2.1602 - val_loss: 6.0419 - val_mse: 6.0419 - val_mae: 1.9017 Epoch 115/1000000 8/8 [==============================] - 0s 6ms/step - loss: 8.5452 - mse: 8.5452 - mae: 2.1361 - val_loss: 5.9237 - val_mse: 5.9237 - val_mae: 1.8610 Epoch 116/1000000 8/8 [==============================] - 0s 6ms/step - loss: 8.4662 - mse: 8.4662 - mae: 2.1218 - val_loss: 5.9197 - val_mse: 5.9197 - val_mae: 1.8758 Epoch 117/1000000 8/8 [==============================] - 0s 6ms/step - loss: 8.5355 - mse: 8.5355 - mae: 2.1472 - val_loss: 6.0528 - val_mse: 6.0528 - val_mae: 1.9083 Epoch 118/1000000 8/8 [==============================] - 0s 6ms/step - loss: 8.4083 - mse: 8.4083 - mae: 2.1386 - val_loss: 5.9042 - val_mse: 5.9042 - val_mae: 1.8812 Epoch 119/1000000 8/8 [==============================] - 0s 6ms/step - loss: 8.3949 - mse: 8.3949 - mae: 2.1215 - val_loss: 5.8084 - val_mse: 5.8084 - val_mae: 1.8533 Epoch 120/1000000 8/8 [==============================] - 0s 7ms/step - loss: 8.3528 - mse: 8.3528 - mae: 2.1110 - val_loss: 5.8486 - val_mse: 5.8486 - val_mae: 1.8559 Epoch 121/1000000 8/8 [==============================] - 0s 6ms/step - loss: 8.3250 - mse: 8.3250 - mae: 2.1250 - val_loss: 5.9246 - val_mse: 5.9246 - val_mae: 1.8784 Epoch 122/1000000 8/8 [==============================] - 0s 6ms/step - loss: 8.2986 - mse: 8.2986 - mae: 2.1247 - val_loss: 5.8186 - val_mse: 5.8186 - val_mae: 1.8560 Epoch 123/1000000 8/8 [==============================] - 0s 8ms/step - loss: 8.2734 - mse: 8.2734 - mae: 2.1152 - val_loss: 5.8367 - val_mse: 5.8367 - val_mae: 1.8539 Epoch 124/1000000 8/8 [==============================] - 0s 6ms/step - loss: 8.4258 - mse: 8.4258 - mae: 2.1183 - val_loss: 5.7235 - val_mse: 5.7235 - val_mae: 1.8236 Epoch 125/1000000 8/8 [==============================] - 0s 7ms/step - loss: 8.1923 - mse: 8.1923 - mae: 2.0936 - val_loss: 5.8179 - val_mse: 5.8179 - val_mae: 1.8510 Epoch 126/1000000 8/8 [==============================] - 0s 6ms/step - loss: 8.1941 - mse: 8.1941 - mae: 2.1091 - val_loss: 5.9638 - val_mse: 5.9638 - val_mae: 1.8873 Epoch 127/1000000 8/8 [==============================] - 0s 7ms/step - loss: 8.1723 - mse: 8.1723 - mae: 2.1099 - val_loss: 5.7563 - val_mse: 5.7563 - val_mae: 1.8393 Epoch 128/1000000 8/8 [==============================] - 0s 10ms/step - loss: 8.1336 - mse: 8.1336 - mae: 2.0889 - val_loss: 5.7177 - val_mse: 5.7177 - val_mae: 1.8285 Epoch 129/1000000 8/8 [==============================] - 0s 7ms/step - loss: 8.1087 - mse: 8.1087 - mae: 2.0860 - val_loss: 5.7552 - val_mse: 5.7552 - val_mae: 1.8368 Epoch 130/1000000 8/8 [==============================] - 0s 8ms/step - loss: 8.0991 - mse: 8.0991 - mae: 2.0898 - val_loss: 5.7808 - val_mse: 5.7808 - val_mae: 1.8487 Epoch 131/1000000 8/8 [==============================] - 0s 6ms/step - loss: 8.0629 - mse: 8.0629 - mae: 2.0842 - val_loss: 5.6608 - val_mse: 5.6608 - val_mae: 1.8128 Epoch 132/1000000 8/8 [==============================] - 0s 7ms/step - loss: 8.0331 - mse: 8.0331 - mae: 2.0713 - val_loss: 5.6491 - val_mse: 5.6491 - val_mae: 1.8162 Epoch 133/1000000 8/8 [==============================] - 0s 6ms/step - loss: 8.0057 - mse: 8.0057 - mae: 2.0733 - val_loss: 5.7308 - val_mse: 5.7308 - val_mae: 1.8352 Epoch 134/1000000 8/8 [==============================] - 0s 6ms/step - loss: 7.9893 - mse: 7.9893 - mae: 2.0758 - val_loss: 5.6930 - val_mse: 5.6930 - val_mae: 1.8259 Epoch 135/1000000 8/8 [==============================] - 0s 6ms/step - loss: 8.0091 - mse: 8.0091 - mae: 2.0671 - val_loss: 5.5845 - val_mse: 5.5845 - val_mae: 1.8013 Epoch 136/1000000 8/8 [==============================] - 0s 6ms/step - loss: 8.0114 - mse: 8.0114 - mae: 2.0781 - val_loss: 5.7329 - val_mse: 5.7329 - val_mae: 1.8386 Epoch 137/1000000 8/8 [==============================] - 0s 6ms/step - loss: 7.9422 - mse: 7.9422 - mae: 2.0646 - val_loss: 5.5616 - val_mse: 5.5616 - val_mae: 1.7928 Epoch 138/1000000 8/8 [==============================] - 0s 6ms/step - loss: 7.9279 - mse: 7.9279 - mae: 2.0602 - val_loss: 5.5905 - val_mse: 5.5905 - val_mae: 1.8037 Epoch 139/1000000 8/8 [==============================] - 0s 7ms/step - loss: 7.9096 - mse: 7.9096 - mae: 2.0570 - val_loss: 5.5992 - val_mse: 5.5992 - val_mae: 1.8031 Epoch 140/1000000 8/8 [==============================] - 0s 6ms/step - loss: 7.9045 - mse: 7.9045 - mae: 2.0559 - val_loss: 5.5833 - val_mse: 5.5833 - val_mae: 1.8009 Epoch 141/1000000 8/8 [==============================] - 0s 8ms/step - loss: 7.8974 - mse: 7.8974 - mae: 2.0628 - val_loss: 5.7161 - val_mse: 5.7161 - val_mae: 1.8403 Epoch 142/1000000 8/8 [==============================] - 0s 6ms/step - loss: 7.8588 - mse: 7.8588 - mae: 2.0644 - val_loss: 5.5998 - val_mse: 5.5998 - val_mae: 1.8133 Epoch 143/1000000 8/8 [==============================] - 0s 7ms/step - loss: 7.8071 - mse: 7.8071 - mae: 2.0451 - val_loss: 5.4817 - val_mse: 5.4817 - val_mae: 1.7809 Epoch 144/1000000 8/8 [==============================] - 0s 8ms/step - loss: 7.8430 - mse: 7.8430 - mae: 2.0430 - val_loss: 5.4700 - val_mse: 5.4700 - val_mae: 1.7729 Epoch 145/1000000 8/8 [==============================] - 0s 6ms/step - loss: 7.9178 - mse: 7.9178 - mae: 2.0467 - val_loss: 5.5165 - val_mse: 5.5165 - val_mae: 1.7827 Epoch 146/1000000 8/8 [==============================] - 0s 8ms/step - loss: 7.8002 - mse: 7.8002 - mae: 2.0629 - val_loss: 5.7450 - val_mse: 5.7450 - val_mae: 1.8398 Epoch 147/1000000 8/8 [==============================] - 0s 6ms/step - loss: 7.8035 - mse: 7.8035 - mae: 2.0618 - val_loss: 5.5874 - val_mse: 5.5874 - val_mae: 1.8121 Epoch 148/1000000 8/8 [==============================] - 0s 6ms/step - loss: 7.7408 - mse: 7.7408 - mae: 2.0420 - val_loss: 5.4925 - val_mse: 5.4925 - val_mae: 1.7884 Epoch 149/1000000 8/8 [==============================] - 0s 6ms/step - loss: 7.8221 - mse: 7.8221 - mae: 2.0413 - val_loss: 5.4456 - val_mse: 5.4456 - val_mae: 1.7811 Epoch 150/1000000 8/8 [==============================] - 0s 7ms/step - loss: 7.7186 - mse: 7.7186 - mae: 2.0430 - val_loss: 5.6643 - val_mse: 5.6643 - val_mae: 1.8215 Epoch 151/1000000 8/8 [==============================] - 0s 6ms/step - loss: 7.7565 - mse: 7.7565 - mae: 2.0427 - val_loss: 5.5452 - val_mse: 5.5452 - val_mae: 1.7965 Epoch 152/1000000 8/8 [==============================] - 0s 9ms/step - loss: 7.7227 - mse: 7.7227 - mae: 2.0389 - val_loss: 5.4819 - val_mse: 5.4819 - val_mae: 1.7930 Epoch 153/1000000 8/8 [==============================] - 0s 6ms/step - loss: 7.6789 - mse: 7.6789 - mae: 2.0294 - val_loss: 5.5544 - val_mse: 5.5544 - val_mae: 1.8045 Epoch 154/1000000 8/8 [==============================] - 0s 6ms/step - loss: 7.6771 - mse: 7.6771 - mae: 2.0350 - val_loss: 5.5747 - val_mse: 5.5747 - val_mae: 1.8077 Epoch 155/1000000 8/8 [==============================] - 0s 6ms/step - loss: 7.6633 - mse: 7.6633 - mae: 2.0302 - val_loss: 5.5368 - val_mse: 5.5368 - val_mae: 1.8041 Epoch 156/1000000 8/8 [==============================] - 0s 7ms/step - loss: 7.6499 - mse: 7.6499 - mae: 2.0333 - val_loss: 5.5256 - val_mse: 5.5256 - val_mae: 1.8021 Epoch 157/1000000 8/8 [==============================] - 0s 6ms/step - loss: 7.6654 - mse: 7.6654 - mae: 2.0240 - val_loss: 5.4533 - val_mse: 5.4533 - val_mae: 1.7915 Epoch 158/1000000 8/8 [==============================] - 0s 6ms/step - loss: 7.6375 - mse: 7.6375 - mae: 2.0224 - val_loss: 5.5145 - val_mse: 5.5145 - val_mae: 1.7989 Epoch 159/1000000 8/8 [==============================] - 0s 6ms/step - loss: 7.6126 - mse: 7.6126 - mae: 2.0258 - val_loss: 5.5094 - val_mse: 5.5094 - val_mae: 1.7911
In [61]:
plot_history( epoch_history )
In [ ]:
## 요약 : 이방법은 더 진행하는 의미가 없을때 중단하는것이 목적이다.