■ 금융지표와 뉴스를 활용한 멀티모달학습 기반의 당기순이익(NIM) 예측 모델¶


  • 2018.1.1 ~ 2024.8.31 : 은행의 NIM 일별 수치데이터

  • 2018.1.1 ~ 2024.8.31 : 한국은행의 통계지표 일별 수치데이터

  • 2018.1.1 ~ 2024.10.21 : 금융권관련 뉴스 일별 텍스트데이터

  • Multi-Modal 및 LSTM을 이용한 시계열 예측 모델


1. 환경구성¶

  • 1-1. 개발환경 버전확인

In [1]:
################################################################################
# 랜덤 시드 값 설정
################################################################################
import os
# os.environ['PYTHONHASHSEED'] = '0'
os.environ['CUDA_VISIBLE_DEVICES'] = ''

# import numpy as np
# import tensorflow as tf
# import random
# seed_value = 42
# np.random.seed(seed_value)
# tf.random.set_seed(seed_value)
# random.seed(seed_value)
################################################################################
In [2]:
cuda_version = 'Cuda not installed'

try:
    import pycuda.driver as cuda
    import pycuda.autoinit

    # CUDA 장치가 있는지 확인 후 초기화
    cuda.init()
    if cuda.Device.count() > 0:
        version = cuda.get_version()
        cuda_version = f'CUDA Version: {version[0]}.{version[1]}'
    else:
        cuda_version = 'No CUDA-capable device found'
except ImportError:
    cuda_version = 'pycuda not installed'
except Exception as e:
    cuda_version = f'{str(e)}'
    
In [3]:
import sys
import keras
import tensorflow as tf
import numpy as np
import matplotlib

print("-"*80)
print(f"Python version : {sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}")
print("Keras version : " + keras.__version__)
print("Tensorflow version : " + tf.__version__)
print(f"CUDA version : {cuda_version}")
print(f"Numpy version : {np.__version__}")
print("Matplotlib version: " + matplotlib.__version__)
print("-"*80)
2024-11-05 00:58:26.172063: I tensorflow/core/util/port.cc:113] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.
2024-11-05 00:58:26.195489: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered
2024-11-05 00:58:26.195515: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered
2024-11-05 00:58:26.196109: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered
2024-11-05 00:58:26.200207: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
To enable the following instructions: AVX2 AVX_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
2024-11-05 00:58:26.703024: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT
--------------------------------------------------------------------------------
Python version : 3.11.10
Keras version : 2.15.0
Tensorflow version : 2.15.0
CUDA version : cuInit failed: no CUDA-capable device is detected
Numpy version : 1.26.4
Matplotlib version: 3.9.2
--------------------------------------------------------------------------------
  • 1-2. 패키지 의존성확인

pip freeze > requirements.txt
pip install -r requirements.txt

# requirements.txt 파일에서 file:// 경로를 가진 줄을 삭제하는 스크립트
with open('requirements.txt', 'r') as f:
    lines = f.readlines()

with open('requirements_clean.txt', 'w') as f:
    for line in lines:
        if 'file://' not in line:
            f.write(line)
In [4]:
pip freeze
absl-py==2.1.0
accelerate==1.0.0
aggdraw==1.3.19
aiohappyeyeballs==2.4.3
aiohttp==3.10.9
aiosignal==1.3.1
ann_visualizer==2.5
anyio==4.6.0
argon2-cffi==23.1.0
argon2-cffi-bindings==21.2.0
arrow==1.3.0
asttokens @ file:///home/conda/feedstock_root/build_artifacts/asttokens_1698341106958/work
astunparse==1.6.3
async-lru==2.0.4
attrs==24.2.0
babel==2.16.0
beautifulsoup4==4.12.3
bleach==6.1.0
cachetools==5.5.0
certifi==2024.8.30
cffi==1.17.1
chardet==3.0.4
charset-normalizer==3.3.2
click==8.1.7
comm @ file:///home/conda/feedstock_root/build_artifacts/comm_1710320294760/work
contourpy==1.3.0
cycler==0.12.1
datasets==3.0.1
debugpy @ file:///home/conda/feedstock_root/build_artifacts/debugpy_1725269156501/work
decorator @ file:///home/conda/feedstock_root/build_artifacts/decorator_1641555617451/work
defusedxml==0.7.1
dill==0.3.8
entrypoints==0.4
exceptiongroup @ file:///home/conda/feedstock_root/build_artifacts/exceptiongroup_1720869315914/work
executing @ file:///home/conda/feedstock_root/build_artifacts/executing_1725214404607/work
fastjsonschema==2.20.0
fasttext==0.9.3
filelock==3.16.1
flatbuffers==24.3.25
fonttools==4.53.1
fqdn==1.5.1
frozenlist==1.4.1
fsspec==2024.6.1
gast==0.6.0
gensim==4.3.3
google-auth==2.34.0
google-auth-oauthlib==1.2.1
google-pasta==0.2.0
googletrans==4.0.0rc1
graphviz==0.20.3
grpcio==1.66.1
h11==0.14.0
h2==3.2.0
h5py==3.11.0
hpack==3.0.0
hstspreload==2024.9.1
httpcore==1.0.6
httpx==0.27.2
huggingface-hub==0.25.1
hyperframe==5.2.0
idna==2.10
importlib_metadata @ file:///home/conda/feedstock_root/build_artifacts/importlib-metadata_1726082825846/work
ipykernel @ file:///home/conda/feedstock_root/build_artifacts/ipykernel_1719845459717/work
ipython @ file:///home/conda/feedstock_root/build_artifacts/ipython_1725050136642/work
ipywidgets==8.1.5
isoduration==20.11.0
jedi @ file:///home/conda/feedstock_root/build_artifacts/jedi_1696326070614/work
Jinja2==3.1.4
joblib==1.4.2
JPype1==1.5.0
json5==0.9.25
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter==1.1.1
jupyter-console==6.6.3
jupyter-events==0.10.0
jupyter-lsp==2.2.5
jupyter_client @ file:///home/conda/feedstock_root/build_artifacts/jupyter_client_1716472197302/work
jupyter_core @ file:///home/conda/feedstock_root/build_artifacts/jupyter_core_1710257359434/work
jupyter_server==2.14.2
jupyter_server_terminals==0.5.3
jupyterlab==4.2.5
jupyterlab_pygments==0.3.0
jupyterlab_server==2.27.3
jupyterlab_widgets==3.0.13
kagglehub==0.3.0
keras==2.15.0
keras-nlp==0.15.0
keras-tuner==1.4.7
kiwisolver==1.4.7
kobert-transformers==0.6.0
konlpy==0.6.0
kt-legacy==1.0.5
libclang==18.1.1
lxml==5.3.0
Mako==1.3.5
Markdown==3.7
markdown-it-py==3.0.0
MarkupSafe==2.1.5
matplotlib==3.9.2
matplotlib-inline @ file:///home/conda/feedstock_root/build_artifacts/matplotlib-inline_1713250518406/work
mdurl==0.1.2
mecab-python3==1.0.9
mistune==0.8.4
ml-dtypes==0.2.0
mpmath==1.3.0
multidict==6.1.0
multiprocess==0.70.16
namex==0.0.8
nbclient==0.10.0
nbconvert==5.6.1
nbformat==5.10.4
nest_asyncio @ file:///home/conda/feedstock_root/build_artifacts/nest-asyncio_1705850609492/work
networkx==3.3
nltk==3.9.1
notebook==7.2.2
notebook_shim==0.2.4
numpy==1.26.4
nvidia-cublas-cu12==12.1.3.1
nvidia-cuda-cupti-cu12==12.1.105
nvidia-cuda-nvrtc-cu12==12.1.105
nvidia-cuda-runtime-cu12==12.1.105
nvidia-cudnn-cu12==9.1.0.70
nvidia-cufft-cu12==11.0.2.54
nvidia-curand-cu12==10.3.2.106
nvidia-cusolver-cu12==11.4.5.107
nvidia-cusparse-cu12==12.1.0.106
nvidia-nccl-cu12==2.20.5
nvidia-nvjitlink-cu12==12.6.77
nvidia-nvtx-cu12==12.1.105
oauthlib==3.2.2
opt-einsum==3.3.0
optree==0.12.1
overrides==7.7.0
packaging @ file:///home/conda/feedstock_root/build_artifacts/packaging_1718189413536/work
pandas==2.2.2
pandocfilters==1.5.1
parso @ file:///home/conda/feedstock_root/build_artifacts/parso_1712320355065/work
patsy==0.5.6
pexpect @ file:///home/conda/feedstock_root/build_artifacts/pexpect_1706113125309/work
pickleshare @ file:///home/conda/feedstock_root/build_artifacts/pickleshare_1602536217715/work
pillow==10.4.0
platformdirs @ file:///home/conda/feedstock_root/build_artifacts/platformdirs_1726315398971/work
prometheus_client==0.21.0
prompt_toolkit @ file:///home/conda/feedstock_root/build_artifacts/prompt-toolkit_1718047967974/work
propcache==0.2.0
protobuf==4.25.4
psutil @ file:///home/conda/feedstock_root/build_artifacts/psutil_1725737916418/work
ptyprocess @ file:///home/conda/feedstock_root/build_artifacts/ptyprocess_1609419310487/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl
pure_eval @ file:///home/conda/feedstock_root/build_artifacts/pure_eval_1721585709575/work
pyarrow==17.0.0
pyasn1==0.6.1
pyasn1_modules==0.4.1
pybind11==2.13.6
pycparser==2.22
pycuda==2024.1.2
pydot @ file:///home/conda/feedstock_root/build_artifacts/pydot_1726737228028/work
Pygments @ file:///home/conda/feedstock_root/build_artifacts/pygments_1714846767233/work
pyparsing @ file:///home/conda/feedstock_root/build_artifacts/pyparsing_1724616129934/work
python-dateutil @ file:///home/conda/feedstock_root/build_artifacts/python-dateutil_1709299778482/work
python-json-logger==2.0.7
pytools==2024.1.14
pytz==2024.2
PyYAML==6.0.2
pyzmq @ file:///home/conda/feedstock_root/build_artifacts/pyzmq_1725448927736/work
referencing==0.35.1
regex==2024.9.11
requests==2.32.3
requests-oauthlib==2.0.0
rfc3339-validator==0.1.4
rfc3986==1.5.0
rfc3986-validator==0.1.1
rich==13.8.1
rouge_score==0.1.2
rpds-py==0.20.0
rsa==4.9
safetensors==0.4.5
scikit-learn==1.5.2
scipy==1.13.1
seaborn==0.13.2
Send2Trash==1.8.3
sentence-transformers==3.1.1
sentencepiece==0.2.0
six @ file:///home/conda/feedstock_root/build_artifacts/six_1620240208055/work
smart-open==7.0.5
sniffio==1.3.1
soupsieve==2.6
stack-data @ file:///home/conda/feedstock_root/build_artifacts/stack_data_1669632077133/work
statsmodels==0.14.4
sympy==1.13.3
tensorboard==2.15.2
tensorboard-data-server==0.7.2
tensorflow==2.15.0
tensorflow-estimator==2.15.0
tensorflow-hub==0.16.1
tensorflow-io-gcs-filesystem==0.37.1
tensorflow-text==2.15.0
termcolor==2.4.0
terminado==0.18.1
testpath==0.6.0
textblob==0.18.0.post0
tf_keras==2.15.1
threadpoolctl==3.5.0
tinycss2==1.3.0
tokenizers==0.20.0
torch==2.4.1
tornado @ file:///home/conda/feedstock_root/build_artifacts/tornado_1724956126282/work
tqdm==4.66.5
traitlets @ file:///home/conda/feedstock_root/build_artifacts/traitlets_1713535121073/work
transformers==4.45.2
triton==3.0.0
types-python-dateutil==2.9.0.20241003
typing_extensions @ file:///home/conda/feedstock_root/build_artifacts/typing_extensions_1717802530399/work
tzdata==2024.1
uri-template==1.3.0
urllib3==2.2.3
visualkeras==0.1.3
wcwidth @ file:///home/conda/feedstock_root/build_artifacts/wcwidth_1704731205417/work
webcolors==24.8.0
webencodings==0.5.1
websocket-client==1.8.0
Werkzeug==3.0.4
widgetsnbextension==4.0.13
wordcloud==1.9.3
wrapt==1.14.1
xxhash==3.5.0
yarl==1.14.0
zipp @ file:///home/conda/feedstock_root/build_artifacts/zipp_1726248574750/work
Note: you may need to restart the kernel to use updated packages.
  • 1-3. Matplot & Numpy 환경설정

sudo apt-get install fonts-nanum* # 폰트 설치
sudo fc-cache -fv # 캐시 제거
sudo fc-list | grep nanum # 폰트 설치 확인
rm -rf ~/.cache/matplotlib/* # matplotlib 캐시 제거
In [5]:
import pandas as pd
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.font_manager as fm

import numpy as np

# NumPy 출력 설정: 모든 값이 출력되도록 설정
np.set_printoptions(threshold=np.inf)
# NumPy 출력 설정: 배열이 한 줄로 출력되도록 설정
np.set_printoptions(linewidth=np.inf)  # 출력 라인의 길이를 무한대로 설정

# Pandas 옵션 설정: 텍스트 생략 없이 출력
pd.set_option('display.max_colwidth', None)
pd.set_option('display.max_rows', 1000)  
pd.set_option('display.max_columns', None)  
pd.options.display.float_format = '{:.8f}'.format

# 나눔 폰트 설정
plt.rc('font', family='NanumGothic') 
mpl.rcParams['axes.unicode_minus'] = False  # 유니코드 마이너스를 일반 마이너스로 변경

# 폰트가 제대로 설정되었는지 확인
print([f.name for f in fm.fontManager.ttflist if 'Nanum' in f.name])
['NanumMyeongjo', 'NanumGothic', 'NanumMyeongjo', 'NanumSquareRound', 'NanumBarunpen', 'NanumBarunpen', 'NanumMyeongjo YetHangul', 'Nanum Brush Script', 'NanumSquare_ac', 'NanumGothicCoding', 'NanumGothic', 'NanumGothic', 'NanumBarunGothic YetHangul', 'NanumSquare_ac', 'NanumBarunGothic', 'NanumSquare', 'NanumSquareRound', 'NanumGothic Eco', 'NanumBarunGothic', 'NanumMyeongjo Eco', 'NanumMyeongjo Eco', 'NanumMyeongjo', 'NanumGothicCoding', 'NanumBarunGothic', 'NanumGothic Eco', 'NanumSquareRound', 'NanumSquare', 'NanumSquare', 'NanumSquare_ac', 'NanumSquareRound', 'NanumGothic Eco', 'NanumSquare_ac', 'NanumMyeongjo Eco', 'NanumSquare', 'Nanum Pen Script', 'NanumGothic', 'NanumMyeongjo Eco', 'NanumGothic Eco', 'NanumBarunGothic']

6. Network 설계¶

  • 6-1. 예측모델 시계열데이터셋 정의(Keras timeseries_dataset_from_array)

In [6]:
import numpy as np

# nim_date
nim_date = np.load('data/numpy/nim_date.npy', allow_pickle=True)
print(f"nim_date loaded: data/numpy/nim_date.npy")
print(f"nim_date[:10]: {nim_date[:10]}")
print('-'*80)

# target_values(표준화된 NIM값 차분:훈련용 타겟변수)
target_values = np.load('data/numpy/target_values.npy')
print(f"target_values loaded: data/numpy/target_values.npy")
print(f"target_values[:10]: {target_values[:10]}")
print('-'*80)

# 변수값
nim_variables = np.load('data/numpy/nim_variables.npy')
print(f"nim_variables loaded: data/numpy/nim_variables.npy")

total_size = int(nim_variables[0])
train_size = int(nim_variables[1])
val_size   = int(nim_variables[2])
test_size  = int(nim_variables[3])
nim_train_mean = nim_variables[4]
nim_train_std  = nim_variables[5]
target_train_mean = nim_variables[6]
target_train_std  = nim_variables[7]
correct_threshold = nim_variables[8]

print(f"total_size: {total_size}, {total_size/24}일")
print(f"train_size: {train_size}, {train_size/24}일")
print(f"val_size  : {val_size}, {val_size/24}일")
print(f"test_size : {test_size}, {test_size/24}일")
print(f"nim_train_mean: {nim_train_mean:+.8f}")
print(f"nim_train_std : {nim_train_std:+.8f}")
print(f"target_train_mean: {target_train_mean:+.8f}")
print(f"target_train_std : {target_train_std:+.8f}")
print(f"correct_threshold : {correct_threshold:+.8f}")
print('-'*80)

# nim_ibks_data(Standardization)
nim_ibks_data = np.load('data/numpy/nim_ibks_data.npy')
print(f"nim_ibks_data loaded: data/numpy/nim_ibks_data.npy")
print(f"nim_ibks_data.shape: {nim_ibks_data.shape}")  
print(nim_ibks_data[:10, :]) 
print('-'*80)

# nim_news_data(Standardization)
nim_boks_data = np.load('data/numpy/nim_boks_data.npy')
print(f"nim_boks_data loaded: data/numpy/nim_boks_data.npy")
print(f"nim_boks_data.shape: {nim_boks_data.shape}")  
print(nim_boks_data[:10, :]) 
print('-'*80)

# nim_news_data(Standardization)
nim_news_data = np.load('data/numpy/nim_news_data.npy')
print(f"nim_news_data loaded: data/numpy/nim_news_data.npy")
print(f"nim_news_data.shape: {nim_news_data.shape}")  
print(nim_news_data[:10, :]) 
print('-'*80)
nim_date loaded: data/numpy/nim_date.npy
nim_date[:10]: ['2018-01-02' '2018-01-02' '2018-01-02' '2018-01-02' '2018-01-02' '2018-01-02' '2018-01-02' '2018-01-02' '2018-01-02' '2018-01-02']
--------------------------------------------------------------------------------
target_values loaded: data/numpy/target_values.npy
target_values[:10]: [2.57046127 2.57044797 2.57042241 2.57043058 2.57045744 2.57041822 2.57046363 2.57042967 2.57041355 2.57045421]
--------------------------------------------------------------------------------
nim_variables loaded: data/numpy/nim_variables.npy
total_size: 39144, 1631.0일
train_size: 27384, 1141.0일
val_size  : 7824, 326.0일
test_size : 3936, 164.0일
nim_train_mean: +1.69604715
nim_train_std : +0.18829625
target_train_mean: +0.00075180
target_train_std : +0.03153061
correct_threshold : +0.02288361
--------------------------------------------------------------------------------
nim_ibks_data loaded: data/numpy/nim_ibks_data.npy
nim_ibks_data.shape: (39144, 23)
[[ 0.78164967  0.98740584  0.40947392  1.16488858  0.41430169  0.63326199  0.08316508  1.72308162  1.15192274  0.09237283  0.60186645 -0.09195605  0.36697266  0.39283116  0.72249536  0.89143847  0.46964555  0.63428204  0.93241689  0.79204667  1.52295708  0.94421724  1.28230299]
 [ 0.78164946  0.98740568  0.40947369  1.16488851  0.41430157  0.63326181  0.08316499  1.72308082  1.15192237  0.0923725   0.60186625 -0.09195629  0.36697243  0.39283087  0.72249504  0.89143818  0.46964533  0.63428167  0.9324167   0.79204645  1.52295689  0.94421707  1.28230197]
 [ 0.7816493   0.98740557  0.40947353  1.16488846  0.41430149  0.63326167  0.08316492  1.72308022  1.15192209  0.09237225  0.60186611 -0.09195647  0.36697226  0.39283066  0.7224948   0.89143796  0.46964517  0.63428139  0.93241656  0.79204629  1.52295675  0.94421694  1.28230121]
 [ 0.78165054  0.98740646  0.40947481  1.16488884  0.41430215  0.63326274  0.08316545  1.72308483  1.15192422  0.09237416  0.60186725 -0.09195511  0.36697359  0.39283231  0.72249665  0.89143964  0.46964642  0.63428353  0.93241763  0.79204755  1.52295784  0.94421791  1.28229892]
 [ 0.78165167  0.98740728  0.40947598  1.1648892   0.41430276  0.63326373  0.08316594  1.72308906  1.15192618  0.09237592  0.60186829 -0.09195386  0.36697481  0.39283383  0.72249834  0.89144117  0.46964757  0.63428549  0.9324186   0.79204871  1.52295884  0.9442188   1.28230839]
 [ 0.78164965  0.98740582  0.4094739   1.16488857  0.41430168  0.63326198  0.08316507  1.72308155  1.15192271  0.0923728   0.60186644 -0.09195607  0.36697264  0.39283113  0.72249533  0.89143845  0.46964553  0.63428201  0.93241687  0.79204665  1.52295706  0.94421722  1.28229497]
 [ 0.78164988  0.98740598  0.40947413  1.16488864  0.4143018   0.63326217  0.08316517  1.72308239  1.1519231   0.09237315  0.60186664 -0.09195583  0.36697288  0.39283143  0.72249567  0.89143875  0.46964576  0.6342824   0.93241706  0.79204688  1.52295726  0.9442174   1.28229914]
 [ 0.78164641  0.98740349  0.40947055  1.16488756  0.41429995  0.63325916  0.08316369  1.72306949  1.15191714  0.0923678   0.60186345 -0.09195962  0.36696916  0.39282681  0.7224905   0.89143406  0.46964225  0.63427642  0.93241409  0.79204335  1.52295421  0.94421469  1.28228707]
 [ 0.78165015  0.98740618  0.40947441  1.16488872  0.41430194  0.6332624   0.08316528  1.72308339  1.15192356  0.09237356  0.60186689 -0.09195553  0.36697317  0.39283179  0.72249607  0.89143911  0.46964603  0.63428286  0.93241729  0.79204715  1.5229575   0.94421761  1.28229709]
 [ 0.78164804  0.98740467  0.40947223  1.16488807  0.41430082  0.63326058  0.08316439  1.72307556  1.15191994  0.09237032  0.60186495 -0.09195784  0.36697091  0.39282899  0.72249293  0.89143627  0.4696439   0.63427923  0.93241549  0.79204501  1.52295564  0.94421596  1.28230037]]
--------------------------------------------------------------------------------
nim_boks_data loaded: data/numpy/nim_boks_data.npy
nim_boks_data.shape: (39144, 9)
[[ 0.6632433   0.80555514  0.57184     0.69671882 -1.59971295 -1.14715363 -0.99772466 -0.30659251  1.28230299]
 [ 0.66324311  0.80555493  0.57183943  0.69671868 -1.59971296 -1.14715363 -0.99772466 -0.30659251  1.28230197]
 [ 0.66324297  0.80555477  0.57183901  0.69671858 -1.59971296 -1.14715363 -0.99772466 -0.30659251  1.28230121]
 [ 0.66324405  0.80555598  0.57184226  0.69671935 -1.59971295 -1.14715363 -0.99772466 -0.30659251  1.28229892]
 [ 0.66324503  0.80555709  0.57184525  0.69672005 -1.59971294 -1.14715363 -0.99772466 -0.30659251  1.28230839]
 [ 0.66324328  0.80555512  0.57183995  0.6967188  -1.59971295 -1.14715363 -0.99772466 -0.30659251  1.28229497]
 [ 0.66324348  0.80555534  0.57184054  0.69671894 -1.59971295 -1.14715363 -0.99772466 -0.30659251  1.28229914]
 [ 0.66324048  0.80555196  0.57183144  0.69671681 -1.59971298 -1.14715363 -0.99772466 -0.30659251  1.28228707]
 [ 0.66324371  0.8055556   0.57184124  0.69671911 -1.59971295 -1.14715363 -0.99772466 -0.30659251  1.28229709]
 [ 0.66324189  0.80555355  0.57183572  0.69671781 -1.59971297 -1.14715363 -0.99772466 -0.30659251  1.28230037]]
--------------------------------------------------------------------------------
nim_news_data loaded: data/numpy/nim_news_data.npy
nim_news_data.shape: (39144, 101)
[[ 3.43747353e-01  3.53728610e-01  2.83315115e-01 -7.40182967e-01  6.52484974e-01 -9.97946696e-01 -3.07057416e-01  1.62246428e-01  1.11230122e+00  3.43961594e-02 -7.11117569e-01  4.25360374e-01  1.38484339e-01 -7.79282049e-01  5.80383342e-01  4.42928167e-01 -5.76514806e-01  1.01804515e+00 -5.34480415e-01 -3.07959030e-01  1.48093078e-01  4.58001379e-02 -1.42222742e-01  2.85665402e-01  1.47970822e-01 -4.36327600e-01  9.67219107e-05  4.88025175e-01 -1.26753214e-02 -4.88107212e-01 -2.30063350e-01  2.28158760e-01 -3.49108498e-01 -3.35872935e-02  2.17037161e-01 -1.74796823e-01 -2.41502581e-01 -9.64590141e-02 -3.68248566e-02  2.27948938e-01 -5.24786185e-03 -1.34660458e-01 -2.52614519e-02  1.47027854e-01 -3.45950245e-02 -1.70637051e-01 -9.68576281e-02 -1.63202381e-01  1.65270605e-03  2.35564591e-01  1.79761251e-01  1.83566502e-02  1.66901150e-01 -1.26032459e-02 -3.50517690e-03  2.14481550e-01  1.49165256e-01  8.21782439e-02  6.41809875e-03  1.37768946e-01 -1.69451962e-01 -9.18528249e-02 -5.31392577e-02 -2.96709319e-02 -7.09818094e-02  4.73773377e-02 -1.13325171e-01  1.39269861e-01 -1.47326852e-01  9.79783232e-02 -2.25233873e-02 -2.44818515e-01  3.83193411e-02  1.67223737e-01  1.54388647e-01 -1.35179768e-02 -1.21827548e-01 -3.67657879e-02  4.01806574e-02 -6.67213992e-03  1.37959504e-01 -1.02401718e-01  4.36846505e-02  4.73984275e-02  5.99798559e-02  7.35004829e-02 -1.54793700e-02  9.15706252e-02 -1.89146751e-02  5.08867855e-02  9.29562188e-03  7.44557951e-02  8.42769686e-02  3.35829271e-02 -5.99019822e-02  1.00056678e-02  4.14729500e-02 -3.65172111e-02 -7.38372907e-03 -2.43910033e-02  1.28230299e+00]
 [ 1.29550279e+00 -5.56690554e-01 -1.64721362e-01 -1.45254764e+00 -3.42197621e-01 -2.35243629e-01 -1.02345444e+00  9.88601562e-01  2.84672943e-01 -5.45985548e-01  1.68699835e-01  1.98393384e-01 -9.23402355e-01  1.01250462e-01 -1.57548880e-01 -6.07124039e-01 -4.76187209e-02  7.92121297e-02 -3.03056744e-01  1.44345686e-02 -5.31963195e-01 -2.54828053e-02  1.46648429e-01 -5.89132017e-01  5.64095072e-02  1.19601838e-01  5.39935835e-03  3.45733462e-01  8.19194002e-02 -2.70667272e-01 -4.44221149e-01 -9.49139156e-02 -3.06553839e-01 -7.53591209e-02  4.25270264e-02  2.48740077e-01  1.22623623e-01 -2.47385186e-01 -1.49966350e-01 -1.11634199e-01 -2.80708223e-02  6.99870926e-03 -1.57888480e-01 -3.49092233e-02 -1.48377976e-01 -8.42623226e-02 -2.42583688e-02 -3.72982744e-02  2.14710023e-01 -1.02034356e-01 -6.08931997e-02 -1.28988690e-02 -8.97157738e-04  2.17193289e-01  9.43869344e-02  1.77074428e-02  2.16099002e-01 -4.44586316e-02  1.00565021e-01 -1.43900987e-01 -8.56689507e-02 -1.04728885e-01  6.32667479e-02 -2.26476521e-02 -5.57287268e-02 -1.62162727e-01  6.17624511e-02 -6.19339086e-02 -1.79491277e-03 -8.84669217e-02 -3.49035035e-02 -2.83565249e-02  4.81758266e-02  1.98440398e-02 -3.83613350e-02 -1.93668437e-01  1.57366824e-01 -4.46535275e-02 -3.99397392e-02 -5.75179624e-02  8.49825642e-02 -3.21393612e-02  4.92048999e-02  2.56847127e-02  5.72372766e-02 -3.50867452e-02  3.62415601e-02 -7.10356845e-02  2.21300996e-02  1.03585755e-01  1.96738040e-02  3.25206922e-02  5.37356149e-02  2.61693536e-02  2.78509225e-02 -2.14962746e-02 -3.96789304e-02  1.73839026e-02  8.85148263e-03  2.58284969e-03  1.28230197e+00]
 [-2.19010154e-01 -1.46939979e+00 -5.54516736e-01 -9.50604586e-01  8.61255063e-01  4.38908401e-01 -1.59145158e+00 -1.82826332e-01  3.23875629e-01 -9.32707647e-01 -4.04307288e-02  4.25907383e-03  1.09942054e-01  5.63137018e-01  3.96778123e-01 -5.30825780e-01 -1.98008128e-01 -1.31166036e-01 -2.62713718e-01 -4.66633980e-02 -5.36649804e-02 -2.08065590e-01  5.82432198e-01 -3.55380268e-01 -1.57250868e-01  3.56081541e-02 -1.00670839e-01 -1.38290258e-01 -6.76148640e-02 -1.39509277e-01 -3.05394218e-01 -1.45061859e-01 -1.64807691e-01 -3.28449852e-01 -7.85660491e-02 -1.14021850e-01  2.00069121e-02 -8.27677568e-02 -1.09720500e-01  3.84635345e-03  1.81180042e-01 -5.97809590e-02 -5.26430917e-02 -5.41112201e-02 -7.34542992e-02 -4.03736569e-02 -4.16675377e-02 -9.26512983e-02 -9.63753033e-03 -2.27266316e-01 -2.71702760e-02 -1.51756142e-02 -1.49746014e-01  1.14853241e-01  9.84697236e-02  7.60593325e-02  3.13915039e-02 -1.13786456e-01 -7.37756080e-03 -8.83369731e-02  1.02173112e-01 -9.93086236e-02 -3.38871887e-02  1.78278641e-01 -7.54526415e-02 -1.39930187e-01 -1.44011644e-01  5.85535454e-02 -5.97497932e-02  5.09113372e-02 -3.08461451e-02 -8.54472930e-02  1.12223117e-01 -3.14103219e-02  1.15410094e-04 -5.89612098e-02 -1.08089437e-01 -9.95173067e-03  4.71664504e-02 -5.91361498e-02 -5.22869246e-02 -5.19303769e-02  2.29510042e-02  4.39963045e-02  3.40098279e-02  9.12310740e-02  3.97281216e-02 -1.08681776e-01 -4.18819160e-02  1.95911525e-03 -6.79188404e-03  1.93396351e-02  7.17781003e-02  6.34715111e-03 -2.58710854e-02  3.58358297e-02 -2.41204773e-03  7.13212219e-03 -2.40968448e-02 -1.22609429e-02  1.28230121e+00]
 [ 9.95177800e-01 -9.18056827e-01 -3.82786393e-01 -3.63390379e-01  1.10186963e+00 -8.90645775e-01 -6.21422551e-01  7.30822218e-01 -4.51329545e-01 -8.85794072e-01 -2.37142262e-02 -2.64997220e-01  6.86535661e-02  2.87582684e-01  2.14780622e-02 -8.19085655e-01 -1.60694513e-02  1.81629749e-01 -5.44818025e-01 -2.78681561e-02  6.61584026e-01  1.51228901e-01  3.01913978e-01 -1.44531556e-02  1.69020878e-01  1.31092219e-01 -1.28024054e-01 -6.15808299e-01 -2.33581781e-01 -7.20987670e-02  5.16572189e-02  2.93848621e-01  2.12006493e-02 -4.76430641e-01  2.47075650e-01  2.81370538e-01  2.58709277e-01 -1.28524417e-01 -1.72780338e-01 -1.07185786e-02  3.26477967e-01  3.21800458e-02 -1.47323913e-01 -1.58726196e-02  2.91420546e-01  8.37790175e-02  1.14698540e-01 -1.42810399e-01  1.50620110e-01 -9.47113122e-02 -7.31053273e-02 -1.89944772e-01  5.49176774e-02  3.20306363e-02 -1.40008062e-01  8.48524308e-03  2.25465397e-01 -4.37872839e-02 -1.91985508e-01  1.18940590e-01 -1.18829382e-01  4.83929926e-03  7.72103672e-02  3.01465534e-02  3.60873018e-02  8.70730199e-02  1.14768063e-03 -3.55435075e-02 -1.30597671e-01  1.83796270e-02  4.31462603e-02 -1.03500495e-01 -4.31740316e-02 -7.53673840e-02  6.64167428e-02 -5.99894512e-02  5.96506638e-02 -3.45276026e-02 -5.68169220e-02 -1.74167054e-01 -1.51399239e-02 -1.36409654e-02  6.37547053e-02 -1.81222806e-03 -1.00838436e-02  3.84657328e-04  2.76862815e-02 -8.33087012e-02  9.65849093e-02  1.38150267e-01 -2.66506701e-03 -5.02331239e-02  8.42831671e-04  3.58771195e-02 -2.36132670e-02 -4.90482780e-02  4.88547747e-02  1.18352925e-02 -3.47382102e-02  2.28662357e-02  1.28229892e+00]
 [ 5.74842528e-01 -8.05390585e-01 -5.51157819e-01 -1.96592302e+00  6.53951974e-02 -5.15155318e-01 -1.22244420e+00  5.38845217e-01 -3.47484766e-01 -6.53249250e-01  3.04070729e-02  5.23831796e-02 -6.58146668e-01  5.91242174e-01  2.50750727e-03 -5.58819711e-01 -2.91080068e-01 -2.18768699e-01 -1.87935885e-01  1.65090817e-01  2.30438513e-01  1.38741792e-01  1.23707729e-01 -5.29181371e-01 -1.65438536e-01 -1.60269348e-02 -8.55789350e-02  2.21272919e-01  2.01225135e-01  7.49696030e-02 -2.61976317e-01  2.08872283e-01  2.68729596e-01 -3.00690792e-01 -1.00189638e-01  3.98678422e-01 -8.56856547e-02  5.92129562e-02 -1.11113592e-01 -6.54996291e-02 -6.83499567e-02  8.18163192e-03  1.42315047e-02 -9.19479811e-02  2.34334407e-02 -9.63842666e-02 -7.01629511e-02 -2.49070029e-01 -5.64059554e-02 -1.91820071e-01 -2.46014962e-01 -2.65747398e-01 -3.47920264e-02  3.95035129e-01  7.39430014e-03 -2.15089038e-02  3.67574849e-01 -1.42315132e-01  1.26256493e-01  2.19595389e-02 -3.15172014e-01  7.59251627e-02 -9.65053029e-03  1.59650447e-01 -4.37185171e-02 -1.34828940e-01 -5.09514086e-02 -1.56433391e-01 -1.08848677e-01  8.80994659e-02 -6.14677950e-02 -1.97084246e-02  1.90511933e-02 -1.86343797e-02  1.21394224e-02 -1.95227519e-01  5.48764409e-02 -7.40386656e-04  4.73169732e-02 -1.49992912e-01  1.06511586e-01 -4.87372879e-02  1.02087585e-01  1.23135682e-02 -3.21314110e-02  1.37631983e-02  3.88173252e-02  7.73606758e-03 -5.41040232e-04  1.09382109e-01 -2.10579035e-03  4.56351337e-02  5.04129052e-02  8.66989506e-02  1.59949902e-02 -3.05838987e-02  2.24416884e-02  6.38213944e-02  1.37546037e-03  1.66336744e-03  1.28230839e+00]
 [-1.94431324e+00 -8.69096582e-01 -3.52323478e-01 -1.00961847e+00 -6.27120763e-01  9.41702678e-01 -6.32942182e-01 -9.12959223e-01  2.30504247e-01  2.55644820e-01 -2.08514717e-02 -2.26600800e-01  2.98502783e-01  2.18271975e-01 -7.48401951e-02  3.60157756e-01 -5.69145250e-01 -2.50557778e-01  3.61923189e-01 -5.84314010e-02 -2.14146573e-01 -7.42272897e-02 -6.15637963e-02  6.73701597e-02 -1.88808689e-02  8.27301606e-02  3.98158325e-01 -3.91072230e-01 -4.73121579e-01 -4.94936234e-02 -4.52821738e-01  1.23404263e-01  4.53592106e-02  9.79554395e-02 -1.22018673e-01 -8.17037420e-02  1.10739045e-01  2.99172400e-01  2.36375063e-01 -6.17264537e-02  2.39640164e-01  3.35312290e-01  2.59595663e-02 -2.28823986e-01  8.65732864e-02 -1.71870565e-01  1.41624771e-01  9.10674703e-02 -1.00250205e-01  6.58767813e-02  1.25451850e-01 -1.08705577e-01  4.04464001e-02 -6.24117197e-02  4.64206014e-02 -3.79130573e-01 -6.92762120e-02  9.53785940e-02  9.02673786e-02  1.02894061e-03  1.59450951e-01 -3.86847552e-02  1.01731893e-02  9.72044499e-02 -4.84288340e-02  9.02267786e-02 -1.31505465e-02 -4.12378680e-02  8.26920089e-02  1.41321867e-01 -1.63935288e-01 -4.99700125e-02 -2.04507395e-03  3.95567265e-02  4.35888871e-03  3.26863573e-02  4.77395689e-02  5.17044062e-02 -9.56373172e-02  1.06633447e-02  5.97863858e-03  9.15923340e-02  2.04738669e-02 -2.49636066e-02  5.68457285e-02 -1.24556690e-01 -7.78905143e-02 -2.17466186e-02 -1.09107731e-01  1.88078641e-02  7.40969092e-03 -7.57283606e-03  1.14966232e-02 -7.04990473e-02  1.25620845e-02 -3.66504889e-02  4.36794099e-03 -3.22940061e-02 -1.56514849e-02  7.22194486e-02  1.28229497e+00]
 [ 4.36008506e-01  1.45156570e+00 -2.19372248e+00 -8.43736159e-01  5.37807712e-01 -1.47463539e-01 -4.90685774e-01 -1.67562216e-01 -6.54601833e-02  3.61379517e-01 -1.13576832e-01 -1.36336382e-01  2.42158882e-01  6.28956872e-01  3.28528305e-01 -3.54043871e-01 -2.40536881e-01  1.36928499e-01 -1.31779189e-01  2.49470629e-02 -2.96991300e-01  6.61977015e-02  1.85047059e-01  5.19281908e-02 -1.43314338e-01  4.26066920e-01 -2.91874174e-01 -9.30426212e-02  5.54189710e-02  3.05702569e-01  2.64881732e-01  6.62625654e-02 -2.32598745e-02 -2.80818436e-02 -1.21131500e-01  7.34282598e-02 -2.10062342e-01  1.82078766e-01  2.06074996e-01  1.66766098e-01  1.71748741e-01  1.36829329e-01 -1.50107495e-01  1.96028376e-01  3.88441070e-02  2.89195962e-01  7.92309749e-02  2.11265564e-02 -4.72716361e-02 -1.12678228e-01  8.93204957e-02  1.07659254e-01 -3.27236811e-02 -1.69610717e-01  5.81811962e-02  8.12560437e-02  5.40303817e-02  1.90601331e-02 -4.86709002e-02 -8.91367088e-02 -3.72502057e-02 -1.43115092e-01 -1.03911739e-01 -1.37051606e-01  1.23548387e-01 -5.86256250e-02 -1.25136440e-01 -1.33170415e-01  5.20329735e-03 -1.56657466e-02 -6.87570136e-02  1.29076303e-02 -1.24335875e-01 -2.81922902e-02 -4.63275626e-03 -6.92197881e-02 -5.51612056e-03 -4.88236988e-02 -2.44131436e-02 -8.28938887e-04  3.53969438e-02  1.06314711e-01  6.78170666e-02  6.18501948e-02  9.19450018e-03  7.47641232e-02  2.45462934e-02  5.01996501e-03 -4.48040852e-02 -3.78490568e-02  5.07404195e-02  2.08314779e-02 -8.30930669e-04  4.95273963e-03  1.55831136e-04 -9.27861299e-03 -3.57116903e-02  3.20337000e-02 -1.36150148e-02 -2.12768024e-04  1.28229914e+00]
 [ 1.29550279e+00 -5.56690554e-01 -1.64721362e-01 -1.45254764e+00 -3.42197621e-01 -2.35243629e-01 -1.02345444e+00  9.88601562e-01  2.84672943e-01 -5.45985548e-01  1.68699835e-01  1.98393384e-01 -9.23402355e-01  1.01250462e-01 -1.57548880e-01 -6.07124039e-01 -4.76187209e-02  7.92121297e-02 -3.03056744e-01  1.44345686e-02 -5.31963195e-01 -2.54828053e-02  1.46648429e-01 -5.89132017e-01  5.64095072e-02  1.19601838e-01  5.39935835e-03  3.45733462e-01  8.19194002e-02 -2.70667272e-01 -4.44221149e-01 -9.49139156e-02 -3.06553839e-01 -7.53591209e-02  4.25270264e-02  2.48740077e-01  1.22623623e-01 -2.47385186e-01 -1.49966350e-01 -1.11634199e-01 -2.80708223e-02  6.99870926e-03 -1.57888480e-01 -3.49092233e-02 -1.48377976e-01 -8.42623226e-02 -2.42583688e-02 -3.72982744e-02  2.14710023e-01 -1.02034356e-01 -6.08931997e-02 -1.28988690e-02 -8.97157738e-04  2.17193289e-01  9.43869344e-02  1.77074428e-02  2.16099002e-01 -4.44586316e-02  1.00565021e-01 -1.43900987e-01 -8.56689507e-02 -1.04728885e-01  6.32667479e-02 -2.26476521e-02 -5.57287268e-02 -1.62162727e-01  6.17624511e-02 -6.19339086e-02 -1.79491277e-03 -8.84669217e-02 -3.49035035e-02 -2.83565249e-02  4.81758266e-02  1.98440398e-02 -3.83613350e-02 -1.93668437e-01  1.57366824e-01 -4.46535275e-02 -3.99397392e-02 -5.75179624e-02  8.49825642e-02 -3.21393612e-02  4.92048999e-02  2.56847127e-02  5.72372766e-02 -3.50867452e-02  3.62415601e-02 -7.10356845e-02  2.21300996e-02  1.03585755e-01  1.96738040e-02  3.25206922e-02  5.37356149e-02  2.61693536e-02  2.78509225e-02 -2.14962746e-02 -3.96789304e-02  1.73839026e-02  8.85148263e-03  2.58284969e-03  1.28228707e+00]
 [ 5.21221146e-01  1.31611637e-01 -2.26053003e-01 -4.77245113e-01  3.16069830e-01 -1.13531067e+00 -3.44146723e-01  4.50299367e-01  1.29106679e+00 -1.36057460e-01 -7.60290907e-01  2.98759825e-01 -8.92882990e-03 -4.38926984e-01  2.45417023e-01  3.49935694e-01 -7.48284804e-01  8.56925844e-01 -4.56559176e-01 -5.06345637e-01  1.99519517e-01  2.22178644e-01 -9.59677421e-02  1.57474960e-01  1.83392158e-01 -1.76381579e-01 -4.06239469e-02  1.98375248e-01 -8.60563728e-02 -2.87824180e-01 -3.96196638e-02  1.40092070e-01 -4.35965487e-01 -1.16290448e-01  3.63944978e-01  4.27777747e-02 -1.85183622e-01 -5.47817989e-02  1.24938499e-04  3.44771402e-01 -5.13216934e-02 -2.35738583e-01  7.46979402e-02  2.39741775e-01 -5.86496459e-03 -1.88175659e-01 -6.07846438e-02 -7.98842763e-02  7.72596453e-02  3.48930217e-01  1.12522426e-01  9.65275783e-02  1.28787635e-01 -1.37813835e-01 -5.44830330e-02  7.93542274e-02  4.01292097e-02  1.50036440e-01 -3.61746245e-02  1.94326116e-01 -1.33182651e-01 -2.20088751e-02  7.56755896e-02 -1.87659420e-03 -9.28777389e-02 -5.51643148e-02 -1.42276858e-01  8.86603494e-02 -1.18676008e-01  1.18998604e-01  4.20717020e-02 -1.57167606e-01  2.01529286e-02  1.52323598e-01  7.45020183e-02 -6.33377621e-03 -4.99952172e-02 -7.12161179e-03  3.01795004e-02 -3.39594692e-02  6.04082438e-02 -9.55573444e-02  7.37276773e-02  4.86923737e-02  5.75383775e-02  6.05144088e-02  1.50705927e-02  1.05918212e-02  1.50545067e-02 -2.61598083e-02  1.26819827e-02  8.09805122e-02  1.06670517e-01 -4.16056165e-03 -4.49695749e-02 -3.79981948e-03  2.47375813e-02 -5.23343184e-02 -8.19765045e-03 -6.18064554e-02  1.28229709e+00]
 [ 2.40441206e+00  1.75753542e+00  1.44470162e+00  3.02703226e+00 -2.09312890e+00  1.39928065e-01 -7.89698682e-01 -9.71778796e-01 -4.26205896e-01 -6.94697672e-01  8.75300161e-02  2.01212171e-01  2.37958338e-01  1.83339463e-01  2.04851426e-01 -3.51633532e-02  1.29219457e-01  2.62384410e-01  3.38242485e-01  2.83813123e-01  3.81954486e-02 -2.43978649e-01  2.24650525e-01 -1.00152085e-01  9.66814696e-02 -1.85258169e-02  1.54197498e-02  8.69091189e-02 -6.73579684e-04 -3.90890841e-02 -8.41900816e-02  1.64816116e-01  3.52175936e-02 -5.70620493e-02  5.51736979e-02  6.41133632e-02  1.74807074e-01  8.06485759e-02  2.55027873e-03 -1.51352512e-02  8.42340443e-02 -6.85029335e-03 -3.49029905e-02  8.18906001e-02  1.50667241e-01  8.19266053e-02 -1.14356658e-02  3.87983687e-03 -5.33314826e-02  5.15352102e-02 -5.99326081e-03 -5.12402253e-02 -6.97023834e-02  1.47365912e-02 -2.28286100e-02  1.71452785e-02  6.41707188e-03  6.09602764e-03 -4.39331102e-03  4.99511696e-02  3.60799975e-03  3.78902823e-02 -3.40773808e-02  2.28335156e-02 -4.47385864e-02 -7.41651900e-03  3.10743632e-02  2.90953048e-02  5.53416765e-03  5.81132897e-02 -4.61842840e-02 -3.20614358e-02 -1.14778322e-02  3.82612323e-03  1.11901415e-03  2.55592260e-02  2.13788800e-02 -2.64670235e-02  5.73169870e-02  2.78707952e-02  8.17630705e-02 -1.05599741e-02  3.40008253e-02 -5.78558196e-02  1.69755594e-02 -5.14110806e-02  1.38387783e-02  2.55141270e-02 -1.23716151e-02  1.56948090e-02  9.37719729e-03 -2.97445065e-02  1.09232604e-03  2.55363416e-02 -7.97661494e-03 -2.86204268e-02 -3.89363078e-03  1.26442500e-02 -2.24473862e-02 -3.35063430e-03  1.28230037e+00]]
--------------------------------------------------------------------------------
  • 20일간의 데이터를 넣고 5일 후의 차분값을 구하는 시계열 데이터셋
In [7]:
sequence_length = 20        # 20일간의 데이터 사용
sampling_rate   = 24        # 1시간 단위
sequence_stride = 1         # 1시간씩 이동
predict_term    = 5         # 5일 후의 값을 예측
batch_size      = 128       # 배치 크기

#------------------------------------------------------------------------------#
# 훈련 데이터셋 생성 (IBK 입력)
#------------------------------------------------------------------------------#
ibks_train_dataset = keras.utils.timeseries_dataset_from_array(
    data=nim_ibks_data,    
    targets=target_values[sequence_length * sampling_rate - sampling_rate:],   
    sequence_length=sequence_length, 
    sampling_rate=sampling_rate,     
    sequence_stride=sequence_stride,  
    shuffle=False,                 
    batch_size=batch_size,          
    start_index=0,
    end_index=train_size            
)

#------------------------------------------------------------------------------#
# 훈련 데이터셋 생성 (BOK 입력)
#------------------------------------------------------------------------------#
boks_train_dataset = keras.utils.timeseries_dataset_from_array(
    data=nim_boks_data,    
    targets=target_values[sequence_length * sampling_rate - sampling_rate:],     
    sequence_length=sequence_length,  
    sampling_rate=sampling_rate,     
    sequence_stride=sequence_stride, 
    shuffle=False,                
    batch_size=batch_size,          
    start_index=0,
    end_index=train_size            
)

#------------------------------------------------------------------------------#
# 훈련 데이터셋 생성 (NEWS 입력)
#------------------------------------------------------------------------------#
news_train_dataset = keras.utils.timeseries_dataset_from_array(
    data=nim_news_data,    
    targets=target_values[sequence_length * sampling_rate - sampling_rate:],   
    sequence_length=sequence_length,  
    sampling_rate=sampling_rate,   
    sequence_stride=sequence_stride,  
    shuffle=False,                   
    batch_size=batch_size,           
    start_index=0,
    end_index=train_size             
)

#------------------------------------------------------------------------------#
# 검증 데이터셋 생성 (IBK 입력)
#------------------------------------------------------------------------------#
ibks_val_dataset = keras.utils.timeseries_dataset_from_array(
    data=nim_ibks_data,    
    targets=target_values[sequence_length * sampling_rate - sampling_rate:],   
    sequence_length=sequence_length,
    sampling_rate=sampling_rate,
    sequence_stride=sequence_stride,
    shuffle=False,
    batch_size=batch_size,
    start_index=train_size,            
    end_index=train_size + val_size 
)

#------------------------------------------------------------------------------#
# 검증 데이터셋 생성 (BOK 입력)
#------------------------------------------------------------------------------#
boks_val_dataset = keras.utils.timeseries_dataset_from_array(
    data=nim_boks_data,    
    targets=target_values[sequence_length * sampling_rate - sampling_rate:],     
    sequence_length=sequence_length,
    sampling_rate=sampling_rate,
    sequence_stride=sequence_stride,
    shuffle=False,
    batch_size=batch_size,
    start_index=train_size,           
    end_index=train_size + val_size   
)

#------------------------------------------------------------------------------#
# 검증 데이터셋 생성 (NEWS 입력)
#------------------------------------------------------------------------------#
news_val_dataset = keras.utils.timeseries_dataset_from_array(
    data=nim_news_data,    
    targets=target_values[sequence_length * sampling_rate - sampling_rate:],   
    sequence_length=sequence_length,
    sampling_rate=sampling_rate,
    sequence_stride=sequence_stride,
    shuffle=False,
    batch_size=batch_size,
    start_index=train_size,           
    end_index=train_size + val_size    
)

#------------------------------------------------------------------------------#
# 테스트 데이터셋 생성 (IBK 입력)
#------------------------------------------------------------------------------#
ibks_test_dataset = keras.utils.timeseries_dataset_from_array(
    data=nim_ibks_data,     
    targets=target_values[sequence_length * sampling_rate - sampling_rate:],    
    sequence_length=sequence_length,
    sampling_rate=sampling_rate,
    sequence_stride=sequence_stride,
    shuffle=False,
    batch_size=batch_size,
    start_index=train_size + val_size,
    end_index=len(nim_ibks_data) - (predict_term * sampling_rate)
)

#------------------------------------------------------------------------------#
# 테스트 데이터셋 생성 (BOK 입력)
#------------------------------------------------------------------------------#
boks_test_dataset = keras.utils.timeseries_dataset_from_array(
    data=nim_boks_data,    
    targets=target_values[sequence_length * sampling_rate - sampling_rate:],   
    sequence_length=sequence_length,
    sampling_rate=sampling_rate,
    sequence_stride=sequence_stride,
    shuffle=False,
    batch_size=batch_size,
    start_index=train_size + val_size,
    end_index=len(nim_boks_data) - (predict_term * sampling_rate)
)

#------------------------------------------------------------------------------#
# 테스트 데이터셋 생성 (NEWS 입력)
#------------------------------------------------------------------------------#
news_test_dataset = keras.utils.timeseries_dataset_from_array(
    data=nim_news_data,    
    targets=target_values[sequence_length * sampling_rate - sampling_rate:],   
    sequence_length=sequence_length,
    sampling_rate=sampling_rate,
    sequence_stride=sequence_stride,
    shuffle=False,
    batch_size=batch_size,
    start_index=train_size + val_size,
    end_index=len(nim_news_data) - (predict_term * sampling_rate)
)
2024-11-05 00:58:28.041984: E external/local_xla/xla/stream_executor/cuda/cuda_driver.cc:274] failed call to cuInit: CUDA_ERROR_NO_DEVICE: no CUDA-capable device is detected
In [8]:
import tensorflow as tf
import time

################################################################################
# IBK 시계열 데이터셋 입력변수
################################################################################
def ibks_timeseris_dataset(dataset):
    for batch in dataset:
        ibks_data, ibks_target = batch
        yield {'ibks_input': ibks_data}, ibks_target

#------------------------------------------------------------------------------#
# 훈련 데이터셋 변환
#------------------------------------------------------------------------------#
ibks_train_input = tf.data.Dataset.from_generator(
    lambda: ibks_timeseris_dataset(ibks_train_dataset),
    output_signature=(
        {'ibks_input': tf.TensorSpec(shape=(None, sequence_length, nim_ibks_data.shape[-1]), dtype=tf.float32)},
        tf.TensorSpec(shape=(None,), dtype=tf.float32)  # 타겟값
    )
)

#------------------------------------------------------------------------------#
# 검증 데이터셋 변환
#------------------------------------------------------------------------------#
ibks_val_input = tf.data.Dataset.from_generator(
    lambda: ibks_timeseris_dataset(ibks_val_dataset),
    output_signature=(
        {'ibks_input': tf.TensorSpec(shape=(None, sequence_length, nim_ibks_data.shape[-1]), dtype=tf.float32)},
        tf.TensorSpec(shape=(None,), dtype=tf.float32)  # 타겟값
    )
)

#------------------------------------------------------------------------------#
# 테스트 데이터셋 변환
#------------------------------------------------------------------------------#
ibks_test_input = tf.data.Dataset.from_generator(
    lambda: ibks_timeseris_dataset(ibks_test_dataset),
    output_signature=(
        {'ibks_input': tf.TensorSpec(shape=(None, sequence_length, nim_ibks_data.shape[-1]), dtype=tf.float32)},
        tf.TensorSpec(shape=(None,), dtype=tf.float32)  # 타겟값
    )
)


################################################################################
# BOK 시계열 데이터셋 입력변수
################################################################################
def boks_timeseris_dataset(dataset):
    for batch in dataset:
        boks_data, boks_target = batch
        yield {'boks_input': boks_data}, boks_target

#------------------------------------------------------------------------------#
# 훈련 데이터셋 변환
#------------------------------------------------------------------------------#
boks_train_input = tf.data.Dataset.from_generator(
    lambda: boks_timeseris_dataset(boks_train_dataset),
    output_signature=(
        {'boks_input': tf.TensorSpec(shape=(None, sequence_length, nim_boks_data.shape[-1]), dtype=tf.float32)},
        tf.TensorSpec(shape=(None,), dtype=tf.float32)  # 타겟값
    )
)

#------------------------------------------------------------------------------#
# 검증 데이터셋 변환
#------------------------------------------------------------------------------#
boks_val_input = tf.data.Dataset.from_generator(
    lambda: boks_timeseris_dataset(boks_val_dataset),
    output_signature=(
        {'boks_input': tf.TensorSpec(shape=(None, sequence_length, nim_boks_data.shape[-1]), dtype=tf.float32)},
        tf.TensorSpec(shape=(None,), dtype=tf.float32)  # 타겟값
    )
)

#------------------------------------------------------------------------------#
# 테스트 데이터셋 변환
#------------------------------------------------------------------------------#
boks_test_input = tf.data.Dataset.from_generator(
    lambda: boks_timeseris_dataset(boks_test_dataset),
    output_signature=(
        {'boks_input': tf.TensorSpec(shape=(None, sequence_length, nim_boks_data.shape[-1]), dtype=tf.float32)},
        tf.TensorSpec(shape=(None,), dtype=tf.float32)  # 타겟값
    )
)


################################################################################
# NEWS 시계열 데이터셋 입력변수
################################################################################
def news_timeseris_dataset(dataset):
    for batch in dataset:
        news_data, news_target = batch
        yield {'news_input': news_data}, news_target

#------------------------------------------------------------------------------#
# 훈련 데이터셋 변환
#------------------------------------------------------------------------------#
news_train_input = tf.data.Dataset.from_generator(
    lambda: news_timeseris_dataset(news_train_dataset),
    output_signature=(
        {'news_input': tf.TensorSpec(shape=(None, sequence_length, nim_news_data.shape[-1]), dtype=tf.float32)},
        tf.TensorSpec(shape=(None,), dtype=tf.float32)  # 타겟값
    )
)

#------------------------------------------------------------------------------#
# 검증 데이터셋 변환
#------------------------------------------------------------------------------#
news_val_input = tf.data.Dataset.from_generator(
    lambda: news_timeseris_dataset(news_val_dataset),
    output_signature=(
        {'news_input': tf.TensorSpec(shape=(None, sequence_length, nim_news_data.shape[-1]), dtype=tf.float32)},
        tf.TensorSpec(shape=(None,), dtype=tf.float32)  # 타겟값
    )
)

#------------------------------------------------------------------------------#
# 테스트 데이터셋 변환
#------------------------------------------------------------------------------#
news_test_input = tf.data.Dataset.from_generator(
    lambda: news_timeseris_dataset(news_test_dataset),
    output_signature=(
        {'news_input': tf.TensorSpec(shape=(None, sequence_length, nim_news_data.shape[-1]), dtype=tf.float32)},
        tf.TensorSpec(shape=(None,), dtype=tf.float32)  # 타겟값
    )
)


################################################################################
# IBK + BOK + NEWS 시계열 데이터셋 입력변수
################################################################################
def ibks_boks_news_timeseris_dataset(ibks_dataset, boks_dataset, news_dataset):
    for (ibks_dataset, ibks_target), (boks_dataset, boks_target), (news_dataset, news_target) in zip(ibks_dataset, boks_dataset, news_dataset):
        yield {'ibks_input': ibks_dataset, 'boks_input': boks_dataset, 'news_input': news_dataset}, ibks_target

#------------------------------------------------------------------------------#
# 훈련 데이터셋 변환
#------------------------------------------------------------------------------#
ibks_boks_news_train_input = tf.data.Dataset.from_generator(
    lambda: ibks_boks_news_timeseris_dataset(ibks_train_dataset, boks_train_dataset, news_train_dataset),
    output_signature=(
        {
            'ibks_input': tf.TensorSpec(shape=(None, sequence_length, nim_ibks_data.shape[-1]), dtype=tf.float32),
            'boks_input': tf.TensorSpec(shape=(None, sequence_length, nim_boks_data.shape[-1]), dtype=tf.float32),
            'news_input': tf.TensorSpec(shape=(None, sequence_length, nim_news_data.shape[-1]), dtype=tf.float32)
        },
        tf.TensorSpec(shape=(None,), dtype=tf.float32)
    )
)

#------------------------------------------------------------------------------#
# 검증 데이터셋 변환
#------------------------------------------------------------------------------#
ibks_boks_news_val_input = tf.data.Dataset.from_generator(
    lambda: ibks_boks_news_timeseris_dataset(ibks_val_dataset, boks_val_dataset, news_val_dataset),
    output_signature=(
        {
            'ibks_input': tf.TensorSpec(shape=(None, sequence_length, nim_ibks_data.shape[-1]), dtype=tf.float32),         
            'boks_input': tf.TensorSpec(shape=(None, sequence_length, nim_boks_data.shape[-1]), dtype=tf.float32),
            'news_input': tf.TensorSpec(shape=(None, sequence_length, nim_news_data.shape[-1]), dtype=tf.float32)
        },
        tf.TensorSpec(shape=(None,), dtype=tf.float32)
    )
)

#------------------------------------------------------------------------------#
# 테스트 데이터셋 변환
#------------------------------------------------------------------------------#
ibks_boks_news_test_input = tf.data.Dataset.from_generator(
    lambda: ibks_boks_news_timeseris_dataset(ibks_test_dataset, boks_test_dataset, news_test_dataset),
    output_signature=(
        {
            'ibks_input': tf.TensorSpec(shape=(None, sequence_length, nim_ibks_data.shape[-1]), dtype=tf.float32),
            'boks_input': tf.TensorSpec(shape=(None, sequence_length, nim_boks_data.shape[-1]), dtype=tf.float32),
            'news_input': tf.TensorSpec(shape=(None, sequence_length, nim_news_data.shape[-1]), dtype=tf.float32)
        },
        tf.TensorSpec(shape=(None,), dtype=tf.float32)
    )
)

6-3. (1)IBKS¶


In [9]:
from tensorflow.keras.models import Model
from tensorflow.keras.layers import LSTM, BatchNormalization, Dropout, Dense, Input
from tensorflow.keras.optimizers import Adam
from keras_tuner import Hyperband, HyperParameters

# 모델 생성 함수 정의
def build_model(hp):
    # 입력 레이어 정의
    ibks_input = Input(shape=(sequence_length, nim_ibks_data.shape[-1]), name="ibks_input")

    # 첫 번째 LSTM 레이어: 64로 고정
    x = LSTM(64, recurrent_dropout=0.2, return_sequences=True)(ibks_input)
    x = BatchNormalization()(x)

    #------------------------------------------------------------------------------#
    # 두 번째 LSTM 레이어
    units_second = hp.Choice('units_second', values=[64, 32])
    x = LSTM(units_second, recurrent_dropout=0.3, return_sequences=True)(x)
    x = BatchNormalization()(x)

    # 세 번째 LSTM 레이어
    values_third = [u for u in [64, 32, 16] if u <= units_second] 
    units_third = hp.Choice('units_third', values=values_third)
    x = LSTM(units_third, recurrent_dropout=0.3, return_sequences=True)(x)
    x = BatchNormalization()(x)

    # 네 번째 LSTM 레이어
    values_fourth = [u for u in [32, 16, 8] if u <= units_third]
    units_fourth = hp.Choice('units_fourth', values=values_fourth)
    x = LSTM(units_fourth, recurrent_dropout=0.3, return_sequences=True)(x)
    x = BatchNormalization()(x)

    # 다섯 번째 LSTM 레이어
    values_fifth = [u for u in [32, 16, 8] if u <= units_fourth]
    units_fifth = hp.Choice('units_fifth', values=values_fifth)
    x = LSTM(units_fifth, recurrent_dropout=0.3, return_sequences=True)(x)
    x = BatchNormalization()(x)

    # 여섯 번째 LSTM 레이어
    values_sixth = [u for u in [16, 8] if u <= units_fifth]
    units_sixth = hp.Choice('units_sixth', values=values_sixth)
    x = LSTM(units_sixth, recurrent_dropout=0.3, return_sequences=True)(x)
    x = BatchNormalization()(x)
    #------------------------------------------------------------------------------#

    # 마지막 LSTM 레이어: 4로 고정
    x = LSTM(4, recurrent_dropout=0.2)(x)
    x = BatchNormalization()(x)

    # 드롭아웃 레이어
    x = Dropout(0.5)(x)

    # 출력 레이어
    outputs = Dense(1)(x)

    # 모델 정의
    model = Model(inputs=[ibks_input], outputs=outputs)
    
    # 컴파일 설정
    model.compile(
        optimizer=Adam(learning_rate=0.0001),
        loss="mse",
        metrics=["mae"]
    )
    
    return model

# Hyperband Tuner 설정
tuner = Hyperband(
    build_model,
    objective="val_mae",
    max_epochs=10,
    hyperband_iterations=2,
    directory='nim_tuner',
    project_name='nim_ibks_tuner'
)

# 모델 탐색
tuner.search(ibks_train_dataset, epochs=10, validation_data=ibks_val_dataset)

# 최적의 하이퍼파라미터로 모델 가져오기
best_model = tuner.get_best_models(num_models=1)[0]
best_hyperparameters = tuner.get_best_hyperparameters(num_trials=1)[0]

# 최적의 하이퍼파라미터 출력
print("Best Hyperparameters:")
print('-'*80)
for param, value in best_hyperparameters.values.items():
    print(f"{param}: {value}")
print('-'*80)
Trial 60 Complete [00h 02m 39s]
val_mae: 0.770330548286438

Best val_mae So Far: 0.6960318684577942
Total elapsed time: 01h 11m 50s
Best Hyperparameters:
--------------------------------------------------------------------------------
units_second: 32
units_third: 16
units_fourth: 8
units_fifth: 16
units_sixth: 8
tuner/epochs: 10
tuner/initial_epoch: 4
tuner/bracket: 2
tuner/round: 2
tuner/trial_id: 0043
--------------------------------------------------------------------------------
In [10]:
import keras_tuner as kt
import matplotlib.pyplot as plt
import numpy as np

# 1. 튜너 파일에서 결과 불러오기
tuner = kt.Hyperband(
    objective='val_mae',
    max_epochs=10,
    factor=3,
    directory='nim_tuner',
    project_name='nim_ibks_tuner'
)

# Trial ID, val_mae, 하이퍼파라미터 설정 리스트
trial_ids = []
val_maes = []
configs = []

# 2. 튜너에서 각 트라이얼의 ID와 val_mae, 하이퍼파라미터 설정 추출
for trial_id, trial in tuner.oracle.trials.items():
    val_mae = trial.metrics.get_best_value('val_mae')
    if val_mae is not None and val_mae < 0.9:
        trial_ids.append(int(trial_id))
        val_maes.append(val_mae)
        configs.append(trial.hyperparameters.values)

# trial_ids 순서대로 정렬
if val_maes:
    sorted_indices = np.argsort(trial_ids)
    trial_ids = np.array(trial_ids)[sorted_indices]
    val_maes = np.array(val_maes)[sorted_indices]
    configs = np.array(configs)[sorted_indices]

    # 최적의 트라이얼 및 하이퍼파라미터 구성 찾기
    min_val_loss_idx = np.argmin(val_maes)
    min_val_loss = val_maes[min_val_loss_idx]
    best_config = configs[min_val_loss_idx]
    config_str = '\n'.join([f'{param}: {value}' for param, value in best_config.items()])
    config_str_with_loss = f"Best val_mae: {min_val_loss:.6f}\n{config_str}"

    # 시각화 설정
    plt.figure(figsize=(25, 10))
    indices = range(len(trial_ids))

    # 각 트라이얼의 마커를 그리기
    for i, val_mae in enumerate(val_maes):
        if i == min_val_loss_idx:
            plt.plot(i, val_mae, 'o', markersize=10, label=f'Best Trial {trial_ids[i]}\n{config_str_with_loss}')
        else:
            plt.plot(i, val_mae, 'o', markersize=10)
        plt.text(i, val_mae + 0.02, f'{val_mae:.6f}', fontsize=10, ha='center')

    # x축에 trial_ids 값 라벨 추가
    plt.xticks(indices, trial_ids, rotation=45)
    plt.ylabel('Final Validation MAE')
    plt.title('Final Validation MAE for Trials (val_mae < 0.9)')
    plt.legend(fontsize=15, loc='upper right', bbox_to_anchor=(1.15, 1), frameon=False)
    plt.grid()
    plt.tight_layout()
    plt.show()
else:
    print("0.9보다 작은 val_mae 값이 없습니다.")
Reloading Tuner from nim_tuner/nim_ibks_tuner/tuner0.json
No description has been provided for this image

6-3. (2)BOK¶


In [11]:
from tensorflow.keras.models import Model
from tensorflow.keras.layers import LSTM, BatchNormalization, Dropout, Dense, Input
from tensorflow.keras.optimizers import Adam
from keras_tuner import Hyperband
from keras_tuner import HyperParameters

def build_model(hp):
    # 입력 레이어 정의
    boks_input = Input(shape=(sequence_length, nim_boks_data.shape[-1]), name="boks_input")

    # 첫 번째 LSTM 레이어: 32로 고정
    x = LSTM(32, recurrent_dropout=0.3, return_sequences=True)(boks_input)
    x = BatchNormalization()(x)

    # 두 번째 LSTM 레이어
    units_second = hp.Choice('units_second', values=[u for u in [32, 16] if u <= 32])
    x = LSTM(units_second, recurrent_dropout=0.3, return_sequences=True)(x)
    x = BatchNormalization()(x)

    # 세 번째 LSTM 레이어
    units_third = hp.Choice('units_third', values=[u for u in [32, 16] if u <= units_second])
    x = LSTM(units_third, recurrent_dropout=0.3, return_sequences=True)(x)
    x = BatchNormalization()(x)

    # 네 번째 LSTM 레이어
    units_fourth = hp.Choice('units_fourth', values=[u for u in [16, 8] if u <= units_third])
    x = LSTM(units_fourth, recurrent_dropout=0.3, return_sequences=True)(x)
    x = BatchNormalization()(x)

    # 다섯 번째 LSTM 레이어
    units_fifth = hp.Choice('units_fifth', values=[u for u in [16, 8, 4] if u <= units_fourth])
    x = LSTM(units_fifth, recurrent_dropout=0.3, return_sequences=True)(x)
    x = BatchNormalization()(x)

    # 마지막 LSTM 레이어: 4로 고정
    x = LSTM(4, recurrent_dropout=0.3)(x)
    x = BatchNormalization()(x)

    # 드롭아웃 레이어
    x = Dropout(0.5)(x)

    # 출력 레이어
    outputs = Dense(1)(x)

    # 모델 정의
    model = Model(inputs=[boks_input], outputs=outputs)
    
    # 컴파일 설정
    model.compile(
        optimizer=Adam(learning_rate=0.0001),
        loss="mse",
        metrics=["mae"]
    )
    
    return model

# Hyperband Tuner 설정
tuner = Hyperband(
    build_model,
    objective="val_mae",
    max_epochs=10,
    hyperband_iterations=2,
    directory='nim_tuner',
    project_name='nim_boks_tuner'
)

# 모델 탐색
tuner.search(boks_train_dataset, epochs=10, validation_data=boks_val_dataset)

# 최적의 하이퍼파라미터로 모델 가져오기
best_model = tuner.get_best_models(num_models=1)[0]
best_hyperparameters = tuner.get_best_hyperparameters(num_trials=1)[0]

# 최적의 하이퍼파라미터 출력
print("Best Hyperparameters:")
print('-'*80)
for param, value in best_hyperparameters.values.items():
    print(f"{param}: {value}")
print('-'*80)
Trial 28 Complete [00h 02m 08s]
val_mae: 0.7549406886100769

Best val_mae So Far: 0.7080110311508179
Total elapsed time: 00h 24m 52s
WARNING:tensorflow:Detecting that an object or model or tf.train.Checkpoint is being deleted with unrestored values. See the following logs for the specific values in question. To silence these warnings, use `status.expect_partial()`. See https://www.tensorflow.org/api_docs/python/tf/train/Checkpoint#restorefor details about the status object returned by the restore function.
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.1
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.2
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.3
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.4
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.5
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.6
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.7
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.8
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.9
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.10
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.11
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.12
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.13
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.14
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.15
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.16
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.17
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.18
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.19
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.20
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.21
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.22
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.23
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.24
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.25
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.26
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.27
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.28
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.29
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.30
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.31
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.32
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.33
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.34
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.35
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.36
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.37
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.38
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.39
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.40
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.41
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.42
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.43
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.44
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.45
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.46
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.47
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.48
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.49
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.50
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.51
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.52
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.53
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.54
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.55
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.56
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.57
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.58
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.59
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.60
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.61
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.62
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.63
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.64
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.65
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.66
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.67
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.68
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.69
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.70
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.71
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.72
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.73
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.74
WARNING:tensorflow:Detecting that an object or model or tf.train.Checkpoint is being deleted with unrestored values. See the following logs for the specific values in question. To silence these warnings, use `status.expect_partial()`. See https://www.tensorflow.org/api_docs/python/tf/train/Checkpoint#restorefor details about the status object returned by the restore function.
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.1
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.2
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.3
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.4
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.5
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.6
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.7
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.8
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.9
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.10
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.11
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.12
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.13
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.14
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.15
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.16
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.17
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.18
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.19
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.20
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.21
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.22
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.23
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.24
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.25
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.26
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.27
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.28
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.29
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.30
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.31
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.32
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.33
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.34
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.35
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.36
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.37
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.38
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.39
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.40
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.41
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.42
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.43
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.44
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.45
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.46
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.47
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.48
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.49
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.50
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.51
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.52
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.53
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.54
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.55
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.56
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.57
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.58
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.59
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.60
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.61
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.62
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.63
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.64
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.65
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.66
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.67
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.68
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.69
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.70
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.71
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.72
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.73
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.74
Best Hyperparameters:
--------------------------------------------------------------------------------
units_second: 32
units_third: 32
units_fourth: 8
units_fifth: 16
tuner/epochs: 10
tuner/initial_epoch: 0
tuner/bracket: 0
tuner/round: 0
--------------------------------------------------------------------------------
In [12]:
import keras_tuner as kt
import matplotlib.pyplot as plt
import numpy as np

# 1. 튜너 파일에서 결과 불러오기
tuner = kt.Hyperband(
    objective='val_mae',
    max_epochs=10,
    factor=3,
    directory='nim_tuner',
    project_name='nim_boks_tuner'
)

# Trial ID, val_mae, 하이퍼파라미터 설정 리스트
trial_ids = []
val_maes = []
configs = []

# 2. 튜너에서 각 트라이얼의 ID와 val_mae, 하이퍼파라미터 설정 추출
for trial_id, trial in tuner.oracle.trials.items():
    val_mae = trial.metrics.get_best_value('val_mae')
    if val_mae is not None and val_mae < 0.9:
        trial_ids.append(int(trial_id))
        val_maes.append(val_mae)
        configs.append(trial.hyperparameters.values)

# 3. trial_ids 순서대로 정렬
if val_maes:
    sorted_indices = np.argsort(trial_ids)
    trial_ids = np.array(trial_ids)[sorted_indices]
    val_maes = np.array(val_maes)[sorted_indices]
    configs = np.array(configs, dtype=object)[sorted_indices]

    # 최적의 트라이얼과 하이퍼파라미터 구성 찾기
    min_val_loss_idx = np.argmin(val_maes)
    min_val_loss = val_maes[min_val_loss_idx]
    best_config = configs[min_val_loss_idx]
    config_str = '\n'.join([f'{param}: {value}' for param, value in best_config.items()])
    config_str_with_loss = f"Best val_mae: {min_val_loss:.6f}\n{config_str}"

    # 4. 시각화 설정
    plt.figure(figsize=(25, 10))
    indices = range(len(trial_ids))

    # 각 트라이얼의 마커를 그리기
    for i, val_mae in enumerate(val_maes):
        if i == min_val_loss_idx:
            plt.plot(i, val_mae, 'o', markersize=10, label=f'Best Trial {trial_ids[i]}\n{config_str_with_loss}')
        else:
            plt.plot(i, val_mae, 'o', markersize=10)
        plt.text(i, val_mae + 0.009, f'{val_mae:.6f}', fontsize=10, ha='center')

    # x축에 trial_ids 값 라벨 추가
    plt.xticks(indices, trial_ids, rotation=45)
    plt.ylabel('Final Validation MAE')
    plt.title('Final Validation MAE for Trials (val_mae < 0.9)')
    plt.legend(fontsize=15, loc='upper right', bbox_to_anchor=(1.15, 1), frameon=False)
    plt.grid()
    plt.tight_layout()
    plt.show()
else:
    print("0.9보다 작은 val_mae 값이 없습니다.")
Reloading Tuner from nim_tuner/nim_boks_tuner/tuner0.json
No description has been provided for this image

6-3. (3)NEWS : LSTM¶


In [13]:
from tensorflow.keras.models import Model
from tensorflow.keras.layers import LSTM, BatchNormalization, Dropout, Dense, Input
from tensorflow.keras.optimizers import Adam
from keras_tuner import Hyperband
from keras_tuner import HyperParameters

def build_model(hp):
    # 입력 레이어 정의
    news_input = Input(shape=(sequence_length, nim_news_data.shape[-1]), name="news_input")

    # 첫 번째 LSTM 레이어 (유닛 고정)
    x = LSTM(256, recurrent_dropout=0.3, return_sequences=True)(news_input)
    x = BatchNormalization()(x)

    # 두 번째 LSTM 레이어
    units_second = hp.Choice('units_second', values=[u for u in [256, 128] if u <= 256])
    x = LSTM(units_second, recurrent_dropout=0.3, return_sequences=True)(x)
    x = BatchNormalization()(x)

    # 세 번째 LSTM 레이어
    units_third = hp.Choice('units_third', values=[u for u in [128, 64] if u <= units_second])
    x = LSTM(units_third, recurrent_dropout=0.3, return_sequences=True)(x)
    x = BatchNormalization()(x)

    # 네 번째 LSTM 레이어
    units_fourth = hp.Choice('units_fourth', values=[u for u in [128, 64, 32] if u <= units_third])
    x = LSTM(units_fourth, recurrent_dropout=0.3, return_sequences=True)(x)
    x = BatchNormalization()(x)

    # 다섯 번째 LSTM 레이어
    units_fifth = hp.Choice('units_fifth', values=[u for u in [64, 32, 16] if u <= units_fourth])
    x = LSTM(units_fifth, recurrent_dropout=0.3, return_sequences=True)(x)
    x = BatchNormalization()(x)

    # 마지막 LSTM 레이어 (유닛 고정)
    x = LSTM(4, recurrent_dropout=0.3)(x)
    x = BatchNormalization()(x)

    # 드롭아웃 레이어
    x = Dropout(0.5)(x)

    # 출력 레이어
    outputs = Dense(1)(x)

    # 모델 정의
    model = Model(inputs=[news_input], outputs=outputs)
    
    # 컴파일 설정
    model.compile(
        optimizer=Adam(learning_rate=0.0001),
        loss="mse",
        metrics=["mae"]
    )
    
    return model

# Hyperband Tuner 설정
tuner = Hyperband(
    build_model,
    objective="val_mae",
    max_epochs=10,
    hyperband_iterations=2,
    directory='nim_tuner',
    project_name='nim_news_tuner'
)

# 모델 탐색
tuner.search(news_train_dataset, epochs=10, validation_data=news_val_dataset)

# 최적의 하이퍼파라미터로 모델 가져오기
best_model = tuner.get_best_models(num_models=1)[0]
best_hyperparameters = tuner.get_best_hyperparameters(num_trials=1)[0]

# 최적의 하이퍼파라미터 출력
print("Best Hyperparameters:")
print('-'*80)
for param, value in best_hyperparameters.values.items():
    print(f"{param}: {value}")
print('-'*80)
Trial 39 Complete [00h 01m 09s]
val_mae: 0.733718991279602

Best val_mae So Far: 0.7137290835380554
Total elapsed time: 01h 09m 18s
WARNING:tensorflow:Detecting that an object or model or tf.train.Checkpoint is being deleted with unrestored values. See the following logs for the specific values in question. To silence these warnings, use `status.expect_partial()`. See https://www.tensorflow.org/api_docs/python/tf/train/Checkpoint#restorefor details about the status object returned by the restore function.
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.1
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.2
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.3
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.4
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.5
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.6
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.7
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.8
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.9
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.10
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.11
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.12
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.13
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.14
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.15
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.16
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.17
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.18
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.19
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.20
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.21
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.22
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.23
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.24
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.25
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.26
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.27
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.28
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.29
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.30
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.31
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.32
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.33
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.34
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.35
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.36
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.37
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.38
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.39
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.40
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.41
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.42
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.43
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.44
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.45
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.46
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.47
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.48
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.49
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.50
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.51
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.52
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.53
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.54
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.55
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.56
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.57
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.58
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.59
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.60
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.61
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.62
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.63
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.64
Best Hyperparameters:
--------------------------------------------------------------------------------
units_second: 256
units_third: 64
units_fourth: 32
units_fifth: 64
tuner/epochs: 10
tuner/initial_epoch: 0
tuner/bracket: 0
tuner/round: 0
--------------------------------------------------------------------------------
In [14]:
import keras_tuner as kt
import matplotlib.pyplot as plt
import numpy as np

# 1. 튜너 파일에서 결과 불러오기
tuner = kt.Hyperband(
    objective='val_mae',
    max_epochs=10,
    factor=3,
    directory='nim_tuner',
    project_name='nim_news_tuner'
)

# Trial ID, val_mae, 하이퍼파라미터 설정 리스트
trial_ids = []
val_maes = []
configs = []

# 2. 튜너에서 각 트라이얼의 ID와 val_mae, 하이퍼파라미터 설정 추출
for trial_id, trial in tuner.oracle.trials.items():
    val_mae = trial.metrics.get_best_value('val_mae')
    if val_mae is not None and val_mae < 0.9:
        trial_ids.append(int(trial_id))
        val_maes.append(val_mae)
        configs.append(trial.hyperparameters.values)

# 3. trial_ids 기준으로 정렬
if val_maes:
    sorted_indices = np.argsort(trial_ids)
    trial_ids = np.array(trial_ids)[sorted_indices]
    val_maes = np.array(val_maes)[sorted_indices]
    configs = np.array(configs, dtype=object)[sorted_indices]

    # 최적의 트라이얼과 하이퍼파라미터 구성 찾기
    min_val_loss_idx = np.argmin(val_maes)
    min_val_loss = val_maes[min_val_loss_idx]
    best_config = configs[min_val_loss_idx]
    config_str = '\n'.join([f'{param}: {value}' for param, value in best_config.items()])
    config_str_with_loss = f"Best val_mae: {min_val_loss:.6f}\n{config_str}"

    # 4. 시각화 설정
    plt.figure(figsize=(25, 10))
    indices = range(len(trial_ids))

    # 각 트라이얼의 마커를 그리기
    for i, val_mae in enumerate(val_maes):
        if i == min_val_loss_idx:
            plt.plot(i, val_mae, 'o', markersize=10, label=f'Best Trial {trial_ids[i]}\n{config_str_with_loss}')
        else:
            plt.plot(i, val_mae, 'o', markersize=10)
        plt.text(i, val_mae + 0.009, f'{val_mae:.6f}', fontsize=10, ha='center')

    # x축에 trial_ids 값 라벨 추가
    plt.xticks(indices, trial_ids, rotation=45)
    plt.ylabel('Final Validation MAE')
    plt.title('Final Validation MAE for Trials (val_mae < 0.9)')
    plt.legend(fontsize=15, loc='upper right', bbox_to_anchor=(1.15, 1), frameon=False)
    plt.grid()
    plt.tight_layout()
    plt.show()
else:
    print("0.9보다 작은 val_mae 값이 없습니다.")
Reloading Tuner from nim_tuner/nim_news_tuner/tuner0.json
No description has been provided for this image

6-3. (4)NEWS : Bidirectional(LSTM)¶


In [15]:
from tensorflow.keras.models import Model
from tensorflow.keras import layers
from tensorflow.keras.layers import LSTM, BatchNormalization, Dropout, Dense, Input
from tensorflow.keras.optimizers import Adam
from keras_tuner import Hyperband
from keras_tuner import HyperParameters

def build_model(hp):
    # 입력 레이어 정의
    news_input = Input(shape=(sequence_length, nim_news_data.shape[-1]), name="news_input")

    # 첫 번째 LSTM 레이어 (유닛 고정)
    x = layers.Bidirectional(LSTM(256, recurrent_dropout=0.3, return_sequences=True))(news_input)
    x = BatchNormalization()(x)

    # 두 번째 LSTM 레이어
    units_second = hp.Choice('units_second', values=[u for u in [256, 128] if u <= 256])
    x = layers.Bidirectional(LSTM(units_second, recurrent_dropout=0.3, return_sequences=True))(x)
    x = BatchNormalization()(x)

    # 세 번째 LSTM 레이어
    units_third = hp.Choice('units_third', values=[u for u in [128, 64, 32] if u <= units_second])
    x = layers.Bidirectional(LSTM(units_third, recurrent_dropout=0.3, return_sequences=True))(x)
    x = BatchNormalization()(x)

    # 네 번째 LSTM 레이어
    units_fourth = hp.Choice('units_fourth', values=[u for u in [64, 32, 16] if u <= units_third])
    x = layers.Bidirectional(LSTM(units_fourth, recurrent_dropout=0.3, return_sequences=True))(x)
    x = BatchNormalization()(x)

    # 마지막 LSTM 레이어 (유닛 고정)
    x = layers.Bidirectional(LSTM(4, recurrent_dropout=0.3))(x)
    x = BatchNormalization()(x)

    # 드롭아웃 레이어
    x = Dropout(0.5)(x)

    # 출력 레이어
    outputs = Dense(1)(x)

    # 모델 정의
    model = Model(inputs=[news_input], outputs=outputs)
    
    # 컴파일 설정
    model.compile(
        optimizer=Adam(learning_rate=0.0001),
        loss="mse",
        metrics=["mae"]
    )
    
    return model

# Hyperband Tuner 설정
tuner = Hyperband(
    build_model,
    objective="val_mae",
    max_epochs=10,
    hyperband_iterations=2,
    directory='nim_tuner',
    project_name='nim_news_bi_tuner'
)

# 모델 탐색
tuner.search(news_train_dataset, epochs=10, validation_data=news_val_dataset)

# 최적의 하이퍼파라미터로 모델 가져오기
best_model = tuner.get_best_models(num_models=1)[0]
best_hyperparameters = tuner.get_best_hyperparameters(num_trials=1)[0]

# 최적의 하이퍼파라미터 출력
print("Best Hyperparameters:")
print('-'*80)
for param, value in best_hyperparameters.values.items():
    print(f"{param}: {value}")
print('-'*80)
Trial 26 Complete [00h 03m 39s]
val_mae: 0.7243140935897827

Best val_mae So Far: 0.7157797813415527
Total elapsed time: 00h 59m 12s
WARNING:tensorflow:Detecting that an object or model or tf.train.Checkpoint is being deleted with unrestored values. See the following logs for the specific values in question. To silence these warnings, use `status.expect_partial()`. See https://www.tensorflow.org/api_docs/python/tf/train/Checkpoint#restorefor details about the status object returned by the restore function.
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.1
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.2
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.3
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.4
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.5
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.6
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.7
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.8
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.9
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.10
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.11
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.12
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.13
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.14
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.15
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.16
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.17
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.18
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.19
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.20
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.21
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.22
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.23
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.24
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.25
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.26
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.27
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.28
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.29
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.30
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.31
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.32
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.33
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.34
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.35
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.36
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.37
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.38
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.39
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.40
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.41
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.42
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.43
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.44
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.45
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.46
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.47
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.48
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.49
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.50
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.51
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.52
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.53
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.54
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.55
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.56
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.57
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.58
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.59
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.60
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.61
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.62
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.63
WARNING:tensorflow:Value in checkpoint could not be found in the restored object: (root).optimizer._variables.64
Best Hyperparameters:
--------------------------------------------------------------------------------
units_second: 256
units_third: 64
units_fourth: 64
tuner/epochs: 10
tuner/initial_epoch: 4
tuner/bracket: 1
tuner/round: 1
tuner/trial_id: 0021
--------------------------------------------------------------------------------
In [16]:
import keras_tuner as kt
import matplotlib.pyplot as plt
import numpy as np

# 1. 튜너 파일에서 결과 불러오기
tuner = kt.Hyperband(
    objective='val_mae',
    max_epochs=10,
    factor=3,
    directory='nim_tuner',
    project_name='nim_news_bi_tuner'
)

# Trial ID, val_mae, 하이퍼파라미터 설정 리스트
trial_ids = []
val_maes = []
configs = []

# 2. 튜너에서 각 트라이얼의 ID와 val_mae, 하이퍼파라미터 설정 추출
for trial_id, trial in tuner.oracle.trials.items():
    val_mae = trial.metrics.get_best_value('val_mae')
    if val_mae is not None and val_mae < 0.9:
        trial_ids.append(int(trial_id))
        val_maes.append(val_mae)
        configs.append(trial.hyperparameters.values)

# 3. trial_ids 기준으로 정렬
if val_maes:
    sorted_indices = np.argsort(trial_ids)
    trial_ids = np.array(trial_ids)[sorted_indices]
    val_maes = np.array(val_maes)[sorted_indices]
    configs = np.array(configs, dtype=object)[sorted_indices]

    # 최적의 트라이얼과 하이퍼파라미터 구성 찾기
    min_val_loss_idx = np.argmin(val_maes)
    min_val_loss = val_maes[min_val_loss_idx]
    best_config = configs[min_val_loss_idx]
    config_str = '\n'.join([f'{param}: {value}' for param, value in best_config.items()])
    config_str_with_loss = f"Best val_mae: {min_val_loss:.6f}\n{config_str}"

    # 4. 시각화 설정
    plt.figure(figsize=(25, 10))
    indices = range(len(trial_ids))

    # 각 트라이얼의 마커를 그리기
    for i, val_mae in enumerate(val_maes):
        if i == min_val_loss_idx:
            plt.plot(i, val_mae, 'o', markersize=10, label=f'Best Trial {trial_ids[i]}\n{config_str_with_loss}')
        else:
            plt.plot(i, val_mae, 'o', markersize=10)
        plt.text(i, val_mae + 0.003, f'{val_mae:.6f}', fontsize=10, ha='center')

    # x축에 trial_ids 값 라벨 추가
    plt.xticks(indices, trial_ids, rotation=45)
    plt.ylabel('Final Validation MAE')
    plt.title('Final Validation MAE for Trials (val_mae < 0.9)')
    plt.legend(fontsize=15, loc='upper right', bbox_to_anchor=(1.15, 1), frameon=False)
    plt.grid()
    plt.tight_layout()
    plt.show()
else:
    print("0.9보다 작은 val_mae 값이 없습니다.")
Reloading Tuner from nim_tuner/nim_news_bi_tuner/tuner0.json
No description has been provided for this image