■ 금융지표와 뉴스를 활용한 멀티모달학습 기반의 당기순이익(NIM) 예측 모델¶


  • 2018.1.1 ~ 2024.8.31 : 은행의 NIM 일별 수치데이터

  • 2018.1.1 ~ 2024.8.31 : 한국은행의 통계지표 일별 수치데이터

  • 2018.1.1 ~ 2024.10.21 : 금융권관련 뉴스 일별 텍스트데이터

  • Multi-Modal 및 LSTM을 이용한 시계열 예측 모델


1. 환경구성¶

  • 1-1. 개발환경 버전확인

In [1]:
################################################################################
# 랜덤 시드 값 설정
################################################################################
import os
# os.environ['PYTHONHASHSEED'] = '0'
os.environ['CUDA_VISIBLE_DEVICES'] = ''

# import numpy as np
# import tensorflow as tf
# import random
# seed_value = 42
# np.random.seed(seed_value)
# tf.random.set_seed(seed_value)
# random.seed(seed_value)
################################################################################
In [2]:
cuda_version = 'Cuda not installed'

try:
    import pycuda.driver as cuda
    import pycuda.autoinit

    # CUDA 장치가 있는지 확인 후 초기화
    cuda.init()
    if cuda.Device.count() > 0:
        version = cuda.get_version()
        cuda_version = f'CUDA Version: {version[0]}.{version[1]}'
    else:
        cuda_version = 'No CUDA-capable device found'
except ImportError:
    cuda_version = 'pycuda not installed'
except Exception as e:
    cuda_version = f'{str(e)}'
    
In [3]:
import sys
import keras
import tensorflow as tf
import numpy as np
import matplotlib

print("-"*80)
print(f"Python version : {sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}")
print("Keras version : " + keras.__version__)
print("Tensorflow version : " + tf.__version__)
print(f"CUDA version : {cuda_version}")
print(f"Numpy version : {np.__version__}")
print("Matplotlib version: " + matplotlib.__version__)
print("-"*80)
2024-11-19 19:41:59.001930: I tensorflow/core/util/port.cc:113] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.
2024-11-19 19:41:59.183666: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered
2024-11-19 19:41:59.183701: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered
2024-11-19 19:41:59.211624: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered
2024-11-19 19:41:59.271577: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.
To enable the following instructions: AVX2 AVX_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.
2024-11-19 19:41:59.993677: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT
--------------------------------------------------------------------------------
Python version : 3.11.10
Keras version : 2.15.0
Tensorflow version : 2.15.0
CUDA version : cuInit failed: no CUDA-capable device is detected
Numpy version : 1.26.4
Matplotlib version: 3.9.2
--------------------------------------------------------------------------------
  • 1-2. 패키지 의존성확인

pip freeze > requirements.txt
pip install -r requirements.txt

# requirements.txt 파일에서 file:// 경로를 가진 줄을 삭제하는 스크립트
with open('requirements.txt', 'r') as f:
    lines = f.readlines()

with open('requirements_clean.txt', 'w') as f:
    for line in lines:
        if 'file://' not in line:
            f.write(line)
In [4]:
pip freeze
absl-py==2.1.0
accelerate==1.0.0
aggdraw==1.3.19
aiohappyeyeballs==2.4.3
aiohttp==3.10.9
aiosignal==1.3.1
ann_visualizer==2.5
anyio==4.6.0
argon2-cffi==23.1.0
argon2-cffi-bindings==21.2.0
arrow==1.3.0
asttokens @ file:///home/conda/feedstock_root/build_artifacts/asttokens_1698341106958/work
astunparse==1.6.3
async-lru==2.0.4
attrs==24.2.0
babel==2.16.0
beautifulsoup4==4.12.3
bleach==6.1.0
cachetools==5.5.0
certifi==2024.8.30
cffi==1.17.1
chardet==3.0.4
charset-normalizer==3.3.2
click==8.1.7
comm @ file:///home/conda/feedstock_root/build_artifacts/comm_1710320294760/work
contourpy==1.3.0
cycler==0.12.1
datasets==3.0.1
debugpy @ file:///home/conda/feedstock_root/build_artifacts/debugpy_1725269156501/work
decorator @ file:///home/conda/feedstock_root/build_artifacts/decorator_1641555617451/work
defusedxml==0.7.1
dill==0.3.8
entrypoints==0.4
exceptiongroup @ file:///home/conda/feedstock_root/build_artifacts/exceptiongroup_1720869315914/work
executing @ file:///home/conda/feedstock_root/build_artifacts/executing_1725214404607/work
fastjsonschema==2.20.0
fasttext==0.9.3
filelock==3.16.1
flatbuffers==24.3.25
fonttools==4.53.1
fqdn==1.5.1
frozenlist==1.4.1
fsspec==2024.6.1
gast==0.6.0
gensim==4.3.3
google-auth==2.34.0
google-auth-oauthlib==1.2.1
google-pasta==0.2.0
googletrans==4.0.0rc1
graphviz==0.20.3
grpcio==1.66.1
h11==0.14.0
h2==3.2.0
h5py==3.11.0
hpack==3.0.0
hstspreload==2024.9.1
httpcore==1.0.6
httpx==0.27.2
huggingface-hub==0.25.1
hyperframe==5.2.0
idna==2.10
imageio==2.36.0
imageio-ffmpeg==0.5.1
importlib_metadata @ file:///home/conda/feedstock_root/build_artifacts/importlib-metadata_1726082825846/work
ipykernel @ file:///home/conda/feedstock_root/build_artifacts/ipykernel_1719845459717/work
ipython @ file:///home/conda/feedstock_root/build_artifacts/ipython_1725050136642/work
ipywidgets==8.1.5
isoduration==20.11.0
jedi @ file:///home/conda/feedstock_root/build_artifacts/jedi_1696326070614/work
Jinja2==3.1.4
joblib==1.4.2
JPype1==1.5.0
json5==0.9.25
jsonpointer==3.0.0
jsonschema==4.23.0
jsonschema-specifications==2024.10.1
jupyter==1.1.1
jupyter-console==6.6.3
jupyter-events==0.10.0
jupyter-lsp==2.2.5
jupyter_client @ file:///home/conda/feedstock_root/build_artifacts/jupyter_client_1716472197302/work
jupyter_core @ file:///home/conda/feedstock_root/build_artifacts/jupyter_core_1710257359434/work
jupyter_server==2.14.2
jupyter_server_terminals==0.5.3
jupyterlab==4.2.5
jupyterlab_pygments==0.3.0
jupyterlab_server==2.27.3
jupyterlab_widgets==3.0.13
kagglehub==0.3.0
keras==2.15.0
keras-nlp==0.15.0
keras-tuner==1.4.7
kiwisolver==1.4.7
kobert-transformers==0.6.0
konlpy==0.6.0
kt-legacy==1.0.5
libclang==18.1.1
lxml==5.3.0
Mako==1.3.5
Markdown==3.7
markdown-it-py==3.0.0
MarkupSafe==2.1.5
matplotlib==3.9.2
matplotlib-inline @ file:///home/conda/feedstock_root/build_artifacts/matplotlib-inline_1713250518406/work
mdurl==0.1.2
mecab-python3==1.0.9
mistune==0.8.4
ml-dtypes==0.2.0
mpmath==1.3.0
multidict==6.1.0
multiprocess==0.70.16
namex==0.0.8
nbclient==0.10.0
nbconvert==5.6.1
nbformat==5.10.4
nest_asyncio @ file:///home/conda/feedstock_root/build_artifacts/nest-asyncio_1705850609492/work
networkx==3.3
nltk==3.9.1
notebook==7.2.2
notebook_shim==0.2.4
numpy==1.26.4
nvidia-cublas-cu12==12.1.3.1
nvidia-cuda-cupti-cu12==12.1.105
nvidia-cuda-nvrtc-cu12==12.1.105
nvidia-cuda-runtime-cu12==12.1.105
nvidia-cudnn-cu12==9.1.0.70
nvidia-cufft-cu12==11.0.2.54
nvidia-curand-cu12==10.3.2.106
nvidia-cusolver-cu12==11.4.5.107
nvidia-cusparse-cu12==12.1.0.106
nvidia-nccl-cu12==2.20.5
nvidia-nvjitlink-cu12==12.6.77
nvidia-nvtx-cu12==12.1.105
oauthlib==3.2.2
opt-einsum==3.3.0
optree==0.12.1
overrides==7.7.0
packaging @ file:///home/conda/feedstock_root/build_artifacts/packaging_1718189413536/work
pandas==2.2.2
pandocfilters==1.5.1
parso @ file:///home/conda/feedstock_root/build_artifacts/parso_1712320355065/work
patsy==0.5.6
pexpect @ file:///home/conda/feedstock_root/build_artifacts/pexpect_1706113125309/work
pickleshare @ file:///home/conda/feedstock_root/build_artifacts/pickleshare_1602536217715/work
pillow==10.4.0
platformdirs @ file:///home/conda/feedstock_root/build_artifacts/platformdirs_1726315398971/work
prometheus_client==0.21.0
prompt_toolkit @ file:///home/conda/feedstock_root/build_artifacts/prompt-toolkit_1718047967974/work
propcache==0.2.0
protobuf==4.25.4
psutil @ file:///home/conda/feedstock_root/build_artifacts/psutil_1725737916418/work
ptyprocess @ file:///home/conda/feedstock_root/build_artifacts/ptyprocess_1609419310487/work/dist/ptyprocess-0.7.0-py2.py3-none-any.whl
pure_eval @ file:///home/conda/feedstock_root/build_artifacts/pure_eval_1721585709575/work
pyarrow==17.0.0
pyasn1==0.6.1
pyasn1_modules==0.4.1
pybind11==2.13.6
pycparser==2.22
pycuda==2024.1.2
pydot @ file:///home/conda/feedstock_root/build_artifacts/pydot_1726737228028/work
Pygments @ file:///home/conda/feedstock_root/build_artifacts/pygments_1714846767233/work
pyparsing @ file:///home/conda/feedstock_root/build_artifacts/pyparsing_1724616129934/work
python-dateutil @ file:///home/conda/feedstock_root/build_artifacts/python-dateutil_1709299778482/work
python-json-logger==2.0.7
pytools==2024.1.14
pytz==2024.2
PyYAML==6.0.2
pyzmq @ file:///home/conda/feedstock_root/build_artifacts/pyzmq_1725448927736/work
referencing==0.35.1
regex==2024.9.11
requests==2.32.3
requests-oauthlib==2.0.0
rfc3339-validator==0.1.4
rfc3986==1.5.0
rfc3986-validator==0.1.1
rich==13.8.1
rouge_score==0.1.2
rpds-py==0.20.0
rsa==4.9
safetensors==0.4.5
scikit-learn==1.5.2
scipy==1.13.1
seaborn==0.13.2
Send2Trash==1.8.3
sentence-transformers==3.1.1
sentencepiece==0.2.0
six @ file:///home/conda/feedstock_root/build_artifacts/six_1620240208055/work
smart-open==7.0.5
sniffio==1.3.1
soupsieve==2.6
stack-data @ file:///home/conda/feedstock_root/build_artifacts/stack_data_1669632077133/work
statsmodels==0.14.4
sympy==1.13.3
tensorboard==2.15.2
tensorboard-data-server==0.7.2
tensorflow==2.15.0
tensorflow-estimator==2.15.0
tensorflow-hub==0.16.1
tensorflow-io-gcs-filesystem==0.37.1
tensorflow-text==2.15.0
termcolor==2.4.0
terminado==0.18.1
testpath==0.6.0
textblob==0.18.0.post0
tf_keras==2.15.1
threadpoolctl==3.5.0
tinycss2==1.3.0
tokenizers==0.20.0
torch==2.4.1
tornado @ file:///home/conda/feedstock_root/build_artifacts/tornado_1724956126282/work
tqdm==4.66.5
traitlets @ file:///home/conda/feedstock_root/build_artifacts/traitlets_1713535121073/work
transformers==4.45.2
triton==3.0.0
types-python-dateutil==2.9.0.20241003
typing_extensions @ file:///home/conda/feedstock_root/build_artifacts/typing_extensions_1717802530399/work
tzdata==2024.1
uri-template==1.3.0
urllib3==2.2.3
visualkeras==0.1.3
wcwidth @ file:///home/conda/feedstock_root/build_artifacts/wcwidth_1704731205417/work
webcolors==24.8.0
webencodings==0.5.1
websocket-client==1.8.0
Werkzeug==3.0.4
widgetsnbextension==4.0.13
wordcloud==1.9.3
wrapt==1.14.1
xxhash==3.5.0
yarl==1.14.0
zipp @ file:///home/conda/feedstock_root/build_artifacts/zipp_1726248574750/work
Note: you may need to restart the kernel to use updated packages.
  • 1-3. Matplot & Numpy 환경설정

sudo apt-get install fonts-nanum* # 폰트 설치
sudo fc-cache -fv # 캐시 제거
sudo fc-list | grep nanum # 폰트 설치 확인
rm -rf ~/.cache/matplotlib/* # matplotlib 캐시 제거
In [5]:
import pandas as pd
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.font_manager as fm

import numpy as np

# NumPy 출력 설정: 모든 값이 출력되도록 설정
np.set_printoptions(threshold=np.inf)
# NumPy 출력 설정: 배열이 한 줄로 출력되도록 설정
np.set_printoptions(linewidth=np.inf)  # 출력 라인의 길이를 무한대로 설정

# Pandas 옵션 설정: 텍스트 생략 없이 출력
pd.set_option('display.max_colwidth', None)
pd.set_option('display.max_rows', 1000)  
pd.set_option('display.max_columns', None)  
pd.options.display.float_format = '{:.8f}'.format

# 나눔 폰트 설정
plt.rc('font', family='NanumGothic') 
mpl.rcParams['axes.unicode_minus'] = False  # 유니코드 마이너스를 일반 마이너스로 변경

# 폰트가 제대로 설정되었는지 확인
print([f.name for f in fm.fontManager.ttflist if 'Nanum' in f.name])
['NanumMyeongjo', 'NanumGothic', 'NanumMyeongjo', 'NanumSquareRound', 'NanumBarunpen', 'NanumBarunpen', 'NanumMyeongjo YetHangul', 'Nanum Brush Script', 'NanumSquare_ac', 'NanumGothicCoding', 'NanumGothic', 'NanumGothic', 'NanumBarunGothic YetHangul', 'NanumSquare_ac', 'NanumBarunGothic', 'NanumSquare', 'NanumSquareRound', 'NanumGothic Eco', 'NanumBarunGothic', 'NanumMyeongjo Eco', 'NanumMyeongjo Eco', 'NanumMyeongjo', 'NanumGothicCoding', 'NanumBarunGothic', 'NanumGothic Eco', 'NanumSquareRound', 'NanumSquare', 'NanumSquare', 'NanumSquare_ac', 'NanumSquareRound', 'NanumGothic Eco', 'NanumSquare_ac', 'NanumMyeongjo Eco', 'NanumSquare', 'Nanum Pen Script', 'NanumGothic', 'NanumMyeongjo Eco', 'NanumGothic Eco', 'NanumBarunGothic']

6. Network 설계¶

  • 6-1. 예측모델 시계열데이터셋 정의(Keras timeseries_dataset_from_array)

In [6]:
import numpy as np

# nim_date
nim_date = np.load('data/numpy/nim_date.npy', allow_pickle=True)
print(f"nim_date loaded: data/numpy/nim_date.npy")
print(f"nim_date[:10]: {nim_date[:10]}")
print('-'*80)

# target_values(표준화된 NIM값 차분:훈련용 타겟변수)
target_values = np.load('data/numpy/target_values.npy')
print(f"target_values loaded: data/numpy/target_values.npy")
print(f"target_values[:10]: {target_values[:10]}")
print('-'*80)

# 변수값
nim_variables = np.load('data/numpy/nim_variables.npy')
print(f"nim_variables loaded: data/numpy/nim_variables.npy")

total_size = int(nim_variables[0])
train_size = int(nim_variables[1])
val_size   = int(nim_variables[2])
test_size  = int(nim_variables[3])
nim_train_mean = nim_variables[4]
nim_train_std  = nim_variables[5]
target_train_mean = nim_variables[6]
target_train_std  = nim_variables[7]
correct_threshold = nim_variables[8]

print(f"total_size: {total_size}, {total_size/24}일")
print(f"train_size: {train_size}, {train_size/24}일")
print(f"val_size  : {val_size}, {val_size/24}일")
print(f"test_size : {test_size}, {test_size/24}일")
print(f"nim_train_mean: {nim_train_mean:+.8f}")
print(f"nim_train_std : {nim_train_std:+.8f}")
print(f"target_train_mean: {target_train_mean:+.8f}")
print(f"target_train_std : {target_train_std:+.8f}")
print(f"correct_threshold : {correct_threshold:+.8f}")
print('-'*80)

# nim_ibks_data(Standardization)
nim_ibks_data = np.load('data/numpy/nim_ibks_data.npy')
print(f"nim_ibks_data loaded: data/numpy/nim_ibks_data.npy")
print(f"nim_ibks_data.shape: {nim_ibks_data.shape}")  
print(nim_ibks_data[:10, :]) 
print('-'*80)

# nim_news_data(Standardization)
nim_boks_data = np.load('data/numpy/nim_boks_data.npy')
print(f"nim_boks_data loaded: data/numpy/nim_boks_data.npy")
print(f"nim_boks_data.shape: {nim_boks_data.shape}")  
print(nim_boks_data[:10, :]) 
print('-'*80)

# nim_news_data(Standardization)
nim_news_data = np.load('data/numpy/nim_news_data.npy')
print(f"nim_news_data loaded: data/numpy/nim_news_data.npy")
print(f"nim_news_data.shape: {nim_news_data.shape}")  
print(nim_news_data[:10, :]) 
print('-'*80)
nim_date loaded: data/numpy/nim_date.npy
nim_date[:10]: ['2018-01-02' '2018-01-02' '2018-01-02' '2018-01-02' '2018-01-02' '2018-01-02' '2018-01-02' '2018-01-02' '2018-01-02' '2018-01-02']
--------------------------------------------------------------------------------
target_values loaded: data/numpy/target_values.npy
target_values[:10]: [2.57046096 2.57046355 2.57043122 2.57040007 2.57052615 2.57042324 2.57044529 2.57043156 2.57044085 2.5704819 ]
--------------------------------------------------------------------------------
nim_variables loaded: data/numpy/nim_variables.npy
total_size: 39144, 1631.0일
train_size: 27384, 1141.0일
val_size  : 7824, 326.0일
test_size : 3936, 164.0일
nim_train_mean: +1.69604716
nim_train_std : +0.18829625
target_train_mean: +0.00075180
target_train_std : +0.03153061
correct_threshold : +0.02288361
--------------------------------------------------------------------------------
nim_ibks_data loaded: data/numpy/nim_ibks_data.npy
nim_ibks_data.shape: (39144, 23)
[[ 0.78164967  0.98740583  0.40947391  1.16488859  0.41430169  0.63326199  0.08316508  1.7230817   1.15192276  0.09237283  0.60186645 -0.09195606  0.36697266  0.39283116  0.72249536  0.89143848  0.46964555  0.63428204  0.93241689  0.79204667  1.5229571   0.94421722  1.28230297]
 [ 0.78164753  0.98740429  0.4094717   1.16488792  0.41430054  0.63326013  0.08316416  1.72307371  1.15191908  0.09236951  0.60186448 -0.09195841  0.36697036  0.39282829  0.72249216  0.89143558  0.46964338  0.63427834  0.93241505  0.79204449  1.52295521  0.94421554  1.28230479]
 [ 0.78165156  0.98740719  0.40947586  1.16488917  0.41430269  0.63326363  0.08316588  1.7230887   1.151926    0.09237573  0.60186818 -0.091954    0.36697468  0.39283367  0.72249817  0.89144102  0.46964745  0.63428528  0.9324185   0.79204859  1.52295876  0.94421869  1.28230134]
 [ 0.78165268  0.98740799  0.40947701  1.16488952  0.41430329  0.6332646   0.08316636  1.72309287  1.15192792  0.09237746  0.60186921 -0.09195277  0.36697588  0.39283516  0.72249984  0.89144254  0.46964859  0.63428721  0.93241946  0.79204973  1.52295974  0.94421957  1.28230459]
 [ 0.78165424  0.98740912  0.40947863  1.16489     0.41430412  0.63326595  0.08316703  1.72309868  1.15193061  0.09237987  0.60187065 -0.09195106  0.36697756  0.39283725  0.72250217  0.89144465  0.46965017  0.63428991  0.9324208   0.79205132  1.52296112  0.9442208   1.282306  ]
 [ 0.78164679  0.98740376  0.40947094  1.1648877   0.41430015  0.6332595   0.08316385  1.72307098  1.15191781  0.09236838  0.6018638  -0.09195921  0.36696957  0.39282732  0.72249107  0.89143459  0.46964263  0.63427707  0.93241442  0.79204374  1.52295456  0.94421497  1.28230608]
 [ 0.78165268  0.987408    0.40947701  1.16488952  0.41430329  0.6332646   0.08316636  1.72309287  1.15192793  0.09237746  0.60186921 -0.09195277  0.36697589  0.39283516  0.72249984  0.89144254  0.46964859  0.63428722  0.93241947  0.79204973  1.52295974  0.94421957  1.28231306]
 [ 0.78164535  0.98740273  0.40946946  1.16488725  0.41429938  0.63325825  0.08316324  1.72306563  1.15191534  0.09236616  0.60186248 -0.09196079  0.36696802  0.3928254   0.72248893  0.89143264  0.46964118  0.63427459  0.93241318  0.79204228  1.52295329  0.94421384  1.28230333]
 [ 0.78164877  0.98740518  0.40947298  1.16488831  0.41430121  0.63326121  0.0831647   1.72307834  1.15192121  0.09237143  0.60186562 -0.09195705  0.36697169  0.39282995  0.72249402  0.89143726  0.46964463  0.63428048  0.93241611  0.79204575  1.5229563   0.94421652  1.28230544]
 [ 0.78165156  0.98740719  0.40947586  1.16488917  0.41430269  0.63326363  0.08316588  1.7230887   1.151926    0.09237573  0.60186818 -0.091954    0.36697468  0.39283367  0.72249817  0.89144102  0.46964745  0.63428528  0.9324185   0.79204859  1.52295876  0.9442187   1.28230834]]
--------------------------------------------------------------------------------
nim_boks_data loaded: data/numpy/nim_boks_data.npy
nim_boks_data.shape: (39144, 9)
[[ 0.6632433   0.80555514  0.57183998  0.69671882 -1.59971295 -1.14715363 -0.99772466 -0.30659251  1.28230297]
 [ 0.66324144  0.80555305  0.57183435  0.69671749 -1.59971297 -1.14715363 -0.99772466 -0.30659251  1.28230479]
 [ 0.66324493  0.80555697  0.57184492  0.69671997 -1.59971294 -1.14715363 -0.99772466 -0.30659251  1.28230134]
 [ 0.6632459   0.80555806  0.57184786  0.69672066 -1.59971293 -1.14715363 -0.99772466 -0.30659251  1.28230459]
 [ 0.66324726  0.80555959  0.57185196  0.69672163 -1.59971292 -1.14715363 -0.99772466 -0.30659251  1.282306  ]
 [ 0.6632408   0.80555233  0.57183242  0.69671704 -1.59971298 -1.14715363 -0.99772466 -0.30659251  1.28230608]
 [ 0.6632459   0.80555807  0.57184787  0.69672067 -1.59971293 -1.14715363 -0.99772466 -0.30659251  1.28231306]
 [ 0.66323956  0.80555093  0.57182865  0.69671615 -1.59971299 -1.14715363 -0.99772466 -0.30659251  1.28230333]
 [ 0.66324252  0.80555426  0.57183761  0.69671826 -1.59971296 -1.14715363 -0.99772466 -0.30659251  1.28230544]
 [ 0.66324493  0.80555697  0.57184493  0.69671998 -1.59971294 -1.14715363 -0.99772466 -0.30659251  1.28230834]]
--------------------------------------------------------------------------------
nim_news_data loaded: data/numpy/nim_news_data.npy
nim_news_data.shape: (39144, 101)
[[-3.21269972e-01  4.25721923e-01 -3.52086240e-01  6.39228666e-01  5.28767333e-01 -9.67022364e-01 -3.44325945e-01 -2.30847090e-01  1.22352447e+00  2.45458207e-01 -7.55765683e-01 -3.65414217e-01  1.20298128e-01 -4.86304074e-01 -2.95807418e-01  5.46162719e-01  5.55639058e-01 -1.15033933e+00 -5.63106876e-01 -3.44310166e-01 -3.09211696e-02 -4.48014318e-02 -2.31431534e-01 -3.07388497e-01 -1.06730066e-02 -1.57194347e-01  6.59067386e-01  2.59235765e-01 -7.43907628e-02  3.27744041e-01 -4.37445740e-01 -2.24102351e-01 -2.29624634e-01 -1.06893989e-01 -3.70324180e-02  2.62310231e-02  7.27176592e-02 -5.69934075e-02  1.20357218e-01  5.21648238e-03  3.22776185e-01  1.07155038e-02  1.37041960e-01  1.47687856e-01  5.95450324e-02  2.87496824e-01  1.21037368e-02  2.24911523e-02  1.38345759e-01  3.70268930e-03 -2.10831639e-01  9.20462558e-02  2.26445057e-01  1.59386647e-01  2.15273854e-01  1.28179696e-01 -1.36940843e-01 -4.44203661e-02  8.21991116e-02 -2.49547563e-01 -7.04727233e-02 -5.29950160e-02  1.44789239e-01  1.84884282e-01  6.81178079e-02  2.30137795e-01  1.68263845e-02 -1.33812564e-01  2.83593880e-02  1.28650181e-02 -4.27196596e-02 -2.48349046e-02 -8.77755292e-02  7.46599920e-02 -8.91660051e-02  6.46458549e-02 -2.61607201e-02  2.51290398e-02  6.72080709e-02 -6.20256872e-04  1.02442439e-01 -4.16550801e-02  1.01890516e-01 -1.22049135e-02  9.18895848e-02 -2.88116433e-02 -5.97582966e-03  4.45473853e-02  2.51564855e-02  1.14570520e-01 -1.25357642e-02  1.55878033e-04 -3.28692335e-02  4.83670431e-02 -3.78450848e-02  4.66957552e-02 -2.89561512e-02  5.12417630e-02 -9.58678188e-03 -5.69626638e-02  1.28230297e+00]
 [-1.30576890e+00 -5.38964188e-01  1.31110424e-01  1.45180632e+00 -3.50945618e-01 -2.74151436e-01 -1.01303931e+00 -1.07883352e+00  2.51289204e-01 -6.07429407e-01  1.94365721e-01 -2.88463916e-01 -8.28129204e-01  1.55055151e-01  1.28618407e-01 -7.51208327e-01 -5.88566340e-02 -8.80124549e-02 -2.70386184e-01  6.73099222e-02 -3.30599765e-01  2.34785253e-01  2.04356632e-01  3.42687136e-01  3.75157001e-01 -2.81372935e-01  6.27055802e-02  3.85684253e-01  2.70104788e-01 -2.66381014e-01 -4.90956648e-01 -2.45607248e-01  1.20412043e-01 -1.55631362e-01  1.23070295e-01  1.93881787e-02  1.75469100e-01 -7.36672636e-02 -3.31112154e-02  3.88896906e-02 -6.98907877e-02 -9.07315719e-02  1.62365813e-02  4.32917245e-02 -1.29793506e-01 -1.51174043e-01 -5.37238444e-02  3.27988985e-01  1.61934107e-03 -6.32230354e-02 -9.03977904e-02 -1.41007790e-01 -1.20890848e-01  2.41315429e-01  9.37474585e-02  1.02785815e-01  1.48455023e-01 -1.26487465e-01  6.39170641e-02  3.19237640e-03  4.26666343e-02 -2.10277484e-01  8.99496377e-03 -2.64442825e-02 -5.57621335e-02  1.17964761e-02 -1.34700889e-02  5.02932129e-02 -2.52842658e-02 -7.72335679e-02  9.85545439e-02  1.51742079e-01  6.57555495e-02 -4.30089367e-02 -3.94387233e-02  8.37002993e-02  2.22470142e-02 -3.66661715e-02 -9.85167828e-02 -5.96211594e-02  1.56065808e-01 -1.08879299e-01 -1.17414990e-01 -5.16350032e-02  1.82690756e-02  1.29885237e-02 -4.58040806e-02  2.50447800e-02 -2.34057540e-02 -2.35293953e-02  4.93221952e-03 -5.01275375e-02 -3.36120790e-02 -8.09844933e-02  5.30705948e-02  5.73111157e-03  1.18357744e-02  9.12190249e-03  4.27269170e-02 -2.15149480e-02  1.28230479e+00]
 [ 1.80298902e-01 -1.50027174e+00  4.19122029e-01  1.11391361e+00  8.16388702e-01  4.02108680e-01 -1.47265521e+00  1.45233601e-01  5.19004059e-01 -9.75264671e-01 -1.29708753e-01  4.58754338e-02  1.74247953e-01  4.87775677e-01 -5.95867222e-01 -4.51018321e-01 -3.66926761e-01 -1.16012678e-01 -1.21957339e-01 -6.07197219e-02  2.10701895e-01  2.09249735e-01 -7.41814223e-02  6.13948841e-01  1.61169857e-01  1.41012670e-01 -2.29891577e-01  9.33399558e-02 -5.54969561e-03 -2.38702110e-01 -3.05838617e-01  1.12142846e-01  5.89067298e-02 -6.38989408e-02  8.76372412e-02  1.52084789e-01  1.17410953e-01  9.05251310e-02  3.21258967e-01  3.01861847e-02  1.21952521e-01 -2.60760024e-03  1.64084688e-02  9.29529088e-02 -1.33525291e-02 -8.96722159e-02 -8.47692794e-02  9.69188358e-02  1.30939460e-01 -2.02076322e-01 -1.23681977e-01 -1.84996882e-01 -2.67827099e-02 -7.40974267e-02  9.08866944e-02 -7.23709911e-02  1.90325980e-02 -1.15503171e-01 -1.87678821e-01 -3.42230217e-02  3.73324913e-02  2.03735932e-01  4.59584819e-03  3.70995013e-02 -2.60227592e-02  2.08684828e-01  1.81270364e-02  3.06477801e-02 -6.58411347e-02 -9.90582196e-02 -4.98292628e-02  5.75425819e-02  1.45598850e-02  3.35949196e-02 -7.17613088e-02 -3.38339653e-02  4.82667258e-02  7.52679219e-02  5.13305417e-02  3.12103033e-02  4.08338929e-02 -1.02357536e-01 -3.38210418e-02 -3.60488699e-02  6.37269726e-02  3.47748348e-02  7.08931623e-02  4.20765901e-02 -1.23684608e-02  5.04750183e-02  2.12338336e-02  6.92934230e-03 -1.32178219e-02  9.81234196e-03  2.37166171e-02 -4.62458089e-02 -2.99038983e-03 -2.35328812e-02 -1.10941417e-02 -7.16783241e-03  1.28230134e+00]
 [-1.01733204e+00 -8.68363504e-01  2.90595525e-01  4.50939520e-01  1.18121353e+00 -8.65090482e-01 -4.26211932e-01 -6.94817199e-01 -3.83429046e-01 -1.01022834e+00 -5.42709651e-02  2.62695296e-01  1.64884290e-01  2.67129378e-01 -3.36263722e-01 -7.92369731e-01  4.17727201e-02 -2.51804588e-01 -3.92099105e-01 -2.79003619e-01  6.56881584e-01 -3.06061458e-01 -1.25123554e-01  1.51121318e-01  1.46915832e-01  1.73057539e-01 -4.35388224e-01 -2.89454450e-01 -2.80835787e-01  1.23248126e-01 -3.03880296e-02 -2.78149054e-01  5.55632895e-01 -1.49925580e-01 -1.37513449e-01 -1.15598653e-01  4.30702975e-01  2.53120207e-01  1.30816841e-01 -7.21264260e-02  1.54528953e-01  1.73303827e-01  1.10298203e-01 -2.41805773e-01  1.63539839e-01  1.78097882e-01 -1.65327160e-01  8.02551630e-02 -3.00919046e-01 -3.90024796e-02 -1.78243387e-01  1.63740958e-03 -5.07516456e-02 -1.36444395e-02  1.95769064e-01 -1.92471721e-02  1.43736945e-02  5.20614633e-02 -1.31644746e-01 -1.44126031e-02  4.50768910e-02 -6.90899467e-02  1.38580050e-02  1.06710988e-01 -1.64584807e-01  1.11200167e-02 -1.03878294e-01  5.60211584e-02  2.83419964e-02 -5.65137618e-03  1.02834066e-01  4.22094307e-02 -1.75670401e-02 -1.13406305e-01 -1.53004062e-01  1.54011283e-02  9.62681453e-02  8.17058042e-02 -1.08774204e-02 -1.54644592e-01  3.78621826e-02  5.17711364e-02  6.47454952e-02  6.29523366e-02 -2.48572573e-03  6.14989460e-03 -5.12300189e-02 -4.10786235e-02  4.45220802e-03  5.85235781e-02  3.36176696e-02 -3.90017189e-02 -3.00415182e-02 -7.83515236e-02 -2.08036982e-02  6.93567619e-02  3.11544505e-02 -5.29258692e-03 -5.46181450e-03  3.81291670e-02  1.28230459e+00]
 [-6.35537258e-01 -7.88652918e-01  5.05977356e-01  1.99744840e+00  8.18018075e-02 -5.56292332e-01 -1.17200825e+00 -5.32438121e-01 -3.08672342e-01 -8.22804742e-01  9.23069998e-02 -3.55605482e-02 -5.15167863e-01  5.71313189e-01 -2.15937287e-01 -5.85257657e-01 -3.89727004e-01 -1.06987464e-01 -1.94299153e-01  1.99842306e-01  3.87441355e-01 -2.07694079e-01  2.04352154e-01  3.81120903e-01 -4.10722681e-03 -2.22701536e-02  3.60237793e-02  4.11293030e-01  1.93407539e-01 -3.09108281e-01  1.26664195e-01 -2.40280981e-01  5.16283911e-01 -1.47313118e-01  3.28316991e-01 -7.36525442e-02 -9.08486962e-02  1.42864138e-01 -9.60493534e-02  5.35346494e-02 -1.37765798e-02 -8.05569230e-02 -1.13682198e-02  6.06700931e-02  8.56152466e-02  7.14075711e-02 -4.26396785e-03  3.28382291e-01 -2.02444832e-01 -2.92036830e-01 -1.10017276e-01 -1.15564082e-01 -7.41276093e-02  2.43583415e-01  1.14271964e-01 -3.93560872e-02  4.48107054e-01 -8.97812811e-02 -2.13215846e-02  5.53847538e-02  1.76700368e-01 -9.29932965e-02  2.66930908e-01  1.36567478e-01 -1.11225845e-01  9.20075408e-02  6.30135281e-02  8.17109443e-02 -1.53729100e-01 -9.72230682e-02  5.89535165e-02  1.16877865e-01  7.03110364e-03 -1.25809083e-01 -8.74271519e-02 -3.37076583e-02  3.01637957e-02  7.42931308e-02 -2.03077846e-01 -7.98662192e-02  8.00031495e-02 -1.01923219e-01 -6.45972756e-02  4.90417047e-02  1.76806224e-01  6.94059355e-02 -6.85188849e-02 -1.80968096e-02  7.23166649e-03  1.08799323e-01  3.57952524e-02 -8.48567716e-02 -7.09268651e-03 -2.46810485e-02  6.40714487e-02  3.85997989e-02  1.47874763e-02  2.53550457e-02  5.01834835e-02 -1.03064419e-02  1.28230600e+00]
 [ 1.92403173e+00 -9.06651954e-01  3.45925612e-01  1.02024598e+00 -7.23989979e-01  8.79381622e-01 -6.90258264e-01  9.14184773e-01  2.68664563e-01  2.21018542e-01 -2.22348418e-02  3.04567699e-01  2.15455483e-01  8.49679806e-02  1.93949184e-01  2.86668224e-01 -5.11778817e-01 -1.53468583e-01  4.01886815e-01  6.93800394e-02 -2.02198996e-01  1.33213014e-01  1.74342644e-01  8.78588908e-02 -2.19816329e-03 -1.38317134e-01 -1.66678592e-01 -4.72567337e-01 -3.91933387e-01 -1.90025167e-01 -3.89011238e-01 -1.50954716e-01 -1.98773591e-01  4.12574775e-01 -3.14889975e-02 -6.08784998e-02  3.04334578e-02  1.69543764e-01 -1.65583741e-01 -1.58850073e-01  1.44643582e-01  3.69910163e-01 -2.86390486e-02 -3.61015234e-02  3.94083253e-02 -2.87614401e-01 -6.03650913e-02 -7.67296265e-02  1.97179294e-02  6.37607818e-02  3.57080021e-02  5.52461326e-02  1.40215766e-01 -1.81047532e-02 -5.35740670e-02 -1.32385272e-01  2.73121921e-01  7.53945708e-02  6.91357362e-02  1.80133027e-01 -1.12755837e-01  1.18390879e-01  9.71417990e-02 -9.79793596e-02  8.62659661e-02 -5.78292059e-02  1.02916006e-01 -7.64502304e-03 -1.20308884e-01  2.31508734e-02 -9.49047741e-02 -2.45140924e-02 -5.56500378e-02 -9.18271550e-02 -4.67029686e-02  1.19434295e-01  3.26407925e-02 -5.99616006e-03 -3.54270839e-02 -2.60247337e-03  6.12898848e-02  2.46778580e-02 -1.64067261e-02 -3.10123476e-04 -3.68817695e-02 -1.83276792e-01 -8.66330974e-02 -9.13201850e-03 -2.52741829e-02 -1.56894140e-02 -1.13471505e-02 -1.91800351e-03  2.17314305e-02  1.31481795e-02 -1.22045525e-02  2.61854171e-03 -1.85250753e-03  7.55013648e-03 -7.23273140e-02  9.30820281e-03  1.28230608e+00]
 [-4.43503705e-01  1.21190978e+00  2.27526756e+00  8.61053998e-01  4.96450951e-01 -1.10576027e-01 -4.88841805e-01  2.27681388e-01 -8.75512433e-02  2.96258700e-01 -5.11201084e-02  3.25762165e-01  2.41716377e-01  6.71144474e-01 -4.15789550e-01 -3.65616793e-01 -1.31112153e-01 -3.18736234e-01  1.27273149e-01 -7.41295590e-02 -2.54937328e-01 -1.91410179e-02 -3.21161591e-02  2.60261739e-01 -3.66179068e-02 -6.40712651e-02 -4.09942676e-01  1.87608917e-01  4.08066046e-02  8.80045247e-02  3.26679896e-01 -5.59056156e-02 -2.58782561e-01 -2.54115126e-02  9.46058370e-02 -9.14124296e-02 -1.58784609e-01  2.16818257e-03  9.06225213e-02 -3.48202163e-03  3.28290890e-01  2.10196894e-01 -1.78280214e-01 -8.73986838e-02 -6.87487000e-02  1.01218364e-01 -8.56778849e-02 -1.26383070e-01 -1.67912601e-01 -7.50062441e-02 -1.89698322e-02 -6.13106280e-02 -1.37099691e-01 -4.22772696e-02  6.83539452e-02  1.39371812e-01  8.77523271e-02  8.62325576e-03  8.60724203e-02 -1.43550589e-01 -3.87391855e-02 -1.44570484e-01 -1.33894022e-01 -2.41202228e-02  8.29372055e-02 -4.63896888e-02  4.88663076e-02  8.08844506e-02 -2.65122265e-02 -5.76159016e-02  4.16713861e-02  9.11568539e-02  4.46821497e-03  3.61224543e-02  1.07917562e-01  2.09562091e-02  8.63427809e-03  5.62428191e-02  5.75368009e-02  2.79757565e-02  3.56714601e-02 -5.51023519e-02 -2.62609334e-02  3.17442564e-02 -1.07712363e-01  4.63453120e-02  4.20320170e-02 -2.67719703e-02  4.44424674e-02  3.24943222e-02 -5.25302904e-02  6.53444441e-02  1.28916871e-02  4.96754944e-02 -4.93975719e-02  2.83177797e-02  6.61680495e-02  1.36363423e-02 -2.39008054e-02 -1.38093373e-02  1.28231306e+00]
 [-1.30576890e+00 -5.38964188e-01  1.31110424e-01  1.45180632e+00 -3.50945618e-01 -2.74151436e-01 -1.01303931e+00 -1.07883352e+00  2.51289204e-01 -6.07429407e-01  1.94365721e-01 -2.88463916e-01 -8.28129204e-01  1.55055151e-01  1.28618407e-01 -7.51208327e-01 -5.88566340e-02 -8.80124549e-02 -2.70386184e-01  6.73099222e-02 -3.30599765e-01  2.34785253e-01  2.04356632e-01  3.42687136e-01  3.75157001e-01 -2.81372935e-01  6.27055802e-02  3.85684253e-01  2.70104788e-01 -2.66381014e-01 -4.90956648e-01 -2.45607248e-01  1.20412043e-01 -1.55631362e-01  1.23070295e-01  1.93881787e-02  1.75469100e-01 -7.36672636e-02 -3.31112154e-02  3.88896906e-02 -6.98907877e-02 -9.07315719e-02  1.62365813e-02  4.32917245e-02 -1.29793506e-01 -1.51174043e-01 -5.37238444e-02  3.27988985e-01  1.61934107e-03 -6.32230354e-02 -9.03977904e-02 -1.41007790e-01 -1.20890848e-01  2.41315429e-01  9.37474585e-02  1.02785815e-01  1.48455023e-01 -1.26487465e-01  6.39170641e-02  3.19237640e-03  4.26666343e-02 -2.10277484e-01  8.99496377e-03 -2.64442825e-02 -5.57621335e-02  1.17964761e-02 -1.34700889e-02  5.02932129e-02 -2.52842658e-02 -7.72335679e-02  9.85545439e-02  1.51742079e-01  6.57555495e-02 -4.30089367e-02 -3.94387233e-02  8.37002993e-02  2.22470142e-02 -3.66661715e-02 -9.85167828e-02 -5.96211594e-02  1.56065808e-01 -1.08879299e-01 -1.17414990e-01 -5.16350032e-02  1.82690756e-02  1.29885237e-02 -4.58040806e-02  2.50447800e-02 -2.34057540e-02 -2.35293953e-02  4.93221952e-03 -5.01275375e-02 -3.36120790e-02 -8.09844933e-02  5.30705948e-02  5.73111157e-03  1.18357744e-02  9.12190249e-03  4.27269170e-02 -2.15149480e-02  1.28230333e+00]
 [-5.40261841e-01  1.02158335e-01  1.14313931e-01  2.91786960e-01  1.61415152e-01 -1.11324012e+00 -3.61694382e-01 -5.35031385e-01  1.36007295e+00  3.12656478e-02 -7.32548914e-01 -2.76274283e-01 -4.75872186e-02 -1.58142039e-01 -6.84763898e-03  4.62738639e-01  2.53757353e-01 -1.19479907e+00 -4.12559484e-01 -4.81440674e-01  3.93500283e-02 -2.03724357e-01 -1.26718461e-01 -2.16742290e-01  9.22770595e-02 -1.78484864e-01  3.88398225e-01  6.75439352e-02 -1.64348114e-01  3.39249351e-01 -1.89293164e-01 -1.93290141e-01 -3.12516256e-01 -4.11354756e-01  9.53983160e-02 -6.64026347e-02  1.72487653e-01  1.22584132e-02  5.69359529e-02 -9.08385481e-02  3.17709786e-01 -7.05092112e-02  2.62805606e-01  2.08699363e-01  7.66345276e-02  2.37388943e-01  2.36151524e-02 -1.42559818e-01  2.24378303e-01  2.27239039e-01 -1.47474977e-01  9.99877672e-02  1.77765047e-01  1.31089300e-01  1.81667988e-01  7.85458737e-02 -2.12741173e-01  1.06174591e-01  8.13766108e-02 -2.62302596e-01 -5.87985571e-02 -7.97311490e-02  1.64571323e-01  1.99721792e-01  1.53670099e-03  8.12070686e-02  6.86185585e-03 -9.75782304e-03 -4.35908454e-02  9.55609254e-03 -1.14349201e-01 -3.70303665e-02  5.40343891e-03  8.10039819e-02 -5.84688006e-02  6.86399442e-02 -1.24282585e-02  5.37200383e-02  1.10360663e-01 -2.35159285e-02  1.18523142e-01 -4.08719608e-02  8.60593956e-02  1.05485371e-02  8.70744893e-03 -1.13329344e-02  4.58502434e-02  3.02755456e-02  1.33386236e-02  6.04737123e-02 -2.94469544e-02 -3.60832944e-02 -1.14017563e-02  2.84535172e-02 -4.04422207e-02 -2.17376464e-02 -1.63598679e-02  7.31290652e-02  2.20734734e-02 -7.39749607e-02  1.28230544e+00]
 [-2.45478897e+00  1.76361302e+00 -1.36876451e+00 -3.15396667e+00 -1.93692302e+00  1.11209150e-01 -7.75010445e-01  1.03725248e+00 -2.68733286e-01 -7.42989137e-01  7.07420605e-02 -2.56040658e-01  2.89492172e-01  4.08595997e-02 -2.42686048e-01 -4.79918138e-02  3.31679445e-01 -1.47536990e-01  2.45470960e-01  3.21688987e-01  1.63768604e-01  1.74099455e-01 -8.32625406e-02  1.92451209e-01  1.19960755e-01 -3.33324072e-02  8.33694766e-02  3.58751885e-04  2.32443798e-02 -1.39414646e-02 -8.33333989e-02 -1.16792517e-01  1.22190609e-01  8.49390256e-02  3.64956478e-02 -2.16144923e-02  9.20784843e-02  1.22994123e-01 -6.30669008e-03  5.45434983e-03  5.41984827e-02  5.17779115e-03  6.91894164e-02 -7.02473580e-02 -4.60076237e-02  4.24343784e-02  1.20368441e-01 -4.23528588e-02 -1.21308789e-01  4.73142852e-02 -7.78274770e-02  2.41255679e-02 -4.09991172e-02 -8.60180674e-03 -3.39995386e-03 -3.09527340e-02 -1.81260030e-02 -1.98917292e-02  3.68429905e-02  3.28582837e-02  8.37604126e-02  1.12860391e-02  3.34422564e-02  4.63965641e-02  1.91137570e-02  1.72354935e-02  3.22273302e-02 -6.34201541e-02 -7.64316928e-02  3.19031694e-02 -2.85152621e-02 -6.82473374e-03 -4.61152032e-02 -2.51429387e-02 -2.59513773e-02  9.74393728e-03  3.92724073e-02  5.03542937e-02 -1.73151603e-02  6.38348560e-02 -1.55187032e-02  2.12355222e-02  1.62737324e-02 -1.39280678e-02  8.72181240e-03  6.44022092e-03  2.13289159e-03 -2.66198597e-02 -4.89825482e-02  1.17613642e-02  1.12883021e-02 -9.01426158e-03 -1.32374059e-02  7.54849796e-03  6.13205950e-03 -6.22374060e-03 -3.28824179e-02  1.35436734e-02  3.95233866e-03  1.04698335e-02  1.28230834e+00]]
--------------------------------------------------------------------------------
  • 20일간의 데이터를 넣고 5일 후의 차분값을 구하는 시계열 데이터셋
In [7]:
sequence_length = 30        # 20일간의 데이터 사용
sampling_rate   = 24        # 24중 하나
sequence_stride = 1         # 1시간씩 이동
predict_term    = 5         # 5일 후의 값을 예측
batch_size      = 128       # 배치 크기

#------------------------------------------------------------------------------#
# 훈련 데이터셋 생성 (IBK 입력)
#------------------------------------------------------------------------------#
ibks_train_dataset = keras.utils.timeseries_dataset_from_array(
    data=nim_ibks_data,    
    targets=target_values[sequence_length * sampling_rate - sampling_rate:],   
    sequence_length=sequence_length, 
    sampling_rate=sampling_rate,     
    sequence_stride=sequence_stride,  
    shuffle=False,                 
    batch_size=batch_size,          
    start_index=0,
    end_index=train_size            
)

#------------------------------------------------------------------------------#
# 훈련 데이터셋 생성 (BOK 입력)
#------------------------------------------------------------------------------#
boks_train_dataset = keras.utils.timeseries_dataset_from_array(
    data=nim_boks_data,    
    targets=target_values[sequence_length * sampling_rate - sampling_rate:],     
    sequence_length=sequence_length,  
    sampling_rate=sampling_rate,     
    sequence_stride=sequence_stride, 
    shuffle=False,                
    batch_size=batch_size,          
    start_index=0,
    end_index=train_size            
)

#------------------------------------------------------------------------------#
# 훈련 데이터셋 생성 (NEWS 입력)
#------------------------------------------------------------------------------#
news_train_dataset = keras.utils.timeseries_dataset_from_array(
    data=nim_news_data,    
    targets=target_values[sequence_length * sampling_rate - sampling_rate:],   
    sequence_length=sequence_length,  
    sampling_rate=sampling_rate,   
    sequence_stride=sequence_stride,  
    shuffle=False,                   
    batch_size=batch_size,           
    start_index=0,
    end_index=train_size             
)

#------------------------------------------------------------------------------#
# 검증 데이터셋 생성 (IBK 입력)
#------------------------------------------------------------------------------#
ibks_val_dataset = keras.utils.timeseries_dataset_from_array(
    data=nim_ibks_data,    
    targets=target_values[sequence_length * sampling_rate - sampling_rate:],   
    sequence_length=sequence_length,
    sampling_rate=sampling_rate,
    sequence_stride=sequence_stride,
    shuffle=False,
    batch_size=batch_size,
    start_index=train_size,            
    end_index=train_size + val_size 
)

#------------------------------------------------------------------------------#
# 검증 데이터셋 생성 (BOK 입력)
#------------------------------------------------------------------------------#
boks_val_dataset = keras.utils.timeseries_dataset_from_array(
    data=nim_boks_data,    
    targets=target_values[sequence_length * sampling_rate - sampling_rate:],     
    sequence_length=sequence_length,
    sampling_rate=sampling_rate,
    sequence_stride=sequence_stride,
    shuffle=False,
    batch_size=batch_size,
    start_index=train_size,           
    end_index=train_size + val_size   
)

#------------------------------------------------------------------------------#
# 검증 데이터셋 생성 (NEWS 입력)
#------------------------------------------------------------------------------#
news_val_dataset = keras.utils.timeseries_dataset_from_array(
    data=nim_news_data,    
    targets=target_values[sequence_length * sampling_rate - sampling_rate:],   
    sequence_length=sequence_length,
    sampling_rate=sampling_rate,
    sequence_stride=sequence_stride,
    shuffle=False,
    batch_size=batch_size,
    start_index=train_size,           
    end_index=train_size + val_size    
)

#------------------------------------------------------------------------------#
# 테스트 데이터셋 생성 (IBK 입력)
#------------------------------------------------------------------------------#
ibks_test_dataset = keras.utils.timeseries_dataset_from_array(
    data=nim_ibks_data,     
    targets=target_values[sequence_length * sampling_rate - sampling_rate:],    
    sequence_length=sequence_length,
    sampling_rate=sampling_rate,
    sequence_stride=sequence_stride,
    shuffle=False,
    batch_size=batch_size,
    start_index=train_size + val_size,
    end_index=len(nim_ibks_data) - (predict_term * sampling_rate)
)

#------------------------------------------------------------------------------#
# 테스트 데이터셋 생성 (BOK 입력)
#------------------------------------------------------------------------------#
boks_test_dataset = keras.utils.timeseries_dataset_from_array(
    data=nim_boks_data,    
    targets=target_values[sequence_length * sampling_rate - sampling_rate:],   
    sequence_length=sequence_length,
    sampling_rate=sampling_rate,
    sequence_stride=sequence_stride,
    shuffle=False,
    batch_size=batch_size,
    start_index=train_size + val_size,
    end_index=len(nim_boks_data) - (predict_term * sampling_rate)
)

#------------------------------------------------------------------------------#
# 테스트 데이터셋 생성 (NEWS 입력)
#------------------------------------------------------------------------------#
news_test_dataset = keras.utils.timeseries_dataset_from_array(
    data=nim_news_data,    
    targets=target_values[sequence_length * sampling_rate - sampling_rate:],   
    sequence_length=sequence_length,
    sampling_rate=sampling_rate,
    sequence_stride=sequence_stride,
    shuffle=False,
    batch_size=batch_size,
    start_index=train_size + val_size,
    end_index=len(nim_news_data) - (predict_term * sampling_rate)
)
2024-11-19 19:42:01.798874: E external/local_xla/xla/stream_executor/cuda/cuda_driver.cc:274] failed call to cuInit: CUDA_ERROR_NO_DEVICE: no CUDA-capable device is detected
In [8]:
import tensorflow as tf
import time

################################################################################
# IBK 시계열 데이터셋 입력변수
################################################################################
def ibks_timeseris_dataset(dataset):
    for batch in dataset:
        ibks_data, ibks_target = batch
        yield {'ibks_input': ibks_data}, ibks_target

#------------------------------------------------------------------------------#
# 훈련 데이터셋 변환
#------------------------------------------------------------------------------#
ibks_train_input = tf.data.Dataset.from_generator(
    lambda: ibks_timeseris_dataset(ibks_train_dataset),
    output_signature=(
        {'ibks_input': tf.TensorSpec(shape=(None, sequence_length, nim_ibks_data.shape[-1]), dtype=tf.float32)},
        tf.TensorSpec(shape=(None,), dtype=tf.float32)  # 타겟값
    )
)

#------------------------------------------------------------------------------#
# 검증 데이터셋 변환
#------------------------------------------------------------------------------#
ibks_val_input = tf.data.Dataset.from_generator(
    lambda: ibks_timeseris_dataset(ibks_val_dataset),
    output_signature=(
        {'ibks_input': tf.TensorSpec(shape=(None, sequence_length, nim_ibks_data.shape[-1]), dtype=tf.float32)},
        tf.TensorSpec(shape=(None,), dtype=tf.float32)  # 타겟값
    )
)

#------------------------------------------------------------------------------#
# 테스트 데이터셋 변환
#------------------------------------------------------------------------------#
ibks_test_input = tf.data.Dataset.from_generator(
    lambda: ibks_timeseris_dataset(ibks_test_dataset),
    output_signature=(
        {'ibks_input': tf.TensorSpec(shape=(None, sequence_length, nim_ibks_data.shape[-1]), dtype=tf.float32)},
        tf.TensorSpec(shape=(None,), dtype=tf.float32)  # 타겟값
    )
)


################################################################################
# BOK 시계열 데이터셋 입력변수
################################################################################
def boks_timeseris_dataset(dataset):
    for batch in dataset:
        boks_data, boks_target = batch
        yield {'boks_input': boks_data}, boks_target

#------------------------------------------------------------------------------#
# 훈련 데이터셋 변환
#------------------------------------------------------------------------------#
boks_train_input = tf.data.Dataset.from_generator(
    lambda: boks_timeseris_dataset(boks_train_dataset),
    output_signature=(
        {'boks_input': tf.TensorSpec(shape=(None, sequence_length, nim_boks_data.shape[-1]), dtype=tf.float32)},
        tf.TensorSpec(shape=(None,), dtype=tf.float32)  # 타겟값
    )
)

#------------------------------------------------------------------------------#
# 검증 데이터셋 변환
#------------------------------------------------------------------------------#
boks_val_input = tf.data.Dataset.from_generator(
    lambda: boks_timeseris_dataset(boks_val_dataset),
    output_signature=(
        {'boks_input': tf.TensorSpec(shape=(None, sequence_length, nim_boks_data.shape[-1]), dtype=tf.float32)},
        tf.TensorSpec(shape=(None,), dtype=tf.float32)  # 타겟값
    )
)

#------------------------------------------------------------------------------#
# 테스트 데이터셋 변환
#------------------------------------------------------------------------------#
boks_test_input = tf.data.Dataset.from_generator(
    lambda: boks_timeseris_dataset(boks_test_dataset),
    output_signature=(
        {'boks_input': tf.TensorSpec(shape=(None, sequence_length, nim_boks_data.shape[-1]), dtype=tf.float32)},
        tf.TensorSpec(shape=(None,), dtype=tf.float32)  # 타겟값
    )
)


################################################################################
# NEWS 시계열 데이터셋 입력변수
################################################################################
def news_timeseris_dataset(dataset):
    for batch in dataset:
        news_data, news_target = batch
        yield {'news_input': news_data}, news_target

#------------------------------------------------------------------------------#
# 훈련 데이터셋 변환
#------------------------------------------------------------------------------#
news_train_input = tf.data.Dataset.from_generator(
    lambda: news_timeseris_dataset(news_train_dataset),
    output_signature=(
        {'news_input': tf.TensorSpec(shape=(None, sequence_length, nim_news_data.shape[-1]), dtype=tf.float32)},
        tf.TensorSpec(shape=(None,), dtype=tf.float32)  # 타겟값
    )
)

#------------------------------------------------------------------------------#
# 검증 데이터셋 변환
#------------------------------------------------------------------------------#
news_val_input = tf.data.Dataset.from_generator(
    lambda: news_timeseris_dataset(news_val_dataset),
    output_signature=(
        {'news_input': tf.TensorSpec(shape=(None, sequence_length, nim_news_data.shape[-1]), dtype=tf.float32)},
        tf.TensorSpec(shape=(None,), dtype=tf.float32)  # 타겟값
    )
)

#------------------------------------------------------------------------------#
# 테스트 데이터셋 변환
#------------------------------------------------------------------------------#
news_test_input = tf.data.Dataset.from_generator(
    lambda: news_timeseris_dataset(news_test_dataset),
    output_signature=(
        {'news_input': tf.TensorSpec(shape=(None, sequence_length, nim_news_data.shape[-1]), dtype=tf.float32)},
        tf.TensorSpec(shape=(None,), dtype=tf.float32)  # 타겟값
    )
)


################################################################################
# IBK + BOK + NEWS 시계열 데이터셋 입력변수
################################################################################
def ibks_boks_news_timeseris_dataset(ibks_dataset, boks_dataset, news_dataset):
    for (ibks_dataset, ibks_target), (boks_dataset, boks_target), (news_dataset, news_target) in zip(ibks_dataset, boks_dataset, news_dataset):
        yield {'ibks_input': ibks_dataset, 'boks_input': boks_dataset, 'news_input': news_dataset}, ibks_target

#------------------------------------------------------------------------------#
# 훈련 데이터셋 변환
#------------------------------------------------------------------------------#
ibks_boks_news_train_input = tf.data.Dataset.from_generator(
    lambda: ibks_boks_news_timeseris_dataset(ibks_train_dataset, boks_train_dataset, news_train_dataset),
    output_signature=(
        {
            'ibks_input': tf.TensorSpec(shape=(None, sequence_length, nim_ibks_data.shape[-1]), dtype=tf.float32),
            'boks_input': tf.TensorSpec(shape=(None, sequence_length, nim_boks_data.shape[-1]), dtype=tf.float32),
            'news_input': tf.TensorSpec(shape=(None, sequence_length, nim_news_data.shape[-1]), dtype=tf.float32)
        },
        tf.TensorSpec(shape=(None,), dtype=tf.float32)
    )
)

#------------------------------------------------------------------------------#
# 검증 데이터셋 변환
#------------------------------------------------------------------------------#
ibks_boks_news_val_input = tf.data.Dataset.from_generator(
    lambda: ibks_boks_news_timeseris_dataset(ibks_val_dataset, boks_val_dataset, news_val_dataset),
    output_signature=(
        {
            'ibks_input': tf.TensorSpec(shape=(None, sequence_length, nim_ibks_data.shape[-1]), dtype=tf.float32),         
            'boks_input': tf.TensorSpec(shape=(None, sequence_length, nim_boks_data.shape[-1]), dtype=tf.float32),
            'news_input': tf.TensorSpec(shape=(None, sequence_length, nim_news_data.shape[-1]), dtype=tf.float32)
        },
        tf.TensorSpec(shape=(None,), dtype=tf.float32)
    )
)

#------------------------------------------------------------------------------#
# 테스트 데이터셋 변환
#------------------------------------------------------------------------------#
ibks_boks_news_test_input = tf.data.Dataset.from_generator(
    lambda: ibks_boks_news_timeseris_dataset(ibks_test_dataset, boks_test_dataset, news_test_dataset),
    output_signature=(
        {
            'ibks_input': tf.TensorSpec(shape=(None, sequence_length, nim_ibks_data.shape[-1]), dtype=tf.float32),
            'boks_input': tf.TensorSpec(shape=(None, sequence_length, nim_boks_data.shape[-1]), dtype=tf.float32),
            'news_input': tf.TensorSpec(shape=(None, sequence_length, nim_news_data.shape[-1]), dtype=tf.float32)
        },
        tf.TensorSpec(shape=(None,), dtype=tf.float32)
    )
)
  • History Visualization
In [9]:
import numpy as np
import matplotlib.pyplot as plt

def plot_training_history(history):
    # History 데이터 가져오기
    train_mae = np.array(history['mae']) 
    val_mae = np.array(history['val_mae']) 
    train_loss = np.array(history['loss'])
    val_loss = np.array(history['val_loss'])

    # Figure와 두 개의 서브플롯 생성 (왼쪽과 오른쪽)
    fig, ax = plt.subplots(1, 2, figsize=(25, 10))

    # 왼쪽 서브플롯 (MAE)
    ax[0].plot(train_mae,   label='Train MAE',      marker='', color='dodgerblue')
    ax[0].plot(val_mae,     label='Validation MAE', marker='', color='darkorange')
    ax[0].set_title('Training and Validation MAE Over Epochs')
    ax[0].set_xlabel('Epochs')
    ax[0].set_ylabel('MAE')
    ax[0].legend(loc='upper right')
    ax[0].grid()

    # 오른쪽 서브플롯 (Loss)
    ax[1].plot(train_loss,  label='Train Loss',         linestyle='--', color='dodgerblue')
    ax[1].plot(val_loss,    label='Validation Loss',    linestyle='--', color='darkorange')
    ax[1].set_title('Training and Validation Loss Over Epochs')
    ax[1].set_xlabel('Epochs')
    ax[1].set_ylabel('Loss')
    ax[1].legend(loc='upper right')
    ax[1].grid()

    # 레이아웃 조정 및 그래프 표시
    fig.tight_layout()
    plt.show()
  • 6-5. Keras Functional API를 이용한 다중입력 Network 설계
In [10]:
from tensorflow import keras
from tensorflow.keras import layers
from tensorflow.keras.utils import plot_model
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint
from tensorflow.keras import regularizers
from PIL import Image
import io
import keras.backend as K

#------------------------------------------------------------------------------#
model_path = 'model'
model_name = 'nim_ibks_boks_news_prediction'
model_ext  = 'keras'
model_save_path = f'{model_path}/{model_name}.{model_ext}'
#------------------------------------------------------------------------------#

################################################################################
# IBKS 데이터셋
################################################################################
ibks_input = keras.Input(shape=(sequence_length, nim_ibks_data.shape[-1]), name="ibks_input")

ibks_x = layers.LSTM(64, recurrent_dropout=0.3, return_sequences=True)(ibks_input) 
ibks_x = layers.BatchNormalization()(ibks_x)

# ibks_x = layers.LSTM(64, recurrent_dropout=0.3, return_sequences=True)(ibks_x)
# ibks_x = layers.BatchNormalization()(ibks_x)

# ibks_x = layers.LSTM(64, recurrent_dropout=0.3, return_sequences=True)(ibks_x)
# ibks_x = layers.BatchNormalization()(ibks_x)

ibks_x = layers.LSTM(32, recurrent_dropout=0.3, return_sequences=True)(ibks_x)
ibks_x = layers.BatchNormalization()(ibks_x)

# ibks_x = layers.LSTM(16, recurrent_dropout=0.3, return_sequences=True)(ibks_x)
# ibks_x = layers.BatchNormalization()(ibks_x)

ibks_x = layers.LSTM(8, recurrent_dropout=0.3)(ibks_x) 
ibks_x = layers.BatchNormalization()(ibks_x)

################################################################################
# BOKS 데이터셋
################################################################################
boks_input = keras.Input(shape=(sequence_length, nim_boks_data.shape[-1]), name="boks_input")

boks_x = layers.GRU(32, recurrent_dropout=0.3, return_sequences=True)(boks_input) 
boks_x = layers.BatchNormalization()(boks_x)

# boks_x = layers.GRU(32, recurrent_dropout=0.3, return_sequences=True)(boks_x)
# boks_x = layers.BatchNormalization()(boks_x)

# boks_x = layers.GRU(32, recurrent_dropout=0.3, return_sequences=True)(boks_x)
# boks_x = layers.BatchNormalization()(boks_x)

boks_x = layers.GRU(16, recurrent_dropout=0.3, return_sequences=True)(boks_x)
boks_x = layers.BatchNormalization()(boks_x)

# boks_x = layers.GRU(8, recurrent_dropout=0.3, return_sequences=True)(boks_x)
# boks_x = layers.BatchNormalization()(boks_x)

boks_x = layers.GRU(4, recurrent_dropout=0.3)(boks_x) 
boks_x = layers.BatchNormalization()(boks_x)

################################################################################
# NEWS 데이터셋
################################################################################
news_input = keras.Input(shape=(sequence_length, nim_news_data.shape[-1]), name="news_input")

news_x = layers.LSTM(128, recurrent_dropout=0.3, return_sequences=True)(news_input) 
news_x = layers.BatchNormalization()(news_x)

news_x = layers.LSTM(64, recurrent_dropout=0.3, return_sequences=True)(news_x) 
news_x = layers.BatchNormalization()(news_x)

# news_x = layers.LSTM(128, recurrent_dropout=0.3, return_sequences=True)(news_x) 
# news_x = layers.BatchNormalization()(news_x)

# news_x = layers.LSTM(32, recurrent_dropout=0.3, return_sequences=True)(news_x) 
# news_x = layers.BatchNormalization()(news_x)

news_x = layers.LSTM(16, recurrent_dropout=0.3, return_sequences=True)(news_x) 
news_x = layers.BatchNormalization()(news_x)

news_x = layers.LSTM(4, recurrent_dropout=0.3)(news_x) 
news_x = layers.BatchNormalization()(news_x)
################################################################################

# @keras.saving.register_keras_serializable()
# class TransformerBlock(layers.Layer):
#     def __init__(self, embed_dim, num_heads, ff_dim, rate=0.1):
#         super(TransformerBlock, self).__init__()
#         self.att = layers.MultiHeadAttention(num_heads=num_heads, key_dim=embed_dim)
#         self.ffn = keras.Sequential([
#             layers.Dense(ff_dim, activation="relu"),
#             layers.Dense(embed_dim),
#         ])
#         self.layernorm1 = layers.LayerNormalization(epsilon=1e-6)
#         self.layernorm2 = layers.LayerNormalization(epsilon=1e-6)
#         self.dropout1 = layers.Dropout(rate)
#         self.dropout2 = layers.Dropout(rate)

#     def call(self, inputs, training):
#         attn_output = self.att(inputs, inputs)
#         attn_output = self.dropout1(attn_output, training=training)
#         out1 = self.layernorm1(inputs + attn_output)
#         ffn_output = self.ffn(out1)
#         ffn_output = self.dropout2(ffn_output, training=training)
#         return self.layernorm2(out1 + ffn_output)


# news_input = keras.Input(shape=(sequence_length, nim_news_data.shape[-1]), name="news_input")

# # Transformer 레이어 스택
# embed_dim = nim_news_data.shape[-1]  # 입력의 임베딩 차원
# num_heads = 4  # 다중 헤드 수
# ff_dim = 128  # Feed Forward 네트워크 차원
# num_transformer_blocks = 6  # Transformer 블록 개수

# tx_x = news_input
# for _ in range(num_transformer_blocks):
#     tx_x = TransformerBlock(embed_dim, num_heads, ff_dim, rate=0.3)(tx_x)

# tx_x = layers.GlobalAveragePooling1D()(tx_x)
# news_x = layers.Dense(8, activation='relu')(tx_x)
# news_x = layers.LayerNormalization()(news_x)

################################################################################
# IBKS + BOKS + NEWS 데이터셋
################################################################################
merge_x = layers.concatenate([ibks_x, boks_x, news_x])
merge_x = layers.BatchNormalization()(merge_x)

################################################################################
# Add Droptout
################################################################################
input_x = layers.Dropout(0.5)(merge_x)

################################################################################
# Dense Layer(Regression)
################################################################################
output_y = layers.Dense(1, kernel_regularizer=regularizers.l2(0.0001))(input_x)

################################################################################
# Define the model (Multi-input configuration)
################################################################################
model = keras.Model(
    inputs=[ibks_input, boks_input, news_input],  
    outputs=output_y
)

################################################################################
# Configure callbacks for saving the best model during trainingSet up callbacks
################################################################################
callbacks = [
    ModelCheckpoint(
        filepath=model_save_path, 
        save_best_only=True,
        save_freq="epoch"
    )
    , # EarlyStopping 설정
    EarlyStopping(
        monitor="val_mae",
        patience=30,  # 연속된 5 epoch 동안 val_mae가 개선되지 않으면 멈춤
        restore_best_weights=True
    )
]

################################################################################
# Compile the model for training
################################################################################
model.compile(
    optimizer=keras.optimizers.Adam(learning_rate=0.0001),
    loss="mse", 
    metrics=["mae"]
)
In [11]:
from tensorflow.keras.utils import plot_model
import matplotlib.pyplot as plt
import matplotlib.image as mpimg

#------------------------------------------------------------------------------#
plot_path = 'image'
plot_name = f'{model_name}_plot'
plot_netron_name = f'{model_name}_netron_final'
plot_ext   = 'png'
plot_save_path = f'{plot_path}/{plot_name}.{plot_ext}'
plot_netron_save_path = f'{plot_path}/{plot_netron_name}.{plot_ext}'
#------------------------------------------------------------------------------#
# 모델을 도식화하여 파일로 저장 (수직)
plot_model(model, to_file=plot_save_path, show_shapes=True, show_layer_names=True, rankdir='TB')

# 저장된 이미지 읽기
model_img = mpimg.imread(plot_save_path)
netron_img = mpimg.imread(plot_netron_save_path)

# 두 그림을 좌우로 나란히 보여줄 수 있는 서브플롯 생성
fig, axes = plt.subplots(1, 2, figsize=(25, 10), gridspec_kw={'width_ratios': [3, 1]}, dpi=300)


# 첫 번째 그림 (모델 도식화)
axes[0].imshow(model_img)
axes[0].axis('off')  # 축 제거
axes[0].set_title('Model Diagram')

# 두 번째 그림 (Netron 시각화 이미지)
axes[1].imshow(netron_img)
axes[1].axis('off')  # 축 제거
axes[1].set_title('Netron Visualization')

# 서브플롯 간격 조정
plt.subplots_adjust(wspace=0.05)

# 레이아웃 조정 및 그림 출력
plt.tight_layout()
plt.show()

# 모델 구조 출력
model.summary()
No description has been provided for this image
Model: "model"
__________________________________________________________________________________________________
 Layer (type)                Output Shape                 Param #   Connected to                  
==================================================================================================
 news_input (InputLayer)     [(None, 30, 101)]            0         []                            
                                                                                                  
 lstm_3 (LSTM)               (None, 30, 128)              117760    ['news_input[0][0]']          
                                                                                                  
 ibks_input (InputLayer)     [(None, 30, 23)]             0         []                            
                                                                                                  
 boks_input (InputLayer)     [(None, 30, 9)]              0         []                            
                                                                                                  
 batch_normalization_6 (Bat  (None, 30, 128)              512       ['lstm_3[0][0]']              
 chNormalization)                                                                                 
                                                                                                  
 lstm (LSTM)                 (None, 30, 64)               22528     ['ibks_input[0][0]']          
                                                                                                  
 gru (GRU)                   (None, 30, 32)               4128      ['boks_input[0][0]']          
                                                                                                  
 lstm_4 (LSTM)               (None, 30, 64)               49408     ['batch_normalization_6[0][0]'
                                                                    ]                             
                                                                                                  
 batch_normalization (Batch  (None, 30, 64)               256       ['lstm[0][0]']                
 Normalization)                                                                                   
                                                                                                  
 batch_normalization_3 (Bat  (None, 30, 32)               128       ['gru[0][0]']                 
 chNormalization)                                                                                 
                                                                                                  
 batch_normalization_7 (Bat  (None, 30, 64)               256       ['lstm_4[0][0]']              
 chNormalization)                                                                                 
                                                                                                  
 lstm_1 (LSTM)               (None, 30, 32)               12416     ['batch_normalization[0][0]'] 
                                                                                                  
 gru_1 (GRU)                 (None, 30, 16)               2400      ['batch_normalization_3[0][0]'
                                                                    ]                             
                                                                                                  
 lstm_5 (LSTM)               (None, 30, 16)               5184      ['batch_normalization_7[0][0]'
                                                                    ]                             
                                                                                                  
 batch_normalization_1 (Bat  (None, 30, 32)               128       ['lstm_1[0][0]']              
 chNormalization)                                                                                 
                                                                                                  
 batch_normalization_4 (Bat  (None, 30, 16)               64        ['gru_1[0][0]']               
 chNormalization)                                                                                 
                                                                                                  
 batch_normalization_8 (Bat  (None, 30, 16)               64        ['lstm_5[0][0]']              
 chNormalization)                                                                                 
                                                                                                  
 lstm_2 (LSTM)               (None, 8)                    1312      ['batch_normalization_1[0][0]'
                                                                    ]                             
                                                                                                  
 gru_2 (GRU)                 (None, 4)                    264       ['batch_normalization_4[0][0]'
                                                                    ]                             
                                                                                                  
 lstm_6 (LSTM)               (None, 4)                    336       ['batch_normalization_8[0][0]'
                                                                    ]                             
                                                                                                  
 batch_normalization_2 (Bat  (None, 8)                    32        ['lstm_2[0][0]']              
 chNormalization)                                                                                 
                                                                                                  
 batch_normalization_5 (Bat  (None, 4)                    16        ['gru_2[0][0]']               
 chNormalization)                                                                                 
                                                                                                  
 batch_normalization_9 (Bat  (None, 4)                    16        ['lstm_6[0][0]']              
 chNormalization)                                                                                 
                                                                                                  
 concatenate (Concatenate)   (None, 16)                   0         ['batch_normalization_2[0][0]'
                                                                    , 'batch_normalization_5[0][0]
                                                                    ',                            
                                                                     'batch_normalization_9[0][0]'
                                                                    ]                             
                                                                                                  
 batch_normalization_10 (Ba  (None, 16)                   64        ['concatenate[0][0]']         
 tchNormalization)                                                                                
                                                                                                  
 dropout (Dropout)           (None, 16)                   0         ['batch_normalization_10[0][0]
                                                                    ']                            
                                                                                                  
 dense (Dense)               (None, 1)                    17        ['dropout[0][0]']             
                                                                                                  
==================================================================================================
Total params: 217289 (848.79 KB)
Trainable params: 216521 (845.79 KB)
Non-trainable params: 768 (3.00 KB)
__________________________________________________________________________________________________
In [174]:
import tensorflow as tf
import time
import pickle
import os

#------------------------------------------------------------------------------#
epoch_cnt = 100

history_path = 'history'
history_name = f'{model_name}_history'
history_ext  = 'dict'
history_save_path = f'{history_path}/{history_name}.{history_ext}'
#------------------------------------------------------------------------------#

start_time = time.time()
print("="*80)

################################################################################
# Train the model (Model fitting)
################################################################################
history = model.fit(
    ibks_boks_news_train_input,  
    validation_data=ibks_boks_news_val_input,  
    epochs=epoch_cnt, 
    callbacks=callbacks 
)

################################################################################
# Evaluate the model on validation data
################################################################################
loss, mae = model.evaluate(ibks_boks_news_val_input)
print("-"*80)
print(f"Evaluate Validation Loss: {loss:.6f}")
print(f"Evaluate Validation MAE: {mae:.6f}")
print("-"*80)

################################################################################
# Save the training history
################################################################################
with open(history_save_path, 'wb') as history_file:
    pickle.dump(history.history, history_file)
    
end_time = time.time()
elapsed_time = time.strftime("%H:%M:%S", time.gmtime(end_time - start_time))
print(f"Training complete. Total time: {elapsed_time}")
print("="*80)
================================================================================
Epoch 1/100
209/209 [==============================] - 34s 111ms/step - loss: 3.3035 - mae: 1.4192 - val_loss: 2.0362 - val_mae: 1.2137
Epoch 2/100
209/209 [==============================] - 19s 90ms/step - loss: 2.7176 - mae: 1.2862 - val_loss: 3.7241 - val_mae: 1.6832
Epoch 3/100
209/209 [==============================] - 19s 91ms/step - loss: 2.3455 - mae: 1.1946 - val_loss: 1.9435 - val_mae: 1.1444
Epoch 4/100
209/209 [==============================] - 19s 90ms/step - loss: 2.1329 - mae: 1.1387 - val_loss: 1.7661 - val_mae: 1.1078
Epoch 5/100
209/209 [==============================] - 19s 90ms/step - loss: 1.9319 - mae: 1.0870 - val_loss: 1.6070 - val_mae: 1.0487
Epoch 6/100
209/209 [==============================] - 19s 89ms/step - loss: 1.7434 - mae: 1.0314 - val_loss: 1.6088 - val_mae: 1.0540
Epoch 7/100
209/209 [==============================] - 19s 89ms/step - loss: 1.6077 - mae: 0.9896 - val_loss: 1.6825 - val_mae: 1.0796
Epoch 8/100
209/209 [==============================] - 19s 90ms/step - loss: 1.4960 - mae: 0.9517 - val_loss: 1.9868 - val_mae: 1.1826
Epoch 9/100
209/209 [==============================] - 19s 90ms/step - loss: 1.3986 - mae: 0.9204 - val_loss: 2.1415 - val_mae: 1.2435
Epoch 10/100
209/209 [==============================] - 19s 90ms/step - loss: 1.3140 - mae: 0.8894 - val_loss: 2.0003 - val_mae: 1.2017
Epoch 11/100
209/209 [==============================] - 19s 90ms/step - loss: 1.2435 - mae: 0.8676 - val_loss: 1.8351 - val_mae: 1.1480
Epoch 12/100
209/209 [==============================] - 19s 90ms/step - loss: 1.1972 - mae: 0.8494 - val_loss: 1.9994 - val_mae: 1.2111
Epoch 13/100
209/209 [==============================] - 19s 90ms/step - loss: 1.1219 - mae: 0.8200 - val_loss: 1.8114 - val_mae: 1.1363
Epoch 14/100
209/209 [==============================] - 19s 89ms/step - loss: 1.0775 - mae: 0.8035 - val_loss: 1.7403 - val_mae: 1.1046
Epoch 15/100
209/209 [==============================] - 19s 90ms/step - loss: 1.0304 - mae: 0.7859 - val_loss: 1.6589 - val_mae: 1.0768
Epoch 16/100
209/209 [==============================] - 19s 90ms/step - loss: 1.0031 - mae: 0.7709 - val_loss: 1.4744 - val_mae: 1.0074
Epoch 17/100
209/209 [==============================] - 19s 91ms/step - loss: 0.9657 - mae: 0.7570 - val_loss: 1.3240 - val_mae: 0.9512
Epoch 18/100
209/209 [==============================] - 19s 90ms/step - loss: 0.9244 - mae: 0.7423 - val_loss: 1.2641 - val_mae: 0.9382
Epoch 19/100
209/209 [==============================] - 19s 90ms/step - loss: 0.9045 - mae: 0.7343 - val_loss: 1.2650 - val_mae: 0.9436
Epoch 20/100
209/209 [==============================] - 19s 91ms/step - loss: 0.8819 - mae: 0.7218 - val_loss: 1.2665 - val_mae: 0.9469
Epoch 21/100
209/209 [==============================] - 19s 89ms/step - loss: 0.8577 - mae: 0.7141 - val_loss: 1.2558 - val_mae: 0.9378
Epoch 22/100
209/209 [==============================] - 19s 90ms/step - loss: 0.8350 - mae: 0.7039 - val_loss: 1.1926 - val_mae: 0.9131
Epoch 23/100
209/209 [==============================] - 19s 90ms/step - loss: 0.8244 - mae: 0.6991 - val_loss: 1.1656 - val_mae: 0.9050
Epoch 24/100
209/209 [==============================] - 19s 92ms/step - loss: 0.8043 - mae: 0.6898 - val_loss: 1.1610 - val_mae: 0.8992
Epoch 25/100
209/209 [==============================] - 21s 98ms/step - loss: 0.7884 - mae: 0.6814 - val_loss: 1.1934 - val_mae: 0.9102
Epoch 26/100
209/209 [==============================] - 20s 94ms/step - loss: 0.7795 - mae: 0.6778 - val_loss: 1.1773 - val_mae: 0.9074
Epoch 27/100
209/209 [==============================] - 20s 94ms/step - loss: 0.7633 - mae: 0.6694 - val_loss: 1.1397 - val_mae: 0.8966
Epoch 28/100
209/209 [==============================] - 19s 89ms/step - loss: 0.7517 - mae: 0.6650 - val_loss: 1.2276 - val_mae: 0.9242
Epoch 29/100
209/209 [==============================] - 19s 91ms/step - loss: 0.7409 - mae: 0.6585 - val_loss: 1.2952 - val_mae: 0.9450
Epoch 30/100
209/209 [==============================] - 19s 93ms/step - loss: 0.7324 - mae: 0.6567 - val_loss: 1.3882 - val_mae: 0.9843
Epoch 31/100
209/209 [==============================] - 19s 90ms/step - loss: 0.7209 - mae: 0.6519 - val_loss: 1.4402 - val_mae: 1.0039
Epoch 32/100
209/209 [==============================] - 19s 91ms/step - loss: 0.7058 - mae: 0.6436 - val_loss: 1.4981 - val_mae: 1.0285
Epoch 33/100
209/209 [==============================] - 20s 93ms/step - loss: 0.7005 - mae: 0.6417 - val_loss: 1.5437 - val_mae: 1.0482
Epoch 34/100
209/209 [==============================] - 19s 90ms/step - loss: 0.6943 - mae: 0.6375 - val_loss: 1.6397 - val_mae: 1.0880
Epoch 35/100
209/209 [==============================] - 19s 91ms/step - loss: 0.6821 - mae: 0.6326 - val_loss: 1.6131 - val_mae: 1.0751
Epoch 36/100
209/209 [==============================] - 20s 95ms/step - loss: 0.6696 - mae: 0.6274 - val_loss: 1.5798 - val_mae: 1.0647
Epoch 37/100
209/209 [==============================] - 19s 92ms/step - loss: 0.6576 - mae: 0.6221 - val_loss: 1.6279 - val_mae: 1.0808
Epoch 38/100
209/209 [==============================] - 19s 90ms/step - loss: 0.6501 - mae: 0.6170 - val_loss: 1.5425 - val_mae: 1.0481
Epoch 39/100
209/209 [==============================] - 21s 98ms/step - loss: 0.6358 - mae: 0.6112 - val_loss: 1.4241 - val_mae: 0.9926
Epoch 40/100
209/209 [==============================] - 19s 91ms/step - loss: 0.6333 - mae: 0.6114 - val_loss: 1.5032 - val_mae: 1.0247
Epoch 41/100
209/209 [==============================] - 20s 95ms/step - loss: 0.6254 - mae: 0.6073 - val_loss: 1.4053 - val_mae: 0.9876
Epoch 42/100
209/209 [==============================] - 20s 96ms/step - loss: 0.6200 - mae: 0.6053 - val_loss: 1.3968 - val_mae: 0.9958
Epoch 43/100
209/209 [==============================] - 20s 94ms/step - loss: 0.6127 - mae: 0.5994 - val_loss: 1.2462 - val_mae: 0.9290
Epoch 44/100
209/209 [==============================] - 19s 89ms/step - loss: 0.6110 - mae: 0.6004 - val_loss: 1.2932 - val_mae: 0.9368
Epoch 45/100
209/209 [==============================] - 19s 90ms/step - loss: 0.6059 - mae: 0.5969 - val_loss: 1.2020 - val_mae: 0.9054
Epoch 46/100
209/209 [==============================] - 19s 92ms/step - loss: 0.5996 - mae: 0.5946 - val_loss: 1.2826 - val_mae: 0.9286
Epoch 47/100
209/209 [==============================] - 19s 92ms/step - loss: 0.5995 - mae: 0.5930 - val_loss: 1.2093 - val_mae: 0.9081
Epoch 48/100
209/209 [==============================] - 20s 93ms/step - loss: 0.5967 - mae: 0.5921 - val_loss: 1.1953 - val_mae: 0.9054
Epoch 49/100
209/209 [==============================] - 21s 101ms/step - loss: 0.5926 - mae: 0.5912 - val_loss: 1.1527 - val_mae: 0.8901
Epoch 50/100
209/209 [==============================] - 19s 91ms/step - loss: 0.5822 - mae: 0.5860 - val_loss: 1.1647 - val_mae: 0.8929
Epoch 51/100
209/209 [==============================] - 19s 92ms/step - loss: 0.5811 - mae: 0.5862 - val_loss: 1.1313 - val_mae: 0.8837
Epoch 52/100
209/209 [==============================] - 19s 93ms/step - loss: 0.5830 - mae: 0.5863 - val_loss: 1.1284 - val_mae: 0.8826
Epoch 53/100
209/209 [==============================] - 20s 93ms/step - loss: 0.5752 - mae: 0.5829 - val_loss: 1.1913 - val_mae: 0.9046
Epoch 54/100
209/209 [==============================] - 19s 91ms/step - loss: 0.5738 - mae: 0.5808 - val_loss: 1.1132 - val_mae: 0.8721
Epoch 55/100
209/209 [==============================] - 19s 91ms/step - loss: 0.5753 - mae: 0.5833 - val_loss: 1.1496 - val_mae: 0.8815
Epoch 56/100
209/209 [==============================] - 19s 91ms/step - loss: 0.5684 - mae: 0.5799 - val_loss: 1.0465 - val_mae: 0.8493
Epoch 57/100
209/209 [==============================] - 19s 91ms/step - loss: 0.5633 - mae: 0.5782 - val_loss: 1.0496 - val_mae: 0.8527
Epoch 58/100
209/209 [==============================] - 19s 90ms/step - loss: 0.5657 - mae: 0.5781 - val_loss: 1.0621 - val_mae: 0.8558
Epoch 59/100
209/209 [==============================] - 19s 91ms/step - loss: 0.5657 - mae: 0.5770 - val_loss: 1.0562 - val_mae: 0.8442
Epoch 60/100
209/209 [==============================] - 19s 92ms/step - loss: 0.5623 - mae: 0.5750 - val_loss: 1.0249 - val_mae: 0.8298
Epoch 61/100
209/209 [==============================] - 19s 91ms/step - loss: 0.5593 - mae: 0.5753 - val_loss: 1.0133 - val_mae: 0.8276
Epoch 62/100
209/209 [==============================] - 19s 92ms/step - loss: 0.5571 - mae: 0.5744 - val_loss: 0.9991 - val_mae: 0.8084
Epoch 63/100
209/209 [==============================] - 19s 89ms/step - loss: 0.5539 - mae: 0.5719 - val_loss: 1.0269 - val_mae: 0.8253
Epoch 64/100
209/209 [==============================] - 19s 89ms/step - loss: 0.5541 - mae: 0.5725 - val_loss: 0.9767 - val_mae: 0.7967
Epoch 65/100
209/209 [==============================] - 19s 90ms/step - loss: 0.5550 - mae: 0.5726 - val_loss: 0.9758 - val_mae: 0.7916
Epoch 66/100
209/209 [==============================] - 19s 89ms/step - loss: 0.5521 - mae: 0.5703 - val_loss: 0.9394 - val_mae: 0.7713
Epoch 67/100
209/209 [==============================] - 19s 88ms/step - loss: 0.5473 - mae: 0.5695 - val_loss: 0.9459 - val_mae: 0.7795
Epoch 68/100
209/209 [==============================] - 19s 88ms/step - loss: 0.5460 - mae: 0.5670 - val_loss: 1.0039 - val_mae: 0.8013
Epoch 69/100
209/209 [==============================] - 19s 89ms/step - loss: 0.5452 - mae: 0.5664 - val_loss: 1.0211 - val_mae: 0.8083
Epoch 70/100
209/209 [==============================] - 19s 90ms/step - loss: 0.5463 - mae: 0.5680 - val_loss: 0.9502 - val_mae: 0.7745
Epoch 71/100
209/209 [==============================] - 19s 89ms/step - loss: 0.5421 - mae: 0.5657 - val_loss: 0.8590 - val_mae: 0.7386
Epoch 72/100
209/209 [==============================] - 19s 89ms/step - loss: 0.5441 - mae: 0.5681 - val_loss: 0.9411 - val_mae: 0.7587
Epoch 73/100
209/209 [==============================] - 19s 89ms/step - loss: 0.5415 - mae: 0.5667 - val_loss: 0.8647 - val_mae: 0.7341
Epoch 74/100
209/209 [==============================] - 19s 89ms/step - loss: 0.5414 - mae: 0.5663 - val_loss: 0.8605 - val_mae: 0.7277
Epoch 75/100
209/209 [==============================] - 19s 92ms/step - loss: 0.5379 - mae: 0.5632 - val_loss: 0.8311 - val_mae: 0.7162
Epoch 76/100
209/209 [==============================] - 20s 96ms/step - loss: 0.5371 - mae: 0.5650 - val_loss: 0.9055 - val_mae: 0.7477
Epoch 77/100
209/209 [==============================] - 19s 92ms/step - loss: 0.5330 - mae: 0.5623 - val_loss: 0.8863 - val_mae: 0.7382
Epoch 78/100
209/209 [==============================] - 19s 89ms/step - loss: 0.5346 - mae: 0.5612 - val_loss: 0.8765 - val_mae: 0.7387
Epoch 79/100
209/209 [==============================] - 19s 91ms/step - loss: 0.5348 - mae: 0.5633 - val_loss: 0.9103 - val_mae: 0.7525
Epoch 80/100
209/209 [==============================] - 19s 90ms/step - loss: 0.5273 - mae: 0.5589 - val_loss: 0.8585 - val_mae: 0.7242
Epoch 81/100
209/209 [==============================] - 19s 89ms/step - loss: 0.5293 - mae: 0.5601 - val_loss: 0.8592 - val_mae: 0.7204
Epoch 82/100
209/209 [==============================] - 19s 90ms/step - loss: 0.5306 - mae: 0.5605 - val_loss: 0.8148 - val_mae: 0.7042
Epoch 83/100
209/209 [==============================] - 19s 90ms/step - loss: 0.5258 - mae: 0.5578 - val_loss: 0.8021 - val_mae: 0.7029
Epoch 84/100
209/209 [==============================] - 20s 97ms/step - loss: 0.5270 - mae: 0.5582 - val_loss: 0.8168 - val_mae: 0.7056
Epoch 85/100
209/209 [==============================] - 19s 91ms/step - loss: 0.5202 - mae: 0.5549 - val_loss: 0.8337 - val_mae: 0.7159
Epoch 86/100
209/209 [==============================] - 19s 89ms/step - loss: 0.5202 - mae: 0.5560 - val_loss: 0.8628 - val_mae: 0.7267
Epoch 87/100
209/209 [==============================] - 19s 90ms/step - loss: 0.5222 - mae: 0.5560 - val_loss: 0.8366 - val_mae: 0.7212
Epoch 88/100
209/209 [==============================] - 19s 89ms/step - loss: 0.5219 - mae: 0.5554 - val_loss: 0.8858 - val_mae: 0.7384
Epoch 89/100
209/209 [==============================] - 19s 89ms/step - loss: 0.5197 - mae: 0.5556 - val_loss: 0.8392 - val_mae: 0.7262
Epoch 90/100
209/209 [==============================] - 19s 90ms/step - loss: 0.5213 - mae: 0.5549 - val_loss: 0.9018 - val_mae: 0.7458
Epoch 91/100
209/209 [==============================] - 19s 89ms/step - loss: 0.5180 - mae: 0.5552 - val_loss: 0.8480 - val_mae: 0.7137
Epoch 92/100
209/209 [==============================] - 19s 88ms/step - loss: 0.5148 - mae: 0.5531 - val_loss: 0.8379 - val_mae: 0.7099
Epoch 93/100
209/209 [==============================] - 19s 89ms/step - loss: 0.5156 - mae: 0.5516 - val_loss: 0.8572 - val_mae: 0.7227
Epoch 94/100
209/209 [==============================] - 19s 90ms/step - loss: 0.5174 - mae: 0.5533 - val_loss: 0.9105 - val_mae: 0.7453
Epoch 95/100
209/209 [==============================] - 19s 92ms/step - loss: 0.5177 - mae: 0.5551 - val_loss: 0.8550 - val_mae: 0.7202
Epoch 96/100
209/209 [==============================] - 19s 90ms/step - loss: 0.5136 - mae: 0.5516 - val_loss: 0.8820 - val_mae: 0.7346
Epoch 97/100
209/209 [==============================] - 19s 92ms/step - loss: 0.5138 - mae: 0.5522 - val_loss: 0.8975 - val_mae: 0.7263
Epoch 98/100
209/209 [==============================] - 19s 91ms/step - loss: 0.5107 - mae: 0.5495 - val_loss: 0.9145 - val_mae: 0.7490
Epoch 99/100
209/209 [==============================] - 19s 90ms/step - loss: 0.5096 - mae: 0.5512 - val_loss: 0.8362 - val_mae: 0.7155
Epoch 100/100
209/209 [==============================] - 19s 90ms/step - loss: 0.5102 - mae: 0.5506 - val_loss: 0.8884 - val_mae: 0.7302
56/56 [==============================] - 1s 23ms/step - loss: 0.8884 - mae: 0.7302
--------------------------------------------------------------------------------
Evaluate Validation Loss: 0.888365
Evaluate Validation MAE: 0.730227
--------------------------------------------------------------------------------
Training complete. Total time: 00:32:37
================================================================================
In [12]:
import pickle
#------------------------------------------------------------------------------#
history_path = 'history'
history_name = 'nim_ibks_boks_news_prediction_history'
history_ext  = 'dict'
history_save_path = f'{history_path}/{history_name}.{history_ext}'
#------------------------------------------------------------------------------#
history = pickle.load(open(history_save_path, "rb"))

plot_training_history(history)
No description has been provided for this image
In [13]:
import numpy as np

# 업로드된 파일 경로
file_path = 'data/numpy/nim_values.npy'

# 파일 읽기
nim_values = np.load(file_path)

# 각 일자의 첫 번째 값(0번째 값)만 추출
daily_first_values = nim_values[::24]

# 추출된 데이터에서 마지막 365일만 남기기(평균 영업일수 22일 * 12월)
last_days = daily_first_values[-264:]

# 5일 간의 차이를 계산
five_day_differences = last_days[5:] - last_days[:-5]

# 5일 간 차이의 절대값 계산
five_day_absolute_differences = np.abs(five_day_differences)

# 평균 및 중위값 계산
mean_value = np.mean(five_day_absolute_differences)
median_value = np.median(five_day_absolute_differences)

# 출력: 평균과 중위값
print(f"mean_value: {mean_value:.8f}")
print(f"median_value: {median_value:.8f}")

# 루프를 돌면서 각 상위 퍼센트 값을 전역 변수로 할당
for p in range(100, 0, -5):
    key = f"Top_{100-p}_percent"
    value = np.percentile(five_day_absolute_differences, p)
    globals()[key] = value  # 전역 변수로 할당

# 모든 전역 변수 출력
for p in range(100, 0, -5):
    key = f"Top_{100-p}_percent"
    if key in globals():
        print(f"{key}: {globals()[key]:.8f}")
mean_value: 0.02438263
median_value: 0.02180000
Top_0_percent: 0.09840000
Top_5_percent: 0.05822000
Top_10_percent: 0.04852000
Top_15_percent: 0.04349000
Top_20_percent: 0.03714000
Top_25_percent: 0.03460000
Top_30_percent: 0.03190000
Top_35_percent: 0.02907000
Top_40_percent: 0.02610000
Top_45_percent: 0.02349000
Top_50_percent: 0.02180000
Top_55_percent: 0.01980000
Top_60_percent: 0.01734000
Top_65_percent: 0.01423000
Top_70_percent: 0.01072000
Top_75_percent: 0.00795000
Top_80_percent: 0.00566000
Top_85_percent: 0.00430000
Top_90_percent: 0.00270000
Top_95_percent: 0.00120000
In [14]:
import numpy as np
import matplotlib.pyplot as plt
from tensorflow.keras.models import load_model

# 결과 저장용 리스트
prediction_dates = []
sequence_nim_values = []    # 시퀀스의 마지막 NIM값
target_nim_values = []      # 시퀀스 마지막 일자에서 5일 후 NIM값(정답)
prediction_nim_values = []  # 시퀀스 마지막 일자에서 5일 후 NIM값(예측)

# predict_threshold 설정
predict_threshold = mean_value

def visualize_nim_predictions(model, input_dataset, input_key):
    print('-'*80)
    print(f"predict_threshold(실제 365일중 5일전후의 NIM값 평균 차이): {predict_threshold:.8f}")
    print('-'*80)

    data_index = 0
    # 각 배치별로 nim_date의 시퀀스 범위와 타겟 날짜, 타겟값, 예측값을 계산
    for i, (inputs, targets) in enumerate(input_dataset):
        # 예측 수행
        predictions = model.predict_on_batch(inputs)

        # 각 배치의 시퀀스에서 타겟값 및 예측값을 비교
        for j, (input_sequence, target, prediction) in enumerate(zip(inputs[input_key], targets, predictions)):
            sequence_start_index = (train_size + val_size) + i * sequence_stride * batch_size + j * sequence_stride
            sequence_ended_index = sequence_start_index + (sequence_length * sampling_rate * sequence_stride) - (sampling_rate * sequence_stride)
            target_index = sequence_start_index + ((sequence_length + predict_term - 1) * sampling_rate * sequence_stride)

            # sequence의 마지막 NIM값(역표준화)
            sequence_nim_value = input_sequence[-1, -1] * nim_train_std + nim_train_mean 

            # target 차분값(역표준화)
            target_std_value = target
            target_diff_value = target * target_train_std + target_train_mean
            # target NIM값
            target_nim_value = sequence_nim_value + target_diff_value

            # predict 차분값(역표준화)
            prediction_std_value = prediction[0]
            prediction_diff_value = prediction[0] * target_train_std + target_train_mean
            # predict NIM값
            prediction_nim_value = sequence_nim_value + prediction_diff_value

            if (sequence_start_index % 24) == 0:
                prediction_dates.append(nim_date[target_index])
                sequence_nim_values.append(sequence_nim_value)
                target_nim_values.append(target_nim_value)
                prediction_nim_values.append(prediction_nim_value)
                    
                ############################################################################################
                # debug
                ############################################################################################
                # print(f"Batch {i:02d}/{j:03d} {sequence_start_index}[{len(prediction_dates)}]: ({nim_date[sequence_start_index]} ~ {nim_date[sequence_ended_index]}) >> Target Date: {nim_date[target_index]}")
                # print(f"[정답]sequence의 마지막 NIM값: {sequence_nim_value:.8f} + target  차분({target_std_value:+.8f}): {target_diff_value:+.8f} = 3일후 NIM값: {target_nim_value:.8f}")
                # print(f"[예측]sequence의 마지막 NIM값: {sequence_nim_value:.8f} + predict 차분({prediction_std_value:+.8f}): {prediction_diff_value:+.8f} = 3일후 NIM값: {prediction_nim_value:.8f} >> 오차: ±{abs(target_nim_value - prediction_nim_value):.8f}")
                # print('-'*80)
                
                print(f"Batch {i:02d}/{j:03d} {sequence_start_index}[{len(prediction_dates)}]: Sequence Date: ({nim_date[sequence_start_index]} ~ {nim_date[sequence_ended_index]}) >> Target Date: {nim_date[target_index]}")
                print(f"[평균]sequence NIM값: {sequence_nim_value:.8f}, 하한: {sequence_nim_value-predict_threshold:+.8f}, 상한: {sequence_nim_value+predict_threshold:+.8f}    = 5일후 NIM값: {sequence_nim_value:.8f}(±{abs(target_nim_value-sequence_nim_value):.8f})")
                print(f"[정답]sequence NIM값: {sequence_nim_value:.8f} + target 차분({target_std_value:+.8f}): {target_diff_value:+.8f}  = 5일후 NIM값: {target_nim_value:.8f}")
                print(f"[예측]sequence NIM값: {sequence_nim_value:.8f} + predict 차분({prediction_std_value:+.8f}): {prediction_diff_value:+.8f} = 5일후 NIM값: {prediction_nim_value:.8f}(±{abs(target_nim_value-prediction_nim_value):.8f})")
                print('-'*80)
In [15]:
from tensorflow.keras.models import load_model
#------------------------------------------------------------------------------#
model_path = 'model'
model_name = 'nim_ibks_boks_news_prediction'
model_ext  = 'keras'
model_save_path = f'{model_path}/{model_name}.{model_ext}'
#------------------------------------------------------------------------------#
model = load_model(model_save_path)

visualize_nim_predictions(model, ibks_boks_news_test_input, 'ibks_input')


# MPAE (Mean Percentage Absolute Error) 계산
def calculate_predict_mpae():
    # 절대 퍼센트 오차를 계산한 후 평균
    percentage_errors = [abs((t - p) / t) * 100 for t, p in zip(target_nim_values, prediction_nim_values) if t != 0]
    mpae = np.mean(percentage_errors)
    return mpae

# MPAE (Mean Percentage Absolute Error) 계산
def calculate_threshold_mpae():
    # 절대 퍼센트 오차를 계산한 후 평균
    percentage_errors_min = [
        abs((t - (p - predict_threshold)) / t) * 100 for t, p in zip(target_nim_values, sequence_nim_values) if t != 0
    ]
    percentage_errors_max = [
        abs((t - (p + predict_threshold)) / t) * 100 for t, p in zip(target_nim_values, sequence_nim_values) if t != 0
    ]
    # 평균 MPAE 계산
    average_mpae = np.mean(percentage_errors_min + percentage_errors_max)
    return average_mpae    

print('='*80)
predict_mpae = calculate_predict_mpae()
print(f"Predict Mean Percentage Absolute Error (MPAE): {predict_mpae:.8f}%")

threshold_mpae = calculate_threshold_mpae()
print(f"Threshold Mean Percentage Absolute Error (MPAE): {threshold_mpae:.8f}%")
print('='*80)

# 절대값 차이 계산 및 평균
absolute_differences = [abs(t - p) for t, p in zip(target_nim_values, prediction_nim_values)]
mean_absolute_difference = sum(absolute_differences) / len(absolute_differences)

absolute_differences, mean_absolute_difference
print(f"mean_absolute_difference: {mean_absolute_difference:.8f}")
print('='*80)
    
--------------------------------------------------------------------------------
predict_threshold(실제 365일중 5일전후의 NIM값 평균 차이): 0.02438263
--------------------------------------------------------------------------------
Batch 00/000 35208[1]: Sequence Date: (2023-12-19 ~ 2024-02-01) >> Target Date: 2024-02-08
[평균]sequence NIM값: 1.75779998, 하한: +1.73341739, 상한: +1.78218257    = 5일후 NIM값: 1.75779998(±0.01329994)
[정답]sequence NIM값: 1.75779998 + target 차분(+0.39796895): +0.01330000  = 5일후 NIM값: 1.77109993
[예측]sequence NIM값: 1.75779998 + predict 차분(-0.47821590): -0.01432664 = 5일후 NIM값: 1.74347329(±0.02762663)
--------------------------------------------------------------------------------
Batch 00/024 35232[2]: Sequence Date: (2023-12-20 ~ 2024-02-02) >> Target Date: 2024-02-13
[평균]sequence NIM값: 1.74500000, 하한: +1.72061741, 상한: +1.76938260    = 5일후 NIM값: 1.74500000(±0.01730001)
[정답]sequence NIM값: 1.74500000 + target 차분(-0.57251656): -0.01730000  = 5일후 NIM값: 1.72770000
[예측]sequence NIM값: 1.74500000 + predict 차분(-0.40333235): -0.01196552 = 5일후 NIM값: 1.73303449(±0.00533450)
--------------------------------------------------------------------------------
Batch 00/048 35256[3]: Sequence Date: (2023-12-21 ~ 2024-02-05) >> Target Date: 2024-02-14
[평균]sequence NIM값: 1.70980000, 하한: +1.68541741, 상한: +1.73418260    = 5일후 NIM값: 1.70980000(±0.04369998)
[정답]sequence NIM값: 1.70980000 + target 차분(-1.40979815): -0.04370000  = 5일후 NIM값: 1.66610003
[예측]sequence NIM값: 1.70980000 + predict 차분(-0.54123771): -0.01631376 = 5일후 NIM값: 1.69348621(±0.02738619)
--------------------------------------------------------------------------------
Batch 00/072 35280[4]: Sequence Date: (2023-12-22 ~ 2024-02-06) >> Target Date: 2024-02-15
[평균]sequence NIM값: 1.74699998, 하한: +1.72261739, 상한: +1.77138257    = 5일후 NIM값: 1.74699998(±0.00090003)
[정답]sequence NIM값: 1.74699998 + target 차분(-0.05238707): -0.00090000  = 5일후 NIM값: 1.74609995
[예측]sequence NIM값: 1.74699998 + predict 차분(-0.36858419): -0.01086989 = 5일후 NIM값: 1.73613012(±0.00996983)
--------------------------------------------------------------------------------
Batch 00/096 35304[5]: Sequence Date: (2023-12-26 ~ 2024-02-07) >> Target Date: 2024-02-16
[평균]sequence NIM값: 1.73160005, 하한: +1.70721745, 상한: +1.75598264    = 5일후 NIM값: 1.73160005(±0.00070000)
[정답]sequence NIM값: 1.73160005 + target 차분(-0.00164273): +0.00070000  = 5일후 NIM값: 1.73230004
[예측]sequence NIM값: 1.73160005 + predict 차분(-0.53584641): -0.01614377 = 5일후 NIM값: 1.71545625(±0.01684380)
--------------------------------------------------------------------------------
Batch 00/120 35328[6]: Sequence Date: (2023-12-27 ~ 2024-02-08) >> Target Date: 2024-02-19
[평균]sequence NIM값: 1.74450004, 하한: +1.72011745, 상한: +1.76888263    = 5일후 NIM값: 1.74450004(±0.01269996)
[정답]sequence NIM값: 1.74450004 + target 차분(-0.42662656): -0.01270000  = 5일후 NIM값: 1.73180008
[예측]sequence NIM값: 1.74450004 + predict 차분(-0.30733278): -0.00893859 = 5일후 NIM값: 1.73556149(±0.00376141)
--------------------------------------------------------------------------------
Batch 01/016 35352[7]: Sequence Date: (2023-12-28 ~ 2024-02-13) >> Target Date: 2024-02-20
[평균]sequence NIM값: 1.76230001, 하한: +1.73791742, 상한: +1.78668261    = 5일후 NIM값: 1.76230001(±0.03659999)
[정답]sequence NIM값: 1.76230001 + target 차분(+1.13693333): +0.03660000  = 5일후 NIM값: 1.79890001
[예측]sequence NIM값: 1.76230001 + predict 차분(-0.27375364): -0.00787982 = 5일후 NIM값: 1.75442016(±0.04447985)
--------------------------------------------------------------------------------
Batch 01/040 35376[8]: Sequence Date: (2024-01-02 ~ 2024-02-14) >> Target Date: 2024-02-21
[평균]sequence NIM값: 1.75349998, 하한: +1.72911739, 상한: +1.77788258    = 5일후 NIM값: 1.75349998(±0.01450002)
[정답]sequence NIM값: 1.75349998 + target 차분(+0.43602720): +0.01450000  = 5일후 NIM값: 1.76800001
[예측]sequence NIM값: 1.75349998 + predict 차분(-0.30674452): -0.00892005 = 5일후 NIM값: 1.74457991(±0.02342010)
--------------------------------------------------------------------------------
Batch 01/064 35400[9]: Sequence Date: (2024-01-03 ~ 2024-02-15) >> Target Date: 2024-02-22
[평균]sequence NIM값: 1.74790001, 하한: +1.72351742, 상한: +1.77228260    = 5일후 NIM값: 1.74790001(±0.00119996)
[정답]sequence NIM값: 1.74790001 + target 차분(+0.01421488): +0.00120000  = 5일후 NIM값: 1.74909997
[예측]sequence NIM값: 1.74790001 + predict 차분(-0.37925556): -0.01120636 = 5일후 NIM값: 1.73669362(±0.01240635)
--------------------------------------------------------------------------------
Batch 01/088 35424[10]: Sequence Date: (2024-01-04 ~ 2024-02-16) >> Target Date: 2024-02-23
[평균]sequence NIM값: 1.73090005, 하한: +1.70651746, 상한: +1.75528264    = 5일후 NIM값: 1.73090005(±0.02390003)
[정답]sequence NIM값: 1.73090005 + target 차분(+0.73415017): +0.02390000  = 5일후 NIM값: 1.75480008
[예측]sequence NIM값: 1.73090005 + predict 차분(-0.45505401): -0.01359633 = 5일후 NIM값: 1.71730375(±0.03749633)
--------------------------------------------------------------------------------
Batch 01/112 35448[11]: Sequence Date: (2024-01-05 ~ 2024-02-19) >> Target Date: 2024-02-26
[평균]sequence NIM값: 1.75720000, 하한: +1.73281741, 상한: +1.78158259    = 5일후 NIM값: 1.75720000(±0.03789997)
[정답]sequence NIM값: 1.75720000 + target 차분(+1.17816317): +0.03790000  = 5일후 NIM값: 1.79509997
[예측]sequence NIM값: 1.75720000 + predict 차분(-0.34968480): -0.01027398 = 5일후 NIM값: 1.74692607(±0.04817390)
--------------------------------------------------------------------------------
Batch 02/008 35472[12]: Sequence Date: (2024-01-08 ~ 2024-02-20) >> Target Date: 2024-02-27
[평균]sequence NIM값: 1.72570002, 하한: +1.70131743, 상한: +1.75008261    = 5일후 NIM값: 1.72570002(±0.01619995)
[정답]sequence NIM값: 1.72570002 + target 차분(+0.48994306): +0.01620000  = 5일후 NIM값: 1.74189997
[예측]sequence NIM값: 1.72570002 + predict 차분(-0.52862495): -0.01591607 = 5일후 NIM값: 1.70978391(±0.03211606)
--------------------------------------------------------------------------------
Batch 02/032 35496[13]: Sequence Date: (2024-01-09 ~ 2024-02-21) >> Target Date: 2024-02-28
[평균]sequence NIM값: 1.73900008, 하한: +1.71461749, 상한: +1.76338267    = 5일후 NIM값: 1.73900008(±0.04610002)
[정답]sequence NIM값: 1.73900008 + target 차분(-1.48591459): -0.04610000  = 5일후 NIM값: 1.69290006
[예측]sequence NIM값: 1.73900008 + predict 차분(-0.47702271): -0.01428902 = 5일후 NIM값: 1.72471106(±0.03181100)
--------------------------------------------------------------------------------
Batch 02/056 35520[14]: Sequence Date: (2024-01-10 ~ 2024-02-22) >> Target Date: 2024-02-29
[평균]sequence NIM값: 1.74670005, 하한: +1.72231746, 상한: +1.77108264    = 5일후 NIM값: 1.74670005(±0.02300000)
[정답]sequence NIM값: 1.74670005 + target 차분(+0.70560652): +0.02300000  = 5일후 NIM값: 1.76970005
[예측]sequence NIM값: 1.74670005 + predict 차분(-0.46039271): -0.01376467 = 5일후 NIM값: 1.73293543(±0.03676462)
--------------------------------------------------------------------------------
Batch 02/080 35544[15]: Sequence Date: (2024-01-11 ~ 2024-02-23) >> Target Date: 2024-03-04
[평균]sequence NIM값: 1.70700002, 하한: +1.68261743, 상한: +1.73138261    = 5일후 NIM값: 1.70700002(±0.00559998)
[정답]sequence NIM값: 1.70700002 + target 차분(+0.15376180): +0.00560000  = 5일후 NIM값: 1.71259999
[예측]sequence NIM값: 1.70700002 + predict 차분(-0.52120322): -0.01568206 = 5일후 NIM값: 1.69131792(±0.02128208)
--------------------------------------------------------------------------------
Batch 02/104 35568[16]: Sequence Date: (2024-01-12 ~ 2024-02-26) >> Target Date: 2024-03-05
[평균]sequence NIM값: 1.71930003, 하한: +1.69491744, 상한: +1.74368262    = 5일후 NIM값: 1.71930003(±0.01569998)
[정답]sequence NIM값: 1.71930003 + target 차분(+0.47408545): +0.01570000  = 5일후 NIM값: 1.73500001
[예측]sequence NIM값: 1.71930003 + predict 차분(-0.27835795): -0.00802500 = 5일후 NIM값: 1.71127498(±0.02372503)
--------------------------------------------------------------------------------
Batch 03/000 35592[17]: Sequence Date: (2024-01-15 ~ 2024-02-27) >> Target Date: 2024-03-06
[평균]sequence NIM값: 1.70950007, 하한: +1.68511748, 상한: +1.73388267    = 5일후 NIM값: 1.70950007(±0.02530003)
[정답]sequence NIM값: 1.70950007 + target 차분(+0.77855146): +0.02530000  = 5일후 NIM값: 1.73480010
[예측]sequence NIM값: 1.70950007 + predict 차분(-0.22374475): -0.00630301 = 5일후 NIM값: 1.70319700(±0.03160310)
--------------------------------------------------------------------------------
Batch 03/024 35616[18]: Sequence Date: (2024-01-16 ~ 2024-02-28) >> Target Date: 2024-03-07
[평균]sequence NIM값: 1.78509998, 하한: +1.76071739, 상한: +1.80948257    = 5일후 NIM값: 1.78509998(±0.09840000)
[정답]sequence NIM값: 1.78509998 + target 차분(+3.09693336): +0.09840000  = 5일후 NIM값: 1.88349998
[예측]sequence NIM값: 1.78509998 + predict 차분(+0.09361485): +0.00370353 = 5일후 NIM값: 1.78880346(±0.09469652)
--------------------------------------------------------------------------------
Batch 03/048 35640[19]: Sequence Date: (2024-01-17 ~ 2024-02-29) >> Target Date: 2024-03-08
[평균]sequence NIM값: 1.72370005, 하한: +1.69931746, 상한: +1.74808264    = 5일후 NIM값: 1.72370005(±0.01810002)
[정답]sequence NIM값: 1.72370005 + target 차분(+0.55020195): +0.01810000  = 5일후 NIM값: 1.74180007
[예측]sequence NIM값: 1.72370005 + predict 차분(-0.09866766): -0.00235926 = 5일후 NIM값: 1.72134078(±0.02045929)
--------------------------------------------------------------------------------
Batch 03/072 35664[20]: Sequence Date: (2024-01-18 ~ 2024-03-04) >> Target Date: 2024-03-11
[평균]sequence NIM값: 1.70140004, 하한: +1.67701745, 상한: +1.72578263    = 5일후 NIM값: 1.70140004(±0.01750004)
[정답]sequence NIM값: 1.70140004 + target 차분(-0.57885957): -0.01750000  = 5일후 NIM값: 1.68390000
[예측]sequence NIM값: 1.70140004 + predict 차분(+0.00355001): +0.00086373 = 5일후 NIM값: 1.70226371(±0.01836371)
--------------------------------------------------------------------------------
Batch 03/096 35688[21]: Sequence Date: (2024-01-19 ~ 2024-03-05) >> Target Date: 2024-03-12
[평균]sequence NIM값: 1.70360005, 하한: +1.67921746, 상한: +1.72798264    = 5일후 NIM값: 1.70360005(±0.00080001)
[정답]sequence NIM값: 1.70360005 + target 차분(+0.00152879): +0.00080000  = 5일후 NIM값: 1.70440006
[예측]sequence NIM값: 1.70360005 + predict 차분(+0.07059100): +0.00297757 = 5일후 NIM값: 1.70657766(±0.00217760)
--------------------------------------------------------------------------------
Batch 03/120 35712[22]: Sequence Date: (2024-01-22 ~ 2024-03-06) >> Target Date: 2024-03-13
[평균]sequence NIM값: 1.68420005, 하한: +1.65981746, 상한: +1.70858264    = 5일후 NIM값: 1.68420005(±0.02499998)
[정답]sequence NIM값: 1.68420005 + target 차분(-0.81672364): -0.02500000  = 5일후 NIM값: 1.65920007
[예측]sequence NIM값: 1.68420005 + predict 차분(+0.11441918): +0.00435950 = 5일후 NIM값: 1.68855953(±0.02935946)
--------------------------------------------------------------------------------
Batch 04/016 35736[23]: Sequence Date: (2024-01-23 ~ 2024-03-07) >> Target Date: 2024-03-14
[평균]sequence NIM값: 1.68669999, 하한: +1.66231740, 상한: +1.71108258    = 5일후 NIM값: 1.68669999(±0.00440001)
[정답]sequence NIM값: 1.68669999 + target 차분(-0.16339031): -0.00440000  = 5일후 NIM값: 1.68229997
[예측]sequence NIM값: 1.68669999 + predict 차분(+0.15614973): +0.00567529 = 5일후 NIM값: 1.69237530(±0.01007533)
--------------------------------------------------------------------------------
Batch 04/040 35760[24]: Sequence Date: (2024-01-24 ~ 2024-03-08) >> Target Date: 2024-03-15
[평균]sequence NIM값: 1.70560002, 하한: +1.68121743, 상한: +1.72998261    = 5일후 NIM값: 1.70560002(±0.01320004)
[정답]sequence NIM값: 1.70560002 + target 차분(+0.39479741): +0.01320000  = 5일후 NIM값: 1.71880007
[예측]sequence NIM값: 1.70560002 + predict 차분(+0.21483672): +0.00752573 = 5일후 NIM값: 1.71312571(±0.00567436)
--------------------------------------------------------------------------------
Batch 04/064 35784[25]: Sequence Date: (2024-01-25 ~ 2024-03-11) >> Target Date: 2024-03-18
[평균]sequence NIM값: 1.71890008, 하한: +1.69451749, 상한: +1.74328268    = 5일후 NIM값: 1.71890008(±0.01540005)
[정답]sequence NIM값: 1.71890008 + target 차분(+0.46457088): +0.01540000  = 5일후 NIM값: 1.73430014
[예측]sequence NIM값: 1.71890008 + predict 차분(+0.28776661): +0.00982525 = 5일후 NIM값: 1.72872531(±0.00557482)
--------------------------------------------------------------------------------
Batch 04/088 35808[26]: Sequence Date: (2024-01-26 ~ 2024-03-12) >> Target Date: 2024-03-19
[평균]sequence NIM값: 1.70280004, 하한: +1.67841744, 상한: +1.72718263    = 5일후 NIM값: 1.70280004(±0.01349998)
[정답]sequence NIM값: 1.70280004 + target 차분(+0.40431198): +0.01350000  = 5일후 NIM값: 1.71630001
[예측]sequence NIM값: 1.70280004 + predict 차분(+0.24026753): +0.00832758 = 5일후 NIM값: 1.71112764(±0.00517237)
--------------------------------------------------------------------------------
Batch 04/112 35832[27]: Sequence Date: (2024-01-29 ~ 2024-03-13) >> Target Date: 2024-03-20
[평균]sequence NIM값: 1.70920002, 하한: +1.68481743, 상한: +1.73358262    = 5일후 NIM값: 1.70920002(±0.04820001)
[정답]sequence NIM값: 1.70920002 + target 차분(+1.50482988): +0.04820000  = 5일후 NIM값: 1.75740004
[예측]sequence NIM값: 1.70920002 + predict 차분(+0.25780836): +0.00888065 = 5일후 NIM값: 1.71808064(±0.03931940)
--------------------------------------------------------------------------------
Batch 05/008 35856[28]: Sequence Date: (2024-01-30 ~ 2024-03-14) >> Target Date: 2024-03-21
[평균]sequence NIM값: 1.69110000, 하한: +1.66671741, 상한: +1.71548259    = 5일후 NIM값: 1.69110000(±0.00489998)
[정답]sequence NIM값: 1.69110000 + target 차분(-0.17924792): -0.00490000  = 5일후 NIM값: 1.68620002
[예측]sequence NIM값: 1.69110000 + predict 차분(+0.20987010): +0.00736913 = 5일후 NIM값: 1.69846916(±0.01226914)
--------------------------------------------------------------------------------
Batch 05/032 35880[29]: Sequence Date: (2024-01-31 ~ 2024-03-15) >> Target Date: 2024-03-22
[평균]sequence NIM값: 1.69239998, 하한: +1.66801739, 상한: +1.71678257    = 5일후 NIM값: 1.69239998(±0.00269997)
[정답]sequence NIM값: 1.69239998 + target 차분(-0.10947445): -0.00270000  = 5일후 NIM값: 1.68970001
[예측]sequence NIM값: 1.69239998 + predict 차분(+0.18735461): +0.00665920 = 5일후 NIM값: 1.69905913(±0.00935912)
--------------------------------------------------------------------------------
Batch 05/056 35904[30]: Sequence Date: (2024-02-01 ~ 2024-03-18) >> Target Date: 2024-03-25
[평균]sequence NIM값: 1.70350003, 하한: +1.67911744, 상한: +1.72788262    = 5일후 NIM값: 1.70350003(±0.01510000)
[정답]sequence NIM값: 1.70350003 + target 차분(+0.45505631): +0.01510000  = 5일후 NIM값: 1.71860003
[예측]sequence NIM값: 1.70350003 + predict 차분(+0.11965326): +0.00452454 = 5일후 NIM값: 1.70802462(±0.01057541)
--------------------------------------------------------------------------------
Batch 05/080 35928[31]: Sequence Date: (2024-02-02 ~ 2024-03-19) >> Target Date: 2024-03-26
[평균]sequence NIM값: 1.68930006, 하한: +1.66491747, 상한: +1.71368265    = 5일후 NIM값: 1.68930006(±0.03670001)
[정답]sequence NIM값: 1.68930006 + target 차분(+1.14010489): +0.03670000  = 5일후 NIM값: 1.72600007
[예측]sequence NIM값: 1.68930006 + predict 차분(+0.10224278): +0.00397557 = 5일후 NIM값: 1.69327569(±0.03272438)
--------------------------------------------------------------------------------
Batch 05/104 35952[32]: Sequence Date: (2024-02-05 ~ 2024-03-20) >> Target Date: 2024-03-27
[평균]sequence NIM값: 1.66100001, 하한: +1.63661742, 상한: +1.68538260    = 5일후 NIM값: 1.66100001(±0.02409995)
[정답]sequence NIM값: 1.66100001 + target 차분(-0.78817999): -0.02410000  = 5일후 NIM값: 1.63690007
[예측]sequence NIM값: 1.66100001 + predict 차분(+0.09981588): +0.00389905 = 5일후 NIM값: 1.66489911(±0.02799904)
--------------------------------------------------------------------------------
Batch 06/000 35976[33]: Sequence Date: (2024-02-06 ~ 2024-03-21) >> Target Date: 2024-03-28
[평균]sequence NIM값: 1.69599998, 하한: +1.67161739, 상한: +1.72038257    = 5일후 NIM값: 1.69599998(±0.01419997)
[정답]sequence NIM값: 1.69599998 + target 차분(+0.42651263): +0.01420000  = 5일후 NIM값: 1.71019995
[예측]sequence NIM값: 1.69599998 + predict 차분(+0.12821323): +0.00479444 = 5일후 NIM값: 1.70079446(±0.00940549)
--------------------------------------------------------------------------------
Batch 06/024 36000[34]: Sequence Date: (2024-02-07 ~ 2024-03-22) >> Target Date: 2024-03-29
[평균]sequence NIM값: 1.69510007, 하한: +1.67071748, 상한: +1.71948266    = 5일후 NIM값: 1.69510007(±0.01870000)
[정답]sequence NIM값: 1.69510007 + target 차분(-0.61691785): -0.01870000  = 5일후 NIM값: 1.67640007
[예측]sequence NIM값: 1.69510007 + predict 차분(+0.17144150): +0.00615745 = 5일후 NIM값: 1.70125747(±0.02485740)
--------------------------------------------------------------------------------
Batch 06/048 36024[35]: Sequence Date: (2024-02-08 ~ 2024-03-25) >> Target Date: 2024-04-01
[평균]sequence NIM값: 1.68840003, 하한: +1.66401744, 상한: +1.71278262    = 5일후 NIM값: 1.68840003(±0.03729999)
[정답]sequence NIM값: 1.68840003 + target 차분(-1.20682073): -0.03730000  = 5일후 NIM값: 1.65110004
[예측]sequence NIM값: 1.68840003 + predict 차분(+0.19870165): +0.00701698 = 5일후 NIM값: 1.69541705(±0.04431701)
--------------------------------------------------------------------------------
Batch 06/072 36048[36]: Sequence Date: (2024-02-13 ~ 2024-03-26) >> Target Date: 2024-04-02
[평균]sequence NIM값: 1.65260005, 하한: +1.62821746, 상한: +1.67698264    = 5일후 NIM값: 1.65260005(±0.08099997)
[정답]sequence NIM값: 1.65260005 + target 차분(-2.59277558): -0.08100001  = 5일후 NIM값: 1.57160008
[예측]sequence NIM값: 1.65260005 + predict 차분(+0.28442749): +0.00971997 = 5일후 NIM값: 1.66232002(±0.09071994)
--------------------------------------------------------------------------------
Batch 06/096 36072[37]: Sequence Date: (2024-02-14 ~ 2024-03-27) >> Target Date: 2024-04-03
[평균]sequence NIM값: 1.68510008, 하한: +1.66071749, 상한: +1.70948267    = 5일후 NIM값: 1.68510008(±0.05729997)
[정답]sequence NIM값: 1.68510008 + target 차분(-1.84112501): -0.05730000  = 5일후 NIM값: 1.62780011
[예측]sequence NIM값: 1.68510008 + predict 차분(+0.30603227): +0.01040118 = 5일후 NIM값: 1.69550121(±0.06770110)
--------------------------------------------------------------------------------
Batch 06/120 36096[38]: Sequence Date: (2024-02-15 ~ 2024-03-28) >> Target Date: 2024-04-04
[평균]sequence NIM값: 1.68180001, 하한: +1.65741742, 상한: +1.70618260    = 5일후 NIM값: 1.68180001(±0.06650007)
[정답]sequence NIM값: 1.68180001 + target 차분(-2.13290501): -0.06650001  = 5일후 NIM값: 1.61529994
[예측]sequence NIM값: 1.68180001 + predict 차분(+0.29975498): +0.01020325 = 5일후 NIM값: 1.69200325(±0.07670331)
--------------------------------------------------------------------------------
Batch 07/016 36120[39]: Sequence Date: (2024-02-16 ~ 2024-03-29) >> Target Date: 2024-04-05
[평균]sequence NIM값: 1.71380007, 하한: +1.68941748, 상한: +1.73818266    = 5일후 NIM값: 1.71380007(±0.02339995)
[정답]sequence NIM값: 1.71380007 + target 차분(-0.76597935): -0.02340000  = 5일후 NIM값: 1.69040012
[예측]sequence NIM값: 1.71380007 + predict 차분(+0.31680089): +0.01074072 = 5일후 NIM값: 1.72454083(±0.03414071)
--------------------------------------------------------------------------------
Batch 07/040 36144[40]: Sequence Date: (2024-02-19 ~ 2024-04-01) >> Target Date: 2024-04-08
[평균]sequence NIM값: 1.72570002, 하한: +1.70131743, 상한: +1.75008261    = 5일후 NIM값: 1.72570002(±0.03830004)
[정답]sequence NIM값: 1.72570002 + target 차분(-1.23853600): -0.03830000  = 5일후 NIM값: 1.68739998
[예측]sequence NIM값: 1.72570002 + predict 차분(+0.35352874): +0.01189877 = 5일후 NIM값: 1.73759878(±0.05019879)
--------------------------------------------------------------------------------
Batch 07/064 36168[41]: Sequence Date: (2024-02-20 ~ 2024-04-02) >> Target Date: 2024-04-09
[평균]sequence NIM값: 1.73360002, 하한: +1.70921743, 상한: +1.75798261    = 5일후 NIM값: 1.73360002(±0.02139997)
[정답]sequence NIM값: 1.73360002 + target 차분(-0.70254892): -0.02140000  = 5일후 NIM값: 1.71220005
[예측]sequence NIM값: 1.73360002 + predict 차분(+0.31363162): +0.01064079 = 5일후 NIM값: 1.74424076(±0.03204072)
--------------------------------------------------------------------------------
Batch 07/088 36192[42]: Sequence Date: (2024-02-21 ~ 2024-04-03) >> Target Date: 2024-04-11
[평균]sequence NIM값: 1.74240005, 하한: +1.71801746, 상한: +1.76678264    = 5일후 NIM값: 1.74240005(±0.00430000)
[정답]sequence NIM값: 1.74240005 + target 차분(-0.16021879): -0.00430000  = 5일후 NIM값: 1.73810005
[예측]sequence NIM값: 1.74240005 + predict 차분(+0.30789456): +0.01045990 = 5일후 NIM값: 1.75285995(±0.01475990)
--------------------------------------------------------------------------------
Batch 07/112 36216[43]: Sequence Date: (2024-02-22 ~ 2024-04-04) >> Target Date: 2024-04-12
[평균]sequence NIM값: 1.74830008, 하한: +1.72391748, 상한: +1.77268267    = 5일후 NIM값: 1.74830008(±0.00790000)
[정답]sequence NIM값: 1.74830008 + target 차분(-0.27439356): -0.00790000  = 5일후 NIM값: 1.74040008
[예측]sequence NIM값: 1.74830008 + predict 차분(+0.31414631): +0.01065702 = 5일후 NIM값: 1.75895715(±0.01855707)
--------------------------------------------------------------------------------
Batch 08/008 36240[44]: Sequence Date: (2024-02-23 ~ 2024-04-05) >> Target Date: 2024-04-15
[평균]sequence NIM값: 1.73720002, 하한: +1.71281743, 상한: +1.76158261    = 5일후 NIM값: 1.73720002(±0.02890003)
[정답]sequence NIM값: 1.73720002 + target 차분(-0.94041300): -0.02890000  = 5일후 NIM값: 1.70829999
[예측]sequence NIM값: 1.73720002 + predict 차분(+0.22249541): +0.00776721 = 5일후 NIM값: 1.74496722(±0.03666723)
--------------------------------------------------------------------------------
Batch 08/032 36264[45]: Sequence Date: (2024-02-26 ~ 2024-04-08) >> Target Date: 2024-04-16
[평균]sequence NIM값: 1.76400006, 하한: +1.73961747, 상한: +1.78838265    = 5일후 NIM값: 1.76400006(±0.02300000)
[정답]sequence NIM값: 1.76400006 + target 차분(+0.70560652): +0.02300000  = 5일후 NIM값: 1.78700006
[예측]sequence NIM값: 1.76400006 + predict 차분(+0.30260885): +0.01029324 = 5일후 NIM값: 1.77429330(±0.01270676)
--------------------------------------------------------------------------------
Batch 08/056 36288[46]: Sequence Date: (2024-02-27 ~ 2024-04-09) >> Target Date: 2024-04-17
[평균]sequence NIM값: 1.75500000, 하한: +1.73061740, 상한: +1.77938259    = 5일후 NIM값: 1.75500000(±0.02820003)
[정답]sequence NIM값: 1.75500000 + target 차분(+0.87052560): +0.02820000  = 5일후 NIM값: 1.78320003
[예측]sequence NIM값: 1.75500000 + predict 차분(+0.23892358): +0.00828520 = 5일후 NIM값: 1.76328516(±0.01991487)
--------------------------------------------------------------------------------
Batch 08/080 36312[47]: Sequence Date: (2024-02-28 ~ 2024-04-11) >> Target Date: 2024-04-18
[평균]sequence NIM값: 1.74670005, 하한: +1.72231746, 상한: +1.77108264    = 5일후 NIM값: 1.74670005(±0.03279996)
[정답]sequence NIM값: 1.74670005 + target 차분(+1.01641560): +0.03280000  = 5일후 NIM값: 1.77950001
[예측]sequence NIM값: 1.74670005 + predict 차분(+0.15897888): +0.00576450 = 5일후 NIM값: 1.75246453(±0.02703547)
--------------------------------------------------------------------------------
Batch 08/104 36336[48]: Sequence Date: (2024-02-29 ~ 2024-04-12) >> Target Date: 2024-04-19
[평균]sequence NIM값: 1.75620008, 하한: +1.73181748, 상한: +1.78058267    = 5일후 NIM값: 1.75620008(±0.04519999)
[정답]sequence NIM값: 1.75620008 + target 차분(+1.40968418): +0.04520000  = 5일후 NIM값: 1.80140007
[예측]sequence NIM값: 1.75620008 + predict 차분(+0.12397451): +0.00466079 = 5일후 NIM값: 1.76086092(±0.04053915)
--------------------------------------------------------------------------------
Batch 09/000 36360[49]: Sequence Date: (2024-03-04 ~ 2024-04-15) >> Target Date: 2024-04-22
[평균]sequence NIM값: 1.76610005, 하한: +1.74171746, 상한: +1.79048264    = 5일후 NIM값: 1.76610005(±0.03349996)
[정답]sequence NIM값: 1.76610005 + target 차분(+1.03861618): +0.03350000  = 5일후 NIM값: 1.79960001
[예측]sequence NIM값: 1.76610005 + predict 차분(+0.02911637): +0.00166985 = 5일후 NIM값: 1.76776993(±0.03183007)
--------------------------------------------------------------------------------
Batch 09/024 36384[50]: Sequence Date: (2024-03-05 ~ 2024-04-16) >> Target Date: 2024-04-23
[평균]sequence NIM값: 1.74100006, 하한: +1.71661747, 상한: +1.76538265    = 5일후 NIM값: 1.74100006(±0.05260003)
[정답]sequence NIM값: 1.74100006 + target 차분(+1.64437675): +0.05260000  = 5일후 NIM값: 1.79360008
[예측]sequence NIM값: 1.74100006 + predict 차분(-0.16361445): -0.00440707 = 5일후 NIM값: 1.73659301(±0.05700707)
--------------------------------------------------------------------------------
Batch 09/048 36408[51]: Sequence Date: (2024-03-06 ~ 2024-04-17) >> Target Date: 2024-04-24
[평균]sequence NIM값: 1.72680008, 하한: +1.70241749, 상한: +1.75118268    = 5일후 NIM값: 1.72680008(±0.02190006)
[정답]sequence NIM값: 1.72680008 + target 차분(+0.67071974): +0.02190000  = 5일후 NIM값: 1.74870014
[예측]sequence NIM값: 1.72680008 + predict 차분(-0.22392020): -0.00630854 = 5일후 NIM값: 1.72049153(±0.02820861)
--------------------------------------------------------------------------------
Batch 09/072 36432[52]: Sequence Date: (2024-03-07 ~ 2024-04-18) >> Target Date: 2024-04-25
[평균]sequence NIM값: 1.71390009, 하한: +1.68951750, 상한: +1.73828268    = 5일후 NIM값: 1.71390009(±0.02380002)
[정답]sequence NIM값: 1.71390009 + target 차분(-0.77866542): -0.02380000  = 5일후 NIM값: 1.69010007
[예측]sequence NIM값: 1.71390009 + predict 차분(-0.25787583): -0.00737919 = 5일후 NIM값: 1.70652092(±0.01642084)
--------------------------------------------------------------------------------
Batch 09/096 36456[53]: Sequence Date: (2024-03-08 ~ 2024-04-19) >> Target Date: 2024-04-26
[평균]sequence NIM값: 1.71100008, 하한: +1.68661749, 상한: +1.73538268    = 5일후 NIM값: 1.71100008(±0.02069998)
[정답]sequence NIM값: 1.71100008 + target 차분(-0.68034828): -0.02070000  = 5일후 NIM값: 1.69030011
[예측]sequence NIM값: 1.71100008 + predict 차분(-0.26491290): -0.00760107 = 5일후 NIM값: 1.70339906(±0.01309896)
--------------------------------------------------------------------------------
Batch 09/120 36480[54]: Sequence Date: (2024-03-11 ~ 2024-04-22) >> Target Date: 2024-04-29
[평균]sequence NIM값: 1.73259997, 하한: +1.70821738, 상한: +1.75698256    = 5일후 NIM값: 1.73259997(±0.03670001)
[정답]sequence NIM값: 1.73259997 + target 차분(+1.14010489): +0.03670000  = 5일후 NIM값: 1.76929998
[예측]sequence NIM값: 1.73259997 + predict 차분(-0.21955174): -0.00617080 = 5일후 NIM값: 1.72642922(±0.04287076)
--------------------------------------------------------------------------------
Batch 10/016 36504[55]: Sequence Date: (2024-03-12 ~ 2024-04-23) >> Target Date: 2024-04-30
[평균]sequence NIM값: 1.68840003, 하한: +1.66401744, 상한: +1.71278262    = 5일후 NIM값: 1.68840003(±0.04439998)
[정답]sequence NIM값: 1.68840003 + target 차분(-1.43199873): -0.04440000  = 5일후 NIM값: 1.64400005
[예측]sequence NIM값: 1.68840003 + predict 차분(-0.23268121): -0.00658478 = 5일후 NIM값: 1.68181527(±0.03781521)
--------------------------------------------------------------------------------
Batch 10/040 36528[56]: Sequence Date: (2024-03-13 ~ 2024-04-24) >> Target Date: 2024-05-02
[평균]sequence NIM값: 1.70490003, 하한: +1.68051744, 상한: +1.72928262    = 5일후 NIM값: 1.70490003(±0.00279999)
[정답]sequence NIM값: 1.70490003 + target 차분(+0.06495921): +0.00280000  = 5일후 NIM값: 1.70770001
[예측]sequence NIM값: 1.70490003 + predict 차분(-0.08065011): -0.00179115 = 5일후 NIM값: 1.70310891(±0.00459111)
--------------------------------------------------------------------------------
Batch 10/064 36552[57]: Sequence Date: (2024-03-14 ~ 2024-04-25) >> Target Date: 2024-05-03
[평균]sequence NIM값: 1.73769999, 하한: +1.71331739, 상한: +1.76208258    = 5일후 NIM값: 1.73769999(±0.04380000)
[정답]sequence NIM값: 1.73769999 + target 차분(+1.36528289): +0.04380000  = 5일후 NIM값: 1.78149998
[예측]sequence NIM값: 1.73769999 + predict 차분(+0.12471873): +0.00468425 = 5일후 NIM값: 1.74238420(±0.03911579)
--------------------------------------------------------------------------------
Batch 10/088 36576[58]: Sequence Date: (2024-03-15 ~ 2024-04-26) >> Target Date: 2024-05-07
[평균]sequence NIM값: 1.73170006, 하한: +1.70731747, 상한: +1.75608265    = 5일후 NIM값: 1.73170006(±0.03760004)
[정답]sequence NIM값: 1.73170006 + target 차분(+1.16864860): +0.03760000  = 5일후 NIM값: 1.76930010
[예측]sequence NIM값: 1.73170006 + predict 차분(+0.15913916): +0.00576955 = 5일후 NIM값: 1.73746967(±0.03183043)
--------------------------------------------------------------------------------
Batch 10/112 36600[59]: Sequence Date: (2024-03-18 ~ 2024-04-29) >> Target Date: 2024-05-08
[평균]sequence NIM값: 1.69590008, 하한: +1.67151749, 상한: +1.72028267    = 5일후 NIM값: 1.69590008(±0.03299999)
[정답]sequence NIM값: 1.69590008 + target 차분(-1.07044542): -0.03300000  = 5일후 NIM값: 1.66290009
[예측]sequence NIM값: 1.69590008 + predict 차분(+0.10464726): +0.00405139 = 5일후 NIM값: 1.69995153(±0.03705144)
--------------------------------------------------------------------------------
Batch 11/008 36624[60]: Sequence Date: (2024-03-19 ~ 2024-04-30) >> Target Date: 2024-05-09
[평균]sequence NIM값: 1.73280001, 하한: +1.70841742, 상한: +1.75718260    = 5일후 NIM값: 1.73280001(±0.00030005)
[정답]sequence NIM값: 1.73280001 + target 차분(-0.03335794): -0.00030000  = 5일후 NIM값: 1.73249996
[예측]sequence NIM값: 1.73280001 + predict 차분(+0.18346655): +0.00653661 = 5일후 NIM값: 1.73933661(±0.00683665)
--------------------------------------------------------------------------------
Batch 11/032 36648[61]: Sequence Date: (2024-03-20 ~ 2024-05-02) >> Target Date: 2024-05-10
[평균]sequence NIM값: 1.70210004, 하한: +1.67771745, 상한: +1.72648263    = 5일후 NIM값: 1.70210004(±0.04139996)
[정답]sequence NIM값: 1.70210004 + target 차분(-1.33685315): -0.04140000  = 5일후 NIM값: 1.66070008
[예측]sequence NIM값: 1.70210004 + predict 차분(+0.13242996): +0.00492739 = 5일후 NIM값: 1.70702744(±0.04632735)
--------------------------------------------------------------------------------
Batch 11/056 36672[62]: Sequence Date: (2024-03-21 ~ 2024-05-03) >> Target Date: 2024-05-13
[평균]sequence NIM값: 1.69389999, 하한: +1.66951740, 상한: +1.71828258    = 5일후 NIM값: 1.69389999(±0.05400002)
[정답]sequence NIM값: 1.69389999 + target 차분(-1.73646486): -0.05400000  = 5일후 NIM값: 1.63989997
[예측]sequence NIM값: 1.69389999 + predict 차분(+0.16264629): +0.00588013 = 5일후 NIM값: 1.69978011(±0.05988014)
--------------------------------------------------------------------------------
Batch 11/080 36696[63]: Sequence Date: (2024-03-22 ~ 2024-05-07) >> Target Date: 2024-05-14
[평균]sequence NIM값: 1.69410002, 하한: +1.66971743, 상한: +1.71848261    = 5일후 NIM값: 1.69410002(±0.03540003)
[정답]sequence NIM값: 1.69410002 + target 차분(-1.14656186): -0.03540000  = 5일후 NIM값: 1.65869999
[예측]sequence NIM값: 1.69410002 + predict 차분(+0.16585609): +0.00598134 = 5일후 NIM값: 1.70008135(±0.04138136)
--------------------------------------------------------------------------------
Batch 11/104 36720[64]: Sequence Date: (2024-03-25 ~ 2024-05-08) >> Target Date: 2024-05-16
[평균]sequence NIM값: 1.72890007, 하한: +1.70451748, 상한: +1.75328267    = 5일후 NIM값: 1.72890007(±0.03520000)
[정답]sequence NIM값: 1.72890007 + target 차분(+1.09253204): +0.03520000  = 5일후 NIM값: 1.76410007
[예측]sequence NIM값: 1.72890007 + predict 차분(+0.20585006): +0.00724237 = 5일후 NIM값: 1.73614240(±0.02795768)
--------------------------------------------------------------------------------
Batch 12/000 36744[65]: Sequence Date: (2024-03-26 ~ 2024-05-09) >> Target Date: 2024-05-17
[평균]sequence NIM값: 1.73310006, 하한: +1.70871747, 상한: +1.75748265    = 5일후 NIM값: 1.73310006(±0.03170002)
[정답]sequence NIM값: 1.73310006 + target 차분(+0.98152882): +0.03170000  = 5일후 NIM값: 1.76480007
[예측]sequence NIM값: 1.73310006 + predict 차분(+0.14635938): +0.00536660 = 5일후 NIM값: 1.73846662(±0.02633345)
--------------------------------------------------------------------------------
Batch 12/024 36768[66]: Sequence Date: (2024-03-27 ~ 2024-05-10) >> Target Date: 2024-05-20
[평균]sequence NIM값: 1.74349999, 하한: +1.71911740, 상한: +1.76788259    = 5일후 NIM값: 1.74349999(±0.03190005)
[정답]sequence NIM값: 1.74349999 + target 차분(+0.98787189): +0.03190000  = 5일후 NIM값: 1.77540004
[예측]sequence NIM값: 1.74349999 + predict 차분(-0.07022100): -0.00146231 = 5일후 NIM값: 1.74203765(±0.03336239)
--------------------------------------------------------------------------------
Batch 12/048 36792[67]: Sequence Date: (2024-03-28 ~ 2024-05-13) >> Target Date: 2024-05-21
[평균]sequence NIM값: 1.74790001, 하한: +1.72351742, 상한: +1.77228260    = 5일후 NIM값: 1.74790001(±0.05200005)
[정답]sequence NIM값: 1.74790001 + target 차분(+1.62534761): +0.05200000  = 5일후 NIM값: 1.79990005
[예측]sequence NIM값: 1.74790001 + predict 차분(-0.14575982): -0.00384410 = 5일후 NIM값: 1.74405587(±0.05584419)
--------------------------------------------------------------------------------
Batch 12/072 36816[68]: Sequence Date: (2024-03-29 ~ 2024-05-14) >> Target Date: 2024-05-22
[평균]sequence NIM값: 1.72950006, 하한: +1.70511746, 상한: +1.75388265    = 5일후 NIM값: 1.72950006(±0.07570004)
[정답]sequence NIM값: 1.72950006 + target 차분(+2.37699819): +0.07570000  = 5일후 NIM값: 1.80520010
[예측]sequence NIM값: 1.72950006 + predict 차분(-0.24467385): -0.00696292 = 5일후 NIM값: 1.72253716(±0.08266294)
--------------------------------------------------------------------------------
Batch 12/096 36840[69]: Sequence Date: (2024-04-01 ~ 2024-05-16) >> Target Date: 2024-05-23
[평균]sequence NIM값: 1.69370008, 하한: +1.66931748, 상한: +1.71808267    = 5일후 NIM값: 1.69370008(±0.00500000)
[정답]sequence NIM값: 1.69370008 + target 차분(+0.13473268): +0.00500000  = 5일후 NIM값: 1.69870007
[예측]sequence NIM값: 1.69370008 + predict 차분(-0.44079524): -0.01314675 = 5일후 NIM값: 1.68055332(±0.01814675)
--------------------------------------------------------------------------------
Batch 12/120 36864[70]: Sequence Date: (2024-04-02 ~ 2024-05-17) >> Target Date: 2024-05-24
[평균]sequence NIM값: 1.70140004, 하한: +1.67701745, 상한: +1.72578263    = 5일후 NIM값: 1.70140004(±0.03190005)
[정답]sequence NIM값: 1.70140004 + target 차분(-1.03555858): -0.03190000  = 5일후 NIM값: 1.66949999
[예측]sequence NIM값: 1.70140004 + predict 차분(-0.46898711): -0.01403565 = 5일후 NIM값: 1.68736434(±0.01786435)
--------------------------------------------------------------------------------
Batch 13/016 36888[71]: Sequence Date: (2024-04-03 ~ 2024-05-20) >> Target Date: 2024-05-27
[평균]sequence NIM값: 1.71160007, 하한: +1.68721747, 상한: +1.73598266    = 5일후 NIM값: 1.71160007(±0.01979995)
[정답]sequence NIM값: 1.71160007 + target 차분(+0.60411781): +0.01980000  = 5일후 NIM값: 1.73140001
[예측]sequence NIM값: 1.71160007 + predict 차분(-0.29763353): -0.00863277 = 5일후 NIM값: 1.70296729(±0.02843273)
--------------------------------------------------------------------------------
Batch 13/040 36912[72]: Sequence Date: (2024-04-04 ~ 2024-05-21) >> Target Date: 2024-05-28
[평균]sequence NIM값: 1.69590008, 하한: +1.67151749, 상한: +1.72028267    = 5일후 NIM값: 1.69590008(±0.00769997)
[정답]sequence NIM값: 1.69590008 + target 차분(+0.22036375): +0.00770000  = 5일후 NIM값: 1.70360005
[예측]sequence NIM값: 1.69590008 + predict 차분(-0.26468262): -0.00759381 = 5일후 NIM값: 1.68830633(±0.01529372)
--------------------------------------------------------------------------------
Batch 13/064 36936[73]: Sequence Date: (2024-04-05 ~ 2024-05-22) >> Target Date: 2024-05-29
[평균]sequence NIM값: 1.65380001, 하한: +1.62941742, 상한: +1.67818260    = 5일후 NIM값: 1.65380001(±0.04340005)
[정답]sequence NIM값: 1.65380001 + target 차분(-1.40028358): -0.04340000  = 5일후 NIM값: 1.61039996
[예측]sequence NIM값: 1.65380001 + predict 차분(-0.24600704): -0.00700496 = 5일후 NIM값: 1.64679503(±0.03639507)
--------------------------------------------------------------------------------
Batch 13/088 36960[74]: Sequence Date: (2024-04-08 ~ 2024-05-23) >> Target Date: 2024-05-30
[평균]sequence NIM값: 1.68870008, 하한: +1.66431749, 상한: +1.71308267    = 5일후 NIM값: 1.68870008(±0.00650001)
[정답]sequence NIM값: 1.68870008 + target 차분(-0.22999226): -0.00650000  = 5일후 NIM값: 1.68220007
[예측]sequence NIM값: 1.68870008 + predict 차분(-0.32952872): -0.00963845 = 5일후 NIM값: 1.67906165(±0.00313842)
--------------------------------------------------------------------------------
Batch 13/112 36984[75]: Sequence Date: (2024-04-09 ~ 2024-05-24) >> Target Date: 2024-05-31
[평균]sequence NIM값: 1.73329997, 하한: +1.70891738, 상한: +1.75768256    = 5일후 NIM값: 1.73329997(±0.02999997)
[정답]sequence NIM값: 1.73329997 + target 차분(+0.92761296): +0.03000000  = 5일후 NIM값: 1.76329994
[예측]sequence NIM값: 1.73329997 + predict 차분(-0.24207813): -0.00688107 = 5일후 NIM값: 1.72641885(±0.03688109)
--------------------------------------------------------------------------------
Batch 14/008 37008[76]: Sequence Date: (2024-04-11 ~ 2024-05-27) >> Target Date: 2024-06-03
[평균]sequence NIM값: 1.69180000, 하한: +1.66741741, 상한: +1.71618259    = 5일후 NIM값: 1.69180000(±0.00810003)
[정답]sequence NIM값: 1.69180000 + target 차분(-0.28073660): -0.00810000  = 5일후 NIM값: 1.68369997
[예측]sequence NIM값: 1.69180000 + predict 차분(-0.27060294): -0.00778048 = 5일후 NIM값: 1.68401957(±0.00031960)
--------------------------------------------------------------------------------
Batch 14/032 37032[77]: Sequence Date: (2024-04-12 ~ 2024-05-28) >> Target Date: 2024-06-04
[평균]sequence NIM값: 1.68820000, 하한: +1.66381741, 상한: +1.71258259    = 5일후 NIM값: 1.68820000(±0.00119996)
[정답]sequence NIM값: 1.68820000 + target 차분(-0.06190163): -0.00120000  = 5일후 NIM값: 1.68700004
[예측]sequence NIM값: 1.68820000 + predict 차분(-0.35809734): -0.01053923 = 5일후 NIM값: 1.67766082(±0.00933921)
--------------------------------------------------------------------------------
Batch 14/056 37056[78]: Sequence Date: (2024-04-15 ~ 2024-05-29) >> Target Date: 2024-06-05
[평균]sequence NIM값: 1.69720006, 하한: +1.67281747, 상한: +1.72158265    = 5일후 NIM값: 1.69720006(±0.03419995)
[정답]sequence NIM값: 1.69720006 + target 차분(+1.06081688): +0.03420000  = 5일후 NIM값: 1.73140001
[예측]sequence NIM값: 1.69720006 + predict 차분(-0.45039046): -0.01344929 = 5일후 NIM값: 1.68375075(±0.04764926)
--------------------------------------------------------------------------------
Batch 14/080 37080[79]: Sequence Date: (2024-04-16 ~ 2024-05-30) >> Target Date: 2024-06-07
[평균]sequence NIM값: 1.69520009, 하한: +1.67081749, 상한: +1.71958268    = 5일후 NIM값: 1.69520009(±0.00209999)
[정답]sequence NIM값: 1.69520009 + target 차분(-0.09044532): -0.00210000  = 5일후 NIM값: 1.69310009
[예측]sequence NIM값: 1.69520009 + predict 차분(-0.57323903): -0.01732278 = 5일후 NIM값: 1.67787731(±0.01522279)
--------------------------------------------------------------------------------
Batch 14/104 37104[80]: Sequence Date: (2024-04-17 ~ 2024-05-31) >> Target Date: 2024-06-10
[평균]sequence NIM값: 1.70330000, 하한: +1.67891741, 상한: +1.72768259    = 5일후 NIM값: 1.70330000(±0.01499999)
[정답]sequence NIM값: 1.70330000 + target 차분(+0.45188481): +0.01500000  = 5일후 NIM값: 1.71829998
[예측]sequence NIM값: 1.70330000 + predict 차분(-0.56648344): -0.01710977 = 5일후 NIM값: 1.68619025(±0.03210974)
--------------------------------------------------------------------------------
Batch 15/000 37128[81]: Sequence Date: (2024-04-18 ~ 2024-06-03) >> Target Date: 2024-06-11
[평균]sequence NIM값: 1.69990003, 하한: +1.67551744, 상한: +1.72428262    = 5일후 NIM값: 1.69990003(±0.02049994)
[정답]sequence NIM값: 1.69990003 + target 차분(+0.62631845): +0.02050000  = 5일후 NIM값: 1.72039998
[예측]sequence NIM값: 1.69990003 + predict 차분(-0.53733879): -0.01619082 = 5일후 NIM값: 1.68370926(±0.03669071)
--------------------------------------------------------------------------------
Batch 15/024 37152[82]: Sequence Date: (2024-04-19 ~ 2024-06-04) >> Target Date: 2024-06-12
[평균]sequence NIM값: 1.68940008, 하한: +1.66501749, 상한: +1.71378267    = 5일후 NIM값: 1.68940008(±0.01329994)
[정답]sequence NIM값: 1.68940008 + target 차분(-0.44565570): -0.01330000  = 5일후 NIM값: 1.67610013
[예측]sequence NIM값: 1.68940008 + predict 차분(-0.47752652): -0.01430491 = 5일후 NIM값: 1.67509520(±0.00100493)
--------------------------------------------------------------------------------
Batch 15/048 37176[83]: Sequence Date: (2024-04-22 ~ 2024-06-05) >> Target Date: 2024-06-13
[평균]sequence NIM값: 1.66299999, 하한: +1.63861740, 상한: +1.68738258    = 5일후 NIM값: 1.66299999(±0.02939999)
[정답]sequence NIM값: 1.66299999 + target 차분(-0.95627064): -0.02940000  = 5일후 NIM값: 1.63360000
[예측]sequence NIM값: 1.66299999 + predict 차분(-0.40174252): -0.01191539 = 5일후 NIM값: 1.65108454(±0.01748455)
--------------------------------------------------------------------------------
Batch 15/072 37200[84]: Sequence Date: (2024-04-23 ~ 2024-06-07) >> Target Date: 2024-06-14
[평균]sequence NIM값: 1.69730008, 하한: +1.67291749, 상한: +1.72168267    = 5일후 NIM값: 1.69730008(±0.00039995)
[정답]sequence NIM값: 1.69730008 + target 차분(-0.01115729): +0.00040000  = 5일후 NIM값: 1.69770002
[예측]sequence NIM값: 1.69730008 + predict 차분(-0.35740146): -0.01051729 = 5일후 NIM값: 1.68678284(±0.01091719)
--------------------------------------------------------------------------------
Batch 15/096 37224[85]: Sequence Date: (2024-04-24 ~ 2024-06-10) >> Target Date: 2024-06-17
[평균]sequence NIM값: 1.68830001, 하한: +1.66391742, 상한: +1.71268260    = 5일후 NIM값: 1.68830001(±0.02100003)
[정답]sequence NIM값: 1.68830001 + target 차분(-0.68986285): -0.02100000  = 5일후 NIM값: 1.66729999
[예측]sequence NIM값: 1.68830001 + predict 차분(-0.27347991): -0.00787119 = 5일후 NIM값: 1.68042886(±0.01312888)
--------------------------------------------------------------------------------
Batch 15/120 37248[86]: Sequence Date: (2024-04-25 ~ 2024-06-11) >> Target Date: 2024-06-18
[평균]sequence NIM값: 1.67940009, 하한: +1.65501750, 상한: +1.70378268    = 5일후 NIM값: 1.67940009(±0.00339997)
[정답]sequence NIM값: 1.67940009 + target 차분(-0.13167509): -0.00340000  = 5일후 NIM값: 1.67600012
[예측]sequence NIM값: 1.67940009 + predict 차분(-0.19946325): -0.00553740 = 5일후 NIM값: 1.67386270(±0.00213742)
--------------------------------------------------------------------------------
Batch 16/016 37272[87]: Sequence Date: (2024-04-26 ~ 2024-06-12) >> Target Date: 2024-06-19
[평균]sequence NIM값: 1.70270002, 하한: +1.67831743, 상한: +1.72708261    = 5일후 NIM값: 1.70270002(±0.01429999)
[정답]sequence NIM값: 1.70270002 + target 차분(+0.42968416): +0.01430000  = 5일후 NIM값: 1.71700001
[예측]sequence NIM값: 1.70270002 + predict 차분(-0.11575442): -0.00289801 = 5일후 NIM값: 1.69980204(±0.01719797)
--------------------------------------------------------------------------------
Batch 16/040 37296[88]: Sequence Date: (2024-04-29 ~ 2024-06-13) >> Target Date: 2024-06-20
[평균]sequence NIM값: 1.69239998, 하한: +1.66801739, 상한: +1.71678257    = 5일후 NIM값: 1.69239998(±0.00619996)
[정답]sequence NIM값: 1.69239998 + target 차분(+0.17279093): +0.00620000  = 5일후 NIM값: 1.69859993
[예측]sequence NIM값: 1.69239998 + predict 차분(-0.04308014): -0.00060655 = 5일후 NIM값: 1.69179344(±0.00680649)
--------------------------------------------------------------------------------
Batch 16/064 37320[89]: Sequence Date: (2024-04-30 ~ 2024-06-14) >> Target Date: 2024-06-21
[평균]sequence NIM값: 1.69690001, 하한: +1.67251742, 상한: +1.72128260    = 5일후 NIM값: 1.69690001(±0.00409997)
[정답]sequence NIM값: 1.69690001 + target 차분(+0.10618899): +0.00410000  = 5일후 NIM값: 1.70099998
[예측]sequence NIM값: 1.69690001 + predict 차분(+0.01931952): +0.00136095 = 5일후 NIM값: 1.69826090(±0.00273907)
--------------------------------------------------------------------------------
Batch 16/088 37344[90]: Sequence Date: (2024-05-02 ~ 2024-06-17) >> Target Date: 2024-06-24
[평균]sequence NIM값: 1.70930004, 하한: +1.68491745, 상한: +1.73368263    = 5일후 NIM값: 1.70930004(±0.01919997)
[정답]sequence NIM값: 1.70930004 + target 차분(+0.58508867): +0.01920000  = 5일후 NIM값: 1.72850001
[예측]sequence NIM값: 1.70930004 + predict 차분(+0.04053967): +0.00203004 = 5일후 NIM값: 1.71133006(±0.01716995)
--------------------------------------------------------------------------------
Batch 16/112 37368[91]: Sequence Date: (2024-05-03 ~ 2024-06-18) >> Target Date: 2024-06-25
[평균]sequence NIM값: 1.68280005, 하한: +1.65841746, 상한: +1.70718265    = 5일후 NIM값: 1.68280005(±0.00800002)
[정답]sequence NIM값: 1.68280005 + target 차분(+0.22987832): +0.00800000  = 5일후 NIM값: 1.69080007
[예측]sequence NIM값: 1.68280005 + predict 차분(+0.06630274): +0.00284236 = 5일후 NIM값: 1.68564236(±0.00515771)
--------------------------------------------------------------------------------
Batch 17/008 37392[92]: Sequence Date: (2024-05-07 ~ 2024-06-19) >> Target Date: 2024-06-26
[평균]sequence NIM값: 1.68840003, 하한: +1.66401744, 상한: +1.71278262    = 5일후 NIM값: 1.68840003(±0.04639995)
[정답]sequence NIM값: 1.68840003 + target 차분(+1.44774246): +0.04640000  = 5일후 NIM값: 1.73479998
[예측]sequence NIM값: 1.68840003 + predict 차분(+0.08828489): +0.00353547 = 5일후 NIM값: 1.69193554(±0.04286444)
--------------------------------------------------------------------------------
Batch 17/032 37416[93]: Sequence Date: (2024-05-08 ~ 2024-06-20) >> Target Date: 2024-06-27
[평균]sequence NIM값: 1.68620002, 하한: +1.66181743, 상한: +1.71058261    = 5일후 NIM값: 1.68620002(±0.02779996)
[정답]sequence NIM값: 1.68620002 + target 차분(+0.85783952): +0.02780000  = 5일후 NIM값: 1.71399999
[예측]sequence NIM값: 1.68620002 + predict 차분(+0.14890310): +0.00544680 = 5일후 NIM값: 1.69164681(±0.02235317)
--------------------------------------------------------------------------------
Batch 17/056 37440[94]: Sequence Date: (2024-05-09 ~ 2024-06-21) >> Target Date: 2024-06-28
[평균]sequence NIM값: 1.69280005, 하한: +1.66841745, 상한: +1.71718264    = 5일후 NIM값: 1.69280005(±0.02100003)
[정답]sequence NIM값: 1.69280005 + target 차분(+0.64217609): +0.02100000  = 5일후 NIM값: 1.71380007
[예측]sequence NIM값: 1.69280005 + predict 차분(+0.13417090): +0.00498229 = 5일후 NIM값: 1.69778228(±0.01601779)
--------------------------------------------------------------------------------
Batch 17/080 37464[95]: Sequence Date: (2024-05-10 ~ 2024-06-24) >> Target Date: 2024-07-01
[평균]sequence NIM값: 1.69010007, 하한: +1.66571748, 상한: +1.71448267    = 5일후 NIM값: 1.69010007(±0.02339995)
[정답]sequence NIM값: 1.69010007 + target 차분(+0.71829259): +0.02340000  = 5일후 NIM값: 1.71350002
[예측]sequence NIM값: 1.69010007 + predict 차분(+0.14844811): +0.00543246 = 5일후 NIM값: 1.69553256(±0.01796746)
--------------------------------------------------------------------------------
Batch 17/104 37488[96]: Sequence Date: (2024-05-13 ~ 2024-06-25) >> Target Date: 2024-07-02
[평균]sequence NIM값: 1.67480004, 하한: +1.65041745, 상한: +1.69918263    = 5일후 NIM값: 1.67480004(±0.03310001)
[정답]sequence NIM값: 1.67480004 + target 차분(-1.07361686): -0.03310000  = 5일후 NIM값: 1.64170003
[예측]sequence NIM값: 1.67480004 + predict 차분(+0.16307367): +0.00589361 = 5일후 NIM값: 1.68069363(±0.03899360)
--------------------------------------------------------------------------------
Batch 18/000 37512[97]: Sequence Date: (2024-05-14 ~ 2024-06-26) >> Target Date: 2024-07-03
[평균]sequence NIM값: 1.64200008, 하한: +1.61761749, 상한: +1.66638267    = 5일후 NIM값: 1.64200008(±0.03719997)
[정답]sequence NIM값: 1.64200008 + target 차분(-1.20364928): -0.03720000  = 5일후 NIM값: 1.60480011
[예측]sequence NIM값: 1.64200008 + predict 차분(+0.14343345): +0.00527434 = 5일후 NIM값: 1.64727437(±0.04247427)
--------------------------------------------------------------------------------
Batch 18/024 37536[98]: Sequence Date: (2024-05-16 ~ 2024-06-27) >> Target Date: 2024-07-04
[평균]sequence NIM값: 1.65840006, 하한: +1.63401747, 상한: +1.68278265    = 5일후 NIM값: 1.65840006(±0.00479996)
[정답]sequence NIM값: 1.65840006 + target 차분(-0.17607640): -0.00480000  = 5일후 NIM값: 1.65360010
[예측]sequence NIM값: 1.65840006 + predict 차분(+0.14343250): +0.00527431 = 5일후 NIM값: 1.66367435(±0.01007426)
--------------------------------------------------------------------------------
Batch 18/048 37560[99]: Sequence Date: (2024-05-17 ~ 2024-06-28) >> Target Date: 2024-07-05
[평균]sequence NIM값: 1.67180002, 하한: +1.64741743, 상한: +1.69618261    = 5일후 NIM값: 1.67180002(±0.01769996)
[정답]sequence NIM값: 1.67180002 + target 차분(-0.58520263): -0.01770000  = 5일후 NIM값: 1.65410006
[예측]sequence NIM값: 1.67180002 + predict 차분(+0.10881708): +0.00418287 = 5일후 NIM값: 1.67598283(±0.02188277)
--------------------------------------------------------------------------------
Batch 18/072 37584[100]: Sequence Date: (2024-05-20 ~ 2024-07-01) >> Target Date: 2024-07-08
[평균]sequence NIM값: 1.66670001, 하한: +1.64231741, 상한: +1.69108260    = 5일후 NIM값: 1.66670001(±0.01600003)
[정답]sequence NIM값: 1.66670001 + target 차분(-0.53128678): -0.01600000  = 5일후 NIM값: 1.65069997
[예측]sequence NIM값: 1.66670001 + predict 차분(+0.10495410): +0.00406106 = 5일후 NIM값: 1.67076111(±0.02006114)
--------------------------------------------------------------------------------
Batch 18/096 37608[101]: Sequence Date: (2024-05-21 ~ 2024-07-02) >> Target Date: 2024-07-09
[평균]sequence NIM값: 1.70790005, 하한: +1.68351746, 상한: +1.73228264    = 5일후 NIM값: 1.70790005(±0.04079998)
[정답]sequence NIM값: 1.70790005 + target 차분(+1.27013731): +0.04080000  = 5일후 NIM값: 1.74870002
[예측]sequence NIM값: 1.70790005 + predict 차분(+0.12601465): +0.00472512 = 5일후 NIM값: 1.71262515(±0.03607488)
--------------------------------------------------------------------------------
Batch 18/120 37632[102]: Sequence Date: (2024-05-22 ~ 2024-07-03) >> Target Date: 2024-07-10
[평균]sequence NIM값: 1.67920005, 하한: +1.65481746, 상한: +1.70358264    = 5일후 NIM값: 1.67920005(±0.01269996)
[정답]sequence NIM값: 1.67920005 + target 차분(+0.37893981): +0.01270000  = 5일후 NIM값: 1.69190001
[예측]sequence NIM값: 1.67920005 + predict 차분(+0.12189335): +0.00459517 = 5일후 NIM값: 1.68379521(±0.00810480)
--------------------------------------------------------------------------------
Batch 19/016 37656[103]: Sequence Date: (2024-05-23 ~ 2024-07-04) >> Target Date: 2024-07-11
[평균]sequence NIM값: 1.66320002, 하한: +1.63881743, 상한: +1.68758261    = 5일후 NIM값: 1.66320002(±0.01660001)
[정답]sequence NIM값: 1.66320002 + target 차분(-0.55031592): -0.01660000  = 5일후 NIM값: 1.64660001
[예측]sequence NIM값: 1.66320002 + predict 차분(+0.07306594): +0.00305561 = 5일후 NIM값: 1.66625559(±0.01965559)
--------------------------------------------------------------------------------
Batch 19/040 37680[104]: Sequence Date: (2024-05-24 ~ 2024-07-05) >> Target Date: 2024-07-12
[평균]sequence NIM값: 1.68949997, 하한: +1.66511738, 상한: +1.71388257    = 5일후 NIM값: 1.68949997(±0.00349998)
[정답]sequence NIM값: 1.68949997 + target 차분(-0.13484661): -0.00350000  = 5일후 NIM값: 1.68599999
[예측]sequence NIM값: 1.68949997 + predict 차분(+0.06366504): +0.00275919 = 5일후 NIM값: 1.69225919(±0.00625920)
--------------------------------------------------------------------------------
Batch 19/064 37704[105]: Sequence Date: (2024-05-27 ~ 2024-07-08) >> Target Date: 2024-07-15
[평균]sequence NIM값: 1.68270004, 하한: +1.65831745, 상한: +1.70708263    = 5일후 NIM값: 1.68270004(±0.02079999)
[정답]sequence NIM값: 1.68270004 + target 차분(-0.68351978): -0.02080000  = 5일후 NIM값: 1.66190004
[예측]sequence NIM값: 1.68270004 + predict 차분(+0.03073422): +0.00172086 = 5일후 NIM값: 1.68442094(±0.02252090)
--------------------------------------------------------------------------------
Batch 19/088 37728[106]: Sequence Date: (2024-05-28 ~ 2024-07-09) >> Target Date: 2024-07-16
[평균]sequence NIM값: 1.66710007, 하한: +1.64271748, 상한: +1.69148266    = 5일후 NIM값: 1.66710007(±0.05009997)
[정답]sequence NIM값: 1.66710007 + target 차분(-1.61277544): -0.05010000  = 5일후 NIM값: 1.61700010
[예측]sequence NIM값: 1.66710007 + predict 차분(+0.00754360): +0.00098965 = 5일후 NIM값: 1.66808975(±0.05108964)
--------------------------------------------------------------------------------
Batch 19/112 37752[107]: Sequence Date: (2024-05-29 ~ 2024-07-10) >> Target Date: 2024-07-17
[평균]sequence NIM값: 1.66649997, 하한: +1.64211738, 상한: +1.69088256    = 5일후 NIM값: 1.66649997(±0.03600001)
[정답]sequence NIM값: 1.66649997 + target 차분(-1.16559100): -0.03600000  = 5일후 NIM값: 1.63049996
[예측]sequence NIM값: 1.66649997 + predict 차분(+0.10296274): +0.00399827 = 5일후 NIM값: 1.67049825(±0.03999829)
--------------------------------------------------------------------------------
Batch 20/008 37776[108]: Sequence Date: (2024-05-30 ~ 2024-07-11) >> Target Date: 2024-07-18
[평균]sequence NIM값: 1.67980003, 하한: +1.65541744, 상한: +1.70418262    = 5일후 NIM값: 1.67980003(±0.01090002)
[정답]sequence NIM값: 1.67980003 + target 차분(-0.36953920): -0.01090000  = 5일후 NIM값: 1.66890001
[예측]sequence NIM값: 1.67980003 + predict 차분(+0.09399579): +0.00371554 = 5일후 NIM값: 1.68351555(±0.01461554)
--------------------------------------------------------------------------------
Batch 20/032 37800[109]: Sequence Date: (2024-05-31 ~ 2024-07-12) >> Target Date: 2024-07-19
[평균]sequence NIM값: 1.69300008, 하한: +1.66861749, 상한: +1.71738267    = 5일후 NIM값: 1.69300008(±0.01979995)
[정답]sequence NIM값: 1.69300008 + target 차분(-0.65180457): -0.01980000  = 5일후 NIM값: 1.67320013
[예측]sequence NIM값: 1.69300008 + predict 차분(+0.11071057): +0.00424257 = 5일후 NIM값: 1.69724262(±0.02404249)
--------------------------------------------------------------------------------
Batch 20/056 37824[110]: Sequence Date: (2024-06-03 ~ 2024-07-15) >> Target Date: 2024-07-22
[평균]sequence NIM값: 1.70350003, 하한: +1.67911744, 상한: +1.72788262    = 5일후 NIM값: 1.70350003(±0.00059998)
[정답]sequence NIM값: 1.70350003 + target 차분(-0.04287250): -0.00060000  = 5일후 NIM값: 1.70290005
[예측]sequence NIM값: 1.70350003 + predict 차분(+0.10892756): +0.00418635 = 5일후 NIM값: 1.70768642(±0.00478637)
--------------------------------------------------------------------------------
Batch 20/080 37848[111]: Sequence Date: (2024-06-04 ~ 2024-07-16) >> Target Date: 2024-07-23
[평균]sequence NIM값: 1.71720004, 하한: +1.69281745, 상한: +1.74158263    = 5일후 NIM값: 1.71720004(±0.03340006)
[정답]sequence NIM값: 1.71720004 + target 차분(+1.03544474): +0.03340000  = 5일후 NIM값: 1.75060010
[예측]sequence NIM값: 1.71720004 + predict 차분(+0.09907071): +0.00387556 = 5일후 NIM값: 1.72107565(±0.02952445)
--------------------------------------------------------------------------------
Batch 20/104 37872[112]: Sequence Date: (2024-06-05 ~ 2024-07-17) >> Target Date: 2024-07-24
[평균]sequence NIM값: 1.70249999, 하한: +1.67811739, 상한: +1.72688258    = 5일후 NIM값: 1.70249999(±0.00969994)
[정답]sequence NIM값: 1.70249999 + target 차분(+0.28379416): +0.00970000  = 5일후 NIM값: 1.71219993
[예측]sequence NIM값: 1.70249999 + predict 차분(+0.08489922): +0.00342872 = 5일후 NIM값: 1.70592868(±0.00627124)
--------------------------------------------------------------------------------
Batch 21/000 37896[113]: Sequence Date: (2024-06-07 ~ 2024-07-18) >> Target Date: 2024-07-25
[평균]sequence NIM값: 1.69070005, 하한: +1.66631746, 상한: +1.71508265    = 5일후 NIM값: 1.69070005(±0.00090003)
[정답]sequence NIM값: 1.69070005 + target 차분(-0.05238707): -0.00090000  = 5일후 NIM값: 1.68980002
[예측]sequence NIM값: 1.69070005 + predict 차분(+0.29838848): +0.01016017 = 5일후 NIM값: 1.70086026(±0.01106024)
--------------------------------------------------------------------------------
Batch 21/024 37920[114]: Sequence Date: (2024-06-10 ~ 2024-07-19) >> Target Date: 2024-07-26
[평균]sequence NIM값: 1.71280003, 하한: +1.68841743, 상한: +1.73718262    = 5일후 NIM값: 1.71280003(±0.02769995)
[정답]sequence NIM값: 1.71280003 + target 차분(+0.85466802): +0.02770000  = 5일후 NIM값: 1.74049997
[예측]sequence NIM값: 1.71280003 + predict 차분(+0.51629835): +0.01703100 = 5일후 NIM값: 1.72983098(±0.01066899)
--------------------------------------------------------------------------------
Batch 21/048 37944[115]: Sequence Date: (2024-06-11 ~ 2024-07-22) >> Target Date: 2024-07-29
[평균]sequence NIM값: 1.70410001, 하한: +1.67971742, 상한: +1.72848260    = 5일후 NIM값: 1.70410001(±0.03680003)
[정답]sequence NIM값: 1.70410001 + target 차분(+1.14327645): +0.03680000  = 5일후 NIM값: 1.74090004
[예측]sequence NIM값: 1.70410001 + predict 차분(+0.60206664): +0.01973532 = 5일후 NIM값: 1.72383535(±0.01706469)
--------------------------------------------------------------------------------
Batch 21/072 37968[116]: Sequence Date: (2024-06-12 ~ 2024-07-23) >> Target Date: 2024-07-30
[평균]sequence NIM값: 1.68379998, 하한: +1.65941739, 상한: +1.70818257    = 5일후 NIM값: 1.68379998(±0.01110005)
[정답]sequence NIM값: 1.68379998 + target 차분(-0.37588224): -0.01110000  = 5일후 NIM값: 1.67269993
[예측]sequence NIM값: 1.68379998 + predict 차분(+0.62556833): +0.02047635 = 5일후 NIM값: 1.70427632(±0.03157640)
--------------------------------------------------------------------------------
Batch 21/096 37992[117]: Sequence Date: (2024-06-13 ~ 2024-07-24) >> Target Date: 2024-07-31
[평균]sequence NIM값: 1.69280005, 하한: +1.66841745, 상한: +1.71718264    = 5일후 NIM값: 1.69280005(±0.01979995)
[정답]sequence NIM값: 1.69280005 + target 차분(+0.60411781): +0.01980000  = 5일후 NIM값: 1.71259999
[예측]sequence NIM값: 1.69280005 + predict 차분(+0.60593325): +0.01985724 = 5일후 NIM값: 1.71265733(±0.00005734)
--------------------------------------------------------------------------------
Batch 21/120 38016[118]: Sequence Date: (2024-06-14 ~ 2024-07-25) >> Target Date: 2024-08-01
[평균]sequence NIM값: 1.69160008, 하한: +1.66721749, 상한: +1.71598268    = 5일후 NIM값: 1.69160008(±0.00680006)
[정답]sequence NIM값: 1.69160008 + target 차분(+0.19182007): +0.00680000  = 5일후 NIM값: 1.69840014
[예측]sequence NIM값: 1.69160008 + predict 차분(+0.56091481): +0.01843778 = 5일후 NIM값: 1.71003783(±0.01163769)
--------------------------------------------------------------------------------
Batch 22/016 38040[119]: Sequence Date: (2024-06-17 ~ 2024-07-26) >> Target Date: 2024-08-02
[평균]sequence NIM값: 1.68510008, 하한: +1.66071749, 상한: +1.70948267    = 5일후 NIM값: 1.68510008(±0.02190006)
[정답]sequence NIM값: 1.68510008 + target 차분(+0.67071974): +0.02190000  = 5일후 NIM값: 1.70700014
[예측]sequence NIM값: 1.68510008 + predict 차분(+0.49831834): +0.01646408 = 5일후 NIM값: 1.70156419(±0.00543594)
--------------------------------------------------------------------------------
Batch 22/040 38064[120]: Sequence Date: (2024-06-18 ~ 2024-07-29) >> Target Date: 2024-08-05
[평균]sequence NIM값: 1.66729999, 하한: +1.64291739, 상한: +1.69168258    = 5일후 NIM값: 1.66729999(±0.00010002)
[정답]sequence NIM값: 1.66729999 + target 차분(-0.02701490): -0.00010000  = 5일후 NIM값: 1.66719997
[예측]sequence NIM값: 1.66729999 + predict 차분(+0.27073857): +0.00928835 = 5일후 NIM값: 1.67658830(±0.00938833)
--------------------------------------------------------------------------------
Batch 22/064 38088[121]: Sequence Date: (2024-06-19 ~ 2024-07-30) >> Target Date: 2024-08-06
[평균]sequence NIM값: 1.69490004, 하한: +1.67051744, 상한: +1.71928263    = 5일후 NIM값: 1.69490004(±0.00610006)
[정답]sequence NIM값: 1.69490004 + target 차분(-0.21730617): -0.00610000  = 5일후 NIM값: 1.68879998
[예측]sequence NIM값: 1.69490004 + predict 차분(+0.09543283): +0.00376085 = 5일후 NIM값: 1.69866085(±0.00986087)
--------------------------------------------------------------------------------
Batch 22/088 38112[122]: Sequence Date: (2024-06-20 ~ 2024-07-31) >> Target Date: 2024-08-07
[평균]sequence NIM값: 1.67299998, 하한: +1.64861739, 상한: +1.69738257    = 5일후 NIM값: 1.67299998(±0.02489996)
[정답]sequence NIM값: 1.67299998 + target 차분(-0.81355214): -0.02490000  = 5일후 NIM값: 1.64810002
[예측]sequence NIM값: 1.67299998 + predict 차분(+0.04570577): +0.00219293 = 5일후 NIM값: 1.67519295(±0.02709293)
--------------------------------------------------------------------------------
Batch 22/112 38136[123]: Sequence Date: (2024-06-21 ~ 2024-08-01) >> Target Date: 2024-08-08
[평균]sequence NIM값: 1.68480003, 하한: +1.66041744, 상한: +1.70918262    = 5일후 NIM값: 1.68480003(±0.00399995)
[정답]sequence NIM값: 1.68480003 + target 차분(-0.15070422): -0.00400000  = 5일후 NIM값: 1.68080008
[예측]sequence NIM값: 1.68480003 + predict 차분(+0.36894748): +0.01238494 = 5일후 NIM값: 1.69718492(±0.01638484)
--------------------------------------------------------------------------------
Batch 23/008 38160[124]: Sequence Date: (2024-06-24 ~ 2024-08-02) >> Target Date: 2024-08-09
[평균]sequence NIM값: 1.66320002, 하한: +1.63881743, 상한: +1.68758261    = 5일후 NIM값: 1.66320002(±0.01919997)
[정답]sequence NIM값: 1.66320002 + target 차분(-0.63277543): -0.01920000  = 5일후 NIM값: 1.64400005
[예측]sequence NIM값: 1.66320002 + predict 차분(+0.22616884): +0.00788304 = 5일후 NIM값: 1.67108309(±0.02708304)
--------------------------------------------------------------------------------
Batch 23/032 38184[125]: Sequence Date: (2024-06-25 ~ 2024-08-05) >> Target Date: 2024-08-12
[평균]sequence NIM값: 1.66740000, 하한: +1.64301741, 상한: +1.69178259    = 5일후 NIM값: 1.66740000(±0.00450003)
[정답]sequence NIM값: 1.66740000 + target 차분(-0.16656183): -0.00450000  = 5일후 NIM값: 1.66289997
[예측]sequence NIM값: 1.66740000 + predict 차분(+0.13856736): +0.00512091 = 5일후 NIM값: 1.67252088(±0.00962090)
--------------------------------------------------------------------------------
Batch 23/056 38208[126]: Sequence Date: (2024-06-26 ~ 2024-08-06) >> Target Date: 2024-08-13
[평균]sequence NIM값: 1.70099998, 하한: +1.67661738, 상한: +1.72538257    = 5일후 NIM값: 1.70099998(±0.02020001)
[정답]sequence NIM값: 1.70099998 + target 차분(+0.61680388): +0.02020000  = 5일후 NIM값: 1.72119999
[예측]sequence NIM값: 1.70099998 + predict 차분(+0.16418669): +0.00592870 = 5일후 NIM값: 1.70692873(±0.01427126)
--------------------------------------------------------------------------------
Batch 23/080 38232[127]: Sequence Date: (2024-06-27 ~ 2024-08-07) >> Target Date: 2024-08-14
[평균]sequence NIM값: 1.69790006, 하한: +1.67351747, 상한: +1.72228265    = 5일후 NIM값: 1.69790006(±0.01030004)
[정답]sequence NIM값: 1.69790006 + target 차분(+0.30282331): +0.01030000  = 5일후 NIM값: 1.70820010
[예측]sequence NIM값: 1.69790006 + predict 차분(+0.18381360): +0.00654755 = 5일후 NIM값: 1.70444763(±0.00375247)
--------------------------------------------------------------------------------
Batch 23/104 38256[128]: Sequence Date: (2024-06-28 ~ 2024-08-08) >> Target Date: 2024-08-16
[평균]sequence NIM값: 1.68879998, 하한: +1.66441739, 상한: +1.71318257    = 5일후 NIM값: 1.68879998(±0.01400006)
[정답]sequence NIM값: 1.68879998 + target 차분(+0.42016959): +0.01400000  = 5일후 NIM값: 1.70280004
[예측]sequence NIM값: 1.68879998 + predict 차분(+0.21721488): +0.00760071 = 5일후 NIM값: 1.69640064(±0.00639939)
--------------------------------------------------------------------------------
Batch 24/000 38280[129]: Sequence Date: (2024-07-01 ~ 2024-08-09) >> Target Date: 2024-08-19
[평균]sequence NIM값: 1.68239999, 하한: +1.65801740, 상한: +1.70678258    = 5일후 NIM값: 1.68239999(±0.01859999)
[정답]sequence NIM값: 1.68239999 + target 차분(+0.56605959): +0.01860000  = 5일후 NIM값: 1.70099998
[예측]sequence NIM값: 1.68239999 + predict 차분(+0.25664291): +0.00884390 = 5일후 NIM값: 1.69124389(±0.00975609)
--------------------------------------------------------------------------------
Batch 24/024 38304[130]: Sequence Date: (2024-07-02 ~ 2024-08-12) >> Target Date: 2024-08-20
[평균]sequence NIM값: 1.67190003, 하한: +1.64751744, 상한: +1.69628263    = 5일후 NIM값: 1.67190003(±0.01530004)
[정답]sequence NIM값: 1.67190003 + target 차분(-0.50908613): -0.01530000  = 5일후 NIM값: 1.65660000
[예측]sequence NIM값: 1.67190003 + predict 차분(+0.29715300): +0.01012121 = 5일후 NIM값: 1.68202126(±0.02542126)
--------------------------------------------------------------------------------
================================================================================
Predict Mean Percentage Absolute Error (MPAE): 1.44641733%
Threshold Mean Percentage Absolute Error (MPAE): 1.79745948%
================================================================================
mean_absolute_difference: 0.02470835
================================================================================
In [70]:
import matplotlib.pyplot as plt

cap_width = 0.5  # 캡의 너비

# 그래프 생성
plt.figure(figsize=(25, 10))

# x축 레이블을 5개마다 설정
plt.xticks(ticks=range(0, len(prediction_dates), 43), labels=[date[-8:] for date in prediction_dates[::43]], rotation=45, fontsize=9)


# 점 그리기
plt.scatter(range(len(prediction_dates)), sequence_nim_values, label=f"Delta Boundary(±{predict_threshold:.4f})", s=20, color='gray', marker='_')

# 에러바 수동으로 점선 그리기
for x, y in enumerate(sequence_nim_values):
    plt.vlines(x, y - predict_threshold, y + predict_threshold, colors='gray', linestyles='-', linewidth=5, alpha=0.3)
    # 캡 (수평선)
    plt.hlines(y + predict_threshold, x - cap_width, x + cap_width, colors='gray', linewidth=0.5)
    plt.hlines(y - predict_threshold, x - cap_width, x + cap_width, colors='gray', linewidth=0.5)

# 실제 값 (Target)
plt.scatter(range(len(target_nim_values)), target_nim_values, label="Target NIM Values", s=20, zorder=2, color='darkorange', alpha=0.8, marker='o')
# plt.plot(range(len(target_nim_values)), target_nim_values, label="Target NIM Values", color='darkorange', alpha=0.6, linewidth=2, marker='o', markersize=4, zorder=2)

# 예측 값 (Prediction)
plt.scatter(range(len(prediction_nim_values)), prediction_nim_values, label="Prediction NIM Values", s=20, zorder=1, color='dodgerblue', alpha=0.9, marker='o')
# plt.plot(range(len(prediction_nim_values)), prediction_nim_values, label="Prediction NIM Values", color='dodgerblue', alpha=0.7, linewidth=2, marker='o', markersize=4, zorder=1)

# Target과 Prediction을 점선으로 연결
for x in range(len(target_nim_values)):
    plt.plot([x, x], [target_nim_values[x], prediction_nim_values[x]], color='black', linestyle='--', linewidth=0.7, alpha=0.5)

# 정확도 표시용 둥근 사각형 추가
bbox_props = dict(boxstyle="round,pad=0.5", edgecolor="orange", facecolor="#FFDAB9", alpha=0.5)

# 텍스트 위치와 transform 설정
plt.text(
    0.5, 0.95,  # 텍스트 위치 (x, y)
    f"Prediction MPAE: {predict_mpae:.3f}%\nAverage Delta MPAE: {threshold_mpae:.4f}%",
    fontsize=14,
    bbox=bbox_props,
    transform=plt.gca().transAxes,  # 그래프 영역 내 좌표 사용
    verticalalignment='top',
    horizontalalignment='center'
)

# 그래프 꾸미기
plt.title("Target, Predicted NIM, and Average Boundaries", fontsize=14)
plt.ylabel("NIM Values", fontsize=10)
plt.legend(fontsize=12)
plt.grid(True)
plt.show()
No description has been provided for this image
In [69]:
import matplotlib.pyplot as plt

cap_width = 0.5  # 캡의 너비

# 그래프 생성
plt.figure(figsize=(25, 10))

# x축 레이블을 5개마다 설정
plt.xticks(ticks=range(0, len(prediction_dates), 43), labels=[date[-8:] for date in prediction_dates[::43]], rotation=45, fontsize=9)


# 점 그리기
plt.scatter(range(len(prediction_dates)), sequence_nim_values, label=f"Delta Boundary(±{predict_threshold:.4f})", s=20, color='gray', marker='_')

# 에러바 수동으로 점선 그리기
for x, y in enumerate(sequence_nim_values):
    plt.vlines(x, y - predict_threshold, y + predict_threshold, colors='gray', linestyles='-', linewidth=5, alpha=0.3)
    # 캡 (수평선)
    plt.hlines(y + predict_threshold, x - cap_width, x + cap_width, colors='gray', linewidth=0.5)
    plt.hlines(y - predict_threshold, x - cap_width, x + cap_width, colors='gray', linewidth=0.5)

# 실제 값 (Target)
# plt.scatter(range(len(target_nim_values)), target_nim_values, label="Target NIM Values", s=20, zorder=2, color='darkorange', alpha=0.8, marker='o')
plt.plot(range(len(target_nim_values)), target_nim_values, label="Target NIM Values", color='darkorange', alpha=0.6, linewidth=2, marker='o', markersize=4, zorder=2)

# 예측 값 (Prediction)
# plt.scatter(range(len(prediction_nim_values)), prediction_nim_values, label="Prediction NIM Values", s=20, zorder=1, color='dodgerblue', alpha=0.9, marker='o')
plt.plot(range(len(prediction_nim_values)), prediction_nim_values, label="Prediction NIM Values", color='dodgerblue', alpha=0.7, linewidth=2, marker='o', markersize=4, zorder=1)

# Target과 Prediction을 점선으로 연결
for x in range(len(target_nim_values)):
    plt.plot([x, x], [target_nim_values[x], prediction_nim_values[x]], color='black', linestyle='--', linewidth=0.7, alpha=0.5)

# 정확도 표시용 둥근 사각형 추가
bbox_props = dict(boxstyle="round,pad=0.5", edgecolor="orange", facecolor="#FFDAB9", alpha=0.5)

# 텍스트 위치와 transform 설정
plt.text(
    0.5, 0.95,  # 텍스트 위치 (x, y)
    f"Prediction MPAE: {predict_mpae:.3f}%\nAverage Delta MPAE: {threshold_mpae:.4f}%",
    fontsize=14,
    bbox=bbox_props,
    transform=plt.gca().transAxes,  # 그래프 영역 내 좌표 사용
    verticalalignment='top',
    horizontalalignment='center'
)

# 그래프 꾸미기
plt.title("Target, Predicted NIM, and Average Boundaries", fontsize=14)
plt.ylabel("NIM Values", fontsize=10)
plt.legend(fontsize=12)
plt.grid(True)
plt.show()
No description has been provided for this image
In [76]:
import matplotlib.pyplot as plt

cap_width = 0.5  # 캡의 너비

# 그래프 생성
plt.figure(figsize=(25, 10))

# x축 레이블을 5개마다 설정
plt.xticks(ticks=range(0, len(prediction_dates), 43), labels=[date[-8:] for date in prediction_dates[::43]], rotation=45, fontsize=9)


# 점 그리기
plt.scatter(range(len(prediction_dates)), sequence_nim_values, label=f"Delta Boundary(±{predict_threshold:.4f})", s=20, color='gray', marker='_')

# 에러바 수동으로 점선 그리기
# for x, y in enumerate(sequence_nim_values):
#     plt.vlines(x, y - predict_threshold, y + predict_threshold, colors='gray', linestyles='-', linewidth=5, alpha=0.3)
#     # 캡 (수평선)
#     plt.hlines(y + predict_threshold, x - cap_width, x + cap_width, colors='gray', linewidth=0.5)
#     plt.hlines(y - predict_threshold, x - cap_width, x + cap_width, colors='gray', linewidth=0.5)

# 에러바 채우기
x_range = range(len(prediction_dates))
lower_bound = [y - predict_threshold for y in sequence_nim_values]
upper_bound = [y + predict_threshold for y in sequence_nim_values]

plt.fill_between(
    x_range, 
    lower_bound, 
    upper_bound, 
    color='gray', 
    alpha=0.3
)


# 실제 값 (Target)
# plt.scatter(range(len(target_nim_values)), target_nim_values, label="Target NIM Values", s=20, zorder=2, color='darkorange', alpha=0.8, marker='o')
plt.plot(range(len(target_nim_values)), target_nim_values, label="Target NIM Values", color='darkorange', alpha=0.6, linewidth=2, marker='o', markersize=2, zorder=2)

# 예측 값 (Prediction)
# plt.scatter(range(len(prediction_nim_values)), prediction_nim_values, label="Prediction NIM Values", s=20, zorder=1, color='dodgerblue', alpha=0.9, marker='o')
plt.plot(range(len(prediction_nim_values)), prediction_nim_values, label="Prediction NIM Values", color='dodgerblue', alpha=0.7, linewidth=2, marker='o', markersize=2, zorder=1)

# Target과 Prediction을 점선으로 연결
# for x in range(len(target_nim_values)):
#     plt.plot([x, x], [target_nim_values[x], prediction_nim_values[x]], color='black', linestyle='--', linewidth=0.7, alpha=0.5)

# 정확도 표시용 둥근 사각형 추가
bbox_props = dict(boxstyle="round,pad=0.5", edgecolor="orange", facecolor="#FFDAB9", alpha=0.5)

# 텍스트 위치와 transform 설정
plt.text(
    0.5, 0.95,  # 텍스트 위치 (x, y)
    f"Prediction MPAE: {predict_mpae:.3f}%\nAverage Delta MPAE: {threshold_mpae:.4f}%",
    fontsize=14,
    bbox=bbox_props,
    transform=plt.gca().transAxes,  # 그래프 영역 내 좌표 사용
    verticalalignment='top',
    horizontalalignment='center'
)

# 그래프 꾸미기
plt.title("Target, Predicted NIM, and Average Boundaries", fontsize=14)
plt.ylabel("NIM Values", fontsize=10)
plt.legend(fontsize=12)
plt.grid(True)
plt.show()
No description has been provided for this image
  • Animation
In [84]:
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.animation import FuncAnimation, PillowWriter
from PIL import Image, ImageSequence

# 크기를 25x10으로 설정
fig, ax = plt.subplots(figsize=(25, 10))
xdata, ydata = [], []
xdata_orange, ydata_orange = [], []
ln, = plt.plot([], [], 'lightblue', animated=True, linewidth=7, zorder=1, label='Actual NIM Value')  # 첫 번째 선 (연한 블루)
ln_orange, = plt.plot([], [], 'darkorange', alpha=0.6, animated=True, linewidth=7, zorder=2, label='Predicted NIM Value')  # 두 번째 선 (진한 오렌지)

# 실제 날짜와 NIM 값 데이터 사용 (날짜와 NIM 값 데이터를 순차적인 숫자로 대체)
dates = np.arange(len(prediction_dates))  # 날짜를 순차적인 숫자로 대체
original_nim_values = np.array(target_nim_values)
prediction_nim_values = np.array(prediction_nim_values)

# x축 눈금을 10개 간격으로 설정
ax.set_xticks(np.arange(0, len(dates), 10))
ax.set_xticklabels(prediction_dates[::10], rotation=45, ha='right')

# 여백 조정 (양옆의 공백 최소화)
plt.subplots_adjust(left=0.05, right=0.95, top=0.9, bottom=0.1)

# 초기 설정
def init():
    ax.set_xlim(0, len(dates))
    min_y = min(min(original_nim_values), min(prediction_nim_values))
    max_y = max(max(original_nim_values), max(prediction_nim_values))
    y_range = max_y - min_y
    ax.set_ylim(min_y - 0.15 * y_range, max_y + 0.15 * y_range)  # y축 범위를 넓혀 두 곡선이 모두 보이도록 설정
    ln.set_data([], [])
    ln_orange.set_data([], [])
    return ln, ln_orange

# 애니메이션 업데이트
def update(frame):
    # 첫 번째 곡선 데이터 추가 (실제 NIM 값)
    xdata.append(dates[frame])
    ydata.append(original_nim_values[frame])
    ln.set_data(xdata, ydata)

    # 두 번째 곡선 데이터 추가 (예측 NIM 값)
    xdata_orange.append(dates[frame])
    ydata_orange.append(prediction_nim_values[frame])
    ln_orange.set_data(xdata_orange, ydata_orange)
    
    return ln, ln_orange

# 1초에 10프레임으로 설정
ani = FuncAnimation(fig, update, frames=len(dates), init_func=init, blit=True, interval=1000, repeat=False)

# 애니메이션을 화면에 표시
plt.ylabel('NIM Value')
plt.title('Actual vs Predicted NIM Values - Animated', fontsize=19)
plt.legend(loc='upper right', fontsize=11)
plt.grid()
plt.show()

# GIF 저장
gif_writer = PillowWriter(fps=24)  # FPS를 24로 유지하며 더 느리게 재생
output_path = "image/nim_ibks_boks_news_prediction_ani.gif"
ani.save(output_path, writer=gif_writer)

# GIF 후처리: 한 번만 재생되도록 설정
with Image.open(output_path) as img:
    frames = [frame.copy() for frame in ImageSequence.Iterator(img)]
    frames[0].save(output_path, save_all=True, append_images=frames[1:], loop=1, duration=img.info['duration'])
No description has been provided for this image