



실제값에 못미치는 낮은 예측률. 스파이크 대응 불가피.
피처 확장
# 시차 및 롤링 생성
df['lag_1'] = df['target'].shift(1)
df['roll_3'] = df['target'].rolling(3).mean().shift(1)
# 사이클릭 인코딩
df['month_sin'] = np.sin(2*np.pi * df.index.month/12)
df['dow_cos'] = np.cos(2*np.pi * df.index.dayofweek/7)
# 외생변수 추가 예시
df['rain_mm'] = weather_df['rain_mm'] # 강수량 병합
타깃 스케일 관리
from sklearn.preprocessing import FunctionTransformer
from tensorflow.keras.losses import Huber
# 로그 변환
log_tf = FunctionTransformer(lambda x: np.log1p(x), inverse_func=lambda x: np.expm1(x))
# Huber 손실 정의
loss_fn = Huber(delta=y_train_log.std()*0.5)
model.compile(loss=loss_fn, optimizer='adam')
모델 구조 전략
# FNN 구조
def build_fnn(dim):
return Sequential([
Dense(64, activation='relu', input_shape=(dim,)),
Dropout(0.3),
Dense(1)
])
# Seq2Seq 구조
encoder = LSTM(64)(encoder_inputs)
decoder = RepeatVector(horizon)(encoder)
outputs = TimeDistributed(Dense(1))(LSTM(64, return_sequences=True)(decoder))
앙상블의 시너지
# Weighted Ensemble
weights = {'fnn':1/mae_fnn, 'rnn':1/mae_rnn}
ensemble_pred = (weights['fnn']*pred_fnn + weights['rnn']*pred_rnn) / sum(weights.values())
# Stacking Meta-Learner
meta_X = np.vstack([pred_fnn, pred_rnn, pred_lstm, pred_cnn]).T
from sklearn.linear_model import LinearRegression
meta = LinearRegression().fit(meta_X_train, y_train)
실험 반복 & 검증
from sklearn.model_selection import TimeSeriesSplit
tscv = TimeSeriesSplit(n_splits=5)
for tr, vl in tscv.split(X):
model.fit(X[tr], y[tr], validation_data=(X[vl], y[vl]), epochs=10)
print(mean_absolute_error(y[vl], model.predict(X[vl])))
