from keras import losses
from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras.layers.convolutional import Conv1D, UpSampling1D
from keras.layers.pooling import MaxPool1D
from keras.layers.core import Dense, Activation, Dropout, Flatten
from keras.layers import BatchNormalization
from keras.layers.pooling import MaxPooling1D
from keras.callbacks import LearningRateScheduler
from sklearn.preprocessing import OneHotEncoder
from keras.optimizers import Adam
# 学習率
def step_decay(epoch):
x = 0.01
if epoch >= 120: x = 0.001
return x
lr_decay = LearningRateScheduler(step_decay)
# Optunaの最適化パラメータを代入する
def create_mlp(shape):
'''
Returns a keras model
'''
print(f"shape: {shape}")
model = Sequential()
model.add(Conv1D(32, 3, activation='relu', kernel_initializer='he_uniform', padding='same', input_shape=shape))
model.add(BatchNormalization())
model.add(Conv1D(32, 3, activation='relu', kernel_initializer='he_uniform', padding='same'))
model.add(BatchNormalization())
model.add(MaxPooling1D(2))
model.add(Dropout(0.2))
model.add(Conv1D(64, 3, activation='relu', kernel_initializer='he_uniform', padding='same'))
model.add(BatchNormalization())
model.add(Conv1D(64, 3, activation='relu', kernel_initializer='he_uniform', padding='same'))
model.add(BatchNormalization())
model.add(MaxPooling1D(2))
model.add(Dropout(0.3))
model.add(Conv1D(128, 3, activation='relu', kernel_initializer='he_uniform', padding='same'))
model.add(BatchNormalization())
model.add(Conv1D(128, 3, activation='relu', kernel_initializer='he_uniform', padding='same'))
model.add(BatchNormalization())
model.add(MaxPooling1D(2))
model.add(Dropout(0.4))
model.add(Flatten())
model.add(Dense(128, activation='relu', kernel_initializer='he_uniform'))
model.add(BatchNormalization())
model.add(Dropout(0.5))
model.add(Dense(1))
return model
FOLD_NUM = 4
kf = KFold(n_splits=FOLD_NUM,
shuffle=True,
random_state=42)
scores = []
feature_importance_df = pd.DataFrame()
pred_cv = np.zeros(len(test.index))
for i, (tdx, vdx) in enumerate(kf.split(X, y)):
print(f'Fold : {i}')
X_train, X_valid, y_train, y_valid = X.iloc[tdx], X.iloc[vdx], y.values[tdx], y.values[vdx]
mlp = create_mlp((X_train.values.shape[1], 1))
optimizer = Adam(lr=0.001)
mlp.compile(optimizer=optimizer, loss=losses.mean_squared_error)
mlp.fit(x=np.reshape(X_train.values, (-1, X_train.shape[1], 1)), y=y_train.reshape(len(y_train),1),
epochs=150, batch_size=493,
validation_data=(np.reshape(X_valid.values, (-1, X_valid.shape[1], 1)), y_valid),
callbacks=[lr_decay])#, verbose=0)
mlp_pred = mlp.predict(np.reshape(X_valid.values, (-1, X_train.shape[1], 1)))
plt.plot(mlp.history.history['loss'][3:], 'r', label='loss', alpha=0.7)
plt.plot(mlp.history.history['val_loss'][3:], label='val_loss', alpha=0.7)
plt.show()
rmsle_score = np.sqrt(mean_squared_log_error(np.exp(y_valid), np.exp(mlp_pred)))
print(rmsle_score)
break