from keras.models import Sequential
from keras.layers import LSTM, Dense, Dropout
from keras.regularizers import l2
from keras.optimizers import Adam
from keras.callbacks import EarlyStopping
def create_mixed_window_data(data, window_sizes, overlaps):
for window_size, overlap in zip(window_sizes, overlaps):
step = window_size - overlap
for start in range(0, len(data) - window_size + 1, step):
end = start + window_size
mixed_data.append(data[start:end])
return np.array(mixed_data)
def build_model(input_shape, num_classes, nNeurons, loss):
model.add(LSTM(nNeurons, return_sequences=True, input_shape=input_shape, kernel_regularizer=l2(0.01), recurrent_regularizer=l2(0.01)))
model.add(LSTM(nNeurons, kernel_regularizer=l2(0.01), recurrent_regularizer=l2(0.01)))
model.add(Dense(num_classes, activation='softmax'))
model.compile(optimizer=Adam(lr=0.001), loss=loss, metrics=['accuracy'])
def train_model(X_train, y_train, X_val, y_val, input_shape, num_classes, loss, nNeurons, patience=5, epochs=100, batch_size=32):
model = build_model(input_shape, num_classes, nNeurons, loss)
early_stopping = EarlyStopping(monitor='val_loss', patience=patience)
model.fit(X_train, y_train, validation_data=(X_val, y_val), epochs=epochs, batch_size=batch_size, callbacks=[early_stopping])
# input_shape = (timesteps, features)
# num_classes = number of output classes
# nNeurons = number of neurons in LSTM layers
# window_sizes = list of different window sizes
# overlaps = list of corresponding overlaps
# Mixed window data preparation
# data = your time series data
# window_sizes = [10, 20, 30]
# mixed_data = create_mixed_window_data(data, window_sizes, overlaps)
# Assuming mixed_data is split into X_train, y_train, X_val, y_val
# model_instance = train_model(X_train, y_train, X_val, y_val, input_shape=(mixed_data.shape[1], mixed_data.shape[2]), num_classes=5, loss='categorical_crossentropy', nNeurons=50)