Keras RNN (batch_size
I created an RNN model for text classification with the LSTM layer, but when I put the batch_size in the fit method, my model trained on the whole batch instead of just the mini-batch _size. This also happened when I used GRU and Bidirectional layer instead of LSTM. What could be wrong?
def create_rnn_lstm():
input_layer = layers.Input((70, ))
embedding_layer = layers.Embedding(len(word_index) + 1, 300, weights=[embedding_matrix], trainable=False)(input_layer)
embedding_layer = layers.SpatialDropout1D(0.3)(embedding_layer)
lstm_layer = layers.LSTM(100)(embedding_layer)
output_layer1 = layers.Dense(70, activation=relu)(lstm_layer)
output_layer1 = layers.Dropout(0.25)(output_layer1)
output_layer2 = layers.Dense(2, activation=softmax)(output_layer1)
model = models.Model(inputs=input_layer, outputs=output_layer2)
model.compile(optimizer=optimizers.Adam(), loss='binary_crossentropy')
return model
LSTM_classifier = create_rnn_lstm()
LSTM_classifier.fit(X_train_seq, y_train, batch_size=128, epochs = 10, shuffle=True)
Category Data Science