0

I try to fit the model use LSTM but i receive this eroor

ValueError: Input 0 of layer sequential_54 is incompatible with the layer: expected ndim=3, found ndim=2. Full shape received: (None, 96)

I search a lot on google and stack overflow but still i do not know how can i solve the problem

This the input shape for X_train and y_train

print(X_train.shape, y_train.shape)

(56, 96) (56, 3)

This the input shape for X_test and y_test

print(X_test.shape, y_test.shape)

(14, 96) (14, 3)

This my code

import numpy as np
import tensorflow as tf
from tensorflow import keras
import pandas as pd

from google.colab import drive
drive.mount('/content/drive')

df = pd.read_csv('/content/drive/MyDrive/lastUpdate.csv')

df.head()

df.shape

df.tail()

#df.dropna(inplace=True )

from sklearn.model_selection import train_test_split

y = df['Activity']

x = df.drop(['Trajectory', 'Activity', 'id' ,'video' ] , axis=1)

x.head()

y.head()

y.dropna(inplace=True)

y.isnull().sum()

y.head()

y.unique()

vals_to_replace = {'drifting':'0', 'normal':'1', 'normal ':'1','static':'2'}
y = y.map(vals_to_replace)

y.unique()



y.isnull().sum()



from keras.utils.np_utils import to_categorical

y = to_categorical(y, num_classes = 3)

X_train, X_test, y_train, y_test = train_test_split( x, y, test_size=0.20, random_state=42)

from keras.layers import Input

print(X_train.shape, y_train.shape)

#X_train.dropna(inplace=True , axis=1)

print(X_test.shape, y_test.shape)

#X_train= np.asarray(X_train).astype(np.float32)
#X_test= np.asarray(X_test).astype(np.float32)
#y_train= np.asarray(y_train).astype(np.float32)
#y_test= np.asarray(y_test).astype(np.float32)

import tensorflow as tf

X_train = tf.constant(X_train, dtype=tf.float32)
X_test = tf.constant(X_test, dtype=tf.float32)
y_train = tf.constant(y_train, dtype=tf.float32)
y_test = tf.constant(y_test, dtype=tf.float32)

X_train


from tensorflow.keras import datasets, layers, models

model = keras.Sequential()
model.add(layers.LSTM(units=128 , input_shape=(None,96)))
#model.add(layers.Flatten())
model.add(layers.Dense(64, activation='relu'))
#model.add(layers.BatchNormalization())
model.add(layers.Dropout(0.5))
model.add(layers.Dense(32, activation='relu'))
model.add(layers.Dense(3, activation = 'softmax'))

opt=tf.keras.optimizers.SGD(learning_rate=1e-4, momentum=0.9)
model.compile(loss='categorical_crossentropy', 
              optimizer=opt, 
              metrics=['accuracy'])

model.summary()

BATCH_SIZE=64
EPOCHS=100

history=model.fit(X_train, y_train, 
          batch_size=BATCH_SIZE, 
          epochs=EPOCHS, 
          verbose=1,
          validation_data=(X_test, y_test)
         )
zkz
  • 21
  • 2
  • still, waiting??????????? – zkz Apr 17 '21 at 21:57
  • LSTM layer expects inputs to have shape of `(batch_size, timesteps, input_dim)`. But here you have passed only `input_dim`. You can try to include one more dimension i.e `timesteps` will solve your issue. For more information you can refer this [answer](https://stackoverflow.com/a/54416792/14290681). Thanks! –  May 17 '21 at 15:16

0 Answers0