Trying to create a model for recognizing sign language. Using 3d convolution layers and finally the LTC cell from NCP library. But the model accuracy does not improve after 0.1.
And also the input dimension for the model is
(N,30,128,128,1) = (nvideos,frames,h,w,channel)
but I am not sure whether the input shape is right while inputting the model.
Before feeding the model the data, I reshaped it into (NVideos,1,30,128,128,1)
.
It only works if I reshaped it in this way.
Why model accuracy doesn't increase?
ncp_wiring = kncp.wirings.NCP(
inter_neurons=16, # Number of inter neurons
command_neurons=8, # Number of command neurons
motor_neurons=len(exist_labels), # Number of motor neurons
sensory_fanout=4, # How many outgoing synapses has each sensory neuron
inter_fanout=3, # How many outgoing synapses has each inter neuron
recurrent_command_synapses=8, # Now many recurrent synapses are in the
# command neuron layer
motor_fanin=4, # How many incoming synapses has each motor neuron
)
ncp_cell = kncp.LTCCell(
ncp_wiring,
initialization_ranges={
# Overwrite some of the initialization ranges
"w": (0.2, 2.0),
},
)
class Model:
def __init__(self,load_name=None):
if load_name != None:
self.model = keras.models.load(load_name)
else:
self.model = Sequential()
sample_shape = (1,30,128,128,1);
self.model.add(InputLayer(input_shape=sample_shape))
self.model.add(TimeDistributed(Conv3D(32,kernel_size=(3,3,3),activation ='relu', kernel_initializer='he_uniform')))
self.model.add(TimeDistributed(MaxPooling3D(pool_size=(2, 2, 2))))
self.model.add(BatchNormalization(center=True, scale=True))
self.model.add(Dropout(0.3))
self.model.add(TimeDistributed(Conv3D(64, kernel_size=(3,3, 3), activation='relu', kernel_initializer='he_uniform')))
self.model.add(TimeDistributed(MaxPooling3D(pool_size=(2, 2, 2))))
self.model.add(BatchNormalization(center=True, scale=True))
self.model.add(Dropout(0.3))
self.model.add(TimeDistributed(Flatten()))
self.model.add(TimeDistributed(Dense(16, activation='relu', kernel_initializer='he_uniform')))
self.model.add(RNN(ncp_cell))
self.model.add(keras.layers.Activation("softmax")),
self.model.compile(loss='sparse_categorical_crossentropy',
optimizer = keras.optimizers.Adam(lr=0.01),
metrics=['accuracy'])
self.model.build()
self.model.summary()
def train(self,X_train,targets_train,batch,epochs,validation_split):
history = self.model.fit(X_train, targets_train,
batch_size=batch,
epochs=epochs,
verbose=1,
validation_split=validation_split)
print(history.history.keys())
return [history.history['accuracy'],history.history['loss'],
history.history['val_accuracy'],history.history['val_loss']]
def save(self,name):
self.model.save(name)