I’m trying to train the model using prelu
activation function, but I get the following error
--------------------------------------------------------------------------- ValueError Traceback (most recent call last) /usr/local/lib/python3.7/dist-packages/tensorflow/python/ops/array_ops.py in zeros(shape, dtype, name) 2965 shape = constant_op._tensor_shape_tensor_conversion_function( -> 2966 tensor_shape.TensorShape(shape)) 2967 except (TypeError, ValueError): 31 frames ValueError: Cannot convert a partially known TensorShape to a Tensor: (None, None, 64) During handling of the above exception, another exception occurred: ValueError Traceback (most recent call last) /usr/local/lib/python3.7/dist-packages/tensorflow/python/framework/constant_op.py in convert_to_eager_tensor(value, ctx, dtype) 96 dtype = dtypes.as_dtype(dtype).as_datatype_enum 97 ctx.ensure_initialized() ---> 98 return ops.EagerTensor(value, ctx.device_name, dtype) 99 100 ValueError: Attempt to convert a value (None) with an unsupported type (<class 'NoneType'>) to a Tensor.
I’m using the below-mentioned code, kindly let me know how do I correct it.
from tensorflow.keras.applications import MobileNet from tensorflow.keras.layers import (Conv2D, MaxPooling2D, GlobalAveragePooling2D, Dropout, Dense) from tensorflow.keras import Model from tensorflow import keras CLASSES = 2 #model.compile() # setup model base_model = MobileNet(weights='imagenet', include_top=False) input = (224, 224, 3) x = base_model.output x = Conv2D(64, (3,3), padding='same', activation = keras.layers.PReLU(alpha_initializer='zeros', alpha_regularizer=None, alpha_constraint=None, shared_axes=None), strides= (2,2), name='layer1')(x) x = MaxPooling2D(pool_size=(2,2))(x) x = Conv2D(128, (3,3), padding='same', activation = keras.layers.PReLU(alpha_initializer='zeros', alpha_regularizer=None, alpha_constraint=None, shared_axes=None), name='layer2')(x) x = GlobalAveragePooling2D(name='avg_pool')(x) x = Dropout(0.4)(x) predictions = Dense(CLASSES, activation=tf.keras.activations.sigmoid)(x) model = Model(inputs=base_model.input, outputs=predictions) # transfer learning for layer in base_model.layers: layer.trainable = False model.compile(optimizer='rmsprop',loss='categorical_crossentropy',metrics=['accuracy'])
Advertisement
Answer
Your tensor input is wrong. You need to set it up like this way
input_s = layers.Input((224, 224, 3)) base_model = keras.applications.MobileNet(weights='imagenet', include_top=False, input_tensor=input_s) ...
Full working code
from tensorflow.keras.applications import MobileNet from tensorflow.keras.layers import (Conv2D, MaxPooling2D, GlobalAveragePooling2D, Dropout, Dense) from tensorflow.keras import Model from tensorflow import keras from tensorflow.keras import layers import tensorflow as tf
CLASSES = 2 # setup model input_s = layers.Input((224, 224, 3)) base_model = keras.applications.MobileNet(weights='imagenet', include_top=False, input_tensor=input_s) x = layers.Conv2D(64, (3,3), padding='same', activation = keras.layers.PReLU( alpha_initializer='zeros', alpha_regularizer=None, alpha_constraint=None, shared_axes=None), strides= (2,2), name='layer1')(base_model.output) x = layers.MaxPooling2D(pool_size=(2,2))(x) x = layers.Conv2D(128, (3,3), padding='same', activation = keras.layers.PReLU(alpha_initializer='zeros', alpha_regularizer=None, alpha_constraint=None, shared_axes=None), name='layer2')(x) x = layers.GlobalAveragePooling2D(name='avg_pool')(x) x = layers.Dropout(0.4)(x) predictions = layers.Dense(CLASSES, activation=tf.keras.activations.sigmoid)(x) model = tf.keras.Model(inputs=base_model.input, outputs=predictions) # transfer learning for layer in base_model.layers: layer.trainable = False model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy'])