Skip to content
Advertisement

I created a CIFAR10 dataset learning model using a CNN model. Why is there an error?

I created a CIFAR10 dataset learning model using a CNN model. Why is there an error? How should I fix it? I did it in Google colab environment.

import tensorflow as tf
import keras
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D, Dropout, Flatten, Dense
from keras.datasets import cifar10

LOSS = 'categorical_crossentropy'
OPTIMIZER = 'adam'

def model_build():
  model = Sequential()

  # 1
  model.add(Conv2D(
    filters=32,
    kernel_size=(5,5),
    padding='same',
    activation='relu',
    input_shape=(32,32,3),
    kernel_regularizer='l2',
  ))
  model.add(MaxPooling2D(
    pool_size=(2,2),
    padding='same'
  ))

  # 2
  model.add(Conv2D(
    filters=64,
    kernel_size=(5,5),
    padding='same',
    activation='relu',
    kernel_regularizer='l2',
  ))
  model.add(MaxPooling2D(
    pool_size=(2,2),
    padding='same'
  ))

  # 3
  model.add(Flatten())
  model.add(Dense(
    units=512,
    activation='relu',
    kernel_regularizer='l2',
  ))

  # 4
  model.add(Dense(
  units=10,
  activation='softmax'
  ))

  model.compile(
  loss=LOSS,
  optimizer=OPTIMIZER,
  metrics=['accuracy']
  )

  return model

def load_dataset():
  (X_train, Y_train), (X_test, Y_test) = cifar10.load_data()

  X_train = X_train.astype('float32')
  X_test = X_test.astype('float32')
  X_train = X_train / 255.0
  X_test = X_test / 255.0

  return (X_train, Y_train), (X_test, Y_test)

model = model_build()
(X_train, Y_train), (X_test, Y_test) = load_dataset()

model.fit(
  x=X_train, y=Y_train,
  epochs=10,
  batch_size=32,
  verbose=1,
)

model.evaluate(
  x=X_test, y=Y_test,
  verbose=1,
)

This error occurred to me

ValueError Traceback (most recent call last) in ()

 77   epochs=10,
 78   batch_size=32,
 79   verbose=1,       <------Error
 80 )
 81

/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/func_graph.py in wrapper(*args, **kwargs)

975           except Exception as e:  # pylint:disable=broad-except
976             if hasattr(e, "ag_error_metadata"):
977               raise e.ag_error_metadata.to_exception(e)       <---Error
978             else:
979               raise 

ValueError: in user code:

/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/training.py:805 train_function  * return step_function(self, iterator)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/training.py:795 step_function  ** outputs = model.distribute_strategy.run(run_step, args=(data,))
/usr/local/lib/python3.6/dist-packages/tensorflow/python/distribute/distribute_lib.py:1259 run          return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/distribute/distribute_lib.py:2730 call_for_each_replica         return self._call_for_each_replica(fn, args, kwargs)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/distribute/distribute_lib.py:3417 _call_for_each_replica          return fn(*args, **kwargs)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/training.py:788  run_step  **         outputs = model.train_step(data)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/training.py:756 train_step          y, y_pred, sample_weight, regularization_losses=self.losses)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/compile_utils.py:203 __call__         loss_value = loss_obj(y_t, y_p, sample_weight=sw)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/losses.py:152  __call__         losses = call_fn(y_true, y_pred)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/losses.py:256 call  **         return ag_fn(y_true, y_pred, **self._fn_kwargs)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/util/dispatch.py:201 wrapper         return target(*args, **kwargs)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/losses.py:1537 categorical_crossentropy         return K.categorical_crossentropy(y_true, y_pred, from_logits=from_logits)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/util/dispatch.py:201 wrapper         return target(*args, **kwargs)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/backend.py:4833 categorical_crossentropy         target.shape.assert_is_compatible_with(output.shape)
/usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/tensor_shape.py:1134 assert_is_compatible_with         raise ValueError("Shapes %s and %s are incompatible" % (self, other))
ValueError: Shapes (None, 1) and (None, 10) are incompatible 

Thank you for your answering.

Advertisement

Answer

I think that your labels are integers not one-hot vectors and its shape is (None, 1).

Try:

LOSS = 'sparse_categorical_crossentropy'
User contributions licensed under: CC BY-SA
6 People found this is helpful
Advertisement