ValueError: Data cardinality is ambiguous: x sizes: 150000 y sizes: 50000 Make sure all arrays contain the same number of samples

Hi I have using this code and getting error

ValueError: Data cardinality is ambiguous: x sizes: 150000y sizes: 50000
Make sure all arrays contain the same number of samples.

I tried changing the reshape option and even numpy.transpose but no use can anyone help?

import numpy as np
import tensorflow as tf
from tensorflow.keras import datasets, layers, models
from tensorflow.keras.layers import Dense, Dropout, Activation, Flatten, Conv2D, MaxPooling2D

(x_train, y_train) , (x_test, y_test) = datasets.cifar10.load_data()

#x_train.shape #(50000, 32, 32, 3) 
#x_test.shape  #(10000, 32, 32, 3)


x_train = x_train.reshape(-1, 32, 32, 1)
x_test = x_test.reshape(-1, 32, 32 ,1)


x_train = x_train.astype('float32')         # change integers to 32-bit floating point numbers 
x_test = x_test.astype('float32')
x_train /= 255.0              
x_test /= 255.0


model = tf.keras.models.Sequential() 
model.add(tf.keras.layers.Conv2D(32, (3, 3), padding='same', activation='relu')) 
model.add(tf.keras.layers.MaxPooling2D(pool_size=(2, 2), strides=(2,2))) 
model.add(tf.keras.layers.Conv2D(64, (3, 3), padding='same', activation='relu')) 
model.add(tf.keras.layers.MaxPooling2D(pool_size=(2, 2), strides=(2,2))) 
model.add(tf.keras.layers.Conv2D(128, (3, 3), padding='same', activation='relu')) 
model.add(tf.keras.layers.MaxPooling2D(pool_size=(2, 2), strides=(2,2)))
model.add(tf.keras.layers.Conv2D(256, (3, 3), padding='same', activation='relu')) 
model.add(tf.keras.layers.MaxPooling2D(pool_size=(2, 2), strides=(2,2)))
model.add(tf.keras.layers.Conv2D(512, (3, 3), padding='same', activation='relu')) 
model.add(tf.keras.layers.MaxPooling2D(pool_size=(2, 2), strides=(2,2)))

model.add(tf.keras.layers.Flatten()) 
model.add(tf.keras.layers.Dense(512, activation=tf.nn.relu)) 
model.add(tf.keras.layers.Dense(512, activation=tf.nn.relu))
model.add(tf.keras.layers.Dense(10, activation=tf.nn.softmax)) 
model.compile(loss='sparse_categorical_crossentropy', optimizer='adam', metrics=['accuracy']) 
model.build(input_shape=(512,32,32,1)) 
model.summary() 

model.fit(x_train, y_train, batch_size=1000, epochs=1) 




score = model.evaluate(x_test, y_test) 
print('Test loss:', score[0]) 
print('Test accuracy:', score[1])


predictions = model.predict([x_test])
#print(predictions)

print(np.argmax(predictions[0]))

img_path = x_test[0]
print(img_path.shape)
if(len(img_path.shape) == 3):
plt.imshow(np.squeeze(img_path))
elif(len(img_path.shape) == 2):
plt.imshow(img_path)
else:
print("Higher dimensional data")


Read more here: https://stackoverflow.com/questions/68488269/valueerror-data-cardinality-is-ambiguous-x-sizes-150000-y-sizes-50000-make-s

Content Attribution

This content was originally published by Yashwanth at Recent Questions - Stack Overflow, and is syndicated here via their RSS feed. You can read the original post over there.

%d bloggers like this: