这是我的源码,谢谢
import tensorflow as tf
import os
import numpy as np
num_epochs = 10
batch_size = 32
learning_rate = 0.001
data_dir = ‘./fastai-datasets-cats-vs-dogs-2’
train_cats_dir = data_dir + ‘/train/cats/’
train_dogs_dir = data_dir + ‘/train/dogs/’
test_cats_dir = data_dir + ‘/valid/cats/’
test_dogs_dir = data_dir + ‘/valid/dogs/’
def my_map(filename, label):
image_string = tf.io.read_file(filename)
image_decoded = tf.image.decode_jpeg(image_string)
my_image = tf.image.resize(image_decoded, [256, 256]) / 255.0
return my_image, label
if name == ‘main’:
train_cats_filenames = tf.constant([train_cats_dir + filename for filename in os.listdir(train_cats_dir)])
train_dogs_filenames = tf.constant([train_dogs_dir + filename for filename in os.listdir(train_dogs_dir)])
train_filenames = tf.concat([train_cats_filenames, train_dogs_filenames], axis=-1)
train_labels = tf.concat([tf.zeros(train_cats_filenames.shape, dtype=tf.int32),
tf.ones(train_dogs_filenames.shape, dtype=tf.int32)], axis=-1)
train_datas = tf.data.Dataset.from_tensor_slices((train_filenames, train_labels))
train_datas = train_datas.map(
map_func=my_map,
num_parallel_calls=tf.data.experimental.AUTOTUNE
)
train_datas = train_datas.shuffle(buffer_size=23000)
train_datas = train_datas.batch(batch_size)
train_datas = train_datas.prefetch(buffer_size=tf.data.experimental.AUTOTUNE)
model = tf.keras.Sequential([
tf.keras.layers.Conv2D(32, 3, activation='relu', input_shape=(256, 256, 3)),
tf.keras.layers.MaxPooling2D(),
tf.keras.layers.Conv2D(32, 5, activation='relu'),
tf.keras.layers.MaxPooling2D(),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(64, activation='relu'),
tf.keras.layers.Dense(2, activation='softmax')
])
model.compile(
optimizer=tf.keras.optimizers.Adam(learning_rate=learning_rate),
loss=tf.keras.losses.sparse_categorical_crossentropy,
metrics=[tf.keras.metrics.sparse_categorical_accuracy]
)
model.fit(train_datas, epochs=num_epochs)
# tf.saved_model.save(model, "saved/1")
test_cat_filenames = tf.constant([test_cats_dir + filename for filename in os.listdir(test_cats_dir)])
test_dog_filenames = tf.constant([test_dogs_dir + filename for filename in os.listdir(test_dogs_dir)])
test_filenames = tf.concat([test_cat_filenames, test_dog_filenames], axis=-1)
test_labels = tf.concat([tf.zeros(test_cat_filenames.shape, dtype=tf.int32),
tf.ones(test_dog_filenames.shape, dtype=tf.int32)], axis=-1)
test_dataset = tf.data.Dataset.from_tensor_slices((test_filenames, test_labels))
test_dataset = test_dataset.map(my_map)
test_dataset = test_dataset.batch(batch_size)
print(model.metrics_names)
print(model.evaluate(test_dataset))
print(’------------------------------------------------------------------------------’)
tf.saved_model.save(model, “save/1”)
mymodel = tf.saved_model.load(‘save/1’)
test_cat_filenames = tf.constant([test_cats_dir + filename for filename in os.listdir(test_cats_dir)])
test_dog_filenames = tf.constant([test_dogs_dir + filename for filename in os.listdir(test_dogs_dir)])
test_filenames = tf.concat([test_cat_filenames, test_dog_filenames], axis=-1)
test_labels = tf.concat([tf.zeros(test_cat_filenames.shape, dtype=tf.int32),
tf.ones(test_dog_filenames.shape, dtype=tf.int32)], axis=-1)
test_dataset = tf.data.Dataset.from_tensor_slices((test_filenames, test_labels))
test_dataset = test_dataset.map(my_map)
test_dataset = test_dataset.batch(batch_size)
print(mymodel.metrics_names)
print(mymodel.evaluate(test_dataset))