第3周作业 天气识别

发布于:2022-12-31 ⋅ 阅读:(392) ⋅ 点赞:(0)
import os,PIL,pathlib
import matplotlib.pyplot as plt
import numpy             as np
from tensorflow          import keras
from tensorflow.keras    import layers,models
data_dir = "D:/jupyter notebook/weather_photos/"
data_dir = pathlib.Path(data_dir)
image_count = len(list(data_dir.glob('*/*.jpg')))
print("图片总数为:",image_count)
图片总数为: 1125
roses = list(data_dir.glob('sunrise/*.jpg'))
PIL.Image.open(str(roses[0]))

在这里插入图片描述

batch_size = 32
img_height = 180
img_width = 180
import tensorflow as tf
train_ds = tf.keras.preprocessing.image_dataset_from_directory(
    data_dir,
    validation_split=0.2,
    subset="training",
    seed=123,
    image_size=(img_height, img_width),
    batch_size=batch_size)
Found 1125 files belonging to 4 classes.
Using 900 files for training.
val_ds = tf.keras.preprocessing.image_dataset_from_directory(
    data_dir,
    validation_split=0.2,
    subset="validation",
    seed=123,
    image_size=(img_height, img_width),
    batch_size=batch_size)
Found 1125 files belonging to 4 classes.
Using 225 files for validation.
class_names = train_ds.class_names
print(class_names)
['cloudy', 'rain', 'shine', 'sunrise']
plt.figure(figsize=(20, 10))

for images, labels in train_ds.take(1):
    for i in range(20):
        ax = plt.subplot(5, 10, i + 1)

        plt.imshow(images[i].numpy().astype("uint8"))
        plt.title(class_names[labels[i]])
        
        plt.axis("off")

在这里插入图片描述

for image_batch, labels_batch in train_ds:
    print(image_batch.shape)
    print(labels_batch.shape)
    break
(32, 180, 180, 3)
(32,)
AUTOTUNE = tf.data.AUTOTUNE
train_ds = train_ds.cache().shuffle(1000).prefetch(buffer_size=AUTOTUNE)
val_ds = val_ds.cache().prefetch(buffer_size=AUTOTUNE)
num_classes = 4
model = models.Sequential([
    layers.experimental.preprocessing.Rescaling(1./255, input_shape=(img_height, img_width, 3)),
    
    layers.Conv2D(16, (3, 3), activation='relu', input_shape=(img_height, img_width, 3)), # 卷积层1,卷积核3*3  
    layers.AveragePooling2D((2, 2)),               # 池化层1,2*2采样
    layers.Conv2D(32, (3, 3), activation='relu'),  # 卷积层2,卷积核3*3
    layers.AveragePooling2D((2, 2)),               # 池化层2,2*2采样
    layers.Conv2D(64, (3, 3), activation='relu'),  # 卷积层3,卷积核3*3
    layers.Dropout(0.3),  
    
    layers.Flatten(),                       # Flatten层,连接卷积层与全连接层
    layers.Dense(128, activation='relu'),   # 全连接层,特征进一步提取
    layers.Dense(num_classes)               # 输出层,输出预期结果
])

model.summary() 
Model: "sequential"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 rescaling (Rescaling)       (None, 180, 180, 3)       0         
                                                                 
 conv2d (Conv2D)             (None, 178, 178, 16)      448       
                                                                 
 average_pooling2d (AverageP  (None, 89, 89, 16)       0         
 ooling2D)                                                       
                                                                 
 conv2d_1 (Conv2D)           (None, 87, 87, 32)        4640      
                                                                 
 average_pooling2d_1 (Averag  (None, 43, 43, 32)       0         
 ePooling2D)                                                     
                                                                 
 conv2d_2 (Conv2D)           (None, 41, 41, 64)        18496     
                                                                 
 dropout (Dropout)           (None, 41, 41, 64)        0         
                                                                 
 flatten (Flatten)           (None, 107584)            0         
                                                                 
 dense (Dense)               (None, 128)               13770880  
                                                                 
 dense_1 (Dense)             (None, 4)                 516       
                                                                 
=================================================================
Total params: 13,794,980
Trainable params: 13,794,980
Non-trainable params: 0
_________________________________________________________________
opt = tf.keras.optimizers.Adam(learning_rate=0.001)

model.compile(optimizer=opt,
              loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
              metrics=['accuracy'])
epochs = 10

history = model.fit(
  train_ds,
  validation_data=val_ds,
  epochs=epochs
)
Epoch 1/10
29/29 [==============================] - 21s 600ms/step - loss: 1.2310 - accuracy: 0.6322 - val_loss: 0.7908 - val_accuracy: 0.6267
Epoch 2/10
29/29 [==============================] - 15s 531ms/step - loss: 0.6317 - accuracy: 0.7678 - val_loss: 0.5912 - val_accuracy: 0.7556
Epoch 3/10
29/29 [==============================] - 15s 518ms/step - loss: 0.4485 - accuracy: 0.8289 - val_loss: 0.5071 - val_accuracy: 0.7956
Epoch 4/10
29/29 [==============================] - 15s 523ms/step - loss: 0.3533 - accuracy: 0.8678 - val_loss: 0.5217 - val_accuracy: 0.7867
Epoch 5/10
29/29 [==============================] - 16s 547ms/step - loss: 0.3040 - accuracy: 0.8856 - val_loss: 0.5508 - val_accuracy: 0.7867
Epoch 6/10
29/29 [==============================] - 16s 543ms/step - loss: 0.2513 - accuracy: 0.9100 - val_loss: 0.5671 - val_accuracy: 0.7822
Epoch 7/10
29/29 [==============================] - 15s 532ms/step - loss: 0.2365 - accuracy: 0.9122 - val_loss: 0.5528 - val_accuracy: 0.7911
Epoch 8/10
29/29 [==============================] - 16s 538ms/step - loss: 0.2178 - accuracy: 0.9189 - val_loss: 0.5059 - val_accuracy: 0.8489
Epoch 9/10
29/29 [==============================] - 15s 531ms/step - loss: 0.1491 - accuracy: 0.9400 - val_loss: 1.2152 - val_accuracy: 0.6978
Epoch 10/10
29/29 [==============================] - 15s 531ms/step - loss: 0.1907 - accuracy: 0.9200 - val_loss: 0.5436 - val_accuracy: 0.8533
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs_range = range(epochs)
plt.figure(figsize=(12, 4))
plt.subplot(1, 2, 1)
plt.plot(epochs_range, acc, label='Training Accuracy')
plt.plot(epochs_range, val_acc, label='Validation Accuracy')
plt.legend(loc='lower right')
plt.title('Training and Validation Accuracy')
plt.subplot(1, 2, 2)
plt.plot(epochs_range, loss, label='Training Loss')
plt.plot(epochs_range, val_loss, label='Validation Loss')
plt.legend(loc='upper right')
plt.title('Training and Validation Loss')
plt.show()

在这里插入图片描述

本文含有隐藏内容,请 开通VIP 后查看

网站公告

今日签到

点亮在社区的每一天
去签到