2021-06-01

2021-06-01  本文已影响0人  xuanxi
    model = tf.keras.models.Sequential([
    tf.keras.layers.Conv2D(filters=6,kernel_size=5,activation='sigmoid',input_shape=(224,224,3)),
    tf.keras.layers.MaxPool2D(pool_size=2, strides=2),
    tf.keras.layers.Conv2D(filters=16,kernel_size=5,activation='sigmoid'),
    tf.keras.layers.MaxPool2D(pool_size=2, strides=2),
    tf.keras.layers.Flatten(),
    tf.keras.layers.Dense(120,activation='sigmoid'),
    tf.keras.layers.Dense(84,activation='sigmoid'),
    tf.keras.layers.Dense(10,activation='sigmoid')
])
model = tf.keras.models.Sequential()

    model.add(Conv2D(64,(3,3), strides = (1,1), input_shape = (224,224,3), padding = 'same', activation = 'relu'))
    model.add(BatchNormalization())
    model.add(Conv2D(64,(3,3), strides = (1,1), padding = 'same', activation = 'relu'))
    model.add(MaxPooling2D((2,2), strides = (2,2)))
    model.add(BatchNormalization())

    model.add(Conv2D(128,(3,3), strides = (1,1), padding = 'same', activation = 'relu'))
    model.add(BatchNormalization())
    model.add(Conv2D(128,(3,3), strides = (1,1), padding = 'same', activation = 'relu'))
    model.add(MaxPooling2D((2,2), strides = (2,2)))
    model.add(BatchNormalization())

    model.add(Conv2D(256,(3,3), strides = (1,1), padding = 'same', activation = 'relu'))
    model.add(BatchNormalization())
    model.add(Conv2D(256,(3,3), strides = (1,1), padding = 'same', activation = 'relu'))
    model.add(BatchNormalization())
    model.add(Conv2D(256,(3,3), strides = (1,1), padding = 'same', activation = 'relu'))
    model.add(MaxPooling2D((2,2), strides = (2,2)))
    model.add(BatchNormalization())
    model.add(Flatten())
    
    model.add(Dense(520, activation = 'relu'))
    model.add(BatchNormalization())
    model.add(Dropout(rate=0.5))
    model.add(Dense(520, activation = 'relu'))
    model.add(BatchNormalization())
    model.add(Dropout(rate=0.5))
    model.add(Dense(10, activation = 'softmax'))    
上一篇下一篇

猜你喜欢

热点阅读