序列分类
使用lstm的序列分类
序贯式模型
'''
import numpy as np
import keras
from keras.models import Sequential,Input,Model
from keras.layers import Dense, Dropout, Flatten,Embedding,LSTM
from keras.layers import Conv2D, MaxPooling2D
from keras.optimizers import SGD
max_features = 10000
model = Sequential()
model.add(Embedding(max_features, output_dim=256,input_shape=(100,)))
model.add(LSTM(128))
model.add(Dropout(0.5))
model.add(Dense(1, activation='sigmoid'))
model.compile(loss='binary_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
print(model.summary())
'''
函数式模型
'''
input_1 = Input(shape=(100,))
x = Embedding(10000,256)(input_1)
x = LSTM(128)(x)
x = Dropout(0.5)(x)
output_1 = Dense(1,activation='sigmoid')(x)
model = Model(input_1,output_1)
model.compile(loss='binary_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
'''
使用1d卷积的序列分类
序贯式模型
'''
from keras.models import Sequential,Input,Model
from keras.layers import Dense, Dropout
from keras.layers import Embedding
from keras.layers import Conv1D, GlobalAveragePooling1D, MaxPooling1D
seq_length = 100
model = Sequential()
model.add(Conv1D(64, 3, activation='relu', input_shape=(seq_length, 100)))
model.add(Conv1D(64, 3, activation='relu'))
model.add(MaxPooling1D(3))
model.add(Conv1D(128, 3, activation='relu'))
model.add(Conv1D(128, 3, activation='relu'))
model.add(GlobalAveragePooling1D())
model.add(Dropout(0.5))
model.add(Dense(1, activation='sigmoid'))
model.compile(loss='binary_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
'''
函数式模型
'''
input_1= Input(shape=(100,100))
x = Conv1D(64, 3, activation='relu')(input_1)
x = Conv1D(64, 3, activation='relu')(x)
x = MaxPooling1D(3)(x)
x = Conv1D(128, 3, activation='relu')(x)
x = Conv1D(128, 3, activation='relu')(x)
x = GlobalAveragePooling1D()(x)
x = Dropout(0.5)(x)
output_1 = Dense(1, activation='sigmoid')(x)
model = Model(input_1,output_1)
'''
使用栈式LSTM的序列分类
序贯式模型
'''
from keras.models import Sequential,Model,Input
from keras.layers import LSTM, Dense
import numpy as np
data_dim = 16
timesteps = 8
num_classes = 10
model = Sequential()
model.add(LSTM(32, return_sequences=True,
input_shape=(timesteps, data_dim))) # returns a sequence of vectors of dimension 32
model.add(LSTM(32, return_sequences=True)) # returns a sequence of vectors of dimension 32
model.add(LSTM(32)) # return a single vector of dimension 32
model.add(Dense(10, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
x_train = np.random.random((1000, timesteps, data_dim))
y_train = np.random.random((1000, num_classes))
x_val = np.random.random((100, timesteps, data_dim))
y_val = np.random.random((100, num_classes))
model.fit(x_train, y_train,
batch_size=64, epochs=5,
validation_data=(x_val, y_val))
'''
函数式模型
'''
input_1 = Input(shape=(timesteps,data_dim))
x = LSTM(32, return_sequences=True)(input_1)
x = LSTM(32, return_sequences=True)(x)
x = LSTM(32)(x)
output_1 = Dense(10, activation='softmax')(x)
model = Model(input_1,output_1)
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
model.fit(x_train, y_train,
batch_size=64, epochs=5,
validation_data=(x_val, y_val))
print(model.summary())
'''