-
Notifications
You must be signed in to change notification settings - Fork 0
/
model.py
49 lines (34 loc) · 1.95 KB
/
model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
import tensorflow as tf
from tensorflow.python import keras
from keras.models import Model
from keras.optimizers import Adam, SGD
from keras.layers import Input, Dense, Flatten, Concatenate, Multiply, Dropout, Subtract, Embedding, LSTM, Lambda, BatchNormalization, Bidirectional, concatenate
from eval import Eval
class SiameseModel:
def __call__(self, shape, vocab_size, max_len, embedding_dim, embedding_matrix):
input_1 = Input(shape=shape)
input_2 = Input(shape=shape)
word_embedding_1 = Embedding(input_dim=vocab_size, weights=[embedding_matrix], output_dim=embedding_dim, input_length=max_len, trainable=False)(input_1)
word_embedding_2 = Embedding(input_dim=vocab_size, weights=[embedding_matrix], output_dim=embedding_dim, input_length=max_len, trainable=False)(input_2)
lstm_1 = Bidirectional(LSTM(128, return_sequences=True))(word_embedding_1)
lstm_1 = Dropout(0.25)(lstm_1)
lstm_1 = Bidirectional(LSTM(128, return_sequences=True))(lstm_1)
lstm_1 = Dropout(0.25)(lstm_1)
lstm_2 = Bidirectional(LSTM(128, return_sequences=True))(word_embedding_2)
lstm_2 = Dropout(0.25)(lstm_2)
lstm_2 = Bidirectional(LSTM(128, return_sequences=True))(lstm_2)
lstm_2 = Dropout(0.25)(lstm_2)
concat = concatenate([lstm_1, lstm_2])
merged = BatchNormalization()(concat)
merged = Dropout(0.25)(merged)
merged = Dense(64, activation="relu")(merged)
merged = BatchNormalization()(merged)
merged = Dropout(0.25)(merged)
output = Dense(1, activation="sigmoid")(merged)
model = Model(inputs=[input_1, input_2], outputs=output)
model.compile(loss="binary_crossentropy",
metrics=["accuracy"],
optimizer=Adam(0.0001)
)
print(model.summary())
return model