Note
Click here to download the full example code
Main
Note
Not working!
9 """
10 import numpy as np
11
12 from keras.callbacks import TensorBoard, ModelCheckpoint
13 from keras.preprocessing.sequence import pad_sequences
14 from keras.utils.np_utils import to_categorical
15 from keras.models import Model
16 from keras.layers import Dense, Input, concatenate
17 from keras.layers import Embedding, Dropout, Bidirectional, TimeDistributed
18
19 #from keras.layers import CuDNNGRU, CuDNNLSTM, Conv1D
20 #from keras.layers import BatchNormalization, GlobalMaxPooling1D
21 #from keras.callbacks import EarlyStopping, ReduceLROnPlateau
22 from keras import backend as K
23 import tensorflow as tf
24 #from tensorflow import set_random_seed
25
26
27 #import tensorflow
28 #tensorflow.random.set_seed(x)
29
30 from tensorflow.compat.v1.keras.layers import CuDNNLSTM
31 from tensorflow.compat.v1.keras.layers import CuDNNGRU
32 from tensorflow.compat.v1.keras.layers import Conv1D
33
34 # In[16]:
35
36
37 x = [
38 [
39 [1, 1, 3, 5],
40 [9, 5, 3, 7],
41 [6, 2, 3, 8],
42 ],
43 [
44 [1, 1, 3, 5, 5],
45 [9, 5, 3, 7, 3],
46 [6, 2, 3, 8, 8],
47 ],
48 [
49 [1, 1, 3],
50 [9, 5, 3],
51 [6, 2, 3],
52 ],
53 [
54 [4, 1, 1, 3],
55 [6, 9, 5, 3],
56 [5, 6, 2, 3],
57 ],
58 ]
59
60
61
62 x_cate = [[1,2], [3,1], [2,2], [1,2]]
63
64
65
66 y = [0, 1, 1, 0]
67
68
69
70
71 SEED = 1
72
73 MAX_TIME_LENGTH = 10
74 MAX_TIMESERIES_TYPE = 3
75
76
77
78
79 # change input format to make it the same shape
80
81 data = np.zeros((len(x), MAX_TIMESERIES_TYPE, MAX_TIME_LENGTH), dtype='int32')
82 i_data = np.zeros((len(x), MAX_TIMESERIES_TYPE, len(x_cate[0])), dtype='int32')
83
84 for i, patient in enumerate(x):
85 for j, types in enumerate(patient):
86 seq_data = pad_sequences(types, maxlen=MAX_TIME_LENGTH)
87 data[i, j] = seq_data
88 i_data[i, :len(i_inputs[i])] = i_inputs[i]
89
90 print(i_data)
91
92 def create_model():
93 embedding_layer = Embedding(num_words,
94 EMBEDDING_DIM,
95 input_length=MAX_POST_LENGTH,
96 weights=[embedding_matrix],
97 trainable=False)
98
99 sequence_input = Input(shape=(MAX_POST_LENGTH,))
100 embedded_sequences = embedding_layer(sequence_input)
101 l_lstm_sent = Bidirectional(CuDNNGRU(50, return_sequences=True))(embedded_sequences)
102 l_lstm_sent = Dropout(0.2)(l_lstm_sent)
103 l_lstm_sent = AttentionWithContext()(l_lstm_sent)
104 l_lstm_sent = Dropout(0.2)(l_lstm_sent)
105 preds = Dense(units=2, activation='softmax')(l_lstm_sent)
106 sentEncoder = Model(sequence_input, preds)
107 print(sentEncoder.summary())
108
109 ana_input = Input(shape=(MAX_POSTS, len(i_data[0][0])))
110
111 review_input = Input(shape=(MAX_POSTS, MAX_POST_LENGTH))
112 l_lstm_sent = TimeDistributed(sentEncoder)(review_input)
113 l_lstm_sent = concatenate([l_lstm_sent, ana_input]) # combine time series and categories
114 l_lstm_sent = BatchNormalization()(l_lstm_sent)
115 l_lstm_sent = Dropout(0.2)(l_lstm_sent)
116 l_lstm_sent = Bidirectional(CuDNNGRU(16, return_sequences=True))(l_lstm_sent)
117 l_lstm_sent = Dropout(0.2)(l_lstm_sent)
118 l_lstm_sent = AttentionWithContext()(l_lstm_sent)
119 l_lstm_sent = Dropout(0.2)(l_lstm_sent)
120 preds = Dense(2, activation='softmax')(l_lstm_sent)
121 model = Model([review_input, ana_input], preds)
122 print(model.summary())
123
124 from keras.optimizers import Adam, AdaMod
125
126 adam = AdaMod()
127 model.compile(loss='binary_crossentropy', optimizer=adam, metrics=['acc'])
128
129 return model
130
131
132
133
134 model = create_model()
135
136 model.fit([data, data_i], np.asarray(y, 'int32'),
137 shuffle=False,
138 nb_epoch=200, batch_size=32, verbose=0)
139 """
Total running time of the script: ( 0 minutes 0.000 seconds)