embed_dim = 128 model = Sequential() model.add(Embedding(max_fatures, embed_dim,input_length = X_train.shape[1])) model.add(SpatialDropout1D(0.2)) model.add(LSTM(196, dropout=0.5, recurrent_dropout=0.2)) model.add(Dense(1,activation='softmax')) model.compile(loss = 'binary_crossentropy', optimizer='adam',metrics = ['accuracy'])
pad_sequences(x_train, maxlen=max_len, padding='pre')
gave a significant difference in the results. In my case, the best result was when padding = 'post'. re16 = re.compile(ur"(?:\b:(?:1|c(?:|)|(?:|)|(?:(?:|(?:(?:(?:|(?:)?|))?|(?:)?))|)|(?:(?:(?:|)|)||||(?:(?:||(?:|)|(?:|(?:(?:(?:||(?:(?:||(?:[]|)|[]))?|[Ń–]))?|||1)||)|)|||[]|(?:|)|(?:(?:(?:[]|)|?|(?:(?:(?:|(?:)?))?|)|(?:|)))?)||)|(?:|x))\b)", re.UNICODE)
def model_conv_core(model_input, embd_size = 128): num_filters = 128 X = Embedding(total_unique_words, DIM, input_length=max_words, weights=[embedding_matrix], trainable=False, name='Word2Vec')(model_input) X = Conv1D(num_filters, 3, activation='relu', padding='same')(X) X = Dropout(0.3)(X) X = MaxPooling1D(2)(X) X = Conv1D(num_filters, 5, activation='relu', padding='same')(X) return X def model_conv1d(model_input, embd_size = 128, num_filters = 64, kernel_size=3): X = Embedding(total_unique_words, DIM, input_length=max_words, weights=[embedding_matrix], trainable=False, name='Word2Vec')(model_input) X = Conv1D(num_filters, kernel_size, padding='same', activation='relu', strides=1)(X) # X = Dropout(0.1)(X) X = MaxPooling1D(pool_size=2)(X) X = LSTM(256, kernel_regularizer=regularizers.l2(0.004))(X) X = Dropout(0.3)(X) X = Dense(128, kernel_regularizer=regularizers.l2(0.0004))(X) X = LeakyReLU()(X) X = BatchNormalization()(X) X = Dense(1, activation="sigmoid")(X) model = Model(model_input, X, name='w2v_conv1d') return model def model_gru(model_input, embd_size = 128): X = model_conv_core(model_input, embd_size) X = MaxPooling1D(2)(X) X = Dropout(0.2)(X) X = GRU(256, activation='relu', return_sequences=True, kernel_regularizer=regularizers.l2(0.004))(X) X = Dropout(0.5)(X) X = GRU(128, activation='relu', kernel_regularizer=regularizers.l2(0.0004))(X) X = Dropout(0.5)(X) X = BatchNormalization()(X) X = Dense(1, activation="sigmoid")(X) model = Model(model_input, X, name='w2v_gru') return model def model_conv2d(model_input, embd_size = 128): from keras.layers import MaxPool2D, Conv2D, Reshape num_filters = 256 filter_sizes = [3, 5, 7] X = Embedding(total_unique_words, DIM, input_length=max_words, weights=[embedding_matrix], trainable=False, name='Word2Vec')(model_input) reshape = Reshape((maxSequenceLength, embd_size, 1))(X) conv_0 = Conv2D(num_filters, kernel_size=(filter_sizes[0], embd_size), padding='valid', kernel_initializer='normal', activation='relu')(reshape) conv_1 = Conv2D(num_filters, kernel_size=(filter_sizes[1], embd_size), padding='valid', kernel_initializer='normal', activation='relu')(reshape) conv_2 = Conv2D(num_filters, kernel_size=(filter_sizes[2], embd_size), padding='valid', kernel_initializer='normal', activation='relu')(reshape) maxpool_0 = MaxPool2D(pool_size=(maxSequenceLength - filter_sizes[0] + 1, 1), strides=(1,1), padding='valid')(conv_0) maxpool_1 = MaxPool2D(pool_size=(maxSequenceLength - filter_sizes[1] + 1, 1), strides=(1,1), padding='valid')(conv_1) maxpool_2 = MaxPool2D(pool_size=(maxSequenceLength - filter_sizes[2] + 1, 1), strides=(1,1), padding='valid')(conv_2) X = concatenate([maxpool_0, maxpool_1, maxpool_2], axis=1) X = Dropout(0.2)(X) X = Flatten()(X) X = Dense(int(embd_size / 2.0), activation='relu', kernel_regularizer=regularizers.l2(0.004))(X) X = Dropout(0.5)(X) X = BatchNormalization()(X) X = Dense(1, activation="sigmoid")(X) model = Model(model_input, X, name='w2v_conv2d') return model
def gray_code(n): def gray_code_recurse (g,n): k = len(g) if n <= 0: return else: for i in range (k-1, -1, -1): char='1' + g[i] g.append(char) for i in range (k-1, -1, -1): g[i]='0' + g[i] gray_code_recurse (g, n-1) g = ['0','1'] gray_code_recurse(g, n-1) return g def gen_list(m): out = [] g = gray_code(len(m)) for i in range (len(g)): mask_str = g[i] idx = 0 v = [] for c in list(mask_str): if c == '1': v.append(m[idx]) idx += 1 if len(v) > 1: out.append(v) return out
Source: https://habr.com/ru/post/441850/
All Articles