def DenseNet(X_train):
ip = Input(shape=(X_train.shape[1],))
x_list = [ip]
x = Dense(128, use_bias=False)(ip)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Dropout(0.5)(x)
x_list.append(x)
x = keras.layers.concatenate(x_list)
x = Dense(128, use_bias=False)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Dropout(0.5)(x)
x_list.append(x)
x = keras.layers.concatenate(x_list)
x = Dense(64, use_bias=False)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Dropout(0.5)(x)
x_list.append(x)
x = keras.layers.concatenate(x_list)
x = Dense(64, use_bias=False)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Dropout(0.5)(x)
x_list.append(x)
x = keras.layers.concatenate(x_list)
x = Dense(32, use_bias=False)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Dropout(0.5)(x)
x_list.append(x)
x = keras.layers.concatenate(x_list)
x = Dense(32, use_bias=False)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Dropout(0.5)(x)
x_list.append(x)
x = keras.layers.concatenate(x_list)
x = Dense(16, use_bias=False)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Dropout(0.5)(x)
x_list.append(x)
x = keras.layers.concatenate(x_list)
x = Dense(16, use_bias=False)(ip)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Dropout(0.5)(x)
op = Dense(1, activation='sigmoid')(x)
model = Model(inputs=ip, outputs=op)
adam = Adam(lr=0.05,)
model.compile(loss='binary_crossentropy', optimizer=adam, metrics=['accuracy'])
return model
model = DenseNet(X)