Commit 2096e00e authored by Florian RICHOUX's avatar Florian RICHOUX

Add conv model without lstm

parent 2ca445de
......@@ -25,6 +25,7 @@ from models.fc2_2dense import FC2_2Dense
from models.fc2_100_2dense import FC2_100_2Dense
from models.fc2_20_2dense import FC2_20_2Dense
from models.fc2_2_2dense import FC2_2_2Dense
from models.3conv3_2dense_shared import 3Conv3_2Dense_S
import tensorflow as tf
from keras import callbacks
......@@ -105,6 +106,8 @@ def factory_model( model_name ):
return FC2_20_2Dense(), 'fc2_20_2dense'
elif model_name == 'fc2_2_2dense':
return FC2_2_2Dense(), 'fc2_2_2dense'
elif model_name == '3conv3_2dense_shared':
return 3Conv3_2Dense_S(), '3conv3_2dense_shared'
else:
print("Model unknown. Terminating.")
sys.exit(1)
......@@ -136,7 +139,7 @@ def make_parser():
parser.add_argument('-train', type=str, help='File containing the training set')
parser.add_argument('-val', type=str, help='File containing the validation set')
parser.add_argument('-test', type=str, help='File containing the test set')
parser.add_argument('-model', type=str, help='choose among: lstm32_3conv3_2dense, lstm32_2conv3_2dense_shared, lstm32_3conv3_2dense_shared, lstm32_2conv3_4dense_shared, lstm32_3conv4_2dense_shared, lstm64_3conv3_2dense_shared, lstm64drop_3conv3_3dense_shared, lstm64x2_3conv3_10dense_shared, lstm64x2_embed2_10dense_shared, lstm64x2_embed4_10dense_shared, fc6_embed3_2dense, fc2_2dense, fc2_100_2dense, fc2_20_2dense, fc2_2_2dense')
parser.add_argument('-model', type=str, help='choose among: lstm32_3conv3_2dense, lstm32_2conv3_2dense_shared, lstm32_3conv3_2dense_shared, lstm32_2conv3_4dense_shared, lstm32_3conv4_2dense_shared, lstm64_3conv3_2dense_shared, lstm64drop_3conv3_3dense_shared, lstm64x2_3conv3_10dense_shared, lstm64x2_embed2_10dense_shared, lstm64x2_embed4_10dense_shared, fc6_embed3_2dense, fc2_2dense, fc2_100_2dense, fc2_20_2dense, fc2_2_2dense', '3conv3_2dense_shared')
parser.add_argument('-epochs', type=int, default=50, help='Number of epochs [default: 50]')
parser.add_argument('-batch', type=int, default=64, help='Batch size [default: 64]')
parser.add_argument('-patience', type=int, default=0, help='Number of epochs before triggering the early stopping criterion [default: infinite patience]')
......
from keras.models import Model
from keras import layers
from keras import Input
import numpy as np
import tensorflow as tf
from models.abstract_model import AbstractModel
class 3Conv3_2Dense_S(AbstractModel):
def __init__(self):
super().__init__()
def get_model(self):
conv1 = layers.Conv1D(5, 20, activation='relu')
pool1 = layers.MaxPooling1D(3)
batchnorm1 = layers.BatchNormalization()
conv2 = layers.Conv1D(5, 20, activation='relu')
pool2 = layers.MaxPooling1D(3)
batchnorm2 = layers.BatchNormalization()
conv3 = layers.Conv1D(5, 20, activation='relu')
pool3 = layers.MaxPooling1D(3)
batchnorm3 = layers.BatchNormalization()
input1 = Input(shape=(None,24,), dtype=np.float32, name='protein1')
protein1 = conv1(input1)
protein1 = pool1(protein1)
protein1 = batchnorm1(protein1)
protein1 = conv2(protein1)
protein1 = pool2(protein1)
protein1 = batchnorm2(protein1)
protein1 = conv3(protein1)
protein1 = pool3(protein1)
protein1 = batchnorm3(protein1)
input2 = Input(shape=(None,24,), dtype=np.float32, name='protein2')
protein2 = conv1(input2)
protein2 = pool1(protein2)
protein2 = batchnorm1(protein2)
protein2 = conv2(protein2)
protein2 = pool2(protein2)
protein2 = batchnorm2(protein2)
protein2 = conv3(protein2)
protein2 = pool3(protein2)
protein2 = batchnorm3(protein2)
head = layers.concatenate([protein1, protein2], axis=-1)
head = layers.Flatten()(head)
# head = layers.Dense(50, activation='relu')(head)
# head = layers.BatchNormalization()(head)
head = layers.Dense(25, activation='relu')(head)
head = layers.BatchNormalization()(head)
head = layers.Dense(1)(head)
output = layers.Activation(tf.nn.sigmoid)(head)
model = Model(inputs=[input1, input2], outputs=output)
return model
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment