-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathmodel.py
86 lines (67 loc) · 3.76 KB
/
model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
from keras.layers import Activation, Reshape, Dropout
from keras.layers import Conv2D, MaxPooling2D, ZeroPadding2D, Conv2DTranspose
from keras.models import Sequential
import random
def add_softmax(model: Sequential) -> Sequential:
_, curr_width, curr_height, curr_channels = model.layers[-1].output_shape
print(curr_width, curr_height, curr_channels)
model.add(Reshape((curr_width * curr_height, curr_channels)))
curr_width, curr_height, curr_channels = model.layers[-1].output_shape
print(curr_width, curr_height, curr_channels)
model.add(Activation('softmax', name='softmax', input_shape=(curr_height, curr_width, curr_channels)))
curr_width, curr_height, curr_channels = model.layers[-1].output_shape
print(curr_width, curr_height, curr_channels)
return model
def dilated_frontend(input_width, input_height) -> Sequential:
model = Sequential()
model.add(Conv2D(64, (3,3), activation='relu', padding='same', input_shape=(input_width, input_height, 3), name='conv1.1'))
model.add(Conv2D(64, (3,3), activation='relu', padding='same', name='conv1.2'))
model.add(MaxPooling2D((2,2)))
model.add(Conv2D(128, (3,3), activation='relu', padding='same', name='conv2.1'))
model.add(Conv2D(128, (3,3), activation='relu', padding='same', name='conv2.2'))
model.add(MaxPooling2D((2,2)))
model.add(Conv2D(256, (3,3), activation='relu', padding='same', name='conv3.1'))
model.add(Conv2D(256, (3,3), activation='relu', padding='same', name='conv3.2'))
model.add(Conv2D(256, (3,3), activation='relu', padding='same', name='conv3.3'))
model.add(MaxPooling2D((2,2)))
model.add(Conv2D(512, (3,3), activation='relu', padding='same', name='conv4.1'))
model.add(Conv2D(512, (3,3), activation='relu', padding='same', name='conv4.2'))
model.add(Conv2D(512, (3,3), activation='relu', padding='same', name='conv4.3'))
model.add(Conv2D(512, (3,3), dilation_rate=(2,2), activation='relu', name='conv5.1'))
model.add(Conv2D(512, (3,3), dilation_rate=(2,2), activation='relu', name='conv5.2'))
model.add(Conv2D(512, (3,3), dilation_rate=(2,2), activation='relu', name='conv5.3'))
# dilated
model.add(Conv2D(4096, (7,7), dilation_rate=(4,4), activation='relu', name='conv6.1'))
model.add(Dropout(0.5, seed=random.randint(0, 99999)))
model.add(Conv2D(4096, (1,1), activation='relu', name='conv6.2'))
model.add(Dropout(0.5, seed=random.randint(0,99999)))
model.add(Conv2D(21, (1,1), activation='linear', name='conv6.3'))
return model
def deconv_frontend(input_width, input_height) -> Sequential:
model = Sequential()
model.add(Conv2D(64, (3,3), activation='relu', input_shape=(input_width, input_height, 3)))
'''model.add(Conv2D(64, (3,3), activation='relu'))
model.add(MaxPooling2D((2,2)))
model.add(Conv2D(128, (3,3), activation='relu'))
model.add(Conv2D(128, (3,3), activation='relu'))
model.add(MaxPooling2D((2,2)))
model.add(Conv2D(256, (3,3), activation='relu'))
model.add(Conv2D(256, (3,3), activation='relu'))
model.add(Conv2D(256, (3,3), activation='relu'))
model.add(MaxPooling2D((2,2)))
model.add(Conv2D(512, (3,3), activation='relu'))
model.add(Conv2D(512, (3,3), activation='relu'))
model.add(Conv2D(512, (3,3), activation='relu'))
model.add(MaxPooling2D((2,2)))
model.add(Conv2D(512, (3,3), activation='relu'))
model.add(Conv2D(512, (3,3), activation='relu'))
model.add(Conv2D(512, (3,3), activation='relu'))
model.add(MaxPooling2D((2,2)))
# fully conv
model.add(Conv2D(4096, (7,7), activation='relu', padding='valid'))
model.add(Dropout(0.5, seed=int(time.time())))
model.add(Conv2D(4096, (1,1), activation='relu', padding='same'))
model.add(Dropout(0.5, seed=int(time.time()) + int(time.time())))
model.add(Conv2D(21, (1,1)))
model.add(Conv2DTranspose(21, (64,64), strides=(32,32)))'''
return model