-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathUnion-net.py
134 lines (101 loc) · 5.06 KB
/
Union-net.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
# 输入必需的模块。
from keras import layers
from keras import optimizers
from keras.layers.normalization import BatchNormalization
from keras.layers.convolutional import MaxPooling2D
from keras.layers.core import Activation
from keras.layers.core import Dense
from keras.layers import Input
from keras.models import Model
from keras import backend as K
class Union_Net:
@staticmethod
def build(width, height, depth, classes, kernel_reg, kernel_init):
#width :图像宽度(以像素为单位)、height :图像高度、 depth :图像的通道数、classes :分类数、kernel_reg : 正则化方法(L1或L2)、kernel_init :内核初始化程序。
inputShape = (height, width, depth)
if K.image_data_format() == "channels_first":
inputShape = (depth, height, width)
my_input = Input(shape=inputShape,name='my_input')
d1 = layers.Conv2D(32, 3, activation="relu", padding="same")(my_input)
d1 = layers.BatchNormalization(axis=-1)(d1)
d1 = layers.Conv2D(32, 3, activation="relu", padding="same")(d1)
d1 = layers.BatchNormalization(axis=-1)(d1)
d1 = layers.Conv2D(32, 3, activation="relu", padding="same")(d1)
d1 = layers.BatchNormalization(axis=-1)(d1)
d1 = layers.Conv2D(32, 3, activation="relu", padding="same")(d1)
d1 = layers.BatchNormalization(axis=-1)(d1)
d1 = layers.MaxPooling2D(2)(d1)
c1 = layers.Conv2D(32, 3, activation="relu", padding="same")(my_input)
c1 = layers.BatchNormalization(axis=-1)(c1)
c1 = layers.Conv2D(32, 3, activation="relu", padding="same")(c1)
c1 = layers.BatchNormalization(axis=-1)(c1)
c1 = layers.Conv2D(32, 3, activation="relu", padding="same")(c1)
c1 = layers.BatchNormalization(axis=-1)(c1)
c1 = layers.MaxPooling2D(2)(c1)
b1 = layers.Conv2D(32, 3, activation="relu", padding="same")(my_input)
b1 = layers.BatchNormalization(axis=-1)(b1)
b1 = layers.Conv2D(32, 3, activation="relu", padding="same")(b1)
b1 = layers.BatchNormalization(axis=-1)(b1)
b1 = layers.MaxPooling2D(2)(b1)
a1 = layers.Conv2D(32, 3, activation="relu", padding="same")(my_input)
a1 = layers.BatchNormalization(axis=-1)(a1)
a1= layers.MaxPooling2D(2)(a1)
m1=layers.add([a1, b1, c1, d1])
#m1 = layers.Dropout(0.5)(m1)
m11= Activation('relu')(m1)
d2 = layers.Conv2D(64, 3, activation="relu", padding="same")(m11)
d2 = layers.BatchNormalization(axis=-1)(d2)
d2 = layers.Conv2D(64, 3, activation="relu", padding="same")(d2)
d2 = layers.BatchNormalization(axis=-1)(d2)
d2 = layers.Conv2D(64, 3, activation="relu", padding="same")(d2)
d2 = layers.BatchNormalization(axis=-1)(d2)
d2 = layers.Conv2D(64, 3, activation="relu", padding="same")(d2)
d2 = layers.BatchNormalization(axis=-1)(d2)
c2 = layers.Conv2D(64, 3, activation="relu", padding="same")(m11)
c2 = layers.BatchNormalization(axis=-1)(c2)
c2 = layers.Conv2D(64, 3, activation="relu", padding="same")(c2)
c2 = layers.BatchNormalization(axis=-1)(c2)
c2 = layers.Conv2D(64, 3, activation="relu", padding="same")(c2)
c2 = layers.BatchNormalization(axis=-1)(c2)
b2 = layers.Conv2D(64, 3, activation="relu", padding="same")(m11)
b2 = layers.BatchNormalization(axis=-1)(b2)
b2 = layers.Conv2D(64, 3, activation="relu", padding="same")(b2)
b2 = layers.BatchNormalization(axis=-1)(b2)
a2 = layers.Conv2D(64, 3, activation="relu", padding="same")(m11)
a2 = layers.BatchNormalization(axis=-1)(a2)
m2=layers.add([a2, b2, c2, d2])
m22 = Activation('relu')(m2)
d3 = layers.Conv2D(128, 3, activation="relu", padding="same")(m22)
d3 = layers.BatchNormalization(axis=-1)(d3)
d3 = layers.Conv2D(128, 3, activation="relu", padding="same")(d3)
d3 = layers.BatchNormalization(axis=-1)(d3)
d3 = layers.Conv2D(128, 3, activation="relu", padding="same")(d3)
d3 = layers.BatchNormalization(axis=-1)(d3)
d3 = layers.Conv2D(128, 3, activation="relu", padding="same")(d3)
d3 = layers.BatchNormalization(axis=-1)(d3)
c3 = layers.Conv2D(128, 3, activation="relu", padding="same")(m22)
c3 = layers.BatchNormalization(axis=-1)(c3)
c3 = layers.Conv2D(128, 3, activation="relu", padding="same")(c3)
c3 = layers.BatchNormalization(axis=-1)(c3)
c3 = layers.Conv2D(128, 3, activation="relu", padding="same")(c3)
c3 = layers.BatchNormalization(axis=-1)(c3)
b3 = layers.Conv2D(128, 3, activation="relu", padding="same")(m22)
b3 = layers.BatchNormalization(axis=-1)(b3)
b3 = layers.Conv2D(128, 3, activation="relu", padding="same")(b3)
b3 = layers.BatchNormalization(axis=-1)(b3)
a3 = layers.Conv2D(128, 3, activation="relu", padding="same")(m22)
a3 = layers.BatchNormalization(axis=-1)(a3)
m3=layers.add([a3, b3, c3, d3])
n1=layers.Conv2D(128, 1, padding='same')(m1)
n2=layers.Conv2D(128, 1, padding='same')(m2)
n3=layers.Conv2D(128, 1, padding='same')(m3)
x=layers.add([n1, n2, n3])
x = Activation('relu')(x)
x = layers.Conv2D(256, 3, activation="relu", padding="same")(x)
x = layers.BatchNormalization(axis=-1)(x)
x = layers.MaxPooling2D(2)(x)
x = layers.GlobalAveragePooling2D()(x)
# softmax 分类器
output = layers.Dense(classes,activation='softmax')(x)
model = Model(inputs=my_input,outputs=output)
return model