Skip to content

Commit 27c576d

Browse files
committed
modify nework
1 parent 2200972 commit 27c576d

File tree

12 files changed

+168
-134
lines changed

12 files changed

+168
-134
lines changed

config/cfg.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -8,16 +8,18 @@
88
common_params = {
99
'batch_size':64,
1010
'image_size':(32,32),
11-
'learning_rate':0.01,
11+
'learning_rate':0.1,
1212
'moment':0.9,
13-
'display_step':10,
13+
'display_step':100,
1414
'num_epochs':200,
1515
'predict_step':500
1616
}
1717

1818

1919
graph_node = {
2020
'input':'input:0',
21+
'is_training':'is_training:0',
22+
'keep_prob':'keep_prob:0',
2123
'output':'output:0'
2224
}
2325

network/denseNet.py

+17-15
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88

99
class DenseNet(object):
1010
"""docstring for DenseNet"""
11-
def __init__(self, k = 12,L = 40,base = True,num_classes=10,is_training=True,):
11+
def __init__(self,is_training,keep_prob, k = 12,L = 40,base = True,num_classes=10):
1212
super(DenseNet, self).__init__()
1313
self.num_classes = num_classes
1414
self.is_training = is_training
@@ -22,6 +22,8 @@ def __init__(self, k = 12,L = 40,base = True,num_classes=10,is_training=True,):
2222

2323
self.per_block_num = (L - 4)//3 if self.base else (L - 4)//6
2424

25+
self.keep_prob = keep_prob
26+
2527
def bottle_block(self,inputs):
2628
internel_out = tf.identity(inputs)
2729
for i in range(self.per_block_num):
@@ -67,8 +69,8 @@ def forward(self,inputs):
6769
out = self.bottle_block(out)
6870
out = tf.layers.average_pooling2d(out,pool_size=8,strides=8,padding='same')
6971
out = tf.layers.flatten(out)
70-
out = tf.layers.dropout(out,rate=0.5)
71-
predicts = tf.layers.dense(out,units=self.num_classes,kernel_regularizer=self.regularizer,name='fc')
72+
out = tf.layers.dropout(out,rate=self.keep_prob)
73+
predicts = tf.layers.dense(out,units=self.num_classes,kernel_initializer=self.initializer,kernel_regularizer=self.regularizer,name='fc')
7274
softmax_out = tf.nn.softmax(predicts,name='output')
7375
return predicts,softmax_out
7476

@@ -79,29 +81,29 @@ def loss(self,predicts,labels):
7981
return losses
8082

8183

82-
def DensetNet40_12():
83-
net = DenseNet(k=12,L = 40)
84+
def DensetNet40_12(is_training=True,keep_prob=0.5):
85+
net = DenseNet(is_training=is_training,keep_prob = keep_prob,k=12,L = 40)
8486
return net
8587

8688

87-
def DenseNet100_12():
88-
net = DenseNet(k=12,L=100)
89+
def DenseNet100_12(is_training=True,keep_prob=0.5):
90+
net = DenseNet(is_training=is_training,keep_prob = keep_prob,k=12,L=100)
8991
return net
9092

91-
def DenseNet100_24():
92-
net = DenseNet(k=24,L = 100)
93+
def DenseNet100_24(is_training=True,keep_prob=0.5):
94+
net = DenseNet(is_training=is_training,keep_prob = keep_prob,k=24,L = 100)
9395
return net
9496

95-
def DenseNetBC100_12():
96-
net = DenseNet(k = 12,L = 100,base=False)
97+
def DenseNetBC100_12(is_training=True,keep_prob=0.5):
98+
net = DenseNet(is_training=is_training,keep_prob = keep_prob,k = 12,L = 100,base=False)
9799
return net
98100

99-
def DenseNetBC250_24():
100-
net = DenseNet(k = 24,L = 250,base = False)
101+
def DenseNetBC250_24(is_training=True,keep_prob=0.5):
102+
net = DenseNet(is_training=is_training,keep_prob = keep_prob,k = 24,L = 250,base = False)
101103
return net
102104

103-
def DenseNetBC190_40():
104-
net = DenseNet(k = 40,L = 190,base = False)
105+
def DenseNetBC190_40(is_training=True,keep_prob=0.5):
106+
net = DenseNet(is_training=is_training,keep_prob = keep_prob,k = 40,L = 190,base = False)
105107
return net
106108

107109

network/mobileNet.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -7,15 +7,15 @@
77

88
class MobileNet(object):
99
"""docstring for MobileNet"""
10-
def __init__(self, num_classes=10,is_training=True):
10+
def __init__(self,is_training=True,keep_prob=0.5,num_classes=10):
1111
super(MobileNet, self).__init__()
1212
self.num_classes = num_classes
1313
self.is_training = is_training
1414
self.conv_num = 0
1515
self.weight_decay = 5e-4
1616
self.regularizer = tf.contrib.layers.l2_regularizer(scale=self.weight_decay)
1717
self.initializer = tf.contrib.layers.xavier_initializer()
18-
18+
self.keep_prob = keep_prob
1919

2020
def conv2d(self,inputs,out_channel,kernel_size=1,stride=1):
2121
inputs = tf.layers.conv2d(inputs,filters=out_channel,kernel_size=kernel_size,strides=stride,padding='same',
@@ -62,8 +62,8 @@ def forward(self,inputs):
6262
out = self.separable_conv2d(out,out_channel=1024,kernel_size=[3,3,1024,1],stride=1)
6363
out = tf.layers.average_pooling2d(out,pool_size=1,strides=1)
6464
out = tf.layers.flatten(out)
65-
out = tf.layers.dropout(out,rate=0.5)
66-
predicts = tf.layers.dense(out,units=self.num_classes,kernel_regularizer=self.regularizer,name='fc')
65+
out = tf.layers.dropout(out,rate=self.keep_prob)
66+
predicts = tf.layers.dense(out,units=self.num_classes,kernel_initializer=self.initializer,kernel_regularizer=self.regularizer,name='fc')
6767
softmax_out = tf.nn.softmax(predicts,name='output')
6868
return predicts,softmax_out
6969

network/resnet.py

+11-10
Original file line numberDiff line numberDiff line change
@@ -7,13 +7,14 @@
77

88
class Resnet(object):
99
"""docstring for Resnet"""
10-
def __init__(self, stack_num=3,num_classes=10,is_training=True):
10+
def __init__(self,is_training,keep_prob,stack_num=3,num_classes=10):
1111
super(Resnet, self).__init__()
1212
self.num_classes = num_classes
1313
self.is_training = is_training
1414
self.regularizer = tf.contrib.layers.l2_regularizer(scale=1e-4)
1515
self.initializer = tf.contrib.layers.xavier_initializer()
1616
self.stack_num = stack_num
17+
self.keep_prob = keep_prob
1718

1819
def residual_block(self,inputs,output_channel,stride=[1,1]):
1920
residual = tf.identity(inputs)
@@ -40,7 +41,7 @@ def forward(self,inputs):
4041
out = self.make_layer(out,[64,64])
4142
out = tf.layers.average_pooling2d(out,pool_size=8,strides=1)
4243
out = tf.layers.flatten(out)
43-
predicts = tf.layers.dense(out,units=self.num_classes,kernel_regularizer=self.regularizer)
44+
predicts = tf.layers.dense(out,units=self.num_classes,kernel_initializer=self.initializer,kernel_regularizer=self.regularizer)
4445
softmax_out = tf.nn.softmax(predicts,name='output')
4546
return predicts,softmax_out
4647

@@ -63,23 +64,23 @@ def loss(self,predicts,labels):
6364
6465
'''
6566

66-
def resnet20():
67-
net = Resnet(stack_num=3)
67+
def resnet20(is_training=True,keep_prob=0.5):
68+
net = Resnet(is_training=is_training,keep_prob=keep_prob,stack_num=3)
6869
return net
6970

7071

71-
def resnet32():
72-
net = Resnet(stack_num=5)
72+
def resnet32(is_training=True,keep_prob=0.5):
73+
net = Resnet(is_training=is_training,keep_prob=keep_prob,stack_num=5)
7374
return net
7475

7576

76-
def resnet44():
77-
net = Resnet(stack_num=7)
77+
def resnet44(is_training=True,keep_prob=0.5):
78+
net = Resnet(is_training=is_training,keep_prob=keep_prob,stack_num=7)
7879
return net
7980

8081

81-
def resnet56():
82-
net = Resnet(stack_num=9)
82+
def resnet56(is_training=True,keep_prob=0.5):
83+
net = Resnet(is_training=is_training,keep_prob=keep_prob,stack_num=9)
8384
return net
8485

8586

network/resnext.py

+8-8
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88

99
class resNext(object):
1010
"""docstring for resNext"""
11-
def __init__(self, block_num,num_classes=10,is_training=True):
11+
def __init__(self, block_num,is_training,keep_prob,num_classes=10):
1212
super(resNext, self).__init__()
1313
self.num_classes = num_classes
1414
self.is_training = is_training
@@ -21,6 +21,7 @@ def __init__(self, block_num,num_classes=10,is_training=True):
2121
self.initializer = tf.contrib.layers.xavier_initializer()
2222

2323
self.cardinality = 32
24+
self.keep_prob = keep_prob
2425

2526
def bottleneck_layer(self,inputs,out_channel,stride):
2627
residual = tf.identity(inputs)
@@ -85,8 +86,8 @@ def forward(self,inputs):
8586

8687
out = tf.layers.average_pooling2d(out,pool_size=(pool_size,pool_size),strides=(stride,stride),name='avg_pool')
8788
out = tf.layers.flatten(out,name='flatten')
88-
out = tf.layers.dropout(out,rate=0.5,name='dropout')
89-
predicts = tf.layers.dense(out,units=self.num_classes,kernel_regularizer=self.regularizer,name='fc')
89+
out = tf.layers.dropout(out,rate=self.keep_prob,name='dropout')
90+
predicts = tf.layers.dense(out,units=self.num_classes,kernel_initializer=self.initializer,kernel_regularizer=self.regularizer,name='fc')
9091
softmax_out = tf.nn.softmax(predicts,name='output')
9192

9293
return predicts,softmax_out
@@ -111,24 +112,23 @@ def loss(self,predicts,labels):
111112

112113

113114

114-
def ResNext50():
115+
def ResNext50(is_training=True,keep_prob=0.5):
115116
'''
116117
stage1: 1x1x128->group conv->3x3x128 cardinality 32 ->1x1x256
117118
stage2: 1x1x256->group conv->3x3x256 cardinality 32 ->1x1x512
118119
stage3: 1x1x512->group conv->3x3x512 cardinality 32 ->1x1x1024
119120
stage4: 1x1x1024->group conv->3x3x1024 cardinality 32 ->1x1x2048
120121
'''
121-
net = resNext([3,4,6,3])
122+
net = resNext([3,4,6,3],is_training=is_training,keep_prob=keep_prob)
122123
return net
123124

124125

125126

126-
def ResNext101():
127-
net = resNext([3,4,23,3])
127+
def ResNext101(is_training=True,keep_prob=0.5):
128+
net = resNext([3,4,23,3],is_training=is_training,keep_prob=keep_prob)
128129
return net
129130

130131

131-
132132
if __name__=='__main__':
133133
with tf.device('/cpu:0'):
134134
net = ResNext50()

network/seNet.py

+8-7
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,11 @@
88

99
class SeNet(object):
1010
"""docstring for SeNet"""
11-
def __init__(self, block_num,num_classes=10,is_training=True):
11+
def __init__(self, block_num,is_training,keep_prob,num_classes=10):
1212
super(SeNet, self).__init__()
1313
self.num_classes = num_classes
1414
self.is_training = is_training
15+
self.keep_prob = keep_prob
1516
self.block_nums = block_num
1617

1718
self.conv_num = 0
@@ -87,8 +88,8 @@ def forward(self,inputs):
8788
h,w = inputs.shape[1] // 32 ,inputs.shape[2] // 32
8889
out = tf.layers.average_pooling2d(out,pool_size=(h,w),strides=(h,w),padding='same',name='average_pool_'+str(self.average_num))
8990
out = tf.layers.flatten(out,name='flatten')
90-
out = tf.layers.dropout(out,rate=0.5,name='dropout')
91-
predicts = tf.layers.dense(out,units=self.num_classes,kernel_regularizer=self.regularizer,name='fc_'+str(self.fc_num))
91+
out = tf.layers.dropout(out,rate=self.keep_prob,name='dropout')
92+
predicts = tf.layers.dense(out,units=self.num_classes,kernel_initializer=self.initializer,kernel_regularizer=self.regularizer,name='fc_'+str(self.fc_num))
9293
softmax_out = tf.nn.softmax(predicts,name='output')
9394

9495
return predicts,softmax_out
@@ -111,12 +112,12 @@ def loss(self,predicts,labels):
111112

112113

113114

114-
def SE_Resnet_50():
115-
net = SeNet([3,4,6,3])
115+
def SE_Resnet_50(is_training=True,keep_prob=0.5):
116+
net = SeNet([3,4,6,3],is_training=is_training,keep_prob=keep_prob)
116117
return net
117118

118-
def SE_Resnet_101():
119-
net = SeNet([3,4,23,3])
119+
def SE_Resnet_101(is_training=True,keep_prob=0.5):
120+
net = SeNet([3,4,23,3],is_training=is_training,keep_prob=keep_prob)
120121
return net
121122

122123

network/squeezeNet.py

+10-7
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818

1919
class SqueezeNet(object):
2020
"""docstring for SqueezeNet"""
21-
def __init__(self, net_config,mode='A',num_classes=10):
21+
def __init__(self, net_config,is_training,keep_prob,mode='A',num_classes=10):
2222
super(SqueezeNet, self).__init__()
2323
self.num_classes = num_classes
2424
self.conv_num = 0
@@ -31,6 +31,9 @@ def __init__(self, net_config,mode='A',num_classes=10):
3131
self.pct33 = net_config['pct33']
3232
self.freq = net_config['freq']
3333

34+
self.is_training = is_training
35+
self.keep_prob = keep_prob
36+
3437
if mode=='A':
3538
self.make_layer = self.make_layerA
3639
elif mode =='B':
@@ -81,8 +84,8 @@ def forward(self,inputs):
8184

8285
out = tf.layers.average_pooling2d(out,pool_size=(pool_size,pool_size),strides=(stride,stride),name='avg_pool_0')
8386
out = tf.layers.flatten(out,name='flatten')
84-
out = tf.layers.dropout(out,rate=0.5,name='dropout')
85-
predicts = tf.layers.dense(out,units=self.num_classes,name='fc')
87+
out = tf.layers.dropout(out,rate=self.keep_prob,name='dropout')
88+
predicts = tf.layers.dense(out,units=self.num_classes,kernel_initializer=self.initializer,name='fc')
8689
softmax_out = tf.nn.softmax(predicts,name='output')
8790

8891
return predicts,softmax_out
@@ -122,13 +125,13 @@ def loss(self,predicts,labels):
122125

123126

124127

125-
def SqueezeNetA():
126-
net = SqueezeNet(net_config=net_config)
128+
def SqueezeNetA(is_training=True,keep_prob=0.5):
129+
net = SqueezeNet(net_config=net_config,is_training=is_training,keep_prob=keep_prob)
127130
return net
128131

129132

130-
def SqueezeNetB():
131-
net = SqueezeNet(net_config=net_config,mode='B')
133+
def SqueezeNetB(is_training=True,keep_prob=0.5):
134+
net = SqueezeNet(net_config=net_config,is_training=is_training,keep_prob=keep_prob,mode='B')
132135
return net
133136

134137

0 commit comments

Comments
 (0)