2020from __future__ import print_function
2121
2222
23+ import tensorflow as tf
24+
2325from tensorflow .python .keras .models import Model
2426from tensorflow .python .keras import layers
2527from tensorflow .python .keras .layers import Input
@@ -71,17 +73,17 @@ def SepConv_BN(x, filters, prefix, stride=1, kernel_size=3, rate=1, depth_activa
7173 depth_padding = 'valid'
7274
7375 if not depth_activation :
74- x = Activation ('relu ' )(x )
76+ x = Activation ('elu ' )(x )
7577 x = DepthwiseConv2D ((kernel_size , kernel_size ), strides = (stride , stride ), dilation_rate = (rate , rate ),
7678 padding = depth_padding , use_bias = False , name = prefix + '_depthwise' )(x )
7779 x = BatchNormalization (name = prefix + '_depthwise_BN' , epsilon = epsilon )(x )
7880 if depth_activation :
79- x = Activation ('relu ' )(x )
81+ x = Activation ('elu ' )(x )
8082 x = Conv2D (filters , (1 , 1 ), padding = 'same' ,
8183 use_bias = False , name = prefix + '_pointwise' )(x )
8284 x = BatchNormalization (name = prefix + '_pointwise_BN' , epsilon = epsilon )(x )
8385 if depth_activation :
84- x = Activation ('relu ' )(x )
86+ x = Activation ('elu ' )(x )
8587
8688 return x
8789
@@ -273,11 +275,11 @@ def Deeplabv3(weights='pascal_voc', input_tensor=None, input_shape=(512, 512, 3)
273275 x = Conv2D (32 , (3 , 3 ), strides = (2 , 2 ),
274276 name = 'entry_flow_conv1_1' , use_bias = False , padding = 'same' )(img_input )
275277 x = BatchNormalization (name = 'entry_flow_conv1_1_BN' )(x )
276- x = Activation ('relu ' )(x )
278+ x = Activation ('elu ' )(x )
277279
278280 x = _conv2d_same (x , 64 , 'entry_flow_conv1_2' , kernel_size = 3 , stride = 1 )
279281 x = BatchNormalization (name = 'entry_flow_conv1_2_BN' )(x )
280- x = Activation ('relu ' )(x )
282+ x = Activation ('elu ' )(x )
281283
282284 x = _xception_block (x , [128 , 128 , 128 ], 'entry_flow_block1' ,
283285 skip_connection_type = 'conv' , stride = 2 ,
@@ -366,14 +368,14 @@ def Deeplabv3(weights='pascal_voc', input_tensor=None, input_shape=(512, 512, 3)
366368 b4 = Conv2D (256 , (1 , 1 ), padding = 'same' ,
367369 use_bias = False , name = 'image_pooling' )(b4 )
368370 b4 = BatchNormalization (name = 'image_pooling_BN' , epsilon = 1e-5 )(b4 )
369- b4 = Activation ('relu ' )(b4 )
371+ b4 = Activation ('elu ' )(b4 )
370372 # upsample. have to use compat because of the option align_corners
371373 size_before = K .int_shape (x )
372374 b4 = UpSampling2D (size = (size_before [1 ],size_before [2 ]),interpolation = 'bilinear' )(b4 )
373375 # simple 1x1
374376 b0 = Conv2D (256 , (1 , 1 ), padding = 'same' , use_bias = False , name = 'aspp0' )(x )
375377 b0 = BatchNormalization (name = 'aspp0_BN' , epsilon = 1e-5 )(b0 )
376- b0 = Activation ('relu ' , name = 'aspp0_activation' )(b0 )
378+ b0 = Activation ('elu ' , name = 'aspp0_activation' )(b0 )
377379
378380 # there are only 2 branches in mobilenetV2. not sure why
379381 if backbone == 'xception' :
@@ -395,8 +397,8 @@ def Deeplabv3(weights='pascal_voc', input_tensor=None, input_shape=(512, 512, 3)
395397 x = Conv2D (256 , (1 , 1 ), padding = 'same' ,
396398 use_bias = False , name = 'concat_projection' )(x )
397399 x = BatchNormalization (name = 'concat_projection_BN' , epsilon = 1e-5 )(x )
398- x = Activation ('relu ' )(x )
399- x = Dropout (0.1 )(x )
400+ x = Activation ('elu ' )(x )
401+ # x = Dropout(0.1)(x)
400402 # DeepLab v.3+ decoder
401403
402404 if backbone == 'xception' :
@@ -409,7 +411,7 @@ def Deeplabv3(weights='pascal_voc', input_tensor=None, input_shape=(512, 512, 3)
409411 use_bias = False , name = 'feature_projection0' )(skip1 )
410412 dec_skip1 = BatchNormalization (
411413 name = 'feature_projection0_BN' , epsilon = 1e-5 )(dec_skip1 )
412- dec_skip1 = Activation ('relu ' )(dec_skip1 )
414+ dec_skip1 = Activation ('elu ' )(dec_skip1 )
413415 x = Concatenate ()([x , dec_skip1 ])
414416 x = SepConv_BN (x , 256 , 'decoder_conv0' ,
415417 depth_activation = True , epsilon = 1e-5 )
0 commit comments