def AttentionRefinmentModule(inputs, n_filters):
filters = n_filters
poolingLayer = AveragePooling2D(pool_size=(1, 1), padding='same')??????
#the paper said it is globalAveragePooling ,but here ?????????
x = poolingLayer(inputs)
x = ConvAndBatch(x, kernel=(1, 1), n_filters=filters, activation='sigmoid')
return multiply([inputs, x])
def AttentionRefinmentModule(inputs, n_filters):
filters = n_filters
#the paper said it is globalAveragePooling ,but here ?????????