天天看點

原始圖檔中的ROI如何映射到到feature map上?

主要參考曉雷:原始圖檔中的ROI如何映射到到feature map?

寫的非常好,從卷積神經網絡物體檢測之感受野大小計算 - machineLearning - 部落格園粘了代碼之後,做了一些改動,目的是為了實作:原圖上的框圖坐标映射到網絡各個層時候相應的坐标,以及特征圖上的框圖坐标反映射到各個層上時候相應的坐标。主要用到

原始圖檔中的ROI如何映射到到feature map上?

這個

原始圖檔中的ROI如何映射到到feature map上?

公式,以及它的反推 

原始圖檔中的ROI如何映射到到feature map上?

代碼如下:

#!/usr/bin/env python
# -*- coding: utf-8 -*-
import copy
net_struct = {
    'alexnet': {'net': [[11, 4, 0], [3, 2, 0], [5, 1, 2], [3, 2, 0], [3, 1, 1], [3, 1, 1], [3, 1, 1], [3, 2, 0]],
                'name': ['conv1', 'pool1', 'conv2', 'pool2', 'conv3', 'conv4', 'conv5', 'pool5']},
    'zf-5': {'net': [[7, 2, 3], [3, 2, 1], [5, 2, 2], [3, 2, 1], [3, 1, 1], [3, 1, 1], [3, 1, 1]],
             'name': ['conv1', 'pool1', 'conv2', 'pool2', 'conv3', 'conv4', 'conv5']},
   'vgg16': {'net': [[3, 1, 1], [3, 1, 1], [2, 2, 0], [3, 1, 1], [3, 1, 1], [2, 2, 0], [3, 1, 1], [3, 1, 1], [3, 1, 1],
                     [2, 2, 0], [3, 1, 1], [3, 1, 1], [3, 1, 1], [2, 2, 0], [3, 1, 1], [3, 1, 1], [3, 1, 1],
                     [2, 2, 0]],
             'name': ['conv1_1', 'conv1_2', 'pool1', 'conv2_1', 'conv2_2', 'pool2', 'conv3_1', 'conv3_2',
                      'conv3_3', 'pool3', 'conv4_1', 'conv4_2', 'conv4_3', 'pool4', 'conv5_1', 'conv5_2', 'conv5_3',
                      'pool5']}}

imsize = 224

def outFromIn(isz, net, layernum):
    totstride = 1
    insize = isz
    for layer in range(layernum):
        fsize, stride, pad = net[layer]
        outsize = (insize - fsize + 2 * pad) / stride + 1
        insize = outsize
        totstride = totstride * stride
    return outsize, totstride

def inFromOut(net, layernum):
    RF = 1
    for layer in reversed(range(layernum)):
        fsize, stride, pad = net[layer]
        RF = ((RF - 1) * stride) + fsize
    return RF

def back_forth_rf (net, mm, nn):
    for i in reversed(range(len(net))):
        fsize, stride, pad = net[i]
        mm[0] = stride*mm[0] +((fsize - 1) / 2 - pad)
        mm[1] = stride*mm[1] +((fsize - 1) / 2 - pad)
        mm[2] = stride * mm[2] + ((fsize - 1) / 2 - pad)
        mm[3] = stride * mm[3] + ((fsize - 1) / 2 - pad)
        qq = copy.deepcopy(mm)
        mmm.append(qq)
    mmm.reverse()  #原mmm是從top到bottom排列的,是以要反轉一下,使其從bottom到top
    for i in range(len(net)):
        fsize, stride, pad = net[i]
        nn[0] = (nn[0] - ((fsize - 1) / 2 - pad)) / stride
        nn[1] = (nn[1] - ((fsize - 1) / 2 - pad)) / stride
        nn[2] = (nn[2] - ((fsize - 1) / 2 - pad)) / stride
        nn[3] = (nn[3] - ((fsize - 1) / 2 - pad)) / stride
        pp = copy.deepcopy(nn)  #如果不用deepcopy中轉,而是直接nnn.append(nn),則會出現nnn到最後隻顯示nn的最後一個資料
        nnn.append(pp)
    return mmm, nnn

if __name__ == '__main__':
    print "layer output sizes given image = %dx%d" % (imsize, imsize)
    for net in net_struct.keys():
        print '************net structrue name is %s**************' % net
        ppp = []  #儲存outsize, totstride
        mmm = []  #儲存從特征圖映射到原圖,各層框圖坐标
        nnn = []  #儲存從原圖映射到特征圖,各層框圖坐标
        rff = []  #儲存RF
        qq = []   #中轉容器
        pp = []   #中轉容器
        if net == 'zf-5':
            mm=[14,14,12,13]  #根據該網絡top層大小,随便定的框圖坐标
        elif net =='vgg16':
            mm=[7,7,6,2]
        else:
            mm=[5,5,2,3]
        nn = [96, 96, 56, 65]  #根據原圖大小,随便定的框圖坐标
        for i in range(len(net_struct[net]['net'])):
            p = outFromIn(imsize, net_struct[net]['net'], i + 1)
            rf = inFromOut(net_struct[net]['net'], i + 1)
            ppp.append(p)
            rff.append(rf)
        q = back_forth_rf(net_struct[net]['net'], mm, nn)
        for i in range(len(net_struct[net]['net'])):
            print "Layer Name = %s, Output size = %3d, Stride = % 3d, RF size = %3d, forth RF=%3s, back RF=%3s" % (
            net_struct[net]['name'][i], ppp[i][0], ppp[i][1], rff[i], q[0][i], q[1][i])

           

運作結果如下:

D:\Anaconda\python.exe E:/CNN/感受野遺留問題/RF.py
layer output sizes given image = 224x224
************net structrue name is vgg16**************
Layer Name = conv1_1, Output size = 224, Stride =   1, RF size =   3, forth RF=[224, 224, 192, 64], back RF=[96, 96, 56, 65]
Layer Name = conv1_2, Output size = 224, Stride =   1, RF size =   5, forth RF=[224, 224, 192, 64], back RF=[96, 96, 56, 65]
Layer Name = pool1, Output size = 112, Stride =   2, RF size =   6, forth RF=[224, 224, 192, 64], back RF=[48, 48, 28, 32]
Layer Name = conv2_1, Output size = 112, Stride =   2, RF size =  10, forth RF=[112, 112, 96, 32], back RF=[48, 48, 28, 32]
Layer Name = conv2_2, Output size = 112, Stride =   2, RF size =  14, forth RF=[112, 112, 96, 32], back RF=[48, 48, 28, 32]
Layer Name = pool2, Output size =  56, Stride =   4, RF size =  16, forth RF=[112, 112, 96, 32], back RF=[24, 24, 14, 16]
Layer Name = conv3_1, Output size =  56, Stride =   4, RF size =  24, forth RF=[56, 56, 48, 16], back RF=[24, 24, 14, 16]
Layer Name = conv3_2, Output size =  56, Stride =   4, RF size =  32, forth RF=[56, 56, 48, 16], back RF=[24, 24, 14, 16]
Layer Name = conv3_3, Output size =  56, Stride =   4, RF size =  40, forth RF=[56, 56, 48, 16], back RF=[24, 24, 14, 16]
Layer Name = pool3, Output size =  28, Stride =   8, RF size =  44, forth RF=[56, 56, 48, 16], back RF=[12, 12, 7, 8]
Layer Name = conv4_1, Output size =  28, Stride =   8, RF size =  60, forth RF=[28, 28, 24, 8], back RF=[12, 12, 7, 8]
Layer Name = conv4_2, Output size =  28, Stride =   8, RF size =  76, forth RF=[28, 28, 24, 8], back RF=[12, 12, 7, 8]
Layer Name = conv4_3, Output size =  28, Stride =   8, RF size =  92, forth RF=[28, 28, 24, 8], back RF=[12, 12, 7, 8]
Layer Name = pool4, Output size =  14, Stride =  16, RF size = 100, forth RF=[28, 28, 24, 8], back RF=[6, 6, 3, 4]
Layer Name = conv5_1, Output size =  14, Stride =  16, RF size = 132, forth RF=[14, 14, 12, 4], back RF=[6, 6, 3, 4]
Layer Name = conv5_2, Output size =  14, Stride =  16, RF size = 164, forth RF=[14, 14, 12, 4], back RF=[6, 6, 3, 4]
Layer Name = conv5_3, Output size =  14, Stride =  16, RF size = 196, forth RF=[14, 14, 12, 4], back RF=[6, 6, 3, 4]
Layer Name = pool5, Output size =   7, Stride =  32, RF size = 212, forth RF=[14, 14, 12, 4], back RF=[3, 3, 1, 2]
************net structrue name is zf-5**************
Layer Name = conv1, Output size = 112, Stride =   2, RF size =   7, forth RF=[224, 224, 192, 208], back RF=[48, 48, 28, 32]
Layer Name = pool1, Output size =  56, Stride =   4, RF size =  11, forth RF=[112, 112, 96, 104], back RF=[24, 24, 14, 16]
Layer Name = conv2, Output size =  28, Stride =   8, RF size =  27, forth RF=[56, 56, 48, 52], back RF=[12, 12, 7, 8]
Layer Name = pool2, Output size =  14, Stride =  16, RF size =  43, forth RF=[28, 28, 24, 26], back RF=[6, 6, 3, 4]
Layer Name = conv3, Output size =  14, Stride =  16, RF size =  75, forth RF=[14, 14, 12, 13], back RF=[6, 6, 3, 4]
Layer Name = conv4, Output size =  14, Stride =  16, RF size = 107, forth RF=[14, 14, 12, 13], back RF=[6, 6, 3, 4]
Layer Name = conv5, Output size =  14, Stride =  16, RF size = 139, forth RF=[14, 14, 12, 13], back RF=[6, 6, 3, 4]
************net structrue name is alexnet**************
Layer Name = conv1, Output size =  54, Stride =   4, RF size =  11, forth RF=[193, 193, 97, 129], back RF=[22, 22, 12, 15]
Layer Name = pool1, Output size =  26, Stride =   8, RF size =  19, forth RF=[47, 47, 23, 31], back RF=[10, 10, 5, 7]
Layer Name = conv2, Output size =  26, Stride =   8, RF size =  51, forth RF=[23, 23, 11, 15], back RF=[10, 10, 5, 7]
Layer Name = pool2, Output size =  12, Stride =  16, RF size =  67, forth RF=[23, 23, 11, 15], back RF=[4, 4, 2, 3]
Layer Name = conv3, Output size =  12, Stride =  16, RF size =  99, forth RF=[11, 11, 5, 7], back RF=[4, 4, 2, 3]
Layer Name = conv4, Output size =  12, Stride =  16, RF size = 131, forth RF=[11, 11, 5, 7], back RF=[4, 4, 2, 3]
Layer Name = conv5, Output size =  12, Stride =  16, RF size = 163, forth RF=[11, 11, 5, 7], back RF=[4, 4, 2, 3]
Layer Name = pool5, Output size =   5, Stride =  32, RF size = 195, forth RF=[11, 11, 5, 7], back RF=[1, 1, 0, 1]

Process finished with exit code 0
           

學習總結,不喜勿噴。

繼續閱讀