Paddle 动态图tensor初始化问题

kr98yfug  于 2021-11-30  发布在  Java
关注(0)|答案(2)|浏览(332)

动态图运行报错:

网络结构如下:

import os
import logging
import functools
import paddle
import paddle.fluid as fluid
from paddle.fluid.layer_helper import LayerHelper
from paddle.fluid.dygraph.nn import Conv2D, Conv2DTranspose, Pool2D, BatchNorm, FC

import numpy as np
import random

logger = logging.getLogger(__name__)

class ConvBNLayer(fluid.dygraph.Layer):
    def __init__(self,
                 name_scope,
                 num_channels,
                 num_filters,
                 filter_size,
                 stride=1,
                 groups=1,
                 act=None):
        super(ConvBNLayer, self).__init__(name_scope)

        self._conv = Conv2D(
            self.full_name(),
            num_filters=num_filters,
            filter_size=filter_size,
            stride=stride,
            padding=(filter_size - 1) // 2,
            groups=groups,
            act=None,
            bias_attr=None)

        self._batch_norm = BatchNorm(self.full_name(), num_filters, act=act)

    def forward(self, inputs):
        y = self._conv(inputs)
        y = self._batch_norm(y)

        return y

class Bottleneck(fluid.dygraph.Layer):
    expansion = 2
    def __init__(self,
                 name_scope,
                 num_channels,
                 num_filters,
                 stride=1,
                 downsample=None):
        super(Bottleneck, self).__init__(name_scope)

        self.conv0 = ConvBNLayer(
            self.full_name(),
            num_channels=num_channels,
            num_filters=num_channels,
            filter_size=1,
            act='relu')
        self.conv1 = ConvBNLayer(
            self.full_name(),
            num_channels=num_channels,
            num_filters=num_filters,
            filter_size=3,
            stride=stride,
            act='relu')
        self.conv2 = ConvBNLayer(
            self.full_name(),
            num_channels=num_filters,
            num_filters=num_filters,
            filter_size=1,
            act=None)

        self.downsample = downsample

        self._num_channels_out = num_filters * stride

    def forward(self, inputs):
        y = self.conv0(inputs)
        conv1 = self.conv1(y)
        conv2 = self.conv2(conv1)

        if self.downsample is not None:
            #print self.downsample
            residual = self.downsample(inputs)
        else:
            residual = inputs

        #print(self.downsample, paddle.fluid.layers.shape(residual))
        y = fluid.layers.elementwise_add(x=residual, y=conv2)

        layer_helper = LayerHelper(self.full_name(), act='relu')
        return layer_helper.append_activation(y)

def conv3x3(name, out_planes, stride=1):
    """3x3 convolution with padding"""
    return Conv2D(name, out_planes, filter_size=3, stride=stride,
                     padding=1)

class BasicBlock(fluid.dygraph.Layer):
    expansion = 1

    def __init__(self, name_scope, inchannels, planes, stride=1, downsample=None):
        super(BasicBlock, self).__init__(name_scope)
        self.conv1 = conv3x3(self.full_name(), planes, stride)
        self.bn1 = BatchNorm(self.full_name(), planes, act="relu")
        self.conv2 = conv3x3(self.full_name(), planes)
        self.bn2 = BatchNorm(self.full_name(), planes, act="relu")
        self.downsample = downsample

    def forward(self, x):
        residual = x

        out = self.conv1(x)
        out = self.bn1(out)

        out = self.conv2(out)
        out = self.bn2(out)

        if self.downsample is not None:
            residual = self.downsample(x)

        y = fluid.layers.elementwise_add(x=residual, y=out)

        layer_helper = LayerHelper(self.full_name(), act='relu')
        return layer_helper.append_activation(y)

class Conv2dTransposeBNLayer(fluid.dygraph.Layer):
    def __init__(self,
                 name_scope,
                 num_channels,
                 num_filters,
                 filter_size,
                 stride=1,
                 groups=1,
                 act=None):
        super(Conv2dTransposeBNLayer, self).__init__(name_scope)

        self._deconv = Conv2DTranspose(
            self.full_name(),
            num_filters=num_filters,
            filter_size=filter_size,
            stride=filter_size,
            padding=0,
            groups=groups,
            act=None,
            bias_attr=None)

        self._batch_norm = BatchNorm(self.full_name(), num_filters, act=act)

    def forward(self, inputs):
        y = self._deconv(inputs)
        y = self._batch_norm(y)

        return y

class Sequential(fluid.dygraph.Layer):
    def __init__(self, scope_name, layers):
        super(Sequential, self).__init__(scope_name)
        self.layers=layers
    def forward(self, x):

        for layer in self.layers:
            if layer is not None:
                x=layer.forward(x)
        return x

class DirectConnect(fluid.dygraph.Layer):
    def __init__(self, scope_name):
        super(DirectConnect, self).__init__(scope_name)
    def forward(self, x):
        return x

class SuperNet(fluid.dygraph.Layer):
    def __init__(self, scope_name, depth, width, planes, num_blocks):
        super(SuperNet, self).__init__(scope_name)
        self.depth = depth
        self.width = width
        self.planes = planes
        self.num_blocks = num_blocks
        self.dc = DirectConnect(self.full_name())
        # stem net
        self.conv1 = ConvBNLayer(self.full_name(), 3, 64, 3, 2,
                               act='relu')
        self.conv2 = ConvBNLayer(self.full_name(), 64, 64, 3, 2,
                               act='relu')

        self.layer1 = self.make_stem_layer(Bottleneck, 64, 64, 4)

        self.stage_start = self.make_stage(0, 1, depth)
        self.stages=[]
        for i in range(width):
            self.stages.append(self.make_stage(i+1, depth, depth))
        self.stage_end = self.make_stage(width+1, depth, 1)

        self.last_layer1 = ConvBNLayer(self.full_name(), 3, 3, 1, 1, act='relu')
        self.last_layer2 = Conv2D(
                name_scope=self.full_name(),
                num_filters=3,
                filter_size=1,
                stride=1,
                padding=0)

    def make_stem_layer(self, block, inplanes, planes, blocks, stride=1):

        layers = []
        subblock = self.add_sublayer('stem_b_%d' % (0),block(self.full_name(), inplanes, planes, stride))
        layers.append(subblock)
        inplanes = planes * block.expansion
        for i in range(1, blocks):
            subblock = self.add_sublayer('stem_b_%d'%(i),block(self.full_name(), planes, planes, 1))
            layers.append(subblock)

        return Sequential(self.full_name(), layers)

    def make_layer(self, block, stageidx,  inidx, outidx, blocknum):
        downsample = None
        delta = outidx-inidx
        layers = []
        inplanes = (inidx+1)*self.planes
        outplanes = (outidx+1)*self.planes
        stride = 2**(abs(delta))
        if delta ==0:
            subblock = self.add_sublayer('s_%d_%d_%d_b_%d' % (stageidx, inidx, outidx, 0), block(self.full_name(), inplanes, outplanes, stride, downsample=downsample))
        elif delta>0:
            downsample = ConvBNLayer(self.full_name(), inplanes, outplanes,
                             filter_size=1, stride=stride)
            subblock = self.add_sublayer('s_%d_%d_%d_b_%d' % (stageidx, inidx, outidx, 0), block(self.full_name(), inplanes, outplanes, stride, downsample=downsample))
        else:
            subblock = None #upsample
            subblock = self.add_sublayer('s_%d_%d_%d_b_%d' % (stageidx, inidx, outidx, 0), Conv2dTransposeBNLayer(self.full_name(), inplanes, outplanes, stride))

        layers.append(subblock)
        for i in range(1, blocknum):
            subblock = self.add_sublayer('s_%d_%d_%d_b_%d'%(stageidx, inidx, outidx, i),block(self.full_name(), outplanes, outplanes, 1))
            layers.append(subblock)

        return Sequential(self.full_name(), layers)

    def make_stage(self, stageidx, innum, outnum):
        stage_layers=[]
        for i in range(outnum):
            stage_layer=[]
            for j in range(innum):
                layerij = self.make_layer(Bottleneck, stageidx, j, i, self.num_blocks)
                stage_layer.append(layerij)
            stage_layers.append(stage_layer)
        return stage_layers

    def forward(self, x, arch):
        x = self.conv1(x)
        x = self.conv2(x)
        x = self.layer1(x)
        vlist=arch[0]
        x_list=[]
        for i in range(len(vlist)):
            print('stem start ',i)
            v=vlist[i]
            if v==0:
                x_list.append(None)
            elif v==1:
                x_list.append(self.dc(x))
            elif v==2:
                x_list.append(self.stage_start[i][0](x))

        y_list=[]
        for m in range(len(x_list)):
            y_list.append(None)
        for i in range(len(self.stages)):
            varr=arch[1][i]
            for m in range(len(varr)):
                for n in range(len(varr[m])):
                    print('stage:', i,' m:',m, ' n:',n)
                    if y_list[m] is None:
                        print('ylist_[m] is None')
                        if varr[m][n]==1:
                            assert(x_list[m] is not None)
                            print('1')
                            y_list[m]=self.dc(x_list[n])
                            print('1 end')
                        elif varr[m][n]==2:
                            assert(x_list[m] is not None)
                            print('2')
                            y_list[m] = self.stages[i][m][n](x_list[n])
                            print('2 end')
                    else:
                        print('ylist_[m] is not None')
                        if varr[m][n] == 1:
                            assert(x_list[m] is not None)
                            print('1')
                            y_list[m] = y_list[m] + self.dc(x_list[n])
                            print('1 end')
                        elif varr[m][n] == 2:
                            assert(x_list[m] is not None)
                            print('2')
                            y_list[m] = y_list[m] + self.stages[i][m][n](x_list[n])
                            print('2 end')

        vlist = arch[-1]
        out=None
        for i in range(len(vlist)):
            if out is None:
                if vlist[i]==1:
                    out = self.dc(y_list[i])
                elif vlist[i]==2:
                    out = self.stage_end[0][i](y_list[i])
            else:
                if vlist[i] == 1:
                    out = out+self.dc(y_list[i])
                elif vlist[i] ==2:
                    out = out+self.stage_end[0][i](y_list[i])

        out = self.last_layer1(out)
        out = self.last_layer2(out)
        out = paddle.fluid.layers.resize_bilinear(out, out_shape=[256, 256])
        out = fluid.layers.sigmoid(out)

        return out

相关问题