我们从Python开源项目中,提取了以下50个代码示例,用于说明如何使用torch.nn.functional.avg_pool2d()。
def forward(self, x): for name, module in self.base._modules.items(): if name == 'avgpool': break x = module(x) if self.cut_at_pooling: return x x = F.avg_pool2d(x, x.size()[2:]) x = x.view(x.size(0), -1) if self.has_embedding: x = self.feat(x) x = self.feat_bn(x) if self.norm: x = F.normalize(x) elif self.has_embedding: x = F.relu(x) if self.dropout > 0: x = self.drop(x) if self.num_classes > 0: x = self.classifier(x) return x
def forward(self, x): branch1x1 = self.branch1x1(x) branch7x7 = self.branch7x7_1(x) branch7x7 = self.branch7x7_2(branch7x7) branch7x7 = self.branch7x7_3(branch7x7) branch7x7dbl = self.branch7x7dbl_1(x) branch7x7dbl = self.branch7x7dbl_2(branch7x7dbl) branch7x7dbl = self.branch7x7dbl_3(branch7x7dbl) branch7x7dbl = self.branch7x7dbl_4(branch7x7dbl) branch7x7dbl = self.branch7x7dbl_5(branch7x7dbl) branch_pool = F.avg_pool2d(x, kernel_size=3, stride=1, padding=1) branch_pool = self.branch_pool(branch_pool) outputs = [branch1x1, branch7x7, branch7x7dbl, branch_pool] return torch.cat(outputs, 1)
def forward(self, x): branch1x1 = self.branch1x1(x) branch3x3 = self.branch3x3_1(x) branch3x3 = [ self.branch3x3_2a(branch3x3), self.branch3x3_2b(branch3x3), ] branch3x3 = torch.cat(branch3x3, 1) branch3x3dbl = self.branch3x3dbl_1(x) branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl) branch3x3dbl = [ self.branch3x3dbl_3a(branch3x3dbl), self.branch3x3dbl_3b(branch3x3dbl), ] branch3x3dbl = torch.cat(branch3x3dbl, 1) branch_pool = F.avg_pool2d(x, kernel_size=3, stride=1, padding=1) branch_pool = self.branch_pool(branch_pool) outputs = [branch1x1, branch3x3, branch3x3dbl, branch_pool] return torch.cat(outputs, 1)
def forward(self, x): # save for combining later with output residual = x if self.combine == 'concat': residual = F.avg_pool2d(residual, kernel_size=3, stride=2, padding=1) out = self.g_conv_1x1_compress(x) out = channel_shuffle(out, self.groups) out = self.depthwise_conv3x3(out) out = self.bn_after_depthwise(out) out = self.g_conv_1x1_expand(out) out = self._combine_func(residual, out) return F.relu(out)
def forward(self, x): x = self.conv1(x) x = self.maxpool(x) x = self.stage2(x) x = self.stage3(x) x = self.stage4(x) # global average pooling layer x = F.avg_pool2d(x, x.data.size()[-2:]) # flatten for input to fully-connected layer x = x.view(x.size(0), -1) x = self.fc(x) return F.log_softmax(x, dim=1)
def forward(self, x): x = F.relu(self.conv11(x)) x = self.bn11(x) x = F.relu(self.conv12(x)) x = self.bn12(x) x = F.relu(self.conv21(x)) x = self.bn21(x) x = F.relu(self.conv22(x)) x = self.bn22(x) x = F.avg_pool2d(x, kernel_size=[x.size(2), x.size(3)]) x = self.fc(x.view(x.size()[:2]))# x = F.softmax(x) return x
def forward(self, x): x = F.relu(self.conv11(x)) x = self.bn11(x) x = self.offset12(x) x = F.relu(self.conv12(x)) x = self.bn12(x) x = self.offset21(x) x = F.relu(self.conv21(x)) x = self.bn21(x) x = self.offset22(x) x = F.relu(self.conv22(x)) x = self.bn22(x) x = F.avg_pool2d(x, kernel_size=[x.size(2), x.size(3)]) x = self.fc(x.view(x.size()[:2])) x = F.softmax(x) return x
def forward(self, x): avg = F.avg_pool2d(x,kernel_size=7, stride=1, padding=3) x1_1 = torch.cat([x,avg],1) x1_1 = F.relu(self.conv1_1(x1_1)) x1_2 = F.avg_pool2d(x1_1,kernel_size=2, stride=2) x1_2 = F.relu(self.conv1_2(x1_2)) x1_4 = F.avg_pool2d(x1_2,kernel_size=2, stride=2) x1_4 = F.relu(self.conv1_3(x1_4)) x1_2_ = F.upsample_nearest(x1_4, scale_factor=2) x1_2 = torch.cat([x1_2,x1_2_],1) x1_2 = F.relu(self.conv1_4(x1_2)) x1_1_ = F.upsample_nearest(x1_2, scale_factor=2) x1_1 = torch.cat([x1_1,x1_1_],1) px = F.relu(self.conv1_5(x1_1)) px = torch.cat([px,px,px],1) px = 1-px/16 return px*x+(1-px)*avg
def forward(self, x): x = self.conv1(x) x = F.relu(x) x = self.bn1(x) x = self.conv2(x) x = self.bn2(x) x = F.max_pool2d(x, kernel_size=(1,2), stride=(1,2), padding=(0,1)) x = F.relu(x) x = self.conv3(x) x = self.bn3(x) x = F.max_pool2d(x, kernel_size=(1,2), stride=(1,2), padding=(0,1)) x = F.relu(x) x = self.conv4(x) x = self.bn4(x) x = F.avg_pool2d(x, kernel_size=(1,2), stride=(1,2), padding=0) x = F.relu(x) x = x.view(-1,192) return x
def forward(self, x): x = self.conv2d_1a(x) x = self.conv2d_2a(x) x = self.conv2d_2b(x) x = self.maxpool_3a(x) x = self.conv2d_3b(x) x = self.conv2d_4a(x) x = self.maxpool_5a(x) x = self.mixed_5b(x) x = self.repeat(x) x = self.mixed_6a(x) x = self.repeat_1(x) x = self.mixed_7a(x) x = self.repeat_2(x) x = self.block8(x) x = self.conv2d_7b(x) #x = F.avg_pool2d(x, 8, count_include_pad=False)] x = adaptive_avgmax_pool2d(x, self.global_pool, count_include_pad=False) x = x.view(x.size(0), -1) if self.drop_rate > 0: x = F.dropout(x, p=self.drop_rate, training=self.training) x = self.classif(x) return x
def adaptive_avgmax_pool2d(x, pool_type='avg', padding=0, count_include_pad=False): """Selectable global pooling function with dynamic input kernel size """ if pool_type == 'avgmaxc': x = torch.cat([ F.avg_pool2d( x, kernel_size=(x.size(2), x.size(3)), padding=padding, count_include_pad=count_include_pad), F.max_pool2d(x, kernel_size=(x.size(2), x.size(3)), padding=padding) ], dim=1) elif pool_type == 'avgmax': x_avg = F.avg_pool2d( x, kernel_size=(x.size(2), x.size(3)), padding=padding, count_include_pad=count_include_pad) x_max = F.max_pool2d(x, kernel_size=(x.size(2), x.size(3)), padding=padding) x = 0.5 * (x_avg + x_max) elif pool_type == 'max': x = F.max_pool2d(x, kernel_size=(x.size(2), x.size(3)), padding=padding) else: if pool_type != 'avg': print('Invalid pool type %s specified. Defaulting to average pooling.' % pool_type) x = F.avg_pool2d( x, kernel_size=(x.size(2), x.size(3)), padding=padding, count_include_pad=count_include_pad) return x
def modify_densenets(model): # Modify attributs model.last_linear = model.classifier del model.classifier def logits(self, features): x = F.relu(features, inplace=True) x = F.avg_pool2d(x, kernel_size=7, stride=1) x = x.view(x.size(0), -1) x = self.last_linear(x) return x def forward(self, input): x = self.features(input) x = self.logits(x) return x # Modify methods setattr(model.__class__, 'logits', logits) setattr(model.__class__, 'forward', forward) return model
def forward(self, inputs): H = self.conv1(inputs) if not self.pre_act: H = self.bn1(H) H = F.relu(H) for section_index in range(self.num_sections): H = getattr(self, f'section_{section_index}')(H) if self.pre_act: H = self.bn1(H) H = F.relu(H) H = F.avg_pool2d(H, H.size()[2:]) H = H.view(H.size(0), -1) outputs = self.fc(H) return outputs
def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = self.layer1(out) out = self.layer2(out) out = self.layer3(out) # out = self.layer4(out) out = F.avg_pool2d(out, 8) out = out.view(out.size(0), -1) out = self.linear(out) return out
def forward(self, color, sketch): color = F.avg_pool2d(color, 16, 16) sketch = self.model(sketch) out = self.prototype(torch.cat([sketch, color], 1)) return self.out(out.view(color.size(0), -1))
def forward(self, x): out = self.conv1(F.relu(self.bn1(x))) out = F.avg_pool2d(out, 2) return out
def forward(self, x): out = self.conv1(x) out = self.trans1(self.dense1(out)) out = self.trans2(self.dense2(out)) out = self.dense3(out) out = torch.squeeze(F.avg_pool2d(F.relu(self.bn1(out)), 8)) out = F.log_softmax(self.fc(out)) return out
def forward(self, x): """ Run the forward pass of the DenseNet model. """ out = self.conv(x) out = self.block(out) out = F.avg_pool2d(out, 8) out = out.view(-1, self.out_channels) out = self.fc(out) return out
def forward(self, input): out = self.init_cnn_layer(input) out = self.denseblocks(out) out = F.avg_pool2d(out, 8).squeeze() return self.lr(out)
def generate_model(): class DenseModel(nn.Module): def __init__(self, pretrained_model): super(DenseModel, self).__init__() self.classifier = nn.Linear(pretrained_model.classifier.in_features, 17) for m in self.modules(): if isinstance(m, nn.Conv2d): nn.init.kaiming_normal(m.weight) elif isinstance(m, nn.BatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() elif isinstance(m, nn.Linear): m.bias.data.zero_() self.features = pretrained_model.features self.dense1 = pretrained_model.features._modules['denseblock1'] self.dense2 = pretrained_model.features._modules['denseblock2'] self.dense3 = pretrained_model.features._modules['denseblock3'] self.dense4 = pretrained_model.features._modules['denseblock4'] def forward(self, x): features = self.features(x) out = F.relu(features, inplace=True) out = F.avg_pool2d(out, kernel_size=8).view(features.size(0), -1) out = F.sigmoid(self.classifier(out)) return out return DenseModel(torchvision.models.densenet169(pretrained=True))
def generate_model(): class DenseModel(nn.Module): def __init__(self, pretrained_model): super(DenseModel, self).__init__() self.classifier = nn.Linear(pretrained_model.classifier.in_features, 17) for m in self.modules(): if isinstance(m, nn.Conv2d): nn.init.kaiming_normal(m.weight) elif isinstance(m, nn.BatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() elif isinstance(m, nn.Linear): m.bias.data.zero_() self.features = pretrained_model.features self.dense1 = pretrained_model.features._modules['denseblock1'] self.dense2 = pretrained_model.features._modules['denseblock2'] self.dense3 = pretrained_model.features._modules['denseblock3'] self.dense4 = pretrained_model.features._modules['denseblock4'] def forward(self, x): features = self.features(x) out = F.relu(features, inplace=True) out = F.avg_pool2d(out, kernel_size=8).view(features.size(0), -1) out = F.sigmoid(self.classifier(out)) return out return DenseModel(torchvision.models.densenet121(pretrained=True))
def generate_model(): class DenseModel(nn.Module): def __init__(self, pretrained_model): super(DenseModel, self).__init__() self.classifier = nn.Linear(pretrained_model.classifier.in_features, 17) for m in self.modules(): if isinstance(m, nn.Conv2d): nn.init.kaiming_normal(m.weight) elif isinstance(m, nn.BatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() elif isinstance(m, nn.Linear): m.bias.data.zero_() self.features = pretrained_model.features self.layer1 = pretrained_model.features._modules['denseblock1'] self.layer2 = pretrained_model.features._modules['denseblock2'] self.layer3 = pretrained_model.features._modules['denseblock3'] self.layer4 = pretrained_model.features._modules['denseblock4'] def forward(self, x): features = self.features(x) out = F.relu(features, inplace=True) out = F.avg_pool2d(out, kernel_size=8).view(features.size(0), -1) out = F.sigmoid(self.classifier(out)) return out return DenseModel(torchvision.models.densenet121(pretrained=True))
def generate_model(): class DenseModel(nn.Module): def __init__(self, pretrained_model): super(DenseModel, self).__init__() self.classifier = nn.Linear(pretrained_model.classifier.in_features, 17) for m in self.modules(): if isinstance(m, nn.Conv2d): nn.init.kaiming_normal(m.weight) elif isinstance(m, nn.BatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() elif isinstance(m, nn.Linear): m.bias.data.zero_() self.features = pretrained_model.features self.dense1 = pretrained_model.features._modules['denseblock1'] self.dense2 = pretrained_model.features._modules['denseblock2'] self.dense3 = pretrained_model.features._modules['denseblock3'] self.dense4 = pretrained_model.features._modules['denseblock4'] def forward(self, x): features = self.features(x) out = F.relu(features, inplace=True) out = F.avg_pool2d(out, kernel_size=8).view(features.size(0), -1) out = F.sigmoid(self.classifier(out)) return out return DenseModel(torchvision.models.densenet201(pretrained=True))
def generate_model(): class DenseModel(nn.Module): def __init__(self, pretrained_model): super(DenseModel, self).__init__() self.classifier = nn.Linear(pretrained_model.classifier.in_features, 17) for m in self.modules(): if isinstance(m, nn.Conv2d): nn.init.kaiming_normal(m.weight) elif isinstance(m, nn.BatchNorm2d): m.weight.data.fill_(1) m.bias.data.zero_() elif isinstance(m, nn.Linear): m.bias.data.zero_() self.features = pretrained_model.features self.layer1 = pretrained_model.features._modules['denseblock1'] self.layer2 = pretrained_model.features._modules['denseblock2'] self.layer3 = pretrained_model.features._modules['denseblock3'] self.layer4 = pretrained_model.features._modules['denseblock4'] def forward(self, x): features = self.features(x) out = F.relu(features, inplace=True) out = F.avg_pool2d(out, kernel_size=8).view(features.size(0), -1) out = F.sigmoid(self.classifier(out)) return out return DenseModel(torchvision.models.densenet169(pretrained=True))
def forward(self, x): out = self.conv1(self.relu(self.bn1(x))) if self.droprate > 0: out = F.dropout(out, p=self.droprate, inplace=False, training=self.training) return F.avg_pool2d(out, 2)
def forward(self, x): out = self.conv1(x) out = self.trans1(self.block1(out)) out = self.trans2(self.block2(out)) out = self.block3(out) out = self.relu(self.bn1(out)) out = F.avg_pool2d(out, 8) out = out.view(-1, self.in_planes) return self.fc(out)
def forward(self, x): N = x.data.size(0) C = x.data.size(1) H = x.data.size(2) W = x.data.size(3) x = F.avg_pool2d(x, (H, W)) x = x.view(N, C) return x # for route and shortcut
def forward(self, x): # x = x.clone() x[:, 0] = (x[:, 0] - 0.485) / 0.229 x[:, 1] = (x[:, 1] - 0.456) / 0.224 x[:, 2] = (x[:, 2] - 0.406) / 0.225 features = self.features(x) # temp_size = features.size(0) out = F.relu(features, inplace=True) out = F.avg_pool2d(out, kernel_size=7).view(features.size(0), -1) ft = out.clone() out = self.classifier(out) return out,ft
def forward(self, x): branch1x1 = self.branch1x1(x) branch5x5 = self.branch5x5_1(x) branch5x5 = self.branch5x5_2(branch5x5) branch3x3dbl = self.branch3x3dbl_1(x) branch3x3dbl = self.branch3x3dbl_2(branch3x3dbl) branch3x3dbl = self.branch3x3dbl_3(branch3x3dbl) branch_pool = F.avg_pool2d(x, kernel_size=3, stride=1, padding=1) branch_pool = self.branch_pool(branch_pool) outputs = [branch1x1, branch5x5, branch3x3dbl, branch_pool] return torch.cat(outputs, 1)
def forward(self, x): # 17 x 17 x 768 x = F.avg_pool2d(x, kernel_size=5, stride=3) # 5 x 5 x 768 x = self.conv0(x) # 5 x 5 x 128 x = self.conv1(x) # 1 x 1 x 768 x = x.view(x.size(0), -1) # 768 x = self.group1(x) # 1000 return x
def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = self.layer1(out) out = self.layer2(out) out = self.layer3(out) out = F.avg_pool2d(out, 8) out = out.view(out.size(0), -1) out = self.linear(out) return out
def forward(self, x): out = self.conv1(x) out = self.layer1(out) out = self.layer2(out) out = self.layer3(out) out = F.relu(self.bn1(out)) out = F.avg_pool2d(out, 8) out = out.view(out.size(0), -1) out = self.linear(out) return out
def forward(self, x): out = self.conv(F.relu(self.bn(x))) out = F.avg_pool2d(out, 2) return out
def forward(self, x): out = self.conv1(x) out = self.trans1(self.dense1(out)) out = self.trans2(self.dense2(out)) out = self.trans3(self.dense3(out)) out = self.dense4(out) out = F.avg_pool2d(F.relu(self.bn(out)), 4) out = out.view(out.size(0), -1) out = self.linear(out) return out
def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = self.layer1(out) out = self.layer2(out) out = self.layer3(out) out = F.avg_pool2d(out, 4) out = out.view(out.size(0), -1) out = self.linear(out) return out
def forward(self, x): out = self.conv1(x) out = self.layer1(out) out = self.layer2(out) out = self.layer3(out) out = self.layer4(out) out = F.avg_pool2d(out, 4) out = out.view(out.size(0), -1) out = self.linear(out) return out
def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = self.layer1(out) out = self.layer2(out) out = self.layer3(out) out = self.layer4(out) out = F.avg_pool2d(out, 4) out = out.view(out.size(0), -1) out = self.linear(out) return out