Skip to content

Commit 4b2f8da

Browse files
authored
Fix C++ lint (#1584)
1 parent 10a7111 commit 4b2f8da

File tree

3 files changed

+10
-14
lines changed

3 files changed

+10
-14
lines changed

torchvision/csrc/models/densenet.cpp

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -99,10 +99,8 @@ DenseNetImpl::DenseNetImpl(
9999
features = torch::nn::Sequential();
100100
features->push_back(
101101
"conv0",
102-
torch::nn::Conv2d(Options(3, num_init_features, 7)
103-
.stride(2)
104-
.padding(3)
105-
.bias(false)));
102+
torch::nn::Conv2d(
103+
Options(3, num_init_features, 7).stride(2).padding(3).bias(false)));
106104

107105
features->push_back("norm0", torch::nn::BatchNorm(num_init_features));
108106
features->push_back("relu0", torch::nn::Functional(modelsimpl::relu_));

torchvision/csrc/models/mnasnet.cpp

Lines changed: 6 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -23,8 +23,7 @@ struct MNASNetInvertedResidualImpl : torch::nn::Module {
2323
auto mid = int64_t(input * expansion_factor);
2424
apply_residual = input == output && stride == 1;
2525

26-
layers->push_back(
27-
torch::nn::Conv2d(Options(input, mid, 1).bias(false)));
26+
layers->push_back(torch::nn::Conv2d(Options(input, mid, 1).bias(false)));
2827
layers->push_back(torch::nn::BatchNorm(
2928
torch::nn::BatchNormOptions(mid).momentum(bn_momentum)));
3029
layers->push_back(
@@ -39,8 +38,7 @@ struct MNASNetInvertedResidualImpl : torch::nn::Module {
3938
torch::nn::BatchNormOptions(mid).momentum(bn_momentum)));
4039
layers->push_back(
4140
torch::nn::Functional(torch::nn::Functional(modelsimpl::relu_)));
42-
layers->push_back(
43-
torch::nn::Conv2d(Options(mid, output, 1).bias(false)));
41+
layers->push_back(torch::nn::Conv2d(Options(mid, output, 1).bias(false)));
4442
layers->push_back(torch::nn::BatchNorm(
4543
torch::nn::BatchNormOptions(output).momentum(bn_momentum)));
4644

@@ -128,8 +126,8 @@ void MNASNetImpl::_initialize_weights() {
128126
MNASNetImpl::MNASNetImpl(double alpha, int64_t num_classes, double dropout) {
129127
auto depths = scale_depths({24, 40, 80, 96, 192, 320}, alpha);
130128

131-
layers->push_back(torch::nn::Conv2d(
132-
Options(3, 32, 3).padding(1).stride(2).bias(false)));
129+
layers->push_back(
130+
torch::nn::Conv2d(Options(3, 32, 3).padding(1).stride(2).bias(false)));
133131
layers->push_back(torch::nn::BatchNorm(
134132
torch::nn::BatchNormOptions(32).momentum(BN_MOMENTUM)));
135133
layers->push_back(torch::nn::Functional(modelsimpl::relu_));
@@ -138,8 +136,8 @@ MNASNetImpl::MNASNetImpl(double alpha, int64_t num_classes, double dropout) {
138136
layers->push_back(torch::nn::BatchNorm(
139137
torch::nn::BatchNormOptions(32).momentum(BN_MOMENTUM)));
140138
layers->push_back(torch::nn::Functional(modelsimpl::relu_));
141-
layers->push_back(torch::nn::Conv2d(
142-
Options(32, 16, 1).padding(0).stride(1).bias(false)));
139+
layers->push_back(
140+
torch::nn::Conv2d(Options(32, 16, 1).padding(0).stride(1).bias(false)));
143141
layers->push_back(torch::nn::BatchNorm(
144142
torch::nn::BatchNormOptions(16).momentum(BN_MOMENTUM)));
145143

torchvision/csrc/models/resnet.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -124,8 +124,8 @@ ResNetImpl<Block>::ResNetImpl(
124124
: groups(groups),
125125
base_width(width_per_group),
126126
inplanes(64),
127-
conv1(torch::nn::Conv2dOptions(3, 64, 7).stride(2).padding(3).bias(
128-
false)),
127+
conv1(
128+
torch::nn::Conv2dOptions(3, 64, 7).stride(2).padding(3).bias(false)),
129129
bn1(64),
130130
layer1(_make_layer(64, layers[0])),
131131
layer2(_make_layer(128, layers[1], 2)),

0 commit comments

Comments
 (0)