Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion RELEASENOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,12 @@ Releases, starting with 9/2/2021, are listed with the most recent release at the

__Fixed Bugs:__


#715 How to implement the following code <br/>

__API Changes__:

Add functional normalizations<br/>
Added torch.utils.tensorboard.SummaryWriter. Support for scalars only.<br/>


## NuGet Version 0.97.3
Expand Down
60 changes: 60 additions & 0 deletions docfx/articles/modules.md
Original file line number Diff line number Diff line change
Expand Up @@ -230,3 +230,63 @@ Sometimes, a module needs to allocate tensor that are not trainable, i.e. their

Each buffer should be declared as a field of type 'Tensor' (not 'Parameter'). This will ensure that the buffer is registered properly when `RegisterComponents()` is called.


## Modules, 'children()' and 'named_children()'

It is sometimes necessary to create a new model from an existing one and discard some of the final layers. The submodules will appear in the 'named_children' list in the same order that they are declared within the module itself, and when constructing a model based on the children, the layers may be reordered unless the submodules are declared in the same order that they are meant to be invoked.

So, for example:

```C#
private class TestModule1 : Module
{
public TestModule1()
: base("TestModule1")
{
lin1 = Linear(100, 10);
lin2 = Linear(10, 5);
RegisterComponents();
}

public override Tensor forward(Tensor input)
{
using (var x = lin1.forward(input))
return lin2.forward(x);
}

// Correct -- the layers are declared in the same order they are invoked.
private Module lin1;
private Module lin2;
}

private class TestModule2 : Module
{
public TestModule2()
: base("TestModule2")
{
lin1 = Linear(100, 10);
lin2 = Linear(10, 5);
RegisterComponents();
}

public override Tensor forward(Tensor input)
{
using (var x = lin1.forward(input))
return lin2.forward(x);
}

// Incorrect -- the layers are not declared in the same order they are invoked.
private Module lin2;
private Module lin1;
}

...
TestModule1 mod1 = ...
TestModule2 mod2 = ...
var seq1 = nn.Sequential(mod1.named_children());
seq1.forward(t); // Does the same as mod1.forward(t)
var seq2 = nn.Sequential(mod2.named_children());
seq2.forward(t); // This probably blows up.
```


6 changes: 3 additions & 3 deletions src/TorchSharp/TorchVision/models/AlexNet.cs
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ namespace Modules
public class AlexNet : Module
{
private readonly Module features;
private readonly Module avgPool;
private readonly Module avgpool;
private readonly Module classifier;

public AlexNet(int numClasses, float dropout = 0.5f, string weights_file = null, bool skipfc = true, Device device = null) : base(nameof(AlexNet))
Expand All @@ -81,7 +81,7 @@ public AlexNet(int numClasses, float dropout = 0.5f, string weights_file = null,
MaxPool2d(kernelSize: 3, stride: 2)
);

avgPool = AdaptiveAvgPool2d(new long[] { 6, 6 });
avgpool = AdaptiveAvgPool2d(new long[] { 6, 6 });

classifier = Sequential(
Dropout(probability: dropout),
Expand All @@ -108,7 +108,7 @@ public override Tensor forward(Tensor input)
{
using (var _ = NewDisposeScope()) {
var f = features.forward(input);
var avg = avgPool.forward(f);
var avg = avgpool.forward(f);
var x = avg.flatten(1);
return classifier.forward(x).MoveToOuterDisposeScope();
}
Expand Down
7 changes: 3 additions & 4 deletions src/TorchSharp/TorchVision/models/GoogleNet.cs
Original file line number Diff line number Diff line change
Expand Up @@ -71,19 +71,18 @@ public class GoogleNet : Module

private readonly Module conv1;
private readonly Module maxpool1;
private readonly Module maxpool2;
private readonly Module maxpool3;
private readonly Module maxpool4;
private readonly Module conv2;
private readonly Module conv3;
private readonly Module maxpool2;
private readonly Module inception3a;
private readonly Module inception3b;

private readonly Module maxpool3;
private readonly Module inception4a;
private readonly Module inception4b;
private readonly Module inception4c;
private readonly Module inception4d;
private readonly Module inception4e;
private readonly Module maxpool4;
private readonly Module inception5a;
private readonly Module inception5b;
//private readonly Module aux1;
Expand Down
4 changes: 2 additions & 2 deletions src/TorchSharp/TorchVision/models/InceptionV3.cs
Original file line number Diff line number Diff line change
Expand Up @@ -70,11 +70,11 @@ public class InceptionV3 : Module
private readonly Module Conv2d_1a_3x3;
private readonly Module Conv2d_2a_3x3;
private readonly Module Conv2d_2b_3x3;
private readonly Module maxpool1;
private readonly Module Conv2d_3b_1x1;
private readonly Module Conv2d_4a_3x3;

private readonly Module maxpool1;
private readonly Module maxpool2;

private readonly Module Mixed_5b;
private readonly Module Mixed_5c;
private readonly Module Mixed_5d;
Expand Down
28 changes: 14 additions & 14 deletions src/TorchSharp/TorchVision/models/ResNet.cs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
using System.Collections.Generic;
using System.IO;
using System.Net;
using TorchSharp.Modules;
using TorchSharp.torchvision.Modules;
using static TorchSharp.torch;
using static TorchSharp.torch.nn;
Expand Down Expand Up @@ -218,19 +219,18 @@ public class ResNet : Module

private readonly Module conv1;
private readonly Module bn1;
private readonly Module relu1;
private readonly Module relu;
private readonly Module maxpool;

private readonly TorchSharp.Modules.ModuleList layer1 = new TorchSharp.Modules.ModuleList();
private readonly TorchSharp.Modules.ModuleList layer2 = new TorchSharp.Modules.ModuleList();
private readonly TorchSharp.Modules.ModuleList layer3 = new TorchSharp.Modules.ModuleList();
private readonly TorchSharp.Modules.ModuleList layer4 = new TorchSharp.Modules.ModuleList();
private readonly Sequential layer1 = Sequential();
private readonly Sequential layer2 = Sequential();
private readonly Sequential layer3 = Sequential();
private readonly Sequential layer4 = Sequential();

private readonly Module avgpool;
private readonly Module maxpool;
private readonly Module flatten;
private readonly Module fc;


private int in_planes = 64;

public static ResNet ResNet18(int numClasses,
Expand Down Expand Up @@ -320,7 +320,7 @@ public ResNet(string name,

conv1 = Conv2d(3, 64, kernelSize: 7, stride: 2, padding: 3, bias: false);
bn1 = BatchNorm2d(64);
relu1 = ReLU(inPlace: true);
relu = ReLU(inPlace: true);
maxpool = MaxPool2d(kernelSize: 2, stride: 2, padding: 1);
MakeLayer(layer1, block, expansion, 64, num_blocks[0], 1);
MakeLayer(layer2, block, expansion, 128, num_blocks[1], 2);
Expand Down Expand Up @@ -356,7 +356,7 @@ public ResNet(string name,
this.to(device);
}

private void MakeLayer(TorchSharp.Modules.ModuleList modules, Func<int, int, int, Module> block, int expansion, int planes, int num_blocks, int stride)
private void MakeLayer(Sequential modules, Func<int, int, int, Module> block, int expansion, int planes, int num_blocks, int stride)
{
var strides = new List<int>();
strides.Add(stride);
Expand All @@ -373,12 +373,12 @@ public override Tensor forward(Tensor input)
{
using (var scope = NewDisposeScope()) {

var x = maxpool.forward(relu1.forward(bn1.forward(conv1.forward(input))));
var x = maxpool.forward(relu.forward(bn1.forward(conv1.forward(input))));

foreach (var m in layer1) x = m.forward(x);
foreach (var m in layer2) x = m.forward(x);
foreach (var m in layer3) x = m.forward(x);
foreach (var m in layer4) x = m.forward(x);
x = layer1.forward(x);
x = layer2.forward(x);
x = layer3.forward(x);
x = layer4.forward(x);

var res = fc.forward(flatten.forward(avgpool.forward(x)));
scope.MoveToOuter(res);
Expand Down
2 changes: 1 addition & 1 deletion src/TorchSharp/TorchVision/models/VGG.cs
Original file line number Diff line number Diff line change
Expand Up @@ -334,8 +334,8 @@ public class VGG : Module
};

private readonly Module features;
private readonly Module classifier;
private readonly Module avgpool;
private readonly Module classifier;

public VGG(string name,
int numClasses,
Expand Down
17 changes: 16 additions & 1 deletion test/TorchSharpTest/TestTorchTensorBugs.cs
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,13 @@
using System.IO;

using System.Threading;
using System.Runtime.CompilerServices;

using static TorchSharp.torch.nn;
using Xunit;

using static TorchSharp.torch;
using System.Runtime.CompilerServices;
using static TorchSharp.torchvision.models;

#nullable enable

Expand Down Expand Up @@ -772,5 +773,19 @@ public void ValidateBug679()
spec = torch.rand(1, 257, 500, dtype: dtype);
x = torch.istft(spec, 512, 160, 400, null);
}

[Fact]
public void ValidateBug715()
{
var resnet = resnet18();
var resnetlist = resnet.named_children();
var list = resnetlist.Take(6);
var bone = nn.Sequential(list);

var x = torch.zeros(1, 3, 64, 160);

// This should not blow up.
var tmp = bone.forward(x);
}
}
}
Loading