|
1 | | -""" |
| 1 | +r""" |
2 | 2 | PyTorch: Tensors and autograd |
3 | 3 | ------------------------------- |
4 | 4 |
|
|
27 | 27 | # Create Tensors to hold input and outputs. |
28 | 28 | # By default, requires_grad=False, which indicates that we do not need to |
29 | 29 | # compute gradients with respect to these Tensors during the backward pass. |
30 | | -x = torch.linspace(-math.pi, math.pi, 2000, dtype=dtype) |
31 | | -y = torch.sin(x) |
| 30 | +x = torch.linspace(-1, 1, 2000, dtype=dtype) |
| 31 | +y = torch.exp(x) # A Taylor expansion would be 1 + x + (1/2) x**2 + (1/3!) x**3 + ... |
32 | 32 |
|
33 | 33 | # Create random Tensors for weights. For a third order polynomial, we need |
34 | 34 | # 4 weights: y = a + b x + c x^2 + d x^3 |
|
39 | 39 | c = torch.randn((), dtype=dtype, requires_grad=True) |
40 | 40 | d = torch.randn((), dtype=dtype, requires_grad=True) |
41 | 41 |
|
42 | | -learning_rate = 1e-6 |
43 | | -for t in range(2000): |
| 42 | +initial_loss = 1. |
| 43 | +learning_rate = 1e-5 |
| 44 | +for t in range(5000): |
44 | 45 | # Forward pass: compute predicted y using operations on Tensors. |
45 | 46 | y_pred = a + b * x + c * x ** 2 + d * x ** 3 |
46 | 47 |
|
47 | 48 | # Compute and print loss using operations on Tensors. |
48 | 49 | # Now loss is a Tensor of shape (1,) |
49 | 50 | # loss.item() gets the scalar value held in the loss. |
50 | 51 | loss = (y_pred - y).pow(2).sum() |
| 52 | + |
| 53 | + # Calculare initial loss, so we can report loss relative to it |
| 54 | + if t==0: |
| 55 | + initial_loss=loss.item() |
| 56 | + |
51 | 57 | if t % 100 == 99: |
52 | | - print(t, loss.item()) |
| 58 | + print(f'Iteration t = {t:4d} loss(t)/loss(0) = {round(loss.item()/initial_loss, 6):10.6f} a = {a.item():10.6f} b = {b.item():10.6f} c = {c.item():10.6f} d = {d.item():10.6f}') |
53 | 59 |
|
54 | 60 | # Use autograd to compute the backward pass. This call will compute the |
55 | 61 | # gradient of loss with respect to all Tensors with requires_grad=True. |
|
0 commit comments