Skip to content

Commit cb37e7a

Browse files
peterbell10pytorchmergebot
authored andcommitted
Remove F.pad python implementation
Pull Request resolved: #73433 Approved by: https://github.com/albanD, https://github.com/jbschlosser
1 parent 7a80fc2 commit cb37e7a

File tree

7 files changed

+140
-278
lines changed

7 files changed

+140
-278
lines changed

test/onnx/expect/TestOperators.test_pad.expect

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -161,7 +161,7 @@ graph {
161161
}
162162
}
163163
node {
164-
input: "input"
164+
input: "onnx::Pad_0"
165165
input: "onnx::Pad_22"
166166
output: "23"
167167
name: "Pad_13"
@@ -186,7 +186,7 @@ graph {
186186
raw_data: "\002\000\000\000\000\000\000\000\003\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\000\000"
187187
}
188188
input {
189-
name: "input"
189+
name: "onnx::Pad_0"
190190
type {
191191
tensor_type {
192192
elem_type: 1

test/test_fx.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3810,6 +3810,7 @@ def tearDown(self):
38103810
"linear": BUILT_IN_FUNC,
38113811
"logsigmoid": BUILT_IN_FUNC,
38123812
"one_hot": BUILT_IN_FUNC,
3813+
"pad": BUILT_IN_FUNC,
38133814
"pairwise_distance": BUILT_IN_FUNC,
38143815
"pdist": BUILT_IN_FUNC,
38153816
"pixel_shuffle": BUILT_IN_FUNC,
@@ -3827,7 +3828,6 @@ def tearDown(self):
38273828
"adaptive_max_pool2d_with_indices": LEN_ERROR,
38283829
"adaptive_max_pool3d_with_indices": LEN_ERROR,
38293830
"instance_norm": CONTROL_FLOW,
3830-
"pad": LEN_ERROR,
38313831

38323832
"adaptive_max_pool1d": PROXY_ITERABLE,
38333833
"adaptive_max_pool2d": PROXY_ITERABLE,

test/test_jit.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15513,7 +15513,7 @@ def forward(self, x):
1551315513
self.assertEqual(m.int64_min, imported.int64_min)
1551415514

1551515515
def test_script_scope(self):
15516-
scripted = torch.jit.script(torch.nn.functional.pad)
15516+
scripted = torch.jit.script(torch.nn.functional.triplet_margin_loss)
1551715517

1551815518
@unittest.skipIf(IS_WINDOWS, "NYI: TemporaryFileName on Windows")
1551915519
def test_serialization_sharing(self):

test/test_nn.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -5430,8 +5430,8 @@ def test_FeatureAlphaDropout(self):
54305430

54315431
def test_pad_scalar_error(self):
54325432
inputs = torch.tensor(0., requires_grad=True)
5433-
self.assertRaises(AssertionError, lambda: F.pad(inputs, (1, 1)))
5434-
self.assertRaises(AssertionError, lambda: F.pad(inputs, (1,)))
5433+
self.assertRaises(RuntimeError, lambda: F.pad(inputs, (1, 1)))
5434+
self.assertRaises(RuntimeError, lambda: F.pad(inputs, (1,)))
54355435

54365436
@unittest.skipIf(not TEST_NUMPY, "numpy not found")
54375437
@parametrize_test("average_attn_weights", [True, False])
@@ -14345,10 +14345,10 @@ def test_pad(self, device, dtype):
1434514345
# Assert assertion errors are raised for invalid circular padding values
1434614346
inputs = torch.randn(1, 1, 4, device=device, dtype=dtype, requires_grad=True)
1434714347
# Should raise error when trying to wrap around more than once
14348-
self.assertRaises(AssertionError, lambda: F.pad(inputs, (5, 4), mode='circular'))
14349-
self.assertRaises(AssertionError, lambda: F.pad(inputs, (3, 6), mode='circular'))
14348+
self.assertRaises(RuntimeError, lambda: F.pad(inputs, (5, 4), mode='circular'))
14349+
self.assertRaises(RuntimeError, lambda: F.pad(inputs, (3, 6), mode='circular'))
1435014350
# Should raise error when negative padding results in negative output shape
14351-
self.assertRaises(AssertionError, lambda: F.pad(inputs, (-3, -2), mode='circular'))
14351+
self.assertRaises(RuntimeError, lambda: F.pad(inputs, (-3, -2), mode='circular'))
1435214352

1435314353
# assert that relfection padding errors when pad >= input size
1435414354
expected_err_msg = r"Padding size should be less than the corresponding input dimension"

0 commit comments

Comments
 (0)