Skip to content

Commit d0331ee

Browse files
committed
Fix MAC address pagination by ensuring deterministic ordering
Add 'pk' to MACAddress model ordering to ensure deterministic results when multiple MAC addresses have the same value. This prevents the same MAC address from appearing on multiple pages during pagination. The issue occurred because Django's default ordering by 'mac_address' alone is non-deterministic when multiple records share the same MAC address value, causing inconsistent pagination results. Fixes #19917
1 parent 043dc4c commit d0331ee

File tree

4 files changed

+43
-174
lines changed

4 files changed

+43
-174
lines changed
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
from django.db import migrations
2+
3+
4+
class Migration(migrations.Migration):
5+
6+
dependencies = [
7+
('dcim', '0208_devicerole_uniqueness'),
8+
]
9+
10+
operations = [
11+
migrations.AlterModelOptions(
12+
name='macaddress',
13+
options={'ordering': ('mac_address', 'pk'), 'verbose_name': 'MAC address', 'verbose_name_plural': 'MAC addresses'},
14+
),
15+
]

netbox/dcim/models/devices.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1272,7 +1272,7 @@ class MACAddress(PrimaryModel):
12721272
)
12731273

12741274
class Meta:
1275-
ordering = ('mac_address',)
1275+
ordering = ('mac_address', 'pk',)
12761276
verbose_name = _('MAC address')
12771277
verbose_name_plural = _('MAC addresses')
12781278

netbox/dcim/tests/test_api.py

Lines changed: 13 additions & 109 deletions
Original file line numberDiff line numberDiff line change
@@ -2675,120 +2675,24 @@ def setUpTestData(cls):
26752675
},
26762676
]
26772677

2678-
def test_device_filter_pagination_distinct(self):
2679-
"""
2680-
Test that filtering MAC addresses by device_id via API returns distinct results
2681-
across pagination boundaries. Creates many interfaces with the same MAC address
2682-
to force the pagination duplication bug to manifest.
2683-
"""
2684-
# Add the required permission for this test
2678+
def test_mac_address_pagination_duplicates(self):
26852679
self.add_permissions('dcim.view_macaddress')
2686-
2687-
# Create a device with many interfaces all having the same MAC address
2688-
device = create_test_device('Pagination Test Device')
2689-
shared_mac_address = '18:2A:D3:65:90:2E' # Use the same MAC from your original issue
2690-
2691-
# Create many interfaces with the same MAC address to trigger the pagination bug
2692-
# This simulates the scenario where the same MAC address appears on multiple interfaces
2693-
# Without .distinct(), the same MAC address object could appear multiple times across pages
2694-
for i in range(500): # Create more to really stress test pagination
2680+
device = create_test_device('Device 2')
2681+
shared_mac = '00:00:00:00:00:07'
2682+
for i in range(5):
26952683
interface = Interface.objects.create(
2696-
device=device,
2697-
name=f'dummy{i:03d}',
2698-
type='1000base-t'
2684+
device=device, name=f'Interface {i:03d}', type='1000base-t'
26992685
)
2700-
2701-
# ALL interfaces get the same MAC address - this triggers the pagination duplication bug
2702-
MACAddress.objects.create(
2703-
mac_address=shared_mac_address,
2704-
assigned_object=interface
2705-
)
2706-
2707-
# Test API pagination with device_id filter - simulate your script's behavior
2686+
MACAddress.objects.create(mac_address=shared_mac, assigned_object=interface)
27082687
url = reverse('dcim-api:macaddress-list')
2709-
expected_count = 500 # Should be exactly 500 unique MAC address objects
2710-
2711-
# Test the exact pagination scenario from your script
2712-
# Your script uses limit=100, so let's test with that
27132688
all_mac_ids = set()
2714-
total_collected = 0
2715-
2716-
# Page 1: limit=100 (like your dummy-test.py)
2717-
response = self.client.get(
2718-
f'{url}?device_id={device.pk}&limit=100',
2719-
format='json',
2720-
**self.header
2721-
)
2722-
self.assertHttpStatus(response, status.HTTP_200_OK)
2723-
self.assertEqual(response.data['count'], expected_count)
2724-
self.assertEqual(len(response.data['results']), 100)
2725-
2726-
page_1_ids = {result['id'] for result in response.data['results']}
2727-
all_mac_ids.update(page_1_ids)
2728-
total_collected += len(response.data['results'])
2729-
2730-
# Continue pagination like your script does
2731-
next_url = response.data.get('next')
2732-
page_count = 1
2733-
2734-
while next_url and page_count < 10: # Limit to prevent infinite loop
2735-
page_count += 1
2736-
# Extract just the query parameters from the next URL
2737-
from urllib.parse import urlparse, parse_qs
2738-
parsed = urlparse(next_url)
2739-
query_params = parse_qs(parsed.query)
2740-
2741-
# Build the request
2742-
params = {}
2743-
for key, value_list in query_params.items():
2744-
if value_list:
2745-
params[key] = value_list[0]
2746-
2747-
response = self.client.get(url, params, format='json', **self.header)
2689+
for page in range(1, 6):
2690+
response = self.client.get(
2691+
f'{url}?device_id={device.pk}&limit=1&offset={1*(page-1)}',
2692+
format='json', **self.header
2693+
)
27482694
self.assertHttpStatus(response, status.HTTP_200_OK)
2749-
27502695
page_ids = {result['id'] for result in response.data['results']}
2751-
2752-
# Check for duplicate IDs across pages - this is where the bug would show
2753-
duplicates = all_mac_ids & page_ids
2754-
self.assertEqual(len(duplicates), 0,
2755-
f"Found duplicate MAC address IDs between pages: {duplicates}")
2756-
2696+
self.assertEqual(len(all_mac_ids & page_ids), 0)
27572697
all_mac_ids.update(page_ids)
2758-
total_collected += len(response.data['results'])
2759-
next_url = response.data.get('next')
2760-
2761-
# The critical test: without .distinct(), total_collected could be > expected_count
2762-
# because the same MAC address object appears multiple times
2763-
self.assertEqual(total_collected, expected_count,
2764-
f"Total MAC addresses collected ({total_collected}) should equal expected ({expected_count})")
2765-
2766-
# Verify all collected IDs are unique
2767-
self.assertEqual(len(all_mac_ids), expected_count,
2768-
f"Should have {expected_count} unique MAC address IDs, got {len(all_mac_ids)}")
2769-
2770-
# Additional verification: all MAC addresses should have the same value
2771-
# Get all results in one go to verify
2772-
response_all = self.client.get(
2773-
f'{url}?device_id={device.pk}&limit=0', # No pagination limit
2774-
format='json',
2775-
**self.header
2776-
)
2777-
self.assertHttpStatus(response, status.HTTP_200_OK)
2778-
2779-
# Count interfaces like your original script does
2780-
interface_mac_count = {}
2781-
for result in response_all.data['results']:
2782-
if result['assigned_object']:
2783-
interface_name = result['assigned_object']['name']
2784-
if interface_name not in interface_mac_count:
2785-
interface_mac_count[interface_name] = []
2786-
interface_mac_count[interface_name].append(result)
2787-
2788-
# In your scenario, you had 1000 MACs on 997 interfaces due to the bug
2789-
# Here, we should have exactly 500 MACs on 500 interfaces
2790-
total_macs_via_interfaces = sum(len(macs) for macs in interface_mac_count.values())
2791-
self.assertEqual(total_macs_via_interfaces, expected_count,
2792-
f"Total MACs via interface grouping should be {expected_count}")
2793-
self.assertEqual(len(interface_mac_count), expected_count,
2794-
f"Should have exactly {expected_count} interfaces with MAC addresses")
2698+
self.assertEqual(len(all_mac_ids), 5)

netbox/dcim/tests/test_filtersets.py

Lines changed: 14 additions & 64 deletions
Original file line numberDiff line numberDiff line change
@@ -6941,71 +6941,21 @@ def test_vminterface(self):
69416941
params = {'vminterface': [vm_interfaces[0].name, vm_interfaces[1].name]}
69426942
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
69436943

6944-
def test_device_filter_pagination_distinct(self):
6945-
"""
6946-
Test that filtering MAC addresses by device_id returns distinct results.
6947-
Creates many interfaces with the same MAC address to force the pagination
6948-
duplication bug to manifest across page boundaries.
6949-
"""
6950-
# Create a device with multiple interfaces
6951-
device = create_test_device('Test Device')
6952-
shared_mac_address = '18:2A:D3:65:90:2E' # Use the same MAC from your original issue
6953-
6954-
# Create 500 interfaces ALL with the same MAC address
6955-
# This ensures the duplication bug will manifest across pagination boundaries
6956-
# More interfaces = more likely to trigger pagination duplication
6957-
for i in range(500):
6944+
def test_mac_address_pagination_duplicates(self):
6945+
device = create_test_device('Device 4')
6946+
shared_mac = '00:00:00:00:00:07'
6947+
for i in range(5):
69586948
interface = Interface.objects.create(
6959-
device=device,
6960-
name=f'dummy{i:03d}',
6961-
type=InterfaceTypeChoices.TYPE_1GE_FIXED
6949+
device=device, name=f'Interface {i:03d}', type=InterfaceTypeChoices.TYPE_1GE_FIXED
69626950
)
6963-
6964-
# ALL interfaces get the same MAC address - this forces the pagination bug
6965-
MACAddress.objects.create(
6966-
mac_address=shared_mac_address,
6967-
assigned_object=interface
6968-
)
6969-
6970-
# Test filtering by device_id
6951+
MACAddress.objects.create(mac_address=shared_mac, assigned_object=interface)
69716952
params = {'device_id': [device.pk]}
69726953
filtered_queryset = self.filterset(params, self.queryset).qs
6973-
6974-
# Without .distinct(), the bug would cause the same MAC to appear multiple times
6975-
# With .distinct(), this should return exactly 500 unique MAC address objects
6976-
expected_count = 500
6977-
self.assertEqual(filtered_queryset.count(), expected_count)
6978-
6979-
# Verify all MAC addresses are unique objects (no duplicate IDs)
6980-
mac_ids = list(filtered_queryset.values_list('id', flat=True))
6981-
self.assertEqual(len(mac_ids), len(set(mac_ids)), "Duplicate MAC address objects found in results")
6982-
6983-
# Verify all returned MAC addresses belong to our test device
6984-
for mac in filtered_queryset:
6985-
self.assertEqual(mac.assigned_object.device, device)
6986-
6987-
# Test pagination-like slicing to simulate the bug scenario
6988-
# This simulates what happens during API pagination with large datasets
6989-
page_size = 50
6990-
first_page = filtered_queryset[:page_size]
6991-
second_page = filtered_queryset[page_size:page_size*2]
6992-
third_page = filtered_queryset[page_size*2:]
6993-
6994-
# Collect IDs from each page
6995-
first_page_ids = set(mac.id for mac in first_page)
6996-
second_page_ids = set(mac.id for mac in second_page)
6997-
third_page_ids = set(mac.id for mac in third_page)
6998-
6999-
# Ensure no overlap between pages (no duplicate MAC address objects)
7000-
self.assertEqual(len(first_page_ids & second_page_ids), 0, "Duplicate MAC objects between page 1 and 2")
7001-
self.assertEqual(len(first_page_ids & third_page_ids), 0, "Duplicate MAC objects between page 1 and 3")
7002-
self.assertEqual(len(second_page_ids & third_page_ids), 0, "Duplicate MAC objects between page 2 and 3")
7003-
7004-
# Verify total count across all pages
7005-
all_ids = first_page_ids | second_page_ids | third_page_ids
7006-
self.assertEqual(len(all_ids), expected_count, "Total count mismatch across pages")
7007-
7008-
# Verify all MACs have the same address but different IDs (different objects)
7009-
all_mac_addresses = filtered_queryset.values_list('mac_address', flat=True)
7010-
self.assertTrue(all(addr == shared_mac_address for addr in all_mac_addresses),
7011-
"Not all MAC addresses match the expected shared address")
6954+
self.assertEqual(filtered_queryset.count(), 5)
6955+
all_ids = set()
6956+
for page_start in range(0, 5, 1):
6957+
page = filtered_queryset[page_start:page_start + 1]
6958+
page_ids = set(mac.id for mac in page)
6959+
self.assertEqual(len(all_ids & page_ids), 0)
6960+
all_ids.update(page_ids)
6961+
self.assertEqual(len(all_ids), 5)

0 commit comments

Comments
 (0)