Compare commits

..

15 Commits

Author SHA1 Message Date
adionit7
f5f0c19860 Remove obsolete pre-commit hook script
Some checks are pending
CI / build (20.x, 3.12) (push) Waiting to run
CI / build (20.x, 3.13) (push) Waiting to run
CI / build (20.x, 3.14) (push) Waiting to run
CodeQL / Analyze (actions) (push) Waiting to run
CodeQL / Analyze (javascript-typescript) (push) Waiting to run
CodeQL / Analyze (python) (push) Waiting to run
The legacy pre-commit hook script was scheduled for removal in NetBox v4.3, as noted in the TODO comment within the file. Users should now use the pre-commit tool instead.
2026-01-16 09:03:08 -05:00
bctiemann
8da9b11ab8 Merge pull request #21154 from netbox-community/21124-moduletype-front-ports
Fixes #21124: Fix rear port selection when creating front ports on a module type
2026-01-16 08:28:39 -05:00
Arthur Hanson
ca67fa9999 Fix #21134: fix bulk rename ModuleType (#21180) 2026-01-16 03:23:28 -06:00
Jeremy Stretch
eff768192e Fixes #21140: Ensure default panel attribute labels are translated (#21153) 2026-01-16 01:35:35 -06:00
github-actions
1e297d55ee Update source translation strings
Some checks failed
CI / build (20.x, 3.12) (push) Has been cancelled
CI / build (20.x, 3.13) (push) Has been cancelled
CI / build (20.x, 3.14) (push) Has been cancelled
CodeQL / Analyze (actions) (push) Has been cancelled
CodeQL / Analyze (javascript-typescript) (push) Has been cancelled
CodeQL / Analyze (python) (push) Has been cancelled
2026-01-16 05:04:49 +00:00
bctiemann
fdb987ef91 Merge pull request #21183 from netbox-community/21178-change-rack-dimensions-display-to-be-more-consistent
Fixes #21178: Add spacing in mounting depth format string
2026-01-15 17:48:39 -05:00
bctiemann
b5a23db43c Merge pull request #21164 from netbox-community/21118-site
fix performance regression for Site save, use bulk_update for cached fields
2026-01-15 17:48:01 -05:00
bctiemann
366b69aff7 Merge pull request #21143 from netbox-community/21050-device-oob-ip-may-become-orphaned
Fixes #21050: Prevent reassignment of OOB IPs
2026-01-15 17:47:00 -05:00
bctiemann
c3e8c5e69c Merge pull request #21100 from netbox-community/21097-graphql-id-lookups
Fixes #21097: Fix comparison lookups for ID filters in GraphQL API
2026-01-15 17:44:22 -05:00
adionit7
b55f36469d Update CodeQL Action from v3 to v4
Some checks failed
CI / build (20.x, 3.12) (push) Has been cancelled
CI / build (20.x, 3.13) (push) Has been cancelled
CI / build (20.x, 3.14) (push) Has been cancelled
CodeQL / Analyze (actions) (push) Has been cancelled
CodeQL / Analyze (javascript-typescript) (push) Has been cancelled
CodeQL / Analyze (python) (push) Has been cancelled
- Update github/codeql-action/init from @v3 to @v4
- Update github/codeql-action/analyze from @v3 to @v4

Fixes #21156
2026-01-15 16:46:25 -05:00
Martin Hauser
1c46215cd5 feat(extras): Allow updates to data_source and data_file via API
Adds support for PATCHing ConfigContext and ConfigContextProfile with
integer IDs for `data_source` and `data_file`.
Adds regression tests to validate assignment and API functionality.

Fixes #20933
2026-01-15 14:37:16 -05:00
Arthur
8cbfe94fba fix performance regression for Site save, use bulk_update for cached fields 2026-01-14 16:30:40 -08:00
Jeremy Stretch
fff99fd3ff Fixes #21124: Fix rear port selection when creating front ports on a module type 2026-01-14 09:46:04 -05:00
Martin Hauser
f4892caa51 fix(ipam): Prevent reassignment of OOB IPs
Disable reassignment of IP addresses designated as primary or OOB for
parent objects. Adds validation to block changes when an IP is marked as
the OOB IP.

Fixes #21050
2026-01-13 18:13:31 +01:00
Jeremy Stretch
a54ad24b47 Fixes #21097: Fix comparison lookups for ID filters in GraphQL API 2026-01-08 16:34:13 -05:00
14 changed files with 178 additions and 103 deletions

View File

@@ -30,13 +30,13 @@ jobs:
uses: actions/checkout@v4
- name: Initialize CodeQL
uses: github/codeql-action/init@v3
uses: github/codeql-action/init@v4
with:
languages: ${{ matrix.language }}
build-mode: ${{ matrix.build-mode }}
config-file: .github/codeql/codeql-config.yml
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v3
uses: github/codeql-action/analyze@v4
with:
category: "/language:${{matrix.language}}"

View File

@@ -44,3 +44,4 @@ class DataFileSerializer(NetBoxModelSerializer):
'id', 'url', 'display_url', 'display', 'source', 'path', 'last_updated', 'size', 'hash',
]
brief_fields = ('id', 'url', 'display', 'path')
read_only_fields = ['path', 'last_updated', 'size', 'hash']

View File

@@ -140,9 +140,6 @@ class FrontPortFormMixin(forms.Form):
widget=forms.SelectMultiple(attrs={'size': 8})
)
port_mapping_model = PortMapping
parent_field = 'device'
def clean(self):
super().clean()
@@ -203,3 +200,22 @@ class FrontPortFormMixin(forms.Form):
using=connection,
update_fields=None
)
def _get_rear_port_choices(self, parent_filter, front_port):
"""
Return a list of choices representing each available rear port & position pair on the parent object (identified
by a Q filter), excluding those assigned to the specified instance.
"""
occupied_rear_port_positions = [
f'{mapping.rear_port_id}:{mapping.rear_port_position}'
for mapping in self.port_mapping_model.objects.filter(parent_filter).exclude(front_port=front_port.pk)
]
choices = []
for rear_port in self.rear_port_model.objects.filter(parent_filter):
for i in range(1, rear_port.positions + 1):
pair_id = f'{rear_port.pk}:{i}'
if pair_id not in occupied_rear_port_positions:
pair_label = f'{rear_port.name}:{i}'
choices.append((pair_id, pair_label))
return choices

View File

@@ -1124,9 +1124,8 @@ class FrontPortTemplateForm(FrontPortFormMixin, ModularComponentTemplateForm):
),
)
# Override FrontPortFormMixin attrs
port_mapping_model = PortTemplateMapping
parent_field = 'device_type'
rear_port_model = RearPortTemplate
class Meta:
model = FrontPortTemplate
@@ -1137,13 +1136,14 @@ class FrontPortTemplateForm(FrontPortFormMixin, ModularComponentTemplateForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Populate rear port choices based on parent DeviceType or ModuleType
if device_type_id := self.data.get('device_type') or self.initial.get('device_type'):
device_type = DeviceType.objects.get(pk=device_type_id)
parent_filter = Q(device_type=device_type_id)
elif module_type_id := self.data.get('module_type') or self.initial.get('module_type'):
parent_filter = Q(module_type=module_type_id)
else:
return
# Populate rear port choices
self.fields['rear_ports'].choices = self._get_rear_port_choices(device_type, self.instance)
self.fields['rear_ports'].choices = self._get_rear_port_choices(parent_filter, self.instance)
# Set initial rear port mappings
if self.instance.pk:
@@ -1152,27 +1152,6 @@ class FrontPortTemplateForm(FrontPortFormMixin, ModularComponentTemplateForm):
for mapping in PortTemplateMapping.objects.filter(front_port_id=self.instance.pk)
]
def _get_rear_port_choices(self, device_type, front_port):
"""
Return a list of choices representing each available rear port & position pair on the device type, excluding
those assigned to the specified instance.
"""
occupied_rear_port_positions = [
f'{mapping.rear_port_id}:{mapping.rear_port_position}'
for mapping in device_type.port_mappings.exclude(front_port=front_port.pk)
]
choices = []
for rear_port in RearPortTemplate.objects.filter(device_type=device_type):
for i in range(1, rear_port.positions + 1):
pair_id = f'{rear_port.pk}:{i}'
if pair_id not in occupied_rear_port_positions:
pair_label = f'{rear_port.name}:{i}'
choices.append(
(pair_id, pair_label)
)
return choices
class RearPortTemplateForm(ModularComponentTemplateForm):
fieldsets = (
@@ -1619,6 +1598,9 @@ class FrontPortForm(FrontPortFormMixin, ModularDeviceComponentForm):
),
)
port_mapping_model = PortMapping
rear_port_model = RearPort
class Meta:
model = FrontPort
fields = [
@@ -1629,13 +1611,12 @@ class FrontPortForm(FrontPortFormMixin, ModularDeviceComponentForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Populate rear port choices
if device_id := self.data.get('device') or self.initial.get('device'):
device = Device.objects.get(pk=device_id)
parent_filter = Q(device=device_id)
else:
return
# Populate rear port choices
self.fields['rear_ports'].choices = self._get_rear_port_choices(device, self.instance)
self.fields['rear_ports'].choices = self._get_rear_port_choices(parent_filter, self.instance)
# Set initial rear port mappings
if self.instance.pk:
@@ -1644,27 +1625,6 @@ class FrontPortForm(FrontPortFormMixin, ModularDeviceComponentForm):
for mapping in PortMapping.objects.filter(front_port_id=self.instance.pk)
]
def _get_rear_port_choices(self, device, front_port):
"""
Return a list of choices representing each available rear port & position pair on the device, excluding those
assigned to the specified instance.
"""
occupied_rear_port_positions = [
f'{mapping.rear_port_id}:{mapping.rear_port_position}'
for mapping in device.port_mappings.exclude(front_port=front_port.pk)
]
choices = []
for rear_port in RearPort.objects.filter(device=device):
for i in range(1, rear_port.positions + 1):
pair_id = f'{rear_port.pk}:{i}'
if pair_id not in occupied_rear_port_positions:
pair_label = f'{rear_port.name}:{i}'
choices.append(
(pair_id, pair_label)
)
return choices
class RearPortForm(ModularDeviceComponentForm):
fieldsets = (

View File

@@ -211,12 +211,16 @@ def sync_cached_scope_fields(instance, created, **kwargs):
for model in (Prefix, Cluster, WirelessLAN):
qs = model.objects.filter(**filters)
# Bulk update cached fields to avoid O(N) performance issues with large datasets.
# This does not trigger post_save signals, avoiding spurious change log entries.
objects_to_update = []
for obj in qs:
# Recompute cache using the same logic as save()
obj.cache_related_objects()
obj.save(update_fields=[
'_location',
'_site',
'_site_group',
'_region',
])
objects_to_update.append(obj)
if objects_to_update:
model.objects.bulk_update(
objects_to_update,
['_location', '_site', '_site_group', '_region']
)

View File

@@ -1845,6 +1845,7 @@ class ModuleTypeBulkEditView(generic.BulkEditView):
class ModuleTypeBulkRenameView(generic.BulkRenameView):
queryset = ModuleType.objects.all()
filterset = filtersets.ModuleTypeFilterSet
field_name = 'model'
@register_model_view(ModuleType, 'bulk_delete', path='delete', detail=False)

View File

@@ -28,7 +28,7 @@ class ConfigContextProfileSerializer(PrimaryModelSerializer):
)
data_file = DataFileSerializer(
nested=True,
read_only=True
required=False
)
class Meta:
@@ -143,7 +143,7 @@ class ConfigContextSerializer(OwnerMixin, ChangeLogMessageSerializer, ValidatedM
)
data_file = DataFileSerializer(
nested=True,
read_only=True
required=False
)
class Meta:

View File

@@ -1,4 +1,5 @@
import datetime
import hashlib
from django.contrib.contenttypes.models import ContentType
from django.urls import reverse
@@ -7,7 +8,7 @@ from rest_framework import status
from core.choices import ManagedFileRootPathChoices
from core.events import *
from core.models import ObjectType
from core.models import DataFile, DataSource, ObjectType
from dcim.models import Device, DeviceRole, DeviceType, Manufacturer, Rack, Location, RackRole, Site
from extras.choices import *
from extras.models import *
@@ -731,6 +732,51 @@ class ConfigContextProfileTest(APIViewTestCases.APIViewTestCase):
)
ConfigContextProfile.objects.bulk_create(profiles)
def test_update_data_source_and_data_file(self):
"""
Regression test: Ensure data_source and data_file can be assigned via the API.
This specifically covers PATCHing a ConfigContext with integer IDs for both fields.
"""
self.add_permissions(
'core.view_datafile',
'core.view_datasource',
'extras.view_configcontextprofile',
'extras.change_configcontextprofile',
)
config_context_profile = ConfigContextProfile.objects.first()
# Create a data source and file
datasource = DataSource.objects.create(
name='Data Source 1',
type='local',
source_url='file:///tmp/netbox-datasource/',
)
# Generate a valid dummy YAML file
file_data = b'profile: configcontext\n'
datafile = DataFile.objects.create(
source=datasource,
path='dir1/file1.yml',
last_updated=now(),
size=len(file_data),
hash=hashlib.sha256(file_data).hexdigest(),
data=file_data,
)
url = self._get_detail_url(config_context_profile)
payload = {
'data_source': datasource.pk,
'data_file': datafile.pk,
}
response = self.client.patch(url, payload, format='json', **self.header)
self.assertHttpStatus(response, status.HTTP_200_OK)
config_context_profile.refresh_from_db()
self.assertEqual(config_context_profile.data_source_id, datasource.pk)
self.assertEqual(config_context_profile.data_file_id, datafile.pk)
self.assertEqual(response.data['data_source']['id'], datasource.pk)
self.assertEqual(response.data['data_file']['id'], datafile.pk)
class ConfigContextTest(APIViewTestCases.APIViewTestCase):
model = ConfigContext
@@ -812,6 +858,51 @@ class ConfigContextTest(APIViewTestCases.APIViewTestCase):
rendered_context = device.get_config_context()
self.assertEqual(rendered_context['bar'], 456)
def test_update_data_source_and_data_file(self):
"""
Regression test: Ensure data_source and data_file can be assigned via the API.
This specifically covers PATCHing a ConfigContext with integer IDs for both fields.
"""
self.add_permissions(
'core.view_datafile',
'core.view_datasource',
'extras.view_configcontext',
'extras.change_configcontext',
)
config_context = ConfigContext.objects.first()
# Create a data source and file
datasource = DataSource.objects.create(
name='Data Source 1',
type='local',
source_url='file:///tmp/netbox-datasource/',
)
# Generate a valid dummy YAML file
file_data = b'context: config\n'
datafile = DataFile.objects.create(
source=datasource,
path='dir1/file1.yml',
last_updated=now(),
size=len(file_data),
hash=hashlib.sha256(file_data).hexdigest(),
data=file_data,
)
url = self._get_detail_url(config_context)
payload = {
'data_source': datasource.pk,
'data_file': datafile.pk,
}
response = self.client.patch(url, payload, format='json', **self.header)
self.assertHttpStatus(response, status.HTTP_200_OK)
config_context.refresh_from_db()
self.assertEqual(config_context.data_source_id, datasource.pk)
self.assertEqual(config_context.data_file_id, datafile.pk)
self.assertEqual(response.data['data_source']['id'], datasource.pk)
self.assertEqual(response.data['data_file']['id'], datafile.pk)
class ConfigTemplateTest(APIViewTestCases.APIViewTestCase):
model = ConfigTemplate

View File

@@ -372,8 +372,8 @@ class IPAddressForm(TenancyForm, PrimaryModelForm):
'virtual_machine_id': instance.assigned_object.virtual_machine.pk,
})
# Disable object assignment fields if the IP address is designated as primary
if self.initial.get('primary_for_parent'):
# Disable object assignment fields if the IP address is designated as primary or OOB
if self.initial.get('primary_for_parent') or self.initial.get('oob_for_parent'):
self.fields['interface'].disabled = True
self.fields['vminterface'].disabled = True
self.fields['fhrpgroup'].disabled = True

View File

@@ -940,6 +940,13 @@ class IPAddress(ContactsMixin, PrimaryModel):
_("Cannot reassign IP address while it is designated as the primary IP for the parent object")
)
# can't use is_oob_ip as self.assigned_object might be changed
if hasattr(original_parent, 'oob_ip') and original_parent.oob_ip_id == self.pk:
if parent != original_parent:
raise ValidationError(
_("Cannot reassign IP address while it is designated as the OOB IP for the parent object")
)
# Validate IP status selection
if self.status == IPAddressStatusChoices.STATUS_SLAAC and self.family != 6:
raise ValidationError({

View File

@@ -3,7 +3,7 @@ from typing import TYPE_CHECKING
import strawberry_django
from strawberry import ID
from strawberry_django import FilterLookup
from strawberry_django import ComparisonFilterLookup, FilterLookup
from core.graphql.filter_mixins import ChangeLoggingMixin
from extras.graphql.filter_mixins import CustomFieldsFilterMixin, JournalEntriesFilterMixin, TagsFilterMixin
@@ -23,7 +23,7 @@ __all__ = (
@dataclass
class BaseModelFilter:
id: FilterLookup[ID] | None = strawberry_django.filter_field()
id: ComparisonFilterLookup[ID] | None = strawberry_django.filter_field()
class ChangeLoggedModelFilter(ChangeLoggingMixin, BaseModelFilter):

View File

@@ -164,7 +164,7 @@ class ObjectAttributesPanel(ObjectPanel, metaclass=ObjectAttributesPanelMeta):
"""
label = name[:1].upper() + name[1:]
label = label.replace('_', ' ')
return label
return _(label)
def get_context(self, context):
# Determine which attributes to display in the panel based on only/exclude args

View File

@@ -8,7 +8,7 @@ msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2026-01-15 05:05+0000\n"
"POT-Creation-Date: 2026-01-16 05:04+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
@@ -2435,7 +2435,7 @@ msgstr ""
msgid "Change logging is not supported for this object type ({type})."
msgstr ""
#: netbox/core/models/config.py:21 netbox/core/models/data.py:282
#: netbox/core/models/config.py:21 netbox/core/models/data.py:284
#: netbox/core/models/files.py:29 netbox/core/models/jobs.py:60
#: netbox/extras/models/models.py:847 netbox/extras/models/notifications.py:39
#: netbox/extras/models/notifications.py:195
@@ -2541,58 +2541,63 @@ msgstr ""
msgid "Unknown backend type: {type}"
msgstr ""
#: netbox/core/models/data.py:180
#: netbox/core/models/data.py:131
#, python-brace-format
msgid "URLs for local sources must start with {scheme} (or specify no scheme)"
msgstr ""
#: netbox/core/models/data.py:182
msgid "Cannot initiate sync; syncing already in progress."
msgstr ""
#: netbox/core/models/data.py:193
#: netbox/core/models/data.py:195
msgid ""
"There was an error initializing the backend. A dependency needs to be "
"installed: "
msgstr ""
#: netbox/core/models/data.py:286 netbox/core/models/files.py:33
#: netbox/core/models/data.py:288 netbox/core/models/files.py:33
#: netbox/netbox/models/features.py:67
msgid "last updated"
msgstr ""
#: netbox/core/models/data.py:296 netbox/dcim/models/cables.py:622
#: netbox/core/models/data.py:298 netbox/dcim/models/cables.py:622
msgid "path"
msgstr ""
#: netbox/core/models/data.py:299
#: netbox/core/models/data.py:301
msgid "File path relative to the data source's root"
msgstr ""
#: netbox/core/models/data.py:303 netbox/ipam/models/ip.py:507
#: netbox/core/models/data.py:305 netbox/ipam/models/ip.py:507
msgid "size"
msgstr ""
#: netbox/core/models/data.py:306
#: netbox/core/models/data.py:308
msgid "hash"
msgstr ""
#: netbox/core/models/data.py:310
#: netbox/core/models/data.py:312
msgid "Length must be 64 hexadecimal characters."
msgstr ""
#: netbox/core/models/data.py:312
#: netbox/core/models/data.py:314
msgid "SHA256 hash of the file data"
msgstr ""
#: netbox/core/models/data.py:326
#: netbox/core/models/data.py:328
msgid "data file"
msgstr ""
#: netbox/core/models/data.py:327
#: netbox/core/models/data.py:329
msgid "data files"
msgstr ""
#: netbox/core/models/data.py:400
#: netbox/core/models/data.py:402
msgid "auto sync record"
msgstr ""
#: netbox/core/models/data.py:401
#: netbox/core/models/data.py:403
msgid "auto sync records"
msgstr ""
@@ -11240,7 +11245,13 @@ msgid ""
"parent object"
msgstr ""
#: netbox/ipam/models/ip.py:946
#: netbox/ipam/models/ip.py:947
msgid ""
"Cannot reassign IP address while it is designated as the OOB IP for the "
"parent object"
msgstr ""
#: netbox/ipam/models/ip.py:953
msgid "Only IPv6 addresses can be assigned SLAAC status"
msgstr ""

View File

@@ -1,16 +0,0 @@
#!/bin/sh
# TODO: Remove this file in NetBox v4.3
# This script has been maintained to ease transition to the pre-commit tool.
exec 1>&2
EXIT=0
RED='\033[0;31m'
YELLOW='\033[0;33m'
NOCOLOR='\033[0m'
printf "${YELLOW}The pre-commit hook script is obsolete. Please use pre-commit instead:${NOCOLOR}\n"
printf " pip install pre-commit\n"
printf " pre-commit install${NOCOLOR}\n"
exit 1