mirror of
https://github.com/netbox-community/netbox.git
synced 2026-01-16 08:42:17 -06:00
Compare commits
18 Commits
20911-drop
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f5f0c19860 | ||
|
|
8da9b11ab8 | ||
|
|
ca67fa9999 | ||
|
|
eff768192e | ||
|
|
1e297d55ee | ||
|
|
fdb987ef91 | ||
|
|
b5a23db43c | ||
|
|
366b69aff7 | ||
|
|
c3e8c5e69c | ||
|
|
b55f36469d | ||
|
|
1c46215cd5 | ||
|
|
7fded2fd87 | ||
|
|
0ddc5805c4 | ||
|
|
c1bbc026e2 | ||
|
|
8cbfe94fba | ||
|
|
fff99fd3ff | ||
|
|
f4892caa51 | ||
|
|
a54ad24b47 |
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -30,13 +30,13 @@ jobs:
|
|||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v3
|
uses: github/codeql-action/init@v4
|
||||||
with:
|
with:
|
||||||
languages: ${{ matrix.language }}
|
languages: ${{ matrix.language }}
|
||||||
build-mode: ${{ matrix.build-mode }}
|
build-mode: ${{ matrix.build-mode }}
|
||||||
config-file: .github/codeql/codeql-config.yml
|
config-file: .github/codeql/codeql-config.yml
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v3
|
uses: github/codeql-action/analyze@v4
|
||||||
with:
|
with:
|
||||||
category: "/language:${{matrix.language}}"
|
category: "/language:${{matrix.language}}"
|
||||||
|
|||||||
@@ -44,3 +44,4 @@ class DataFileSerializer(NetBoxModelSerializer):
|
|||||||
'id', 'url', 'display_url', 'display', 'source', 'path', 'last_updated', 'size', 'hash',
|
'id', 'url', 'display_url', 'display', 'source', 'path', 'last_updated', 'size', 'hash',
|
||||||
]
|
]
|
||||||
brief_fields = ('id', 'url', 'display', 'path')
|
brief_fields = ('id', 'url', 'display', 'path')
|
||||||
|
read_only_fields = ['path', 'last_updated', 'size', 'hash']
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ from django.core.validators import RegexValidator
|
|||||||
from django.db import models
|
from django.db import models
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from django.utils import timezone
|
from django.utils import timezone
|
||||||
from django.utils.translation import gettext as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
|
||||||
from netbox.constants import CENSOR_TOKEN, CENSOR_TOKEN_CHANGED
|
from netbox.constants import CENSOR_TOKEN, CENSOR_TOKEN_CHANGED
|
||||||
from netbox.models import PrimaryModel
|
from netbox.models import PrimaryModel
|
||||||
@@ -128,7 +128,9 @@ class DataSource(JobsMixin, PrimaryModel):
|
|||||||
# Ensure URL scheme matches selected type
|
# Ensure URL scheme matches selected type
|
||||||
if self.backend_class.is_local and self.url_scheme not in ('file', ''):
|
if self.backend_class.is_local and self.url_scheme not in ('file', ''):
|
||||||
raise ValidationError({
|
raise ValidationError({
|
||||||
'source_url': "URLs for local sources must start with file:// (or specify no scheme)"
|
'source_url': _("URLs for local sources must start with {scheme} (or specify no scheme)").format(
|
||||||
|
scheme='file://'
|
||||||
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
def save(self, *args, **kwargs):
|
def save(self, *args, **kwargs):
|
||||||
|
|||||||
@@ -140,9 +140,6 @@ class FrontPortFormMixin(forms.Form):
|
|||||||
widget=forms.SelectMultiple(attrs={'size': 8})
|
widget=forms.SelectMultiple(attrs={'size': 8})
|
||||||
)
|
)
|
||||||
|
|
||||||
port_mapping_model = PortMapping
|
|
||||||
parent_field = 'device'
|
|
||||||
|
|
||||||
def clean(self):
|
def clean(self):
|
||||||
super().clean()
|
super().clean()
|
||||||
|
|
||||||
@@ -203,3 +200,22 @@ class FrontPortFormMixin(forms.Form):
|
|||||||
using=connection,
|
using=connection,
|
||||||
update_fields=None
|
update_fields=None
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def _get_rear_port_choices(self, parent_filter, front_port):
|
||||||
|
"""
|
||||||
|
Return a list of choices representing each available rear port & position pair on the parent object (identified
|
||||||
|
by a Q filter), excluding those assigned to the specified instance.
|
||||||
|
"""
|
||||||
|
occupied_rear_port_positions = [
|
||||||
|
f'{mapping.rear_port_id}:{mapping.rear_port_position}'
|
||||||
|
for mapping in self.port_mapping_model.objects.filter(parent_filter).exclude(front_port=front_port.pk)
|
||||||
|
]
|
||||||
|
|
||||||
|
choices = []
|
||||||
|
for rear_port in self.rear_port_model.objects.filter(parent_filter):
|
||||||
|
for i in range(1, rear_port.positions + 1):
|
||||||
|
pair_id = f'{rear_port.pk}:{i}'
|
||||||
|
if pair_id not in occupied_rear_port_positions:
|
||||||
|
pair_label = f'{rear_port.name}:{i}'
|
||||||
|
choices.append((pair_id, pair_label))
|
||||||
|
return choices
|
||||||
|
|||||||
@@ -1124,9 +1124,8 @@ class FrontPortTemplateForm(FrontPortFormMixin, ModularComponentTemplateForm):
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Override FrontPortFormMixin attrs
|
|
||||||
port_mapping_model = PortTemplateMapping
|
port_mapping_model = PortTemplateMapping
|
||||||
parent_field = 'device_type'
|
rear_port_model = RearPortTemplate
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = FrontPortTemplate
|
model = FrontPortTemplate
|
||||||
@@ -1137,13 +1136,14 @@ class FrontPortTemplateForm(FrontPortFormMixin, ModularComponentTemplateForm):
|
|||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
# Populate rear port choices based on parent DeviceType or ModuleType
|
||||||
if device_type_id := self.data.get('device_type') or self.initial.get('device_type'):
|
if device_type_id := self.data.get('device_type') or self.initial.get('device_type'):
|
||||||
device_type = DeviceType.objects.get(pk=device_type_id)
|
parent_filter = Q(device_type=device_type_id)
|
||||||
|
elif module_type_id := self.data.get('module_type') or self.initial.get('module_type'):
|
||||||
|
parent_filter = Q(module_type=module_type_id)
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
|
self.fields['rear_ports'].choices = self._get_rear_port_choices(parent_filter, self.instance)
|
||||||
# Populate rear port choices
|
|
||||||
self.fields['rear_ports'].choices = self._get_rear_port_choices(device_type, self.instance)
|
|
||||||
|
|
||||||
# Set initial rear port mappings
|
# Set initial rear port mappings
|
||||||
if self.instance.pk:
|
if self.instance.pk:
|
||||||
@@ -1152,27 +1152,6 @@ class FrontPortTemplateForm(FrontPortFormMixin, ModularComponentTemplateForm):
|
|||||||
for mapping in PortTemplateMapping.objects.filter(front_port_id=self.instance.pk)
|
for mapping in PortTemplateMapping.objects.filter(front_port_id=self.instance.pk)
|
||||||
]
|
]
|
||||||
|
|
||||||
def _get_rear_port_choices(self, device_type, front_port):
|
|
||||||
"""
|
|
||||||
Return a list of choices representing each available rear port & position pair on the device type, excluding
|
|
||||||
those assigned to the specified instance.
|
|
||||||
"""
|
|
||||||
occupied_rear_port_positions = [
|
|
||||||
f'{mapping.rear_port_id}:{mapping.rear_port_position}'
|
|
||||||
for mapping in device_type.port_mappings.exclude(front_port=front_port.pk)
|
|
||||||
]
|
|
||||||
|
|
||||||
choices = []
|
|
||||||
for rear_port in RearPortTemplate.objects.filter(device_type=device_type):
|
|
||||||
for i in range(1, rear_port.positions + 1):
|
|
||||||
pair_id = f'{rear_port.pk}:{i}'
|
|
||||||
if pair_id not in occupied_rear_port_positions:
|
|
||||||
pair_label = f'{rear_port.name}:{i}'
|
|
||||||
choices.append(
|
|
||||||
(pair_id, pair_label)
|
|
||||||
)
|
|
||||||
return choices
|
|
||||||
|
|
||||||
|
|
||||||
class RearPortTemplateForm(ModularComponentTemplateForm):
|
class RearPortTemplateForm(ModularComponentTemplateForm):
|
||||||
fieldsets = (
|
fieldsets = (
|
||||||
@@ -1619,6 +1598,9 @@ class FrontPortForm(FrontPortFormMixin, ModularDeviceComponentForm):
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
port_mapping_model = PortMapping
|
||||||
|
rear_port_model = RearPort
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = FrontPort
|
model = FrontPort
|
||||||
fields = [
|
fields = [
|
||||||
@@ -1629,13 +1611,12 @@ class FrontPortForm(FrontPortFormMixin, ModularDeviceComponentForm):
|
|||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
# Populate rear port choices
|
||||||
if device_id := self.data.get('device') or self.initial.get('device'):
|
if device_id := self.data.get('device') or self.initial.get('device'):
|
||||||
device = Device.objects.get(pk=device_id)
|
parent_filter = Q(device=device_id)
|
||||||
else:
|
else:
|
||||||
return
|
return
|
||||||
|
self.fields['rear_ports'].choices = self._get_rear_port_choices(parent_filter, self.instance)
|
||||||
# Populate rear port choices
|
|
||||||
self.fields['rear_ports'].choices = self._get_rear_port_choices(device, self.instance)
|
|
||||||
|
|
||||||
# Set initial rear port mappings
|
# Set initial rear port mappings
|
||||||
if self.instance.pk:
|
if self.instance.pk:
|
||||||
@@ -1644,27 +1625,6 @@ class FrontPortForm(FrontPortFormMixin, ModularDeviceComponentForm):
|
|||||||
for mapping in PortMapping.objects.filter(front_port_id=self.instance.pk)
|
for mapping in PortMapping.objects.filter(front_port_id=self.instance.pk)
|
||||||
]
|
]
|
||||||
|
|
||||||
def _get_rear_port_choices(self, device, front_port):
|
|
||||||
"""
|
|
||||||
Return a list of choices representing each available rear port & position pair on the device, excluding those
|
|
||||||
assigned to the specified instance.
|
|
||||||
"""
|
|
||||||
occupied_rear_port_positions = [
|
|
||||||
f'{mapping.rear_port_id}:{mapping.rear_port_position}'
|
|
||||||
for mapping in device.port_mappings.exclude(front_port=front_port.pk)
|
|
||||||
]
|
|
||||||
|
|
||||||
choices = []
|
|
||||||
for rear_port in RearPort.objects.filter(device=device):
|
|
||||||
for i in range(1, rear_port.positions + 1):
|
|
||||||
pair_id = f'{rear_port.pk}:{i}'
|
|
||||||
if pair_id not in occupied_rear_port_positions:
|
|
||||||
pair_label = f'{rear_port.name}:{i}'
|
|
||||||
choices.append(
|
|
||||||
(pair_id, pair_label)
|
|
||||||
)
|
|
||||||
return choices
|
|
||||||
|
|
||||||
|
|
||||||
class RearPortForm(ModularDeviceComponentForm):
|
class RearPortForm(ModularDeviceComponentForm):
|
||||||
fieldsets = (
|
fieldsets = (
|
||||||
|
|||||||
@@ -211,12 +211,16 @@ def sync_cached_scope_fields(instance, created, **kwargs):
|
|||||||
for model in (Prefix, Cluster, WirelessLAN):
|
for model in (Prefix, Cluster, WirelessLAN):
|
||||||
qs = model.objects.filter(**filters)
|
qs = model.objects.filter(**filters)
|
||||||
|
|
||||||
|
# Bulk update cached fields to avoid O(N) performance issues with large datasets.
|
||||||
|
# This does not trigger post_save signals, avoiding spurious change log entries.
|
||||||
|
objects_to_update = []
|
||||||
for obj in qs:
|
for obj in qs:
|
||||||
# Recompute cache using the same logic as save()
|
# Recompute cache using the same logic as save()
|
||||||
obj.cache_related_objects()
|
obj.cache_related_objects()
|
||||||
obj.save(update_fields=[
|
objects_to_update.append(obj)
|
||||||
'_location',
|
|
||||||
'_site',
|
if objects_to_update:
|
||||||
'_site_group',
|
model.objects.bulk_update(
|
||||||
'_region',
|
objects_to_update,
|
||||||
])
|
['_location', '_site', '_site_group', '_region']
|
||||||
|
)
|
||||||
|
|||||||
@@ -1845,6 +1845,7 @@ class ModuleTypeBulkEditView(generic.BulkEditView):
|
|||||||
class ModuleTypeBulkRenameView(generic.BulkRenameView):
|
class ModuleTypeBulkRenameView(generic.BulkRenameView):
|
||||||
queryset = ModuleType.objects.all()
|
queryset = ModuleType.objects.all()
|
||||||
filterset = filtersets.ModuleTypeFilterSet
|
filterset = filtersets.ModuleTypeFilterSet
|
||||||
|
field_name = 'model'
|
||||||
|
|
||||||
|
|
||||||
@register_model_view(ModuleType, 'bulk_delete', path='delete', detail=False)
|
@register_model_view(ModuleType, 'bulk_delete', path='delete', detail=False)
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ class ConfigContextProfileSerializer(PrimaryModelSerializer):
|
|||||||
)
|
)
|
||||||
data_file = DataFileSerializer(
|
data_file = DataFileSerializer(
|
||||||
nested=True,
|
nested=True,
|
||||||
read_only=True
|
required=False
|
||||||
)
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
@@ -143,7 +143,7 @@ class ConfigContextSerializer(OwnerMixin, ChangeLogMessageSerializer, ValidatedM
|
|||||||
)
|
)
|
||||||
data_file = DataFileSerializer(
|
data_file = DataFileSerializer(
|
||||||
nested=True,
|
nested=True,
|
||||||
read_only=True
|
required=False
|
||||||
)
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import datetime
|
import datetime
|
||||||
|
import hashlib
|
||||||
|
|
||||||
from django.contrib.contenttypes.models import ContentType
|
from django.contrib.contenttypes.models import ContentType
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
@@ -7,7 +8,7 @@ from rest_framework import status
|
|||||||
|
|
||||||
from core.choices import ManagedFileRootPathChoices
|
from core.choices import ManagedFileRootPathChoices
|
||||||
from core.events import *
|
from core.events import *
|
||||||
from core.models import ObjectType
|
from core.models import DataFile, DataSource, ObjectType
|
||||||
from dcim.models import Device, DeviceRole, DeviceType, Manufacturer, Rack, Location, RackRole, Site
|
from dcim.models import Device, DeviceRole, DeviceType, Manufacturer, Rack, Location, RackRole, Site
|
||||||
from extras.choices import *
|
from extras.choices import *
|
||||||
from extras.models import *
|
from extras.models import *
|
||||||
@@ -731,6 +732,51 @@ class ConfigContextProfileTest(APIViewTestCases.APIViewTestCase):
|
|||||||
)
|
)
|
||||||
ConfigContextProfile.objects.bulk_create(profiles)
|
ConfigContextProfile.objects.bulk_create(profiles)
|
||||||
|
|
||||||
|
def test_update_data_source_and_data_file(self):
|
||||||
|
"""
|
||||||
|
Regression test: Ensure data_source and data_file can be assigned via the API.
|
||||||
|
|
||||||
|
This specifically covers PATCHing a ConfigContext with integer IDs for both fields.
|
||||||
|
"""
|
||||||
|
self.add_permissions(
|
||||||
|
'core.view_datafile',
|
||||||
|
'core.view_datasource',
|
||||||
|
'extras.view_configcontextprofile',
|
||||||
|
'extras.change_configcontextprofile',
|
||||||
|
)
|
||||||
|
config_context_profile = ConfigContextProfile.objects.first()
|
||||||
|
|
||||||
|
# Create a data source and file
|
||||||
|
datasource = DataSource.objects.create(
|
||||||
|
name='Data Source 1',
|
||||||
|
type='local',
|
||||||
|
source_url='file:///tmp/netbox-datasource/',
|
||||||
|
)
|
||||||
|
# Generate a valid dummy YAML file
|
||||||
|
file_data = b'profile: configcontext\n'
|
||||||
|
datafile = DataFile.objects.create(
|
||||||
|
source=datasource,
|
||||||
|
path='dir1/file1.yml',
|
||||||
|
last_updated=now(),
|
||||||
|
size=len(file_data),
|
||||||
|
hash=hashlib.sha256(file_data).hexdigest(),
|
||||||
|
data=file_data,
|
||||||
|
)
|
||||||
|
|
||||||
|
url = self._get_detail_url(config_context_profile)
|
||||||
|
payload = {
|
||||||
|
'data_source': datasource.pk,
|
||||||
|
'data_file': datafile.pk,
|
||||||
|
}
|
||||||
|
response = self.client.patch(url, payload, format='json', **self.header)
|
||||||
|
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||||
|
|
||||||
|
config_context_profile.refresh_from_db()
|
||||||
|
self.assertEqual(config_context_profile.data_source_id, datasource.pk)
|
||||||
|
self.assertEqual(config_context_profile.data_file_id, datafile.pk)
|
||||||
|
self.assertEqual(response.data['data_source']['id'], datasource.pk)
|
||||||
|
self.assertEqual(response.data['data_file']['id'], datafile.pk)
|
||||||
|
|
||||||
|
|
||||||
class ConfigContextTest(APIViewTestCases.APIViewTestCase):
|
class ConfigContextTest(APIViewTestCases.APIViewTestCase):
|
||||||
model = ConfigContext
|
model = ConfigContext
|
||||||
@@ -812,6 +858,51 @@ class ConfigContextTest(APIViewTestCases.APIViewTestCase):
|
|||||||
rendered_context = device.get_config_context()
|
rendered_context = device.get_config_context()
|
||||||
self.assertEqual(rendered_context['bar'], 456)
|
self.assertEqual(rendered_context['bar'], 456)
|
||||||
|
|
||||||
|
def test_update_data_source_and_data_file(self):
|
||||||
|
"""
|
||||||
|
Regression test: Ensure data_source and data_file can be assigned via the API.
|
||||||
|
|
||||||
|
This specifically covers PATCHing a ConfigContext with integer IDs for both fields.
|
||||||
|
"""
|
||||||
|
self.add_permissions(
|
||||||
|
'core.view_datafile',
|
||||||
|
'core.view_datasource',
|
||||||
|
'extras.view_configcontext',
|
||||||
|
'extras.change_configcontext',
|
||||||
|
)
|
||||||
|
config_context = ConfigContext.objects.first()
|
||||||
|
|
||||||
|
# Create a data source and file
|
||||||
|
datasource = DataSource.objects.create(
|
||||||
|
name='Data Source 1',
|
||||||
|
type='local',
|
||||||
|
source_url='file:///tmp/netbox-datasource/',
|
||||||
|
)
|
||||||
|
# Generate a valid dummy YAML file
|
||||||
|
file_data = b'context: config\n'
|
||||||
|
datafile = DataFile.objects.create(
|
||||||
|
source=datasource,
|
||||||
|
path='dir1/file1.yml',
|
||||||
|
last_updated=now(),
|
||||||
|
size=len(file_data),
|
||||||
|
hash=hashlib.sha256(file_data).hexdigest(),
|
||||||
|
data=file_data,
|
||||||
|
)
|
||||||
|
|
||||||
|
url = self._get_detail_url(config_context)
|
||||||
|
payload = {
|
||||||
|
'data_source': datasource.pk,
|
||||||
|
'data_file': datafile.pk,
|
||||||
|
}
|
||||||
|
response = self.client.patch(url, payload, format='json', **self.header)
|
||||||
|
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||||
|
|
||||||
|
config_context.refresh_from_db()
|
||||||
|
self.assertEqual(config_context.data_source_id, datasource.pk)
|
||||||
|
self.assertEqual(config_context.data_file_id, datafile.pk)
|
||||||
|
self.assertEqual(response.data['data_source']['id'], datasource.pk)
|
||||||
|
self.assertEqual(response.data['data_file']['id'], datafile.pk)
|
||||||
|
|
||||||
|
|
||||||
class ConfigTemplateTest(APIViewTestCases.APIViewTestCase):
|
class ConfigTemplateTest(APIViewTestCases.APIViewTestCase):
|
||||||
model = ConfigTemplate
|
model = ConfigTemplate
|
||||||
|
|||||||
@@ -372,8 +372,8 @@ class IPAddressForm(TenancyForm, PrimaryModelForm):
|
|||||||
'virtual_machine_id': instance.assigned_object.virtual_machine.pk,
|
'virtual_machine_id': instance.assigned_object.virtual_machine.pk,
|
||||||
})
|
})
|
||||||
|
|
||||||
# Disable object assignment fields if the IP address is designated as primary
|
# Disable object assignment fields if the IP address is designated as primary or OOB
|
||||||
if self.initial.get('primary_for_parent'):
|
if self.initial.get('primary_for_parent') or self.initial.get('oob_for_parent'):
|
||||||
self.fields['interface'].disabled = True
|
self.fields['interface'].disabled = True
|
||||||
self.fields['vminterface'].disabled = True
|
self.fields['vminterface'].disabled = True
|
||||||
self.fields['fhrpgroup'].disabled = True
|
self.fields['fhrpgroup'].disabled = True
|
||||||
|
|||||||
@@ -940,6 +940,13 @@ class IPAddress(ContactsMixin, PrimaryModel):
|
|||||||
_("Cannot reassign IP address while it is designated as the primary IP for the parent object")
|
_("Cannot reassign IP address while it is designated as the primary IP for the parent object")
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# can't use is_oob_ip as self.assigned_object might be changed
|
||||||
|
if hasattr(original_parent, 'oob_ip') and original_parent.oob_ip_id == self.pk:
|
||||||
|
if parent != original_parent:
|
||||||
|
raise ValidationError(
|
||||||
|
_("Cannot reassign IP address while it is designated as the OOB IP for the parent object")
|
||||||
|
)
|
||||||
|
|
||||||
# Validate IP status selection
|
# Validate IP status selection
|
||||||
if self.status == IPAddressStatusChoices.STATUS_SLAAC and self.family != 6:
|
if self.status == IPAddressStatusChoices.STATUS_SLAAC and self.family != 6:
|
||||||
raise ValidationError({
|
raise ValidationError({
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ from typing import TYPE_CHECKING
|
|||||||
|
|
||||||
import strawberry_django
|
import strawberry_django
|
||||||
from strawberry import ID
|
from strawberry import ID
|
||||||
from strawberry_django import FilterLookup
|
from strawberry_django import ComparisonFilterLookup, FilterLookup
|
||||||
|
|
||||||
from core.graphql.filter_mixins import ChangeLoggingMixin
|
from core.graphql.filter_mixins import ChangeLoggingMixin
|
||||||
from extras.graphql.filter_mixins import CustomFieldsFilterMixin, JournalEntriesFilterMixin, TagsFilterMixin
|
from extras.graphql.filter_mixins import CustomFieldsFilterMixin, JournalEntriesFilterMixin, TagsFilterMixin
|
||||||
@@ -23,7 +23,7 @@ __all__ = (
|
|||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class BaseModelFilter:
|
class BaseModelFilter:
|
||||||
id: FilterLookup[ID] | None = strawberry_django.filter_field()
|
id: ComparisonFilterLookup[ID] | None = strawberry_django.filter_field()
|
||||||
|
|
||||||
|
|
||||||
class ChangeLoggedModelFilter(ChangeLoggingMixin, BaseModelFilter):
|
class ChangeLoggedModelFilter(ChangeLoggingMixin, BaseModelFilter):
|
||||||
|
|||||||
@@ -164,7 +164,7 @@ class ObjectAttributesPanel(ObjectPanel, metaclass=ObjectAttributesPanelMeta):
|
|||||||
"""
|
"""
|
||||||
label = name[:1].upper() + name[1:]
|
label = name[:1].upper() + name[1:]
|
||||||
label = label.replace('_', ' ')
|
label = label.replace('_', ' ')
|
||||||
return label
|
return _(label)
|
||||||
|
|
||||||
def get_context(self, context):
|
def get_context(self, context):
|
||||||
# Determine which attributes to display in the panel based on only/exclude args
|
# Determine which attributes to display in the panel based on only/exclude args
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ msgid ""
|
|||||||
msgstr ""
|
msgstr ""
|
||||||
"Project-Id-Version: PACKAGE VERSION\n"
|
"Project-Id-Version: PACKAGE VERSION\n"
|
||||||
"Report-Msgid-Bugs-To: \n"
|
"Report-Msgid-Bugs-To: \n"
|
||||||
"POT-Creation-Date: 2026-01-13 05:05+0000\n"
|
"POT-Creation-Date: 2026-01-16 05:04+0000\n"
|
||||||
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
|
||||||
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
|
||||||
"Language-Team: LANGUAGE <LL@li.org>\n"
|
"Language-Team: LANGUAGE <LL@li.org>\n"
|
||||||
@@ -2435,7 +2435,7 @@ msgstr ""
|
|||||||
msgid "Change logging is not supported for this object type ({type})."
|
msgid "Change logging is not supported for this object type ({type})."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: netbox/core/models/config.py:21 netbox/core/models/data.py:282
|
#: netbox/core/models/config.py:21 netbox/core/models/data.py:284
|
||||||
#: netbox/core/models/files.py:29 netbox/core/models/jobs.py:60
|
#: netbox/core/models/files.py:29 netbox/core/models/jobs.py:60
|
||||||
#: netbox/extras/models/models.py:847 netbox/extras/models/notifications.py:39
|
#: netbox/extras/models/models.py:847 netbox/extras/models/notifications.py:39
|
||||||
#: netbox/extras/models/notifications.py:195
|
#: netbox/extras/models/notifications.py:195
|
||||||
@@ -2541,58 +2541,63 @@ msgstr ""
|
|||||||
msgid "Unknown backend type: {type}"
|
msgid "Unknown backend type: {type}"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: netbox/core/models/data.py:180
|
#: netbox/core/models/data.py:131
|
||||||
|
#, python-brace-format
|
||||||
|
msgid "URLs for local sources must start with {scheme} (or specify no scheme)"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: netbox/core/models/data.py:182
|
||||||
msgid "Cannot initiate sync; syncing already in progress."
|
msgid "Cannot initiate sync; syncing already in progress."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: netbox/core/models/data.py:193
|
#: netbox/core/models/data.py:195
|
||||||
msgid ""
|
msgid ""
|
||||||
"There was an error initializing the backend. A dependency needs to be "
|
"There was an error initializing the backend. A dependency needs to be "
|
||||||
"installed: "
|
"installed: "
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: netbox/core/models/data.py:286 netbox/core/models/files.py:33
|
#: netbox/core/models/data.py:288 netbox/core/models/files.py:33
|
||||||
#: netbox/netbox/models/features.py:67
|
#: netbox/netbox/models/features.py:67
|
||||||
msgid "last updated"
|
msgid "last updated"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: netbox/core/models/data.py:296 netbox/dcim/models/cables.py:622
|
#: netbox/core/models/data.py:298 netbox/dcim/models/cables.py:622
|
||||||
msgid "path"
|
msgid "path"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: netbox/core/models/data.py:299
|
#: netbox/core/models/data.py:301
|
||||||
msgid "File path relative to the data source's root"
|
msgid "File path relative to the data source's root"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: netbox/core/models/data.py:303 netbox/ipam/models/ip.py:507
|
#: netbox/core/models/data.py:305 netbox/ipam/models/ip.py:507
|
||||||
msgid "size"
|
msgid "size"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: netbox/core/models/data.py:306
|
#: netbox/core/models/data.py:308
|
||||||
msgid "hash"
|
msgid "hash"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: netbox/core/models/data.py:310
|
#: netbox/core/models/data.py:312
|
||||||
msgid "Length must be 64 hexadecimal characters."
|
msgid "Length must be 64 hexadecimal characters."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: netbox/core/models/data.py:312
|
#: netbox/core/models/data.py:314
|
||||||
msgid "SHA256 hash of the file data"
|
msgid "SHA256 hash of the file data"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: netbox/core/models/data.py:326
|
#: netbox/core/models/data.py:328
|
||||||
msgid "data file"
|
msgid "data file"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: netbox/core/models/data.py:327
|
#: netbox/core/models/data.py:329
|
||||||
msgid "data files"
|
msgid "data files"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: netbox/core/models/data.py:400
|
#: netbox/core/models/data.py:402
|
||||||
msgid "auto sync record"
|
msgid "auto sync record"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: netbox/core/models/data.py:401
|
#: netbox/core/models/data.py:403
|
||||||
msgid "auto sync records"
|
msgid "auto sync records"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
@@ -11240,7 +11245,13 @@ msgid ""
|
|||||||
"parent object"
|
"parent object"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: netbox/ipam/models/ip.py:946
|
#: netbox/ipam/models/ip.py:947
|
||||||
|
msgid ""
|
||||||
|
"Cannot reassign IP address while it is designated as the OOB IP for the "
|
||||||
|
"parent object"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: netbox/ipam/models/ip.py:953
|
||||||
msgid "Only IPv6 addresses can be assigned SLAAC status"
|
msgid "Only IPv6 addresses can be assigned SLAAC status"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
@@ -12489,8 +12500,8 @@ msgstr ""
|
|||||||
msgid "Delete Selected"
|
msgid "Delete Selected"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: netbox/netbox/plugins/navigation.py:55
|
#: netbox/netbox/plugins/navigation.py:53
|
||||||
#: netbox/netbox/plugins/navigation.py:88
|
#: netbox/netbox/plugins/navigation.py:89
|
||||||
msgid "Permissions must be passed as a tuple or list."
|
msgid "Permissions must be passed as a tuple or list."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
@@ -12498,7 +12509,7 @@ msgstr ""
|
|||||||
msgid "Buttons must be passed as a tuple or list."
|
msgid "Buttons must be passed as a tuple or list."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: netbox/netbox/plugins/navigation.py:92
|
#: netbox/netbox/plugins/navigation.py:95
|
||||||
msgid "Button color must be a choice within ButtonColorChoices."
|
msgid "Button color must be a choice within ButtonColorChoices."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
|
|||||||
@@ -1,16 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
# TODO: Remove this file in NetBox v4.3
|
|
||||||
# This script has been maintained to ease transition to the pre-commit tool.
|
|
||||||
|
|
||||||
exec 1>&2
|
|
||||||
|
|
||||||
EXIT=0
|
|
||||||
RED='\033[0;31m'
|
|
||||||
YELLOW='\033[0;33m'
|
|
||||||
NOCOLOR='\033[0m'
|
|
||||||
|
|
||||||
printf "${YELLOW}The pre-commit hook script is obsolete. Please use pre-commit instead:${NOCOLOR}\n"
|
|
||||||
printf " pip install pre-commit\n"
|
|
||||||
printf " pre-commit install${NOCOLOR}\n"
|
|
||||||
|
|
||||||
exit 1
|
|
||||||
Reference in New Issue
Block a user