mirror of
https://github.com/netbox-community/netbox.git
synced 2025-12-27 15:47:46 -06:00
Compare commits
32 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7c63d001b1 | ||
|
|
93119f52c3 | ||
|
|
ee2aa35cba | ||
|
|
7896a48075 | ||
|
|
eb87c3f304 | ||
|
|
3acbb0a08c | ||
|
|
f67cc47def | ||
|
|
f7219e0672 | ||
|
|
e5a975176d | ||
|
|
83ee4fb593 | ||
|
|
db8271c904 | ||
|
|
5a24f99c9d | ||
|
|
9318c91405 | ||
|
|
5c6aaf2388 | ||
|
|
265f375595 | ||
|
|
d95fa8dbb2 | ||
|
|
2699149016 | ||
|
|
f371004809 | ||
|
|
ad29402b87 | ||
|
|
598f8d034d | ||
|
|
ec13a79907 | ||
|
|
21f4036782 | ||
|
|
ce3738572c | ||
|
|
cbb979934e | ||
|
|
642d83a4c6 | ||
|
|
a06c12c6b8 | ||
|
|
60fce84c96 | ||
|
|
59afa0b41d | ||
|
|
14b246cb8a | ||
|
|
f0507d00bf | ||
|
|
77b389f105 | ||
|
|
9ae53fc232 |
@@ -15,7 +15,7 @@ body:
|
||||
attributes:
|
||||
label: NetBox version
|
||||
description: What version of NetBox are you currently running?
|
||||
placeholder: v4.4.8
|
||||
placeholder: v4.4.9
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/02-bug_report.yaml
vendored
2
.github/ISSUE_TEMPLATE/02-bug_report.yaml
vendored
@@ -27,7 +27,7 @@ body:
|
||||
attributes:
|
||||
label: NetBox Version
|
||||
description: What version of NetBox are you currently running?
|
||||
placeholder: v4.4.8
|
||||
placeholder: v4.4.9
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<a href="https://github.com/netbox-community/netbox/blob/main/LICENSE.txt"><img src="https://img.shields.io/badge/license-Apache_2.0-blue.svg" alt="License" /></a>
|
||||
<a href="https://github.com/netbox-community/netbox/graphs/contributors"><img src="https://img.shields.io/github/contributors/netbox-community/netbox?color=blue" alt="Contributors" /></a>
|
||||
<a href="https://github.com/netbox-community/netbox/stargazers"><img src="https://img.shields.io/github/stars/netbox-community/netbox?style=flat" alt="GitHub stars" /></a>
|
||||
<a href="https://explore.transifex.com/netbox-community/netbox/"><img src="https://img.shields.io/badge/languages-15-blue" alt="Languages supported" /></a>
|
||||
<a href="https://explore.transifex.com/netbox-community/netbox/"><img src="https://img.shields.io/badge/languages-16-blue" alt="Languages supported" /></a>
|
||||
<a href="https://github.com/netbox-community/netbox/actions/workflows/ci.yml"><img src="https://github.com/netbox-community/netbox/actions/workflows/ci.yml/badge.svg" alt="CI status" /></a>
|
||||
<p>
|
||||
<strong><a href="https://netboxlabs.com/community/">NetBox Community</a></strong> |
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"openapi": "3.0.3",
|
||||
"info": {
|
||||
"title": "NetBox REST API",
|
||||
"version": "4.4.8",
|
||||
"version": "4.4.9",
|
||||
"license": {
|
||||
"name": "Apache v2 License"
|
||||
}
|
||||
@@ -158511,6 +158511,7 @@
|
||||
"fr",
|
||||
"it",
|
||||
"ja",
|
||||
"lv",
|
||||
"nl",
|
||||
"pl",
|
||||
"pt",
|
||||
@@ -205630,15 +205631,9 @@
|
||||
"description": {
|
||||
"type": "string",
|
||||
"maxLength": 200
|
||||
},
|
||||
"devicetype_count": {
|
||||
"type": "integer",
|
||||
"format": "int64",
|
||||
"readOnly": true
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"devicetype_count",
|
||||
"display",
|
||||
"id",
|
||||
"name",
|
||||
|
||||
@@ -1,5 +1,31 @@
|
||||
# NetBox v4.4
|
||||
|
||||
## v4.4.9 (2025-12-23)
|
||||
|
||||
### Enhancements
|
||||
|
||||
* [#20309](https://github.com/netbox-community/netbox/issues/20309) - Support ASDOT notation for ASN ranges
|
||||
* [#20720](https://github.com/netbox-community/netbox/issues/20720) - Add Latvian translations
|
||||
* [#20900](https://github.com/netbox-community/netbox/issues/20900) - Allow filtering custom choice fields by multiple values in the UI
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* [#17976](https://github.com/netbox-community/netbox/issues/17976) - Remove `devicetype_count` from nested manufacturer to correct OpenAPI schema
|
||||
* [#20011](https://github.com/netbox-community/netbox/issues/20011) - Provide a clear message when encountering duplicate object IDs during bulk import
|
||||
* [#20114](https://github.com/netbox-community/netbox/issues/20114) - Preserve `parent_bay` during device bulk import when tags are present
|
||||
* [#20491](https://github.com/netbox-community/netbox/issues/20491) - Improve handling of numeric ranges in tests
|
||||
* [#20873](https://github.com/netbox-community/netbox/issues/20873) - Fix `AttributeError` exception triggered by event rules associated with an object that supports file attachments
|
||||
* [#20875](https://github.com/netbox-community/netbox/issues/20875) - Ensure that parent object relations are cached (for filtering) on device/module components during instantiation
|
||||
* [#20876](https://github.com/netbox-community/netbox/issues/20876) - Allow editing an IP address that resides within a range marked as populated
|
||||
* [#20912](https://github.com/netbox-community/netbox/issues/20912) - Fix inconsistent clearing of `module` field on ModuleBay
|
||||
* [#20944](https://github.com/netbox-community/netbox/issues/20944) - Ensure cached scope is updated on child objects when a parent region/site/location is changed
|
||||
* [#20948](https://github.com/netbox-community/netbox/issues/20948) - Handle the deletion of related objects with `on_delete=RESTRICT` the same as `CASCADE`
|
||||
* [#20969](https://github.com/netbox-community/netbox/issues/20969) - Fix querying of front port templates by `rear_port_id`
|
||||
* [#21011](https://github.com/netbox-community/netbox/issues/21011) - Avoid writing to the database when loading active ConfigRevision
|
||||
* [#21032](https://github.com/netbox-community/netbox/issues/21032) - Avoid SQL subquery in RestrictedQuerySet where unnecessary
|
||||
|
||||
---
|
||||
|
||||
## v4.4.8 (2025-12-09)
|
||||
|
||||
### Enhancements
|
||||
|
||||
@@ -63,16 +63,20 @@ class ConfigRevision(models.Model):
|
||||
return reverse('core:config') # Default config view
|
||||
return reverse('core:configrevision', args=[self.pk])
|
||||
|
||||
def activate(self):
|
||||
def activate(self, update_db=True):
|
||||
"""
|
||||
Cache the configuration data.
|
||||
|
||||
Parameters:
|
||||
update_db: Mark the ConfigRevision as active in the database (default: True)
|
||||
"""
|
||||
cache.set('config', self.data, None)
|
||||
cache.set('config_version', self.pk, None)
|
||||
|
||||
# Set all instances of ConfigRevision to false and set this instance to true
|
||||
ConfigRevision.objects.all().update(active=False)
|
||||
ConfigRevision.objects.filter(pk=self.pk).update(active=True)
|
||||
if update_db:
|
||||
# Set all instances of ConfigRevision to false and set this instance to true
|
||||
ConfigRevision.objects.all().update(active=False)
|
||||
ConfigRevision.objects.filter(pk=self.pk).update(active=True)
|
||||
|
||||
activate.alters_data = True
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ from threading import local
|
||||
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||
from django.db.models import CASCADE
|
||||
from django.db.models import CASCADE, RESTRICT
|
||||
from django.db.models.fields.reverse_related import ManyToManyRel, ManyToOneRel
|
||||
from django.db.models.signals import m2m_changed, post_migrate, post_save, pre_delete
|
||||
from django.dispatch import receiver, Signal
|
||||
@@ -221,7 +221,7 @@ def handle_deleted_object(sender, instance, **kwargs):
|
||||
obj.snapshot() # Ensure the change record includes the "before" state
|
||||
if type(relation) is ManyToManyRel:
|
||||
getattr(obj, related_field_name).remove(instance)
|
||||
elif type(relation) is ManyToOneRel and relation.null and relation.on_delete is not CASCADE:
|
||||
elif type(relation) is ManyToOneRel and relation.null and relation.on_delete not in (CASCADE, RESTRICT):
|
||||
setattr(obj, related_field_name, None)
|
||||
obj.save()
|
||||
|
||||
|
||||
@@ -20,4 +20,4 @@ class ManufacturerSerializer(NetBoxModelSerializer):
|
||||
'id', 'url', 'display_url', 'display', 'name', 'slug', 'description', 'tags', 'custom_fields',
|
||||
'created', 'last_updated', 'devicetype_count', 'inventoryitem_count', 'platform_count',
|
||||
]
|
||||
brief_fields = ('id', 'url', 'display', 'name', 'slug', 'description', 'devicetype_count')
|
||||
brief_fields = ('id', 'url', 'display', 'name', 'slug', 'description')
|
||||
|
||||
@@ -875,7 +875,7 @@ class FrontPortTemplateFilterSet(ChangeLoggedModelFilterSet, ModularDeviceTypeCo
|
||||
null_value=None
|
||||
)
|
||||
rear_port_id = django_filters.ModelMultipleChoiceFilter(
|
||||
queryset=RearPort.objects.all()
|
||||
queryset=RearPortTemplate.objects.all()
|
||||
)
|
||||
|
||||
class Meta:
|
||||
|
||||
@@ -1222,6 +1222,8 @@ class ModuleBay(ModularComponentModel, TrackingModelMixin, MPTTModel):
|
||||
def save(self, *args, **kwargs):
|
||||
if self.module:
|
||||
self.parent = self.module.module_bay
|
||||
else:
|
||||
self.parent = None
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
|
||||
|
||||
@@ -957,6 +957,11 @@ class Device(
|
||||
if cf_defaults := CustomField.objects.get_defaults_for_model(model):
|
||||
for component in components:
|
||||
component.custom_field_data = cf_defaults
|
||||
# Set denormalized references
|
||||
for component in components:
|
||||
component._site = self.site
|
||||
component._location = self.location
|
||||
component._rack = self.rack
|
||||
components = model.objects.bulk_create(components)
|
||||
# Prefetch related objects to minimize queries needed during post_save
|
||||
prefetch_fields = get_prefetchable_fields(model)
|
||||
|
||||
@@ -315,6 +315,12 @@ class Module(PrimaryModel, ConfigContextModel):
|
||||
for component in create_instances:
|
||||
component.custom_field_data = cf_defaults
|
||||
|
||||
# Set denormalized references
|
||||
for component in create_instances:
|
||||
component._site = self.device.site
|
||||
component._location = self.device.location
|
||||
component._rack = self.device.rack
|
||||
|
||||
if component_model is not ModuleBay:
|
||||
component_model.objects.bulk_create(create_instances)
|
||||
# Emit the post_save signal for each newly created object
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import logging
|
||||
|
||||
from django.db.models.signals import post_save, post_delete
|
||||
from django.db.models.signals import post_delete, post_save
|
||||
from django.dispatch import receiver
|
||||
|
||||
from dcim.choices import CableEndChoices, LinkStatusChoices
|
||||
from virtualization.models import VMInterface
|
||||
from ipam.models import Prefix
|
||||
from virtualization.models import Cluster, VMInterface
|
||||
from wireless.models import WirelessLAN
|
||||
from .models import (
|
||||
Cable, CablePath, CableTermination, ConsolePort, ConsoleServerPort, Device, DeviceBay, FrontPort, Interface,
|
||||
InventoryItem, ModuleBay, PathEndpoint, PowerOutlet, PowerPanel, PowerPort, Rack, RearPort, Location,
|
||||
InventoryItem, Location, ModuleBay, PathEndpoint, PowerOutlet, PowerPanel, PowerPort, Rack, RearPort, Site,
|
||||
VirtualChassis,
|
||||
)
|
||||
from .models.cables import trace_paths
|
||||
@@ -44,6 +46,9 @@ def handle_location_site_change(instance, created, **kwargs):
|
||||
Device.objects.filter(location__in=locations).update(site=instance.site)
|
||||
PowerPanel.objects.filter(location__in=locations).update(site=instance.site)
|
||||
CableTermination.objects.filter(_location__in=locations).update(_site=instance.site)
|
||||
# Update component models for devices in these locations
|
||||
for model in COMPONENT_MODELS:
|
||||
model.objects.filter(device__location__in=locations).update(_site=instance.site)
|
||||
|
||||
|
||||
@receiver(post_save, sender=Rack)
|
||||
@@ -53,6 +58,12 @@ def handle_rack_site_change(instance, created, **kwargs):
|
||||
"""
|
||||
if not created:
|
||||
Device.objects.filter(rack=instance).update(site=instance.site, location=instance.location)
|
||||
# Update component models for devices in this rack
|
||||
for model in COMPONENT_MODELS:
|
||||
model.objects.filter(device__rack=instance).update(
|
||||
_site=instance.site,
|
||||
_location=instance.location,
|
||||
)
|
||||
|
||||
|
||||
@receiver(post_save, sender=Device)
|
||||
@@ -171,3 +182,40 @@ def update_mac_address_interface(instance, created, raw, **kwargs):
|
||||
if created and not raw and instance.primary_mac_address:
|
||||
instance.primary_mac_address.assigned_object = instance
|
||||
instance.primary_mac_address.save()
|
||||
|
||||
|
||||
@receiver(post_save, sender=Location)
|
||||
@receiver(post_save, sender=Site)
|
||||
def sync_cached_scope_fields(instance, created, **kwargs):
|
||||
"""
|
||||
Rebuild cached scope fields for all CachedScopeMixin-based models
|
||||
affected by a change in a Region, SiteGroup, Site, or Location.
|
||||
|
||||
This method is safe to run for objects created in the past and does
|
||||
not rely on incremental updates. Cached fields are recomputed from
|
||||
authoritative relationships.
|
||||
"""
|
||||
if created:
|
||||
return
|
||||
|
||||
if isinstance(instance, Location):
|
||||
filters = {'_location': instance}
|
||||
elif isinstance(instance, Site):
|
||||
filters = {'_site': instance}
|
||||
else:
|
||||
return
|
||||
|
||||
# These models are explicitly listed because they all subclass CachedScopeMixin
|
||||
# and therefore require their cached scope fields to be recomputed.
|
||||
for model in (Prefix, Cluster, WirelessLAN):
|
||||
qs = model.objects.filter(**filters)
|
||||
|
||||
for obj in qs.only('id'):
|
||||
# Recompute cache using the same logic as save()
|
||||
obj.cache_related_objects()
|
||||
obj.save(update_fields=[
|
||||
'_location',
|
||||
'_site',
|
||||
'_site_group',
|
||||
'_region',
|
||||
])
|
||||
|
||||
@@ -531,7 +531,7 @@ class RackReservationTest(APIViewTestCases.APIViewTestCase):
|
||||
|
||||
class ManufacturerTest(APIViewTestCases.APIViewTestCase):
|
||||
model = Manufacturer
|
||||
brief_fields = ['description', 'devicetype_count', 'display', 'id', 'name', 'slug', 'url']
|
||||
brief_fields = ['description', 'display', 'id', 'name', 'slug', 'url']
|
||||
create_data = [
|
||||
{
|
||||
'name': 'Manufacturer 4',
|
||||
|
||||
@@ -841,6 +841,32 @@ class ModuleBayTestCase(TestCase):
|
||||
nested_bay = module.modulebays.get(name='SFP A-21')
|
||||
self.assertEqual(nested_bay.label, 'A-21')
|
||||
|
||||
@tag('regression') # #20912
|
||||
def test_module_bay_parent_cleared_when_module_removed(self):
|
||||
"""Test that the parent field is properly cleared when a module bay's module assignment is removed"""
|
||||
device = Device.objects.first()
|
||||
manufacturer = Manufacturer.objects.first()
|
||||
module_type = ModuleType.objects.create(manufacturer=manufacturer, model='Test Module Type')
|
||||
bay1 = ModuleBay.objects.create(device=device, name='Test Bay 1')
|
||||
bay2 = ModuleBay.objects.create(device=device, name='Test Bay 2')
|
||||
|
||||
# Install a module in bay1
|
||||
module1 = Module.objects.create(device=device, module_bay=bay1, module_type=module_type)
|
||||
|
||||
# Assign bay2 to module1 and verify parent is now set to bay1 (module1's bay)
|
||||
bay2.module = module1
|
||||
bay2.save()
|
||||
bay2.refresh_from_db()
|
||||
self.assertEqual(bay2.parent, bay1)
|
||||
self.assertEqual(bay2.module, module1)
|
||||
|
||||
# Clear the module assignment (return bay2 to device level) Verify parent is cleared
|
||||
bay2.module = None
|
||||
bay2.save()
|
||||
bay2.refresh_from_db()
|
||||
self.assertIsNone(bay2.parent)
|
||||
self.assertIsNone(bay2.module)
|
||||
|
||||
|
||||
class CableTestCase(TestCase):
|
||||
|
||||
|
||||
@@ -2322,6 +2322,32 @@ class DeviceTestCase(ViewTestCases.PrimaryObjectViewTestCase):
|
||||
url = reverse('dcim:device_inventory', kwargs={'pk': device.pk})
|
||||
self.assertHttpStatus(self.client.get(url), 200)
|
||||
|
||||
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*'])
|
||||
def test_bulk_import_duplicate_ids_error_message(self):
|
||||
device = Device.objects.first()
|
||||
csv_data = (
|
||||
"id,role",
|
||||
f"{device.pk},Device Role 1",
|
||||
f"{device.pk},Device Role 2",
|
||||
)
|
||||
|
||||
self.add_permissions('dcim.add_device', 'dcim.change_device')
|
||||
response = self.client.post(
|
||||
self._get_url('bulk_import'),
|
||||
{
|
||||
'data': '\n'.join(csv_data),
|
||||
'format': ImportFormatChoices.CSV,
|
||||
'csv_delimiter': CSVDelimiterChoices.AUTO,
|
||||
},
|
||||
follow=True
|
||||
)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(
|
||||
f'Duplicate objects found: Device with ID(s) {device.pk} appears multiple times',
|
||||
response.content.decode('utf-8')
|
||||
)
|
||||
|
||||
|
||||
class ModuleTestCase(
|
||||
# Module does not support bulk renaming (no name field) or
|
||||
|
||||
@@ -2454,11 +2454,12 @@ class DeviceBulkImportView(generic.BulkImportView):
|
||||
model_form = forms.DeviceImportForm
|
||||
|
||||
def save_object(self, object_form, request):
|
||||
parent_bay = getattr(object_form.instance, 'parent_bay', None)
|
||||
obj = object_form.save()
|
||||
|
||||
# For child devices, save the reverse relation to the parent device bay
|
||||
if getattr(obj, 'parent_bay', None):
|
||||
device_bay = obj.parent_bay
|
||||
if parent_bay:
|
||||
device_bay = parent_bay
|
||||
device_bay.installed_device = obj
|
||||
device_bay.save()
|
||||
|
||||
|
||||
@@ -119,7 +119,9 @@ def process_event_rules(event_rules, object_type, event_type, data, username=Non
|
||||
if snapshots:
|
||||
params["snapshots"] = snapshots
|
||||
if request:
|
||||
params["request"] = copy_safe_request(request)
|
||||
# Exclude FILES - webhooks don't need uploaded files,
|
||||
# which can cause pickle errors with Pillow.
|
||||
params["request"] = copy_safe_request(request, include_files=False)
|
||||
|
||||
# Enqueue the task
|
||||
rq_queue.enqueue(
|
||||
|
||||
@@ -449,7 +449,14 @@ class CustomField(CloningMixin, ExportTemplatesMixin, ChangeLoggedModel):
|
||||
return model.objects.filter(pk__in=value)
|
||||
return value
|
||||
|
||||
def to_form_field(self, set_initial=True, enforce_required=True, enforce_visibility=True, for_csv_import=False):
|
||||
def to_form_field(
|
||||
self,
|
||||
set_initial=True,
|
||||
enforce_required=True,
|
||||
enforce_visibility=True,
|
||||
for_csv_import=False,
|
||||
for_filterset_form=False,
|
||||
):
|
||||
"""
|
||||
Return a form field suitable for setting a CustomField's value for an object.
|
||||
|
||||
@@ -457,6 +464,7 @@ class CustomField(CloningMixin, ExportTemplatesMixin, ChangeLoggedModel):
|
||||
enforce_required: Honor the value of CustomField.required. Set to False for filtering/bulk editing.
|
||||
enforce_visibility: Honor the value of CustomField.ui_visible. Set to False for filtering.
|
||||
for_csv_import: Return a form field suitable for bulk import of objects in CSV format.
|
||||
for_filterset_form: Return a form field suitable for use in a FilterSet form.
|
||||
"""
|
||||
initial = self.default if set_initial else None
|
||||
required = self.required if enforce_required else False
|
||||
@@ -519,7 +527,7 @@ class CustomField(CloningMixin, ExportTemplatesMixin, ChangeLoggedModel):
|
||||
field_class = CSVMultipleChoiceField
|
||||
field = field_class(choices=choices, required=required, initial=initial)
|
||||
else:
|
||||
if self.type == CustomFieldTypeChoices.TYPE_SELECT:
|
||||
if self.type == CustomFieldTypeChoices.TYPE_SELECT and not for_filterset_form:
|
||||
field_class = DynamicChoiceField
|
||||
widget_class = APISelect
|
||||
else:
|
||||
|
||||
@@ -16,6 +16,7 @@ __all__ = (
|
||||
# BGP ASN bounds
|
||||
BGP_ASN_MIN = 1
|
||||
BGP_ASN_MAX = 2**32 - 1
|
||||
BGP_ASN_ASDOT_BASE = 2**16
|
||||
|
||||
|
||||
class BaseIPField(models.Field):
|
||||
@@ -126,3 +127,16 @@ class ASNField(models.BigIntegerField):
|
||||
}
|
||||
defaults.update(**kwargs)
|
||||
return super().formfield(**defaults)
|
||||
|
||||
@staticmethod
|
||||
def to_asdot(value) -> str:
|
||||
"""
|
||||
Return ASDOT notation for AS numbers greater than 16 bits.
|
||||
"""
|
||||
if value is None:
|
||||
return ''
|
||||
|
||||
if value >= BGP_ASN_ASDOT_BASE:
|
||||
hi, lo = divmod(value, BGP_ASN_ASDOT_BASE)
|
||||
return f'{hi}.{lo}'
|
||||
return str(value)
|
||||
|
||||
@@ -230,10 +230,6 @@ class PrefixImportForm(ScopedImportForm, NetBoxModelImportForm):
|
||||
query |= Q(**{
|
||||
f"site__{self.fields['vlan_site'].to_field_name}": vlan_site
|
||||
})
|
||||
# Don't Forget to include VLANs without a site in the filter
|
||||
query |= Q(**{
|
||||
f"site__{self.fields['vlan_site'].to_field_name}__isnull": True
|
||||
})
|
||||
|
||||
if vlan_group:
|
||||
query &= Q(**{
|
||||
|
||||
@@ -55,13 +55,6 @@ class ASNRange(OrganizationalModel):
|
||||
def __str__(self):
|
||||
return f'{self.name} ({self.range_as_string()})'
|
||||
|
||||
@property
|
||||
def range(self):
|
||||
return range(self.start, self.end + 1)
|
||||
|
||||
def range_as_string(self):
|
||||
return f'{self.start}-{self.end}'
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
|
||||
@@ -72,7 +65,45 @@ class ASNRange(OrganizationalModel):
|
||||
)
|
||||
)
|
||||
|
||||
@property
|
||||
def range(self):
|
||||
"""
|
||||
Return a range of integers representing the ASN range.
|
||||
"""
|
||||
return range(self.start, self.end + 1)
|
||||
|
||||
@property
|
||||
def start_asdot(self):
|
||||
"""
|
||||
Return ASDOT notation for AS numbers greater than 16 bits.
|
||||
"""
|
||||
return ASNField.to_asdot(self.start)
|
||||
|
||||
@property
|
||||
def end_asdot(self):
|
||||
"""
|
||||
Return ASDOT notation for AS numbers greater than 16 bits.
|
||||
"""
|
||||
return ASNField.to_asdot(self.end)
|
||||
|
||||
def range_as_string(self):
|
||||
"""
|
||||
Return a string representation of the ASN range.
|
||||
"""
|
||||
return f'{self.start}-{self.end}'
|
||||
|
||||
def range_as_string_with_asdot(self):
|
||||
"""
|
||||
Return a string representation of the ASN range, including ASDOT notation.
|
||||
"""
|
||||
if self.end >= 65536:
|
||||
return f'{self.range_as_string()} ({self.start_asdot}-{self.end_asdot})'
|
||||
return self.range_as_string()
|
||||
|
||||
def get_child_asns(self):
|
||||
"""
|
||||
Return all child ASNs (ASNs within the range).
|
||||
"""
|
||||
return ASN.objects.filter(
|
||||
asn__gte=self.start,
|
||||
asn__lte=self.end
|
||||
@@ -131,20 +162,20 @@ class ASN(ContactsMixin, PrimaryModel):
|
||||
"""
|
||||
Return ASDOT notation for AS numbers greater than 16 bits.
|
||||
"""
|
||||
if self.asn > 65535:
|
||||
return f'{self.asn // 65536}.{self.asn % 65536}'
|
||||
return self.asn
|
||||
return ASNField.to_asdot(self.asn)
|
||||
|
||||
@property
|
||||
def asn_with_asdot(self):
|
||||
"""
|
||||
Return both plain and ASDOT notation, where applicable.
|
||||
"""
|
||||
if self.asn > 65535:
|
||||
return f'{self.asn} ({self.asn // 65536}.{self.asn % 65536})'
|
||||
else:
|
||||
return self.asn
|
||||
if self.asn >= 65536:
|
||||
return f'{self.asn} ({self.asn_asdot})'
|
||||
return str(self.asn)
|
||||
|
||||
@property
|
||||
def prefixed_name(self):
|
||||
"""
|
||||
Return the ASN with ASDOT notation prefixed with "AS".
|
||||
"""
|
||||
return f'AS{self.asn_with_asdot}'
|
||||
|
||||
@@ -910,13 +910,13 @@ class IPAddress(ContactsMixin, PrimaryModel):
|
||||
})
|
||||
|
||||
# Disallow the creation of IPAddresses within an IPRange with mark_populated=True
|
||||
parent_range = IPRange.objects.filter(
|
||||
parent_range_qs = IPRange.objects.filter(
|
||||
start_address__lte=self.address,
|
||||
end_address__gte=self.address,
|
||||
vrf=self.vrf,
|
||||
mark_populated=True
|
||||
).first()
|
||||
if parent_range:
|
||||
)
|
||||
if not self.pk and (parent_range := parent_range_qs.first()):
|
||||
raise ValidationError({
|
||||
'address': _(
|
||||
"Cannot create IP address {ip} inside range {range}."
|
||||
|
||||
@@ -20,6 +20,16 @@ class ASNRangeTable(TenancyColumnsMixin, NetBoxTable):
|
||||
verbose_name=_('RIR'),
|
||||
linkify=True
|
||||
)
|
||||
start_asdot = tables.Column(
|
||||
accessor=tables.A('start_asdot'),
|
||||
order_by=tables.A('start'),
|
||||
verbose_name=_('Start (ASDOT)')
|
||||
)
|
||||
end_asdot = tables.Column(
|
||||
accessor=tables.A('end_asdot'),
|
||||
order_by=tables.A('end'),
|
||||
verbose_name=_('End (ASDOT)')
|
||||
)
|
||||
tags = columns.TagColumn(
|
||||
url_name='ipam:asnrange_list'
|
||||
)
|
||||
@@ -30,8 +40,8 @@ class ASNRangeTable(TenancyColumnsMixin, NetBoxTable):
|
||||
class Meta(NetBoxTable.Meta):
|
||||
model = ASNRange
|
||||
fields = (
|
||||
'pk', 'name', 'slug', 'rir', 'start', 'end', 'asn_count', 'tenant', 'tenant_group', 'description', 'tags',
|
||||
'created', 'last_updated', 'actions',
|
||||
'pk', 'name', 'slug', 'rir', 'start', 'start_asdot', 'end', 'end_asdot', 'asn_count', 'tenant',
|
||||
'tenant_group', 'description', 'tags', 'created', 'last_updated', 'actions',
|
||||
)
|
||||
default_columns = ('pk', 'name', 'rir', 'start', 'end', 'tenant', 'asn_count', 'description')
|
||||
|
||||
|
||||
@@ -1071,14 +1071,17 @@ class VLANGroupTest(APIViewTestCases.APIViewTestCase):
|
||||
{
|
||||
'name': 'VLAN Group 4',
|
||||
'slug': 'vlan-group-4',
|
||||
'vid_ranges': [[1, 4094]]
|
||||
},
|
||||
{
|
||||
'name': 'VLAN Group 5',
|
||||
'slug': 'vlan-group-5',
|
||||
'vid_ranges': [[1, 4094]]
|
||||
},
|
||||
{
|
||||
'name': 'VLAN Group 6',
|
||||
'slug': 'vlan-group-6',
|
||||
'vid_ranges': [[1, 4094]]
|
||||
},
|
||||
]
|
||||
bulk_update_data = {
|
||||
|
||||
@@ -564,6 +564,82 @@ vlan: 102
|
||||
self.assertEqual(prefix.vlan.vid, 102)
|
||||
self.assertEqual(prefix.scope, site)
|
||||
|
||||
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*'])
|
||||
def test_prefix_import_with_vlan_site_multiple_vlans_same_vid(self):
|
||||
"""
|
||||
Test import when multiple VLANs exist with the same vid but different sites.
|
||||
Ref: #20560
|
||||
"""
|
||||
site1 = Site.objects.get(name='Site 1')
|
||||
site2 = Site.objects.get(name='Site 2')
|
||||
|
||||
# Create VLANs with the same vid but different sites
|
||||
vlan1 = VLAN.objects.create(vid=1, name='VLAN1-Site1', site=site1)
|
||||
VLAN.objects.create(vid=1, name='VLAN1-Site2', site=site2) # Create ambiguity
|
||||
|
||||
# Import prefix with vlan_site specified
|
||||
IMPORT_DATA = f"""
|
||||
prefix: 10.11.0.0/22
|
||||
status: active
|
||||
scope_type: dcim.site
|
||||
scope_id: {site1.pk}
|
||||
vlan_site: {site1.name}
|
||||
vlan: 1
|
||||
description: LOC02-MGMT
|
||||
"""
|
||||
|
||||
# Add all required permissions to the test user
|
||||
self.add_permissions('ipam.view_prefix', 'ipam.add_prefix')
|
||||
|
||||
form_data = {
|
||||
'data': IMPORT_DATA,
|
||||
'format': 'yaml'
|
||||
}
|
||||
response = self.client.post(reverse('ipam:prefix_bulk_import'), data=form_data, follow=True)
|
||||
self.assertHttpStatus(response, 200)
|
||||
|
||||
# Verify the prefix was created with the correct VLAN
|
||||
prefix = Prefix.objects.get(prefix='10.11.0.0/22')
|
||||
self.assertEqual(prefix.vlan, vlan1)
|
||||
|
||||
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*'])
|
||||
def test_prefix_import_with_vlan_site_and_global_vlan(self):
|
||||
"""
|
||||
Test import when a global VLAN (no site) and site-specific VLAN exist with same vid.
|
||||
When vlan_site is specified, should prefer the site-specific VLAN.
|
||||
Ref: #20560
|
||||
"""
|
||||
site1 = Site.objects.get(name='Site 1')
|
||||
|
||||
# Create a global VLAN (no site) and a site-specific VLAN with the same vid
|
||||
VLAN.objects.create(vid=10, name='VLAN10-Global', site=None) # Create ambiguity
|
||||
vlan_site = VLAN.objects.create(vid=10, name='VLAN10-Site1', site=site1)
|
||||
|
||||
# Import prefix with vlan_site specified
|
||||
IMPORT_DATA = f"""
|
||||
prefix: 10.12.0.0/22
|
||||
status: active
|
||||
scope_type: dcim.site
|
||||
scope_id: {site1.pk}
|
||||
vlan_site: {site1.name}
|
||||
vlan: 10
|
||||
description: Test Site-Specific VLAN
|
||||
"""
|
||||
|
||||
# Add all required permissions to the test user
|
||||
self.add_permissions('ipam.view_prefix', 'ipam.add_prefix')
|
||||
|
||||
form_data = {
|
||||
'data': IMPORT_DATA,
|
||||
'format': 'yaml'
|
||||
}
|
||||
response = self.client.post(reverse('ipam:prefix_bulk_import'), data=form_data, follow=True)
|
||||
self.assertHttpStatus(response, 200)
|
||||
|
||||
# Verify the prefix was created with the site-specific VLAN (not the global one)
|
||||
prefix = Prefix.objects.get(prefix='10.12.0.0/22')
|
||||
self.assertEqual(prefix.vlan, vlan_site)
|
||||
|
||||
|
||||
class IPRangeTestCase(ViewTestCases.PrimaryObjectViewTestCase):
|
||||
model = IPRange
|
||||
|
||||
@@ -80,22 +80,21 @@ class Config:
|
||||
try:
|
||||
# Enforce the creation date as the ordering parameter
|
||||
revision = ConfigRevision.objects.get(active=True)
|
||||
logger.debug(f"Loaded active configuration revision #{revision.pk}")
|
||||
logger.debug(f"Loaded active configuration revision (#{revision.pk})")
|
||||
except (ConfigRevision.DoesNotExist, ConfigRevision.MultipleObjectsReturned):
|
||||
logger.debug("No active configuration revision found - falling back to most recent")
|
||||
revision = ConfigRevision.objects.order_by('-created').first()
|
||||
if revision is None:
|
||||
logger.debug("No previous configuration found in database; proceeding with default values")
|
||||
logger.debug("No configuration found in database; proceeding with default values")
|
||||
return
|
||||
logger.debug(f"Using fallback configuration revision #{revision.pk}")
|
||||
logger.debug(f"No active configuration revision found; falling back to most recent (#{revision.pk})")
|
||||
except DatabaseError:
|
||||
# The database may not be available yet (e.g. when running a management command)
|
||||
logger.warning("Skipping config initialization (database unavailable)")
|
||||
return
|
||||
|
||||
revision.activate()
|
||||
logger.debug("Filled cache with data from latest ConfigRevision")
|
||||
revision.activate(update_db=False)
|
||||
self._populate_from_cache()
|
||||
logger.debug("Filled cache with data from latest ConfigRevision")
|
||||
|
||||
|
||||
class ConfigItem:
|
||||
|
||||
@@ -205,4 +205,6 @@ class NetBoxModelFilterSetForm(CustomFieldsMixin, SavedFiltersMixin, forms.Form)
|
||||
)
|
||||
|
||||
def _get_form_field(self, customfield):
|
||||
return customfield.to_form_field(set_initial=False, enforce_required=False, enforce_visibility=False)
|
||||
return customfield.to_form_field(
|
||||
set_initial=False, enforce_required=False, enforce_visibility=False, for_filterset_form=True
|
||||
)
|
||||
|
||||
@@ -827,6 +827,7 @@ LANGUAGES = (
|
||||
('fr', _('French')),
|
||||
('it', _('Italian')),
|
||||
('ja', _('Japanese')),
|
||||
('lv', _('Latvian')),
|
||||
('nl', _('Dutch')),
|
||||
('pl', _('Polish')),
|
||||
('pt', _('Portuguese')),
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import logging
|
||||
import re
|
||||
from collections import Counter
|
||||
from copy import deepcopy
|
||||
|
||||
from django.contrib import messages
|
||||
@@ -33,6 +34,7 @@ from utilities.jobs import is_background_request, process_request_as_job
|
||||
from utilities.permissions import get_permission_for_model
|
||||
from utilities.query import reapply_model_ordering
|
||||
from utilities.request import safe_for_redirect
|
||||
from utilities.string import title
|
||||
from utilities.tables import get_table_configs
|
||||
from utilities.views import GetReturnURLMixin, get_action_url
|
||||
from .base import BaseMultiObjectView
|
||||
@@ -443,6 +445,18 @@ class BulkImportView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
|
||||
# Prefetch objects to be updated, if any
|
||||
prefetch_ids = [int(record['id']) for record in records if record.get('id')]
|
||||
|
||||
# check for duplicate IDs
|
||||
duplicate_pks = [pk for pk, count in Counter(prefetch_ids).items() if count > 1]
|
||||
if duplicate_pks:
|
||||
error_msg = _(
|
||||
"Duplicate objects found: {model} with ID(s) {ids} appears multiple times"
|
||||
).format(
|
||||
model=title(self.queryset.model._meta.verbose_name),
|
||||
ids=', '.join(str(pk) for pk in sorted(duplicate_pks))
|
||||
)
|
||||
raise ValidationError(error_msg)
|
||||
|
||||
prefetched_objects = {
|
||||
obj.pk: obj
|
||||
for obj in self.queryset.model.objects.filter(id__in=prefetch_ids)
|
||||
|
||||
2
netbox/project-static/dist/netbox.css
vendored
2
netbox/project-static/dist/netbox.css
vendored
File diff suppressed because one or more lines are too long
10
netbox/project-static/dist/netbox.js
vendored
10
netbox/project-static/dist/netbox.js
vendored
File diff suppressed because one or more lines are too long
6
netbox/project-static/dist/netbox.js.map
vendored
6
netbox/project-static/dist/netbox.js.map
vendored
File diff suppressed because one or more lines are too long
@@ -27,10 +27,10 @@
|
||||
"bootstrap": "5.3.8",
|
||||
"clipboard": "2.0.11",
|
||||
"flatpickr": "4.6.13",
|
||||
"gridstack": "12.3.3",
|
||||
"gridstack": "12.4.1",
|
||||
"htmx.org": "2.0.8",
|
||||
"query-string": "9.3.1",
|
||||
"sass": "1.95.0",
|
||||
"sass": "1.97.1",
|
||||
"tom-select": "2.4.3",
|
||||
"typeface-inter": "3.18.1",
|
||||
"typeface-roboto-mono": "1.1.13"
|
||||
|
||||
@@ -36,7 +36,6 @@ form.object-edit {
|
||||
// Make optgroup labels sticky when scrolling through select elements
|
||||
select[multiple] {
|
||||
optgroup {
|
||||
position: sticky;
|
||||
top: 0;
|
||||
background-color: var(--bs-body-bg);
|
||||
font-style: normal;
|
||||
|
||||
@@ -2178,10 +2178,10 @@ graphql@16.10.0:
|
||||
resolved "https://registry.yarnpkg.com/graphql/-/graphql-16.10.0.tgz#24c01ae0af6b11ea87bf55694429198aaa8e220c"
|
||||
integrity sha512-AjqGKbDGUFRKIRCP9tCKiIGHyriz2oHEbPIbEtcSLSs4YjReZOIPQQWek4+6hjw62H9QShXHyaGivGiYVLeYFQ==
|
||||
|
||||
gridstack@12.3.3:
|
||||
version "12.3.3"
|
||||
resolved "https://registry.yarnpkg.com/gridstack/-/gridstack-12.3.3.tgz#0c4fc3cdf6e1c16e6095bc79ff7240a590d2c200"
|
||||
integrity sha512-Bboi4gj7HXGnx1VFXQNde4Nwi5srdUSuCCnOSszKhFjBs8EtMEWhsKX02BjIKkErq/FjQUkNUbXUYeQaVMQ0jQ==
|
||||
gridstack@12.4.1:
|
||||
version "12.4.1"
|
||||
resolved "https://registry.yarnpkg.com/gridstack/-/gridstack-12.4.1.tgz#4a44511e5da33016e731f00bee279bed550d4ab9"
|
||||
integrity sha512-dYBNVEDw2zwnz0bCDouHk8rMclrMoMn4r6rtNyyWSeYsV3RF8QV2KFRTj4c86T2FsZPr3iQv+/LD/ae29FcpHQ==
|
||||
|
||||
has-bigints@^1.0.1, has-bigints@^1.0.2:
|
||||
version "1.0.2"
|
||||
@@ -3190,10 +3190,10 @@ safe-regex-test@^1.1.0:
|
||||
es-errors "^1.3.0"
|
||||
is-regex "^1.2.1"
|
||||
|
||||
sass@1.95.0:
|
||||
version "1.95.0"
|
||||
resolved "https://registry.yarnpkg.com/sass/-/sass-1.95.0.tgz#3a3a4d4d954313ab50eaf16f6e2548a2f6ec0811"
|
||||
integrity sha512-9QMjhLq+UkOg/4bb8Lt8A+hJZvY3t+9xeZMKSBtBEgxrXA3ed5Ts4NDreUkYgJP1BTmrscQE/xYhf7iShow6lw==
|
||||
sass@1.97.1:
|
||||
version "1.97.1"
|
||||
resolved "https://registry.yarnpkg.com/sass/-/sass-1.97.1.tgz#f36e492baf8ccdd08d591b58d3d8b53ea35ab905"
|
||||
integrity sha512-uf6HoO8fy6ClsrShvMgaKUn14f2EHQLQRtpsZZLeU/Mv0Q1K5P0+x2uvH6Cub39TVVbWNSrraUhDAoFph6vh0A==
|
||||
dependencies:
|
||||
chokidar "^4.0.0"
|
||||
immutable "^5.0.2"
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version: "4.4.8"
|
||||
version: "4.4.9"
|
||||
edition: "Community"
|
||||
published: "2025-12-09"
|
||||
published: "2025-12-23"
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="row">{% trans "Range" %}</th>
|
||||
<td>{{ object.range_as_string }}</td>
|
||||
<td>{{ object.range_as_string_with_asdot }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="row">{% trans "Tenant" %}</th>
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
BIN
netbox/translations/lv/LC_MESSAGES/django.mo
Normal file
BIN
netbox/translations/lv/LC_MESSAGES/django.mo
Normal file
Binary file not shown.
17696
netbox/translations/lv/LC_MESSAGES/django.po
Normal file
17696
netbox/translations/lv/LC_MESSAGES/django.po
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -50,21 +50,21 @@ class RestrictedQuerySet(QuerySet):
|
||||
|
||||
# Bypass restriction for superusers and exempt views
|
||||
if user and user.is_superuser or permission_is_exempt(permission_required):
|
||||
qs = self
|
||||
return self
|
||||
|
||||
# User is anonymous or has not been granted the requisite permission
|
||||
elif user is None or not user.is_authenticated or permission_required not in user.get_all_permissions():
|
||||
qs = self.none()
|
||||
if user is None or not user.is_authenticated or permission_required not in user.get_all_permissions():
|
||||
return self.none()
|
||||
|
||||
# Filter the queryset to include only objects with allowed attributes
|
||||
else:
|
||||
tokens = {
|
||||
CONSTRAINT_TOKEN_USER: user,
|
||||
}
|
||||
attrs = qs_filter_from_constraints(user._object_perm_cache[permission_required], tokens)
|
||||
constraints = user._object_perm_cache[permission_required]
|
||||
tokens = {
|
||||
CONSTRAINT_TOKEN_USER: user,
|
||||
}
|
||||
if attrs := qs_filter_from_constraints(constraints, tokens):
|
||||
# #8715: Avoid duplicates when JOIN on many-to-many fields without using DISTINCT.
|
||||
# DISTINCT acts globally on the entire request, which may not be desirable.
|
||||
allowed_objects = self.model.objects.filter(attrs)
|
||||
qs = self.filter(pk__in=allowed_objects)
|
||||
return self.filter(pk__in=allowed_objects)
|
||||
|
||||
return qs
|
||||
return self
|
||||
|
||||
@@ -35,27 +35,34 @@ class NetBoxFakeRequest:
|
||||
# Utility functions
|
||||
#
|
||||
|
||||
def copy_safe_request(request):
|
||||
def copy_safe_request(request, include_files=True):
|
||||
"""
|
||||
Copy selected attributes from a request object into a new fake request object. This is needed in places where
|
||||
thread safe pickling of the useful request data is needed.
|
||||
|
||||
Args:
|
||||
request: The original request object
|
||||
include_files: Whether to include request.FILES.
|
||||
"""
|
||||
meta = {
|
||||
k: request.META[k]
|
||||
for k in HTTP_REQUEST_META_SAFE_COPY
|
||||
if k in request.META and isinstance(request.META[k], str)
|
||||
}
|
||||
return NetBoxFakeRequest({
|
||||
data = {
|
||||
'META': meta,
|
||||
'COOKIES': request.COOKIES,
|
||||
'POST': request.POST,
|
||||
'GET': request.GET,
|
||||
'FILES': request.FILES,
|
||||
'user': request.user,
|
||||
'method': request.method,
|
||||
'path': request.path,
|
||||
'id': getattr(request, 'id', None), # UUID assigned by middleware
|
||||
})
|
||||
}
|
||||
if include_files:
|
||||
data['FILES'] = request.FILES
|
||||
|
||||
return NetBoxFakeRequest(data)
|
||||
|
||||
|
||||
def get_client_ip(request, additional_headers=()):
|
||||
|
||||
@@ -141,8 +141,8 @@ class ModelTestCase(TestCase):
|
||||
elif value and type(field) is GenericForeignKey:
|
||||
model_dict[key] = value.pk
|
||||
|
||||
# Handle API output
|
||||
elif api:
|
||||
|
||||
# Replace ContentType numeric IDs with <app_label>.<model>
|
||||
if type(getattr(instance, key)) in (ContentType, ObjectType):
|
||||
object_type = ObjectType.objects.get(pk=value)
|
||||
@@ -152,9 +152,13 @@ class ModelTestCase(TestCase):
|
||||
elif type(value) is IPNetwork:
|
||||
model_dict[key] = str(value)
|
||||
|
||||
else:
|
||||
field = instance._meta.get_field(key)
|
||||
# Normalize arrays of numeric ranges (e.g. VLAN IDs or port ranges).
|
||||
# DB uses canonical half-open [lo, hi) via NumericRange; API uses inclusive [lo, hi].
|
||||
# Convert to inclusive pairs for stable API comparisons.
|
||||
elif type(field) is ArrayField and issubclass(type(field.base_field), RangeField):
|
||||
model_dict[key] = [[r.lower, r.upper - 1] for r in value]
|
||||
|
||||
else:
|
||||
# Convert ArrayFields to CSV strings
|
||||
if type(field) is ArrayField:
|
||||
if getattr(field.base_field, 'choices', None):
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
[project]
|
||||
name = "netbox"
|
||||
version = "4.4.7"
|
||||
version = "4.4.9"
|
||||
requires-python = ">=3.10"
|
||||
description = "The premier source of truth powering network automation."
|
||||
readme = "README.md"
|
||||
|
||||
@@ -23,7 +23,7 @@ gunicorn==23.0.0
|
||||
Jinja2==3.1.6
|
||||
jsonschema==4.25.1
|
||||
Markdown==3.10
|
||||
mkdocs-material==9.7.0
|
||||
mkdocs-material==9.7.1
|
||||
mkdocstrings==1.0.0
|
||||
mkdocstrings-python==2.0.1
|
||||
netaddr==1.3.0
|
||||
@@ -33,11 +33,11 @@ psycopg[c,pool]==3.3.2
|
||||
PyYAML==6.0.3
|
||||
requests==2.32.5
|
||||
rq==2.6.1
|
||||
social-auth-app-django==5.6.0
|
||||
social-auth-core==4.8.1
|
||||
social-auth-app-django==5.7.0
|
||||
social-auth-core==4.8.3
|
||||
sorl-thumbnail==12.11.0
|
||||
strawberry-graphql==0.287.2
|
||||
strawberry-graphql==0.287.3
|
||||
strawberry-graphql-django==0.70.1
|
||||
svgwrite==1.4.3
|
||||
tablib==3.9.0
|
||||
tzdata==2025.2
|
||||
tzdata==2025.3
|
||||
|
||||
Reference in New Issue
Block a user