Compare commits

..

1 Commits

Author SHA1 Message Date
Martin Hauser
a320df9926 fix(misc): Handle cache unpickling failure in release check
Guard `cache.get('latest_release')` during release checks to prevent a
500 when stale cached data can't be unpickled after dependency upgrades.
On failure, log at debug level and delete the affected cache key.

Fixes #21254
2026-01-23 18:02:02 +01:00
9 changed files with 78 additions and 139 deletions

View File

@@ -20,9 +20,7 @@ from utilities.forms.fields import (
DynamicModelChoiceField, DynamicModelMultipleChoiceField, JSONField, NumericArrayField, SlugField, DynamicModelChoiceField, DynamicModelMultipleChoiceField, JSONField, NumericArrayField, SlugField,
) )
from utilities.forms.rendering import FieldSet, InlineFields, TabbedGroups from utilities.forms.rendering import FieldSet, InlineFields, TabbedGroups
from utilities.forms.widgets import ( from utilities.forms.widgets import APISelect, ClearableFileInput, HTMXSelect, NumberWithOptions, SelectWithPK
APISelect, ClearableFileInput, ClearableSelect, HTMXSelect, NumberWithOptions, SelectWithPK,
)
from utilities.jsonschema import JSONSchemaProperty from utilities.jsonschema import JSONSchemaProperty
from virtualization.models import Cluster, VMInterface from virtualization.models import Cluster, VMInterface
from wireless.models import WirelessLAN, WirelessLANGroup from wireless.models import WirelessLAN, WirelessLANGroup
@@ -594,14 +592,6 @@ class DeviceForm(TenancyForm, PrimaryModelForm):
}, },
) )
) )
face = forms.ChoiceField(
label=_('Face'),
choices=add_blank_choice(DeviceFaceChoices),
required=False,
widget=ClearableSelect(
requires_fields=['rack']
)
)
device_type = DynamicModelChoiceField( device_type = DynamicModelChoiceField(
label=_('Device type'), label=_('Device type'),
queryset=DeviceType.objects.all(), queryset=DeviceType.objects.all(),

View File

@@ -1,5 +1,5 @@
import logging import logging
from collections import UserDict, defaultdict from collections import defaultdict
from django.conf import settings from django.conf import settings
from django.utils import timezone from django.utils import timezone
@@ -12,6 +12,7 @@ from core.models import ObjectType
from netbox.config import get_config from netbox.config import get_config
from netbox.constants import RQ_QUEUE_DEFAULT from netbox.constants import RQ_QUEUE_DEFAULT
from netbox.models.features import has_feature from netbox.models.features import has_feature
from users.models import User
from utilities.api import get_serializer_for_model from utilities.api import get_serializer_for_model
from utilities.request import copy_safe_request from utilities.request import copy_safe_request
from utilities.rqworker import get_rq_retry from utilities.rqworker import get_rq_retry
@@ -22,19 +23,6 @@ from .models import EventRule
logger = logging.getLogger('netbox.events_processor') logger = logging.getLogger('netbox.events_processor')
class EventContext(UserDict):
"""
A custom dictionary that automatically serializes its associated object on demand.
"""
def __getitem__(self, item):
if item == 'data' and 'data' not in self:
data = serialize_for_event(self['object'])
self.__setitem__('data', data)
return data
return super().__getitem__(item)
def serialize_for_event(instance): def serialize_for_event(instance):
""" """
Return a serialized representation of the given instance suitable for use in a queued event. Return a serialized representation of the given instance suitable for use in a queued event.
@@ -78,42 +66,37 @@ def enqueue_event(queue, instance, request, event_type):
assert instance.pk is not None assert instance.pk is not None
key = f'{app_label}.{model_name}:{instance.pk}' key = f'{app_label}.{model_name}:{instance.pk}'
if key in queue: if key in queue:
queue[key]['data'] = serialize_for_event(instance)
queue[key]['snapshots']['postchange'] = get_snapshots(instance, event_type)['postchange'] queue[key]['snapshots']['postchange'] = get_snapshots(instance, event_type)['postchange']
# If the object is being deleted, update any prior "update" event to "delete" # If the object is being deleted, update any prior "update" event to "delete"
if event_type == OBJECT_DELETED: if event_type == OBJECT_DELETED:
queue[key]['event_type'] = event_type queue[key]['event_type'] = event_type
else: else:
queue[key] = EventContext( queue[key] = {
object_type=ObjectType.objects.get_for_model(instance), 'object_type': ObjectType.objects.get_for_model(instance),
object_id=instance.pk, 'object_id': instance.pk,
object=instance, 'event_type': event_type,
event_type=event_type, 'data': serialize_for_event(instance),
snapshots=get_snapshots(instance, event_type), 'snapshots': get_snapshots(instance, event_type),
request=request, 'request': request,
user=request.user,
# Legacy request attributes for backward compatibility # Legacy request attributes for backward compatibility
username=request.user.username, 'username': request.user.username,
request_id=request.id, 'request_id': request.id,
) }
# Force serialization of objects prior to them actually being deleted
if event_type == OBJECT_DELETED:
queue[key]['data'] = serialize_for_event(instance)
def process_event_rules(event_rules, object_type, event): def process_event_rules(event_rules, object_type, event_type, data, username=None, snapshots=None, request=None):
""" user = None # To be resolved from the username if needed
Process a list of EventRules against an event.
"""
for event_rule in event_rules: for event_rule in event_rules:
# Evaluate event rule conditions (if any) # Evaluate event rule conditions (if any)
if not event_rule.eval_conditions(event['data']): if not event_rule.eval_conditions(data):
continue continue
# Compile event data # Compile event data
event_data = event_rule.action_data or {} event_data = event_rule.action_data or {}
event_data.update(event['data']) event_data.update(data)
# Webhooks # Webhooks
if event_rule.action_type == EventRuleActionChoices.WEBHOOK: if event_rule.action_type == EventRuleActionChoices.WEBHOOK:
@@ -126,43 +109,50 @@ def process_event_rules(event_rules, object_type, event):
params = { params = {
"event_rule": event_rule, "event_rule": event_rule,
"object_type": object_type, "object_type": object_type,
"event_type": event['event_type'], "event_type": event_type,
"data": event_data, "data": event_data,
"snapshots": event['snapshots'], "snapshots": snapshots,
"timestamp": timezone.now().isoformat(), "timestamp": timezone.now().isoformat(),
"username": event['username'], "username": username,
"retry": get_rq_retry() "retry": get_rq_retry()
} }
if 'snapshots' in event: if snapshots:
params['snapshots'] = event['snapshots'] params["snapshots"] = snapshots
if 'request' in event: if request:
# Exclude FILES - webhooks don't need uploaded files, # Exclude FILES - webhooks don't need uploaded files,
# which can cause pickle errors with Pillow. # which can cause pickle errors with Pillow.
params['request'] = copy_safe_request(event['request'], include_files=False) params["request"] = copy_safe_request(request, include_files=False)
# Enqueue the task # Enqueue the task
rq_queue.enqueue('extras.webhooks.send_webhook', **params) rq_queue.enqueue(
"extras.webhooks.send_webhook",
**params
)
# Scripts # Scripts
elif event_rule.action_type == EventRuleActionChoices.SCRIPT: elif event_rule.action_type == EventRuleActionChoices.SCRIPT:
# Resolve the script from action parameters # Resolve the script from action parameters
script = event_rule.action_object.python_class() script = event_rule.action_object.python_class()
# Retrieve the User if not already resolved
if user is None:
user = User.objects.get(username=username)
# Enqueue a Job to record the script's execution # Enqueue a Job to record the script's execution
from extras.jobs import ScriptJob from extras.jobs import ScriptJob
params = { params = {
"instance": event_rule.action_object, "instance": event_rule.action_object,
"name": script.name, "name": script.name,
"user": event['user'], "user": user,
"data": event_data "data": event_data
} }
if 'snapshots' in event: if snapshots:
params['snapshots'] = event['snapshots'] params["snapshots"] = snapshots
if 'request' in event: if request:
params['request'] = copy_safe_request(event['request']) params["request"] = copy_safe_request(request)
ScriptJob.enqueue(
# Enqueue the job **params
ScriptJob.enqueue(**params) )
# Notification groups # Notification groups
elif event_rule.action_type == EventRuleActionChoices.NOTIFICATION: elif event_rule.action_type == EventRuleActionChoices.NOTIFICATION:
@@ -171,7 +161,7 @@ def process_event_rules(event_rules, object_type, event):
object_type=object_type, object_type=object_type,
object_id=event_data['id'], object_id=event_data['id'],
object_repr=event_data.get('display'), object_repr=event_data.get('display'),
event_type=event['event_type'] event_type=event_type
) )
else: else:
@@ -183,8 +173,6 @@ def process_event_rules(event_rules, object_type, event):
def process_event_queue(events): def process_event_queue(events):
""" """
Flush a list of object representation to RQ for EventRule processing. Flush a list of object representation to RQ for EventRule processing.
This is the default processor listed in EVENTS_PIPELINE.
""" """
events_cache = defaultdict(dict) events_cache = defaultdict(dict)
@@ -204,7 +192,11 @@ def process_event_queue(events):
process_event_rules( process_event_rules(
event_rules=event_rules, event_rules=event_rules,
object_type=object_type, object_type=object_type,
event=event, event_type=event['event_type'],
data=event['data'],
username=event['username'],
snapshots=event['snapshots'],
request=event['request'],
) )

View File

@@ -4,7 +4,7 @@ from django.dispatch import receiver
from core.events import * from core.events import *
from core.signals import job_end, job_start from core.signals import job_end, job_start
from extras.events import EventContext, process_event_rules from extras.events import process_event_rules
from extras.models import EventRule, Notification, Subscription from extras.models import EventRule, Notification, Subscription
from netbox.config import get_config from netbox.config import get_config
from netbox.models.features import has_feature from netbox.models.features import has_feature
@@ -102,12 +102,14 @@ def process_job_start_event_rules(sender, **kwargs):
enabled=True, enabled=True,
object_types=sender.object_type object_types=sender.object_type
) )
event = EventContext( username = sender.user.username if sender.user else None
process_event_rules(
event_rules=event_rules,
object_type=sender.object_type,
event_type=JOB_STARTED, event_type=JOB_STARTED,
data=sender.data, data=sender.data,
user=sender.user, username=username
) )
process_event_rules(event_rules, sender.object_type, event)
@receiver(job_end) @receiver(job_end)
@@ -120,12 +122,14 @@ def process_job_end_event_rules(sender, **kwargs):
enabled=True, enabled=True,
object_types=sender.object_type object_types=sender.object_type
) )
event = EventContext( username = sender.user.username if sender.user else None
process_event_rules(
event_rules=event_rules,
object_type=sender.object_type,
event_type=JOB_COMPLETED, event_type=JOB_COMPLETED,
data=sender.data, data=sender.data,
user=sender.user, username=username
) )
process_event_rules(event_rules, sender.object_type, event)
# #

View File

@@ -1,5 +1,6 @@
import re import re
from collections import namedtuple from collections import namedtuple
import logging
from django.conf import settings from django.conf import settings
from django.contrib import messages from django.contrib import messages
@@ -28,6 +29,8 @@ __all__ = (
'SearchView', 'SearchView',
) )
logger = logging.getLogger(f'netbox.{__name__}')
Link = namedtuple('Link', ('label', 'viewname', 'permission', 'count')) Link = namedtuple('Link', ('label', 'viewname', 'permission', 'count'))
@@ -50,7 +53,14 @@ class HomeView(ConditionalLoginRequiredMixin, View):
# Check whether a new release is available. (Only for superusers.) # Check whether a new release is available. (Only for superusers.)
new_release = None new_release = None
if request.user.is_superuser: if request.user.is_superuser:
# cache.get() can raise if the cached value can't be unpickled after dependency upgrades
try:
latest_release = cache.get('latest_release') latest_release = cache.get('latest_release')
except Exception:
logger.debug("Failed to read 'latest_release' from cache; deleting key", exc_info=True)
cache.delete('latest_release')
latest_release = None
if latest_release: if latest_release:
release_version, release_url = latest_release release_version, release_url = latest_release
if release_version > version.parse(settings.RELEASE.version): if release_version > version.parse(settings.RELEASE.version):

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,40 +0,0 @@
import TomSelect from 'tom-select';
import { getElements } from '../util';
/**
* Initialize clear-field dependencies.
* When a required field is cleared, dependent fields with data-requires-fields attribute will also be cleared.
*/
export function initClearField(): void {
// Find all fields with data-requires-fields attribute
for (const field of getElements<HTMLSelectElement>('[data-requires-fields]')) {
const requiredFieldsAttr = field.getAttribute('data-requires-fields');
if (!requiredFieldsAttr) continue;
// Parse the comma-separated list of required field names
const requiredFields = requiredFieldsAttr.split(',').map(name => name.trim());
// Set up listeners for each required field
for (const requiredFieldName of requiredFields) {
const requiredField = document.querySelector<HTMLSelectElement>(
`[name="${requiredFieldName}"]`,
);
if (!requiredField) continue;
// Listen for changes on the required field
requiredField.addEventListener('change', () => {
// If required field is cleared, also clear this dependent field
if (!requiredField.value || requiredField.value === '') {
// Check if this field uses TomSelect
const tomselect = (field as HTMLSelectElement & { tomselect?: TomSelect }).tomselect;
if (tomselect) {
tomselect.clear();
} else {
// Regular select field
field.value = '';
}
}
});
}
}
}

View File

@@ -1,10 +1,9 @@
import { initClearField } from './clearField';
import { initFormElements } from './elements'; import { initFormElements } from './elements';
import { initFilterModifiers } from './filterModifiers'; import { initFilterModifiers } from './filterModifiers';
import { initSpeedSelector } from './speedSelector'; import { initSpeedSelector } from './speedSelector';
export function initForms(): void { export function initForms(): void {
for (const func of [initFormElements, initSpeedSelector, initFilterModifiers, initClearField]) { for (const func of [initFormElements, initSpeedSelector, initFilterModifiers]) {
func(); func();
} }
} }

View File

@@ -5,7 +5,6 @@ from ..utils import add_blank_choice
__all__ = ( __all__ = (
'BulkEditNullBooleanSelect', 'BulkEditNullBooleanSelect',
'ClearableSelect',
'ColorSelect', 'ColorSelect',
'HTMXSelect', 'HTMXSelect',
'SelectWithPK', 'SelectWithPK',
@@ -29,21 +28,6 @@ class BulkEditNullBooleanSelect(forms.NullBooleanSelect):
) )
class ClearableSelect(forms.Select):
"""
A Select widget that will be automatically cleared when one or more required fields are cleared.
Args:
requires_fields: A list of field names that this field depends on. When any of these fields
are cleared, this field will also be cleared automatically via JavaScript.
"""
def __init__(self, *args, requires_fields=None, **kwargs):
super().__init__(*args, **kwargs)
if requires_fields:
self.attrs['data-requires-fields'] = ','.join(requires_fields)
class ColorSelect(forms.Select): class ColorSelect(forms.Select):
""" """
Extends the built-in Select widget to colorize each <option>. Extends the built-in Select widget to colorize each <option>.