mirror of
https://github.com/netbox-community/netbox.git
synced 2025-07-13 08:44:51 -06:00
Compare commits
3 Commits
112ee4f629
...
5d90fb8ce4
Author | SHA1 | Date | |
---|---|---|---|
![]() |
5d90fb8ce4 | ||
![]() |
0d31449df8 | ||
![]() |
875a641687 |
@ -116,7 +116,7 @@ class Job(models.Model):
|
||||
verbose_name_plural = _('jobs')
|
||||
|
||||
def __str__(self):
|
||||
return str(self.job_id)
|
||||
return self.name
|
||||
|
||||
def get_absolute_url(self):
|
||||
# TODO: Employ dynamic registration
|
||||
|
@ -786,6 +786,14 @@ class IPRange(ContactsMixin, PrimaryModel):
|
||||
|
||||
return min(float(child_count) / self.size * 100, 100)
|
||||
|
||||
@classmethod
|
||||
def find_prefix(self, address):
|
||||
prefixes = Prefix.objects.filter(
|
||||
models.Q(prefix__net_contains=address.start_address) & Q(prefix__net_contains=address.end_address),
|
||||
vrf=address.vrf,
|
||||
)
|
||||
return prefixes.last()
|
||||
|
||||
|
||||
class IPAddress(ContactsMixin, PrimaryModel):
|
||||
"""
|
||||
@ -1085,3 +1093,8 @@ class IPAddress(ContactsMixin, PrimaryModel):
|
||||
|
||||
def get_role_color(self):
|
||||
return IPAddressRoleChoices.colors.get(self.role)
|
||||
|
||||
@classmethod
|
||||
def find_prefix(self, address):
|
||||
prefixes = Prefix.objects.filter(prefix__net_contains=address.address, vrf=address.vrf)
|
||||
return prefixes.last()
|
||||
|
@ -29,12 +29,11 @@ def update_children_depth(prefix):
|
||||
Prefix.objects.bulk_update(children, ['_depth'], batch_size=100)
|
||||
|
||||
|
||||
def update_ipaddress_prefix(prefix, delete=False):
|
||||
def update_object_prefix(prefix, delete=False, parent_model=Prefix, child_model=IPAddress):
|
||||
if delete:
|
||||
# Get all possible addresses
|
||||
addresses = IPAddress.objects.filter(prefix=prefix)
|
||||
# Find a new containing prefix
|
||||
prefix = Prefix.objects.filter(
|
||||
addresses = child_model.objects.filter(prefix=prefix)
|
||||
prefix = parent_model.objects.filter(
|
||||
prefix__net_contains_or_equals=prefix.prefix,
|
||||
vrf=prefix.vrf
|
||||
).exclude(pk=prefix.pk).last()
|
||||
@ -43,71 +42,38 @@ def update_ipaddress_prefix(prefix, delete=False):
|
||||
# Set contained addresses to the containing prefix if it exists
|
||||
address.prefix = prefix
|
||||
else:
|
||||
# Get all possible modified addresses
|
||||
addresses = IPAddress.objects.filter(
|
||||
Q(address__net_contained_or_equal=prefix.prefix, vrf=prefix.vrf) |
|
||||
Q(prefix=prefix)
|
||||
)
|
||||
filter = Q(prefix=prefix)
|
||||
|
||||
if child_model == IPAddress:
|
||||
filter |= Q(address__net_contained_or_equal=prefix.prefix, vrf=prefix.vrf)
|
||||
elif child_model == IPRange:
|
||||
filter |= Q(
|
||||
start_address__net_contained_or_equal=prefix.prefix,
|
||||
end_address__net_contained_or_equal=prefix.prefix,
|
||||
vrf=prefix.vrf
|
||||
)
|
||||
|
||||
addresses = child_model.objects.filter(filter)
|
||||
for address in addresses:
|
||||
if not address.prefix or (prefix.prefix in address.prefix.prefix and address.address in prefix.prefix):
|
||||
# Set to new Prefix as the prefix is a child of the old prefix and the address is contained in the
|
||||
# prefix
|
||||
# If addresses prefix is not set then this model is the only option
|
||||
if not address.prefix:
|
||||
address.prefix = prefix
|
||||
elif address.prefix and address.address not in prefix.prefix:
|
||||
# Find a new prefix as the prefix no longer contains the address
|
||||
address.prefix = Prefix.objects.filter(
|
||||
prefix__net_contains_or_equals=address.address,
|
||||
vrf=prefix.vrf
|
||||
).last()
|
||||
# This address has a different VRF so the prefix cannot be the parent prefix
|
||||
elif address.prefix != address.find_prefix(address):
|
||||
address.prefix = address.find_prefix(address)
|
||||
else:
|
||||
# No-OP as the prefix does not require modification
|
||||
pass
|
||||
|
||||
# Update the addresses
|
||||
IPAddress.objects.bulk_update(addresses, ['prefix'], batch_size=100)
|
||||
child_model.objects.bulk_update(addresses, ['prefix'], batch_size=100)
|
||||
|
||||
|
||||
def update_ipaddress_prefix(prefix, delete=False):
|
||||
update_object_prefix(prefix, delete, child_model=IPAddress)
|
||||
|
||||
|
||||
def update_iprange_prefix(prefix, delete=False):
|
||||
if delete:
|
||||
# Get all possible addresses
|
||||
addresses = IPRange.objects.filter(prefix=prefix)
|
||||
# Find a new containing prefix
|
||||
prefix = Prefix.objects.filter(
|
||||
prefix__net_contains_or_equals=prefix.prefix,
|
||||
vrf=prefix.vrf
|
||||
).exclude(pk=prefix.pk).last()
|
||||
|
||||
for address in addresses:
|
||||
# Set contained addresses to the containing prefix if it exists
|
||||
address.prefix = prefix
|
||||
else:
|
||||
# Get all possible modified addresses
|
||||
addresses = IPRange.objects.filter(
|
||||
Q(start_address__net_contained_or_equal=prefix.prefix, vrf=prefix.vrf) |
|
||||
Q(prefix=prefix)
|
||||
)
|
||||
|
||||
for address in addresses:
|
||||
if not address.prefix or (
|
||||
prefix.prefix in address.prefix.prefix and address.start_address in prefix.prefix and
|
||||
address.end_address in prefix.prefix
|
||||
):
|
||||
# Set to new Prefix as the prefix is a child of the old prefix and the address is contained in the
|
||||
# prefix
|
||||
address.prefix = prefix
|
||||
elif address.prefix and address.address not in prefix.prefix:
|
||||
# Find a new prefix as the prefix no longer contains the address
|
||||
address.prefix = Prefix.objects.filter(Q(prefix__net_contains_or_equals=address.start_address) &
|
||||
Q(prefix__net_contains_or_equals=address.end_address),
|
||||
vrf=prefix.vrf
|
||||
).last()
|
||||
else:
|
||||
# No-OP as the prefix does not require modification
|
||||
pass
|
||||
|
||||
# Update the addresses
|
||||
IPAddress.objects.bulk_update(addresses, ['prefix'], batch_size=100)
|
||||
update_object_prefix(prefix, delete, child_model=IPRange)
|
||||
|
||||
|
||||
def update_prefix_parents(prefix, delete=False):
|
||||
|
@ -156,7 +156,7 @@ class TestIPRange(TestCase):
|
||||
range.clean()
|
||||
range.save()
|
||||
|
||||
prefix = Prefix(prefix='192.0.1.0/17')
|
||||
prefix = Prefix(prefix='192.0.0.0/17')
|
||||
prefix.clean()
|
||||
prefix.save()
|
||||
|
||||
@ -264,6 +264,8 @@ class TestPrefix(TestCase):
|
||||
|
||||
parent_prefix.vrf = vrfs[0]
|
||||
parent_prefix.save()
|
||||
|
||||
parent_prefix.refresh_from_db()
|
||||
child_ip_pks = {p.pk for p in parent_prefix.ip_addresses.all()}
|
||||
|
||||
# VRF container is limited to its own VRF
|
||||
@ -741,13 +743,20 @@ class TestIPAddress(TestCase):
|
||||
self.assertRaises(ValidationError, duplicate_ip.clean)
|
||||
|
||||
def test_duplicate_vrf(self):
|
||||
vrf = VRF.objects.create(name='Test', rd='1:1', enforce_unique=False)
|
||||
vrf = VRF.objects.get(rd='1:1')
|
||||
vrf.enforce_unique = False
|
||||
vrf.clean()
|
||||
vrf.save()
|
||||
|
||||
IPAddress.objects.create(vrf=vrf, address=IPNetwork('192.0.2.1/24'))
|
||||
duplicate_ip = IPAddress(vrf=vrf, address=IPNetwork('192.0.2.1/24'))
|
||||
self.assertIsNone(duplicate_ip.clean())
|
||||
|
||||
def test_duplicate_vrf_unique(self):
|
||||
vrf = VRF.objects.create(name='Test', rd='1:1', enforce_unique=True)
|
||||
vrf = VRF.objects.get(rd='1:1')
|
||||
vrf.enforce_unique = True
|
||||
vrf.clean()
|
||||
vrf.save()
|
||||
IPAddress.objects.create(vrf=vrf, address=IPNetwork('192.0.2.1/24'))
|
||||
duplicate_ip = IPAddress(vrf=vrf, address=IPNetwork('192.0.2.1/24'))
|
||||
self.assertRaises(ValidationError, duplicate_ip.clean)
|
||||
|
@ -8,11 +8,15 @@ from django_pglocks import advisory_lock
|
||||
from rq.timeouts import JobTimeoutException
|
||||
|
||||
from core.choices import JobStatusChoices
|
||||
from core.events import JOB_COMPLETED, JOB_FAILED
|
||||
from core.models import Job, ObjectType
|
||||
from extras.models import Notification
|
||||
from netbox.constants import ADVISORY_LOCK_KEYS
|
||||
from netbox.registry import registry
|
||||
from utilities.request import apply_request_processors
|
||||
|
||||
__all__ = (
|
||||
'AsyncViewJob',
|
||||
'JobRunner',
|
||||
'system_job',
|
||||
)
|
||||
@ -154,3 +158,35 @@ class JobRunner(ABC):
|
||||
job.delete()
|
||||
|
||||
return cls.enqueue(instance=instance, schedule_at=schedule_at, interval=interval, *args, **kwargs)
|
||||
|
||||
|
||||
class AsyncViewJob(JobRunner):
|
||||
"""
|
||||
Execute a view as a background job.
|
||||
"""
|
||||
class Meta:
|
||||
name = 'Async View'
|
||||
|
||||
def run(self, view_cls, request, **kwargs):
|
||||
view = view_cls.as_view()
|
||||
|
||||
# Apply all registered request processors (e.g. event_tracking)
|
||||
with apply_request_processors(request):
|
||||
data = view(request)
|
||||
|
||||
self.job.data = {
|
||||
'log': data.log,
|
||||
'errors': data.errors,
|
||||
}
|
||||
|
||||
# Notify the user
|
||||
notification = Notification(
|
||||
user=request.user,
|
||||
object=self.job,
|
||||
event_type=JOB_COMPLETED if not data.errors else JOB_FAILED,
|
||||
)
|
||||
notification.save()
|
||||
|
||||
# TODO: Waiting on fix for bug #19806
|
||||
# if errors:
|
||||
# raise JobFailed()
|
||||
|
@ -1,8 +1,5 @@
|
||||
from contextlib import ExitStack
|
||||
|
||||
import logging
|
||||
import uuid
|
||||
import warnings
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib import auth, messages
|
||||
@ -13,10 +10,10 @@ from django.db.utils import InternalError
|
||||
from django.http import Http404, HttpResponseRedirect
|
||||
|
||||
from netbox.config import clear_config, get_config
|
||||
from netbox.registry import registry
|
||||
from netbox.views import handler_500
|
||||
from utilities.api import is_api_request
|
||||
from utilities.error_handlers import handle_rest_api_exception
|
||||
from utilities.request import apply_request_processors
|
||||
|
||||
__all__ = (
|
||||
'CoreMiddleware',
|
||||
@ -36,12 +33,7 @@ class CoreMiddleware:
|
||||
request.id = uuid.uuid4()
|
||||
|
||||
# Apply all registered request processors
|
||||
with ExitStack() as stack:
|
||||
for request_processor in registry['request_processors']:
|
||||
try:
|
||||
stack.enter_context(request_processor(request))
|
||||
except Exception as e:
|
||||
warnings.warn(f'Failed to initialize request processor {request_processor}: {e}')
|
||||
with apply_request_processors(request):
|
||||
response = self.get_response(request)
|
||||
|
||||
# Check if language cookie should be renewed
|
||||
|
@ -28,6 +28,7 @@ from utilities.export import TableExport
|
||||
from utilities.forms import BulkRenameForm, ConfirmationForm, restrict_form_fields
|
||||
from utilities.forms.bulk_import import BulkImportForm
|
||||
from utilities.htmx import htmx_partial
|
||||
from utilities.jobs import AsyncJobData, is_background_request, process_request_as_job
|
||||
from utilities.permissions import get_permission_for_model
|
||||
from utilities.query import reapply_model_ordering
|
||||
from utilities.request import safe_for_redirect
|
||||
@ -503,25 +504,32 @@ class BulkImportView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
|
||||
if form.is_valid():
|
||||
logger.debug("Import form validation was successful")
|
||||
redirect_url = reverse(get_viewname(model, action='list'))
|
||||
new_objects = []
|
||||
|
||||
# If indicated, defer this request to a background job & redirect the user
|
||||
if form.cleaned_data['background_job']:
|
||||
job_name = _('Bulk import {count} {object_type}').format(
|
||||
count=len(form.cleaned_data['data']),
|
||||
object_type=model._meta.verbose_name_plural,
|
||||
)
|
||||
if job := process_request_as_job(self.__class__, request, name=job_name):
|
||||
msg = _('Created background job {job.pk}: <a href="{url}">{job.name}</a>').format(
|
||||
url=job.get_absolute_url(),
|
||||
job=job
|
||||
)
|
||||
messages.info(request, mark_safe(msg))
|
||||
return redirect(redirect_url)
|
||||
|
||||
try:
|
||||
# Iterate through data and bind each record to a new model form instance.
|
||||
with transaction.atomic(using=router.db_for_write(model)):
|
||||
new_objs = self.create_and_update_objects(form, request)
|
||||
new_objects = self.create_and_update_objects(form, request)
|
||||
|
||||
# Enforce object-level permissions
|
||||
if self.queryset.filter(pk__in=[obj.pk for obj in new_objs]).count() != len(new_objs):
|
||||
if self.queryset.filter(pk__in=[obj.pk for obj in new_objects]).count() != len(new_objects):
|
||||
raise PermissionsViolation
|
||||
|
||||
if new_objs:
|
||||
msg = f"Imported {len(new_objs)} {model._meta.verbose_name_plural}"
|
||||
logger.info(msg)
|
||||
messages.success(request, msg)
|
||||
|
||||
view_name = get_viewname(model, action='list')
|
||||
results_url = f"{reverse(view_name)}?modified_by_request={request.id}"
|
||||
return redirect(results_url)
|
||||
|
||||
except (AbortTransaction, ValidationError):
|
||||
clear_events.send(sender=self)
|
||||
|
||||
@ -530,6 +538,25 @@ class BulkImportView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
form.add_error(None, e.message)
|
||||
clear_events.send(sender=self)
|
||||
|
||||
# If this request was executed via a background job, return the raw data for logging
|
||||
if is_background_request(request):
|
||||
return AsyncJobData(
|
||||
log=[
|
||||
_('Created {object}').format(object=str(obj))
|
||||
for obj in new_objects
|
||||
],
|
||||
errors=form.errors
|
||||
)
|
||||
|
||||
if new_objects:
|
||||
msg = _("Imported {count} {object_type}").format(
|
||||
count=len(new_objects),
|
||||
object_type=model._meta.verbose_name_plural
|
||||
)
|
||||
logger.info(msg)
|
||||
messages.success(request, msg)
|
||||
return redirect(f"{redirect_url}?modified_by_request={request.id}")
|
||||
|
||||
else:
|
||||
logger.debug("Form validation failed")
|
||||
|
||||
|
@ -50,6 +50,7 @@ Context:
|
||||
{% render_field form.data %}
|
||||
{% render_field form.format %}
|
||||
{% render_field form.csv_delimiter %}
|
||||
{% render_field form.background_job %}
|
||||
<div class="form-group">
|
||||
<div class="col col-md-12 text-end">
|
||||
{% if return_url %}
|
||||
@ -94,6 +95,7 @@ Context:
|
||||
{% render_field form.data_file %}
|
||||
{% render_field form.format %}
|
||||
{% render_field form.csv_delimiter %}
|
||||
{% render_field form.background_job %}
|
||||
<div class="form-group">
|
||||
<div class="col col-md-12 text-end">
|
||||
{% if return_url %}
|
||||
|
@ -37,6 +37,11 @@ class BulkImportForm(SyncedDataMixin, forms.Form):
|
||||
help_text=_("The character which delimits CSV fields. Applies only to CSV format."),
|
||||
required=False
|
||||
)
|
||||
background_job = forms.BooleanField(
|
||||
label=_('Background job'),
|
||||
help_text=_("Enqueue a background job to complete the bulk import/update."),
|
||||
required=False,
|
||||
)
|
||||
|
||||
data_field = 'data'
|
||||
|
||||
|
46
netbox/utilities/jobs.py
Normal file
46
netbox/utilities/jobs.py
Normal file
@ -0,0 +1,46 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import List
|
||||
|
||||
from netbox.jobs import AsyncViewJob
|
||||
from utilities.request import copy_safe_request
|
||||
|
||||
__all__ = (
|
||||
'AsyncJobData',
|
||||
'is_background_request',
|
||||
'process_request_as_job',
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class AsyncJobData:
|
||||
log: List[str]
|
||||
errors: List[str]
|
||||
|
||||
|
||||
def is_background_request(request):
|
||||
"""
|
||||
Return True if the request is being processed as a background job.
|
||||
"""
|
||||
return getattr(request, '_background', False)
|
||||
|
||||
|
||||
def process_request_as_job(view, request, name=None):
|
||||
"""
|
||||
Process a request using a view as a background job.
|
||||
"""
|
||||
|
||||
# Check that the request that is not already being processed as a background job (would be a loop)
|
||||
if is_background_request(request):
|
||||
return
|
||||
|
||||
# Create a serializable copy of the original request
|
||||
request_copy = copy_safe_request(request)
|
||||
request_copy._background = True
|
||||
|
||||
# Enqueue a job to perform the work in the background
|
||||
return AsyncViewJob.enqueue(
|
||||
name=name,
|
||||
user=request.user,
|
||||
view_cls=view,
|
||||
request=request_copy,
|
||||
)
|
@ -1,13 +1,17 @@
|
||||
import warnings
|
||||
from contextlib import ExitStack, contextmanager
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from django.utils.http import url_has_allowed_host_and_scheme
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from netaddr import AddrFormatError, IPAddress
|
||||
|
||||
from netbox.registry import registry
|
||||
from .constants import HTTP_REQUEST_META_SAFE_COPY
|
||||
|
||||
__all__ = (
|
||||
'NetBoxFakeRequest',
|
||||
'apply_request_processors',
|
||||
'copy_safe_request',
|
||||
'get_client_ip',
|
||||
'safe_for_redirect',
|
||||
@ -48,6 +52,7 @@ def copy_safe_request(request):
|
||||
'GET': request.GET,
|
||||
'FILES': request.FILES,
|
||||
'user': request.user,
|
||||
'method': request.method,
|
||||
'path': request.path,
|
||||
'id': getattr(request, 'id', None), # UUID assigned by middleware
|
||||
})
|
||||
@ -87,3 +92,17 @@ def safe_for_redirect(url):
|
||||
Returns True if the given URL is safe to use as an HTTP redirect; otherwise returns False.
|
||||
"""
|
||||
return url_has_allowed_host_and_scheme(url, allowed_hosts=None)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def apply_request_processors(request):
|
||||
"""
|
||||
A context manager with applies all registered request processors (such as event_tracking).
|
||||
"""
|
||||
with ExitStack() as stack:
|
||||
for request_processor in registry['request_processors']:
|
||||
try:
|
||||
stack.enter_context(request_processor(request))
|
||||
except Exception as e:
|
||||
warnings.warn(f'Failed to initialize request processor {request_processor.__name__}: {e}')
|
||||
yield
|
||||
|
Loading…
Reference in New Issue
Block a user