mirror of
https://github.com/netbox-community/netbox.git
synced 2025-07-16 04:02:52 -06:00
Add tooling for handling background requests
This commit is contained in:
parent
da77b7c41a
commit
bcb0c6ccbc
@ -172,20 +172,21 @@ class AsyncViewJob(JobRunner):
|
|||||||
|
|
||||||
# Apply all registered request processors (e.g. event_tracking)
|
# Apply all registered request processors (e.g. event_tracking)
|
||||||
with apply_request_processors(request):
|
with apply_request_processors(request):
|
||||||
result, errors = view(request)
|
data = view(request)
|
||||||
|
|
||||||
self.job.data = {
|
self.job.data = {
|
||||||
'result': result,
|
'log': data.log,
|
||||||
'errors': errors,
|
'errors': data.errors,
|
||||||
}
|
}
|
||||||
# TODO: Figure out how to mark a job as "failed"
|
|
||||||
# if errors:
|
|
||||||
# self.job.terminate(status=JobStatusChoices.STATUS_FAILED, error=errors[0])
|
|
||||||
|
|
||||||
# Notify the user
|
# Notify the user
|
||||||
notification = Notification(
|
notification = Notification(
|
||||||
user=request.user,
|
user=request.user,
|
||||||
object=self.job,
|
object=self.job,
|
||||||
event_type=JOB_COMPLETED if not errors else JOB_FAILED,
|
event_type=JOB_COMPLETED if not data.errors else JOB_FAILED,
|
||||||
)
|
)
|
||||||
notification.save()
|
notification.save()
|
||||||
|
|
||||||
|
# TODO: Waiting on fix for bug #19806
|
||||||
|
# if errors:
|
||||||
|
# raise JobFailed()
|
||||||
|
@ -22,16 +22,16 @@ from core.models import ObjectType
|
|||||||
from core.signals import clear_events
|
from core.signals import clear_events
|
||||||
from extras.choices import CustomFieldUIEditableChoices
|
from extras.choices import CustomFieldUIEditableChoices
|
||||||
from extras.models import CustomField, ExportTemplate
|
from extras.models import CustomField, ExportTemplate
|
||||||
from netbox.jobs import AsyncViewJob
|
|
||||||
from netbox.object_actions import AddObject, BulkDelete, BulkEdit, BulkExport, BulkImport
|
from netbox.object_actions import AddObject, BulkDelete, BulkEdit, BulkExport, BulkImport
|
||||||
from utilities.error_handlers import handle_protectederror
|
from utilities.error_handlers import handle_protectederror
|
||||||
from utilities.exceptions import AbortRequest, AbortTransaction, PermissionsViolation
|
from utilities.exceptions import AbortRequest, AbortTransaction, PermissionsViolation
|
||||||
from utilities.forms import BulkRenameForm, ConfirmationForm, restrict_form_fields
|
from utilities.forms import BulkRenameForm, ConfirmationForm, restrict_form_fields
|
||||||
from utilities.forms.bulk_import import BulkImportForm
|
from utilities.forms.bulk_import import BulkImportForm
|
||||||
from utilities.htmx import htmx_partial
|
from utilities.htmx import htmx_partial
|
||||||
|
from utilities.jobs import AsyncJobData, is_background_request, process_request_as_job
|
||||||
from utilities.permissions import get_permission_for_model
|
from utilities.permissions import get_permission_for_model
|
||||||
from utilities.query import reapply_model_ordering
|
from utilities.query import reapply_model_ordering
|
||||||
from utilities.request import copy_safe_request, safe_for_redirect
|
from utilities.request import safe_for_redirect
|
||||||
from utilities.tables import get_table_configs
|
from utilities.tables import get_table_configs
|
||||||
from utilities.views import GetReturnURLMixin, get_viewname
|
from utilities.views import GetReturnURLMixin, get_viewname
|
||||||
from .base import BaseMultiObjectView
|
from .base import BaseMultiObjectView
|
||||||
@ -504,25 +504,11 @@ class BulkImportView(GetReturnURLMixin, BaseMultiObjectView):
|
|||||||
redirect_url = reverse(get_viewname(model, action='list'))
|
redirect_url = reverse(get_viewname(model, action='list'))
|
||||||
new_objects = []
|
new_objects = []
|
||||||
|
|
||||||
# Defer the request to a background job?
|
# If indicated, defer this request to a background job & redirect the user
|
||||||
if form.cleaned_data['background_job'] and not getattr(request, '_background', False):
|
if form.cleaned_data['background_job']:
|
||||||
|
if job := process_request_as_job(self.__class__, request):
|
||||||
# Create a serializable copy of the original request
|
messages.info(request, _("Background job enqueued: {job}").format(job=job.pk))
|
||||||
request_copy = copy_safe_request(request)
|
return redirect(redirect_url)
|
||||||
request_copy._background = True
|
|
||||||
|
|
||||||
# Enqueue a job to perform the work in the background
|
|
||||||
job = AsyncViewJob.enqueue(
|
|
||||||
user=request.user,
|
|
||||||
view_cls=self.__class__,
|
|
||||||
request=request_copy,
|
|
||||||
)
|
|
||||||
msg = _("Background job enqueued: {job}").format(job=job.pk)
|
|
||||||
logger.info(msg)
|
|
||||||
messages.info(request, msg)
|
|
||||||
|
|
||||||
# Redirect to the model's list view
|
|
||||||
return redirect(redirect_url)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Iterate through data and bind each record to a new model form instance.
|
# Iterate through data and bind each record to a new model form instance.
|
||||||
@ -542,15 +528,20 @@ class BulkImportView(GetReturnURLMixin, BaseMultiObjectView):
|
|||||||
clear_events.send(sender=self)
|
clear_events.send(sender=self)
|
||||||
|
|
||||||
# If this request was executed via a background job, return the raw data for logging
|
# If this request was executed via a background job, return the raw data for logging
|
||||||
if getattr(request, '_background', False):
|
if is_background_request(request):
|
||||||
result = [
|
return AsyncJobData(
|
||||||
_('Created {object}').format(object=str(obj))
|
log=[
|
||||||
for obj in new_objects
|
_('Created {object}').format(object=str(obj))
|
||||||
]
|
for obj in new_objects
|
||||||
return result, form.errors
|
],
|
||||||
|
errors=form.errors
|
||||||
|
)
|
||||||
|
|
||||||
if new_objects:
|
if new_objects:
|
||||||
msg = f"Imported {len(new_objects)} {model._meta.verbose_name_plural}"
|
msg = _("Imported {count} {object_type}").format(
|
||||||
|
count=len(new_objects),
|
||||||
|
object_type=model._meta.verbose_name_plural
|
||||||
|
)
|
||||||
logger.info(msg)
|
logger.info(msg)
|
||||||
messages.success(request, msg)
|
messages.success(request, msg)
|
||||||
return redirect(f"{redirect_url}?modified_by_request={request.id}")
|
return redirect(f"{redirect_url}?modified_by_request={request.id}")
|
||||||
|
45
netbox/utilities/jobs.py
Normal file
45
netbox/utilities/jobs.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from netbox.jobs import AsyncViewJob
|
||||||
|
from utilities.request import copy_safe_request
|
||||||
|
|
||||||
|
__all__ = (
|
||||||
|
'AsyncJobData',
|
||||||
|
'is_background_request',
|
||||||
|
'process_request_as_job',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class AsyncJobData:
|
||||||
|
log: List[str]
|
||||||
|
errors: List[str]
|
||||||
|
|
||||||
|
|
||||||
|
def is_background_request(request):
|
||||||
|
"""
|
||||||
|
Return True if the request is being processed as a background job.
|
||||||
|
"""
|
||||||
|
return getattr(request, '_background', False)
|
||||||
|
|
||||||
|
|
||||||
|
def process_request_as_job(view, request):
|
||||||
|
"""
|
||||||
|
Process a request using a view as a background job.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Check that the request that is not already being processed as a background job (would be a loop)
|
||||||
|
if is_background_request(request):
|
||||||
|
return
|
||||||
|
|
||||||
|
# Create a serializable copy of the original request
|
||||||
|
request_copy = copy_safe_request(request)
|
||||||
|
request_copy._background = True
|
||||||
|
|
||||||
|
# Enqueue a job to perform the work in the background
|
||||||
|
return AsyncViewJob.enqueue(
|
||||||
|
user=request.user,
|
||||||
|
view_cls=view,
|
||||||
|
request=request_copy,
|
||||||
|
)
|
Loading…
Reference in New Issue
Block a user