mirror of
https://github.com/netbox-community/netbox.git
synced 2026-02-05 06:46:25 -06:00
* Reference database object by GFK when running scripts & reports via UI * Reference database object by GFK when running scripts & reports via API * Remove old enqueue_job() method * Enable filtering jobs by object * Introduce ObjectJobsView * Add tabbed views for report & script jobs * Add object_id to JobSerializer * Move generic relation to JobsMixin * Clean up old naming
This commit is contained in:
@@ -67,6 +67,6 @@ class JobSerializer(BaseModelSerializer):
|
||||
class Meta:
|
||||
model = Job
|
||||
fields = [
|
||||
'id', 'url', 'display', 'status', 'created', 'scheduled', 'interval', 'started', 'completed', 'name',
|
||||
'object_type', 'user', 'data', 'job_id',
|
||||
'id', 'url', 'display', 'object_type', 'object_id', 'name', 'status', 'created', 'scheduled', 'interval',
|
||||
'started', 'completed', 'user', 'data', 'job_id',
|
||||
]
|
||||
|
||||
@@ -113,7 +113,7 @@ class JobFilterSet(BaseFilterSet):
|
||||
|
||||
class Meta:
|
||||
model = Job
|
||||
fields = ('id', 'interval', 'status', 'user', 'object_type', 'name')
|
||||
fields = ('id', 'object_type', 'object_id', 'name', 'interval', 'status', 'user')
|
||||
|
||||
def search(self, queryset, name, value):
|
||||
if not value.strip():
|
||||
|
||||
+5
-6
@@ -1,6 +1,5 @@
|
||||
import logging
|
||||
|
||||
from .choices import JobStatusChoices
|
||||
from netbox.search.backends import search_backend
|
||||
from .choices import *
|
||||
from .exceptions import SyncError
|
||||
@@ -9,22 +8,22 @@ from .models import DataSource
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def sync_datasource(job_result, *args, **kwargs):
|
||||
def sync_datasource(job, *args, **kwargs):
|
||||
"""
|
||||
Call sync() on a DataSource.
|
||||
"""
|
||||
datasource = DataSource.objects.get(name=job_result.name)
|
||||
datasource = DataSource.objects.get(pk=job.object_id)
|
||||
|
||||
try:
|
||||
job_result.start()
|
||||
job.start()
|
||||
datasource.sync()
|
||||
|
||||
# Update the search cache for DataFiles belonging to this source
|
||||
search_backend.cache(datasource.datafiles.iterator())
|
||||
|
||||
job_result.terminate()
|
||||
job.terminate()
|
||||
|
||||
except SyncError as e:
|
||||
job_result.terminate(status=JobStatusChoices.STATUS_ERRORED)
|
||||
job.terminate(status=JobStatusChoices.STATUS_ERRORED)
|
||||
DataSource.objects.filter(pk=datasource.pk).update(status=DataSourceStatusChoices.FAILED)
|
||||
logging.error(e)
|
||||
|
||||
@@ -5,7 +5,7 @@ from fnmatch import fnmatchcase
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.contrib.contenttypes.fields import GenericRelation
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import RegexValidator
|
||||
from django.db import models
|
||||
@@ -15,6 +15,7 @@ from django.utils.module_loading import import_string
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from netbox.models import PrimaryModel
|
||||
from netbox.models.features import JobsMixin
|
||||
from netbox.registry import registry
|
||||
from utilities.files import sha256_hash
|
||||
from utilities.querysets import RestrictedQuerySet
|
||||
@@ -31,7 +32,7 @@ __all__ = (
|
||||
logger = logging.getLogger('netbox.core.data')
|
||||
|
||||
|
||||
class DataSource(PrimaryModel):
|
||||
class DataSource(JobsMixin, PrimaryModel):
|
||||
"""
|
||||
A remote source, such as a git repository, from which DataFiles are synchronized.
|
||||
"""
|
||||
@@ -118,15 +119,12 @@ class DataSource(PrimaryModel):
|
||||
DataSource.objects.filter(pk=self.pk).update(status=self.status)
|
||||
|
||||
# Enqueue a sync job
|
||||
job_result = Job.enqueue_job(
|
||||
return Job.enqueue(
|
||||
import_string('core.jobs.sync_datasource'),
|
||||
name=self.name,
|
||||
obj_type=ContentType.objects.get_for_model(DataSource),
|
||||
user=request.user,
|
||||
instance=self,
|
||||
user=request.user
|
||||
)
|
||||
|
||||
return job_result
|
||||
|
||||
def get_backend(self):
|
||||
backend_cls = registry['data_backends'].get(self.type)
|
||||
backend_params = self.parameters or {}
|
||||
|
||||
+24
-22
@@ -7,7 +7,6 @@ from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.validators import MinValueValidator
|
||||
from django.db import models
|
||||
from django.urls import reverse
|
||||
from django.urls.exceptions import NoReverseMatch
|
||||
from django.utils import timezone
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
@@ -96,21 +95,12 @@ class Job(models.Model):
|
||||
def __str__(self):
|
||||
return str(self.job_id)
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
super().delete(*args, **kwargs)
|
||||
|
||||
rq_queue_name = get_config().QUEUE_MAPPINGS.get(self.object_type.model, RQ_QUEUE_DEFAULT)
|
||||
queue = django_rq.get_queue(rq_queue_name)
|
||||
job = queue.fetch_job(str(self.job_id))
|
||||
|
||||
if job:
|
||||
job.cancel()
|
||||
|
||||
def get_absolute_url(self):
|
||||
try:
|
||||
return reverse(f'extras:{self.object_type.model}_result', args=[self.pk])
|
||||
except NoReverseMatch:
|
||||
return None
|
||||
# TODO: Employ dynamic registration
|
||||
if self.object_type.model == 'reportmodule':
|
||||
return reverse(f'extras:report_result', kwargs={'job_pk': self.pk})
|
||||
if self.object_type.model == 'scriptmodule':
|
||||
return reverse(f'extras:script_result', kwargs={'job_pk': self.pk})
|
||||
|
||||
def get_status_color(self):
|
||||
return JobStatusChoices.colors.get(self.status)
|
||||
@@ -130,6 +120,16 @@ class Job(models.Model):
|
||||
|
||||
return f"{int(minutes)} minutes, {seconds:.2f} seconds"
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
super().delete(*args, **kwargs)
|
||||
|
||||
rq_queue_name = get_config().QUEUE_MAPPINGS.get(self.object_type.model, RQ_QUEUE_DEFAULT)
|
||||
queue = django_rq.get_queue(rq_queue_name)
|
||||
job = queue.fetch_job(str(self.job_id))
|
||||
|
||||
if job:
|
||||
job.cancel()
|
||||
|
||||
def start(self):
|
||||
"""
|
||||
Record the job's start time and update its status to "running."
|
||||
@@ -162,25 +162,27 @@ class Job(models.Model):
|
||||
self.trigger_webhooks(event=EVENT_JOB_END)
|
||||
|
||||
@classmethod
|
||||
def enqueue_job(cls, func, name, obj_type, user, schedule_at=None, interval=None, *args, **kwargs):
|
||||
def enqueue(cls, func, instance, name='', user=None, schedule_at=None, interval=None, **kwargs):
|
||||
"""
|
||||
Create a Job instance and enqueue a job using the given callable
|
||||
|
||||
Args:
|
||||
func: The callable object to be enqueued for execution
|
||||
instance: The NetBox object to which this job pertains
|
||||
name: Name for the job (optional)
|
||||
obj_type: ContentType to link to the Job instance object_type
|
||||
user: User object to link to the Job instance
|
||||
user: The user responsible for running the job
|
||||
schedule_at: Schedule the job to be executed at the passed date and time
|
||||
interval: Recurrence interval (in minutes)
|
||||
"""
|
||||
rq_queue_name = get_queue_for_model(obj_type.model)
|
||||
object_type = ContentType.objects.get_for_model(instance, for_concrete_model=False)
|
||||
rq_queue_name = get_queue_for_model(object_type.model)
|
||||
queue = django_rq.get_queue(rq_queue_name)
|
||||
status = JobStatusChoices.STATUS_SCHEDULED if schedule_at else JobStatusChoices.STATUS_PENDING
|
||||
job = Job.objects.create(
|
||||
object_type=object_type,
|
||||
object_id=instance.pk,
|
||||
name=name,
|
||||
status=status,
|
||||
object_type=obj_type,
|
||||
scheduled=schedule_at,
|
||||
interval=interval,
|
||||
user=user,
|
||||
@@ -188,9 +190,9 @@ class Job(models.Model):
|
||||
)
|
||||
|
||||
if schedule_at:
|
||||
queue.enqueue_at(schedule_at, func, job_id=str(job.job_id), job_result=job, **kwargs)
|
||||
queue.enqueue_at(schedule_at, func, job_id=str(job.job_id), job=job, **kwargs)
|
||||
else:
|
||||
queue.enqueue(func, job_id=str(job.job_id), job_result=job, **kwargs)
|
||||
queue.enqueue(func, job_id=str(job.job_id), job=job, **kwargs)
|
||||
|
||||
return job
|
||||
|
||||
|
||||
@@ -6,12 +6,18 @@ from ..models import Job
|
||||
|
||||
|
||||
class JobTable(NetBoxTable):
|
||||
id = tables.Column(
|
||||
linkify=True
|
||||
)
|
||||
name = tables.Column(
|
||||
linkify=True
|
||||
)
|
||||
object_type = columns.ContentTypeColumn(
|
||||
verbose_name=_('Type')
|
||||
)
|
||||
object = tables.Column(
|
||||
linkify=True
|
||||
)
|
||||
status = columns.ChoiceFieldColumn()
|
||||
created = columns.DateTimeColumn()
|
||||
scheduled = columns.DateTimeColumn()
|
||||
@@ -25,10 +31,9 @@ class JobTable(NetBoxTable):
|
||||
class Meta(NetBoxTable.Meta):
|
||||
model = Job
|
||||
fields = (
|
||||
'pk', 'id', 'object_type', 'name', 'status', 'created', 'scheduled', 'interval', 'started', 'completed',
|
||||
'user', 'job_id',
|
||||
'pk', 'id', 'object_type', 'object', 'name', 'status', 'created', 'scheduled', 'interval', 'started',
|
||||
'completed', 'user', 'job_id',
|
||||
)
|
||||
default_columns = (
|
||||
'pk', 'id', 'object_type', 'name', 'status', 'created', 'scheduled', 'interval', 'started', 'completed',
|
||||
'user',
|
||||
'pk', 'id', 'object_type', 'object', 'name', 'status', 'created', 'started', 'completed', 'user',
|
||||
)
|
||||
|
||||
@@ -55,9 +55,9 @@ class DataSourceSyncView(BaseObjectView):
|
||||
|
||||
def post(self, request, pk):
|
||||
datasource = get_object_or_404(self.queryset, pk=pk)
|
||||
job_result = datasource.enqueue_sync_job(request)
|
||||
job = datasource.enqueue_sync_job(request)
|
||||
|
||||
messages.success(request, f"Queued job #{job_result.pk} to sync {datasource}")
|
||||
messages.success(request, f"Queued job #{job.pk} to sync {datasource}")
|
||||
return redirect(datasource.get_absolute_url())
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user