mirror of
https://github.com/netbox-community/netbox.git
synced 2025-07-29 11:56:25 -06:00
7848 viewset
This commit is contained in:
parent
ff24a3abe4
commit
ff9c8e1a9b
@ -12,11 +12,10 @@ router.register('data-sources', views.DataSourceViewSet)
|
|||||||
router.register('data-files', views.DataFileViewSet)
|
router.register('data-files', views.DataFileViewSet)
|
||||||
router.register('jobs', views.JobViewSet)
|
router.register('jobs', views.JobViewSet)
|
||||||
router.register('object-changes', views.ObjectChangeViewSet)
|
router.register('object-changes', views.ObjectChangeViewSet)
|
||||||
|
router.register('background-queues', views.QueueViewSet, basename='RQQueue')
|
||||||
|
router.register('background-workers', views.WorkerViewSet, basename='RQWorker')
|
||||||
|
|
||||||
urlpatterns = (
|
urlpatterns = (
|
||||||
path('background-queues/', views.QueueListView.as_view(), name="background_queue_list"),
|
|
||||||
path('background-workers/', views.WorkerListView.as_view(), name="background_worker_list"),
|
|
||||||
path('background-workers/<str:worker_name>/', views.WorkerDetailView.as_view(), name="background_worker_detail"),
|
|
||||||
path('background-tasks/<str:queue_name>/', views.TaskListView.as_view(), name="background_task_list"),
|
path('background-tasks/<str:queue_name>/', views.TaskListView.as_view(), name="background_task_list"),
|
||||||
path('background-tasks/<str:queue_name>/deferred/', views.DeferredTaskListView.as_view(), name="background_tasks_deferred"),
|
path('background-tasks/<str:queue_name>/deferred/', views.DeferredTaskListView.as_view(), name="background_tasks_deferred"),
|
||||||
path('background-tasks/<str:queue_name>/failed/', views.FailedTaskListView.as_view(), name="background_tasks_failed"),
|
path('background-tasks/<str:queue_name>/failed/', views.FailedTaskListView.as_view(), name="background_tasks_failed"),
|
||||||
|
@ -20,6 +20,8 @@ from django_rq.utils import get_statistics
|
|||||||
from django_rq.settings import QUEUES_LIST
|
from django_rq.settings import QUEUES_LIST
|
||||||
from netbox.api.metadata import ContentTypeMetadata
|
from netbox.api.metadata import ContentTypeMetadata
|
||||||
from netbox.api.viewsets import NetBoxModelViewSet, NetBoxReadOnlyModelViewSet
|
from netbox.api.viewsets import NetBoxModelViewSet, NetBoxReadOnlyModelViewSet
|
||||||
|
from rest_framework import viewsets
|
||||||
|
from rest_framework.pagination import LimitOffsetPagination
|
||||||
from rest_framework.permissions import IsAdminUser
|
from rest_framework.permissions import IsAdminUser
|
||||||
from rq.job import Job as RQ_Job
|
from rq.job import Job as RQ_Job
|
||||||
from rq.worker import Worker
|
from rq.worker import Worker
|
||||||
@ -84,55 +86,82 @@ class ObjectChangeViewSet(ReadOnlyModelViewSet):
|
|||||||
filterset_class = filtersets.ObjectChangeFilterSet
|
filterset_class = filtersets.ObjectChangeFilterSet
|
||||||
|
|
||||||
|
|
||||||
class QueueListView(APIView):
|
class LimitOffsetListPagination(LimitOffsetPagination):
|
||||||
|
"""
|
||||||
|
DRF LimitOffset Paginator but for list instead of queryset
|
||||||
|
"""
|
||||||
|
count = 0
|
||||||
|
offset = 0
|
||||||
|
|
||||||
|
def paginate_list(self, data, request, view=None):
|
||||||
|
self.request = request
|
||||||
|
self.limit = self.get_limit(request)
|
||||||
|
self.count = len(data)
|
||||||
|
self.offset = self.get_offset(request)
|
||||||
|
|
||||||
|
if self.limit is None:
|
||||||
|
self.limit = self.count
|
||||||
|
|
||||||
|
if self.count == 0 or self.offset > self.count:
|
||||||
|
return []
|
||||||
|
|
||||||
|
if self.count > self.limit and self.template is not None:
|
||||||
|
self.display_page_controls = True
|
||||||
|
|
||||||
|
return data[self.offset:self.offset + self.limit]
|
||||||
|
|
||||||
|
|
||||||
|
class BaseRQListView(viewsets.ViewSet):
|
||||||
"""
|
"""
|
||||||
Retrieve a list of RQ Queues.
|
Retrieve a list of RQ Queues.
|
||||||
"""
|
"""
|
||||||
permission_classes = [IsAdminUser]
|
permission_classes = [IsAdminUser]
|
||||||
|
serializer_class = None
|
||||||
|
|
||||||
|
@extend_schema(responses={200: OpenApiTypes.OBJECT})
|
||||||
|
def list(self, request):
|
||||||
|
data = self.get_data()
|
||||||
|
paginator = LimitOffsetListPagination()
|
||||||
|
data = paginator.paginate_list(data, request)
|
||||||
|
|
||||||
|
serializer = self.serializer_class(data, many=True)
|
||||||
|
return paginator.get_paginated_response(serializer.data)
|
||||||
|
|
||||||
|
|
||||||
|
class QueueViewSet(BaseRQListView):
|
||||||
|
"""
|
||||||
|
Retrieve a list of RQ Queues.
|
||||||
|
"""
|
||||||
|
serializer_class = serializers.BackgroundQueueSerializer
|
||||||
|
|
||||||
def get_view_name(self):
|
def get_view_name(self):
|
||||||
return "Background Queues"
|
return "Background Queues"
|
||||||
|
|
||||||
@extend_schema(responses={200: OpenApiTypes.OBJECT})
|
def get_data(self):
|
||||||
def get(self, request, format=None):
|
return get_statistics(run_maintenance_tasks=True)["queues"]
|
||||||
data = get_statistics(run_maintenance_tasks=True)["queues"]
|
|
||||||
serializer = serializers.BackgroundQueueSerializer(data, many=True)
|
|
||||||
return Response(serializer.data)
|
|
||||||
|
|
||||||
|
|
||||||
class WorkerListView(APIView):
|
class WorkerViewSet(BaseRQListView):
|
||||||
"""
|
"""
|
||||||
Retrieve a list of RQ Workers.
|
Retrieve a list of RQ Workers.
|
||||||
"""
|
"""
|
||||||
permission_classes = [IsAdminUser]
|
serializer_class = serializers.BackgroundWorkerSerializer
|
||||||
|
|
||||||
def get_view_name(self):
|
def get_view_name(self):
|
||||||
return "Background Workers"
|
return "Background Workers"
|
||||||
|
|
||||||
@extend_schema(responses={200: OpenApiTypes.OBJECT})
|
def get_data(self):
|
||||||
def get(self, request, format=None):
|
config = QUEUES_LIST[0]
|
||||||
|
return Worker.all(get_redis_connection(config['connection_config']))
|
||||||
|
|
||||||
|
def retrieve(self, request, pk=None):
|
||||||
# all the RQ queues should use the same connection
|
# all the RQ queues should use the same connection
|
||||||
|
if not pk:
|
||||||
|
raise Http404
|
||||||
|
|
||||||
config = QUEUES_LIST[0]
|
config = QUEUES_LIST[0]
|
||||||
workers = Worker.all(get_redis_connection(config['connection_config']))
|
workers = Worker.all(get_redis_connection(config['connection_config']))
|
||||||
serializer = serializers.BackgroundWorkerSerializer(workers, many=True)
|
worker = next((item for item in workers if item.name == pk), None)
|
||||||
return Response(serializer.data)
|
|
||||||
|
|
||||||
|
|
||||||
class WorkerDetailView(APIView):
|
|
||||||
"""
|
|
||||||
Retrieve the details of the specified RQ Worker.
|
|
||||||
"""
|
|
||||||
permission_classes = [IsAdminUser]
|
|
||||||
|
|
||||||
def get_view_name(self):
|
|
||||||
return "Background Worker"
|
|
||||||
|
|
||||||
@extend_schema(responses={200: OpenApiTypes.OBJECT})
|
|
||||||
def get(self, request, worker_name, format=None):
|
|
||||||
# all the RQ queues should use the same connection
|
|
||||||
config = QUEUES_LIST[0]
|
|
||||||
workers = Worker.all(get_redis_connection(config['connection_config']))
|
|
||||||
worker = next((item for item in workers if item.name == worker_name), None)
|
|
||||||
if not worker:
|
if not worker:
|
||||||
raise Http404
|
raise Http404
|
||||||
|
|
||||||
|
@ -204,7 +204,7 @@ class BackgroundTaskTestCase(TestCase):
|
|||||||
queue = get_queue('default')
|
queue = get_queue('default')
|
||||||
job = queue.enqueue(self.dummy_job_default)
|
job = queue.enqueue(self.dummy_job_default)
|
||||||
|
|
||||||
response = self.client.get(reverse('core-api:background_task_delete', args=[job.id]), **self.header)
|
response = self.client.post(reverse('core-api:background_task_delete', args=[job.id]), **self.header)
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
self.assertFalse(RQ_Job.exists(job.id, connection=queue.connection))
|
self.assertFalse(RQ_Job.exists(job.id, connection=queue.connection))
|
||||||
queue = get_queue('default')
|
queue = get_queue('default')
|
||||||
@ -220,7 +220,7 @@ class BackgroundTaskTestCase(TestCase):
|
|||||||
self.assertTrue(job.is_failed)
|
self.assertTrue(job.is_failed)
|
||||||
|
|
||||||
# Re-enqueue the failed job and check that its status has been reset
|
# Re-enqueue the failed job and check that its status has been reset
|
||||||
response = self.client.get(reverse('core-api:background_task_requeue', args=[job.id]), **self.header)
|
response = self.client.post(reverse('core-api:background_task_requeue', args=[job.id]), **self.header)
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
job = RQ_Job.fetch(job.id, queue.connection)
|
job = RQ_Job.fetch(job.id, queue.connection)
|
||||||
self.assertFalse(job.is_failed)
|
self.assertFalse(job.is_failed)
|
||||||
@ -240,7 +240,7 @@ class BackgroundTaskTestCase(TestCase):
|
|||||||
self.assertIsNone(job.enqueued_at)
|
self.assertIsNone(job.enqueued_at)
|
||||||
|
|
||||||
# Force-enqueue the deferred job
|
# Force-enqueue the deferred job
|
||||||
response = self.client.get(reverse('core-api:background_task_enqueue', args=[job.id]), **self.header)
|
response = self.client.post(reverse('core-api:background_task_enqueue', args=[job.id]), **self.header)
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
|
|
||||||
# Check that job's status is updated correctly
|
# Check that job's status is updated correctly
|
||||||
@ -256,7 +256,7 @@ class BackgroundTaskTestCase(TestCase):
|
|||||||
worker.prepare_job_execution(job)
|
worker.prepare_job_execution(job)
|
||||||
|
|
||||||
self.assertEqual(job.get_status(), JobStatus.STARTED)
|
self.assertEqual(job.get_status(), JobStatus.STARTED)
|
||||||
response = self.client.get(reverse('core-api:background_task_stop', args=[job.id]), **self.header)
|
response = self.client.post(reverse('core-api:background_task_stop', args=[job.id]), **self.header)
|
||||||
self.assertEqual(response.status_code, 200)
|
self.assertEqual(response.status_code, 200)
|
||||||
worker.monitor_work_horse(job, queue) # Sets the job as Failed and removes from Started
|
worker.monitor_work_horse(job, queue) # Sets the job as Failed and removes from Started
|
||||||
started_job_registry = StartedJobRegistry(queue.name, connection=queue.connection)
|
started_job_registry = StartedJobRegistry(queue.name, connection=queue.connection)
|
||||||
|
Loading…
Reference in New Issue
Block a user