diff --git a/docs/configuration/system.md b/docs/configuration/system.md index 20143276c..fda1ca7ed 100644 --- a/docs/configuration/system.md +++ b/docs/configuration/system.md @@ -158,6 +158,7 @@ LOGGING = { * `netbox..` - Generic form for model-specific log messages * `netbox.auth.*` - Authentication events * `netbox.api.views.*` - Views which handle business logic for the REST API +* `netbox.jobs.*` - Background jobs * `netbox.reports.*` - Report execution (`module.name`) * `netbox.scripts.*` - Custom script execution (`module.name`) * `netbox.views.*` - Views which handle business logic for the web UI diff --git a/docs/plugins/development/background-jobs.md b/docs/plugins/development/background-jobs.md index 9be52c3ca..376df6dd2 100644 --- a/docs/plugins/development/background-jobs.md +++ b/docs/plugins/development/background-jobs.md @@ -38,6 +38,27 @@ You can schedule the background job from within your code (e.g. from a model's ` This is the human-friendly names of your background job. If omitted, the class name will be used. +### Logging + +!!! info "This feature was introduced in NetBox v4.4." + +A Python logger is instantiated by the runner for each job. It can be utilized within a job's `run()` method as needed: + +```python +def run(self, *args, **kwargs): + obj = MyModel.objects.get(pk=kwargs.get('pk')) + self.logger.info("Retrieved object {obj}") +``` + +Four of the standard Python logging levels are supported: + +* `debug()` +* `info()` +* `warning()` +* `error()` + +Log entries recorded using the runner's logger will be saved in the job's log in the database in addition to being processed by other [system logging handlers](../../configuration/system.md#logging). + ### Scheduled Jobs As described above, jobs can be scheduled for immediate execution or at any later time using the `enqueue()` method. However, for management purposes, the `enqueue_once()` method allows a job to be scheduled exactly once avoiding duplicates. If a job is already scheduled for a particular instance, a second one won't be scheduled, respecting thread safety. An example use case would be to schedule a periodic task that is bound to an instance in general, but not to any event of that instance (such as updates). The parameters of the `enqueue_once()` method are identical to those of `enqueue()`. diff --git a/netbox/core/api/serializers_/jobs.py b/netbox/core/api/serializers_/jobs.py index 306287e88..dd0dd1245 100644 --- a/netbox/core/api/serializers_/jobs.py +++ b/netbox/core/api/serializers_/jobs.py @@ -23,6 +23,6 @@ class JobSerializer(BaseModelSerializer): model = Job fields = [ 'id', 'url', 'display_url', 'display', 'object_type', 'object_id', 'name', 'status', 'created', 'scheduled', - 'interval', 'started', 'completed', 'user', 'data', 'error', 'job_id', + 'interval', 'started', 'completed', 'user', 'data', 'error', 'job_id', 'log_entries', ] brief_fields = ('url', 'created', 'completed', 'user', 'status') diff --git a/netbox/core/constants.py b/netbox/core/constants.py index 3c3382dcc..582768186 100644 --- a/netbox/core/constants.py +++ b/netbox/core/constants.py @@ -4,23 +4,31 @@ from django.utils.translation import gettext_lazy as _ from rq.job import JobStatus __all__ = ( + 'JOB_LOG_ENTRY_LEVELS', 'RQ_TASK_STATUSES', ) @dataclass -class Status: +class Badge: label: str color: str RQ_TASK_STATUSES = { - JobStatus.QUEUED: Status(_('Queued'), 'cyan'), - JobStatus.FINISHED: Status(_('Finished'), 'green'), - JobStatus.FAILED: Status(_('Failed'), 'red'), - JobStatus.STARTED: Status(_('Started'), 'blue'), - JobStatus.DEFERRED: Status(_('Deferred'), 'gray'), - JobStatus.SCHEDULED: Status(_('Scheduled'), 'purple'), - JobStatus.STOPPED: Status(_('Stopped'), 'orange'), - JobStatus.CANCELED: Status(_('Cancelled'), 'yellow'), + JobStatus.QUEUED: Badge(_('Queued'), 'cyan'), + JobStatus.FINISHED: Badge(_('Finished'), 'green'), + JobStatus.FAILED: Badge(_('Failed'), 'red'), + JobStatus.STARTED: Badge(_('Started'), 'blue'), + JobStatus.DEFERRED: Badge(_('Deferred'), 'gray'), + JobStatus.SCHEDULED: Badge(_('Scheduled'), 'purple'), + JobStatus.STOPPED: Badge(_('Stopped'), 'orange'), + JobStatus.CANCELED: Badge(_('Cancelled'), 'yellow'), +} + +JOB_LOG_ENTRY_LEVELS = { + 'debug': Badge(_('Debug'), 'gray'), + 'info': Badge(_('Info'), 'blue'), + 'warning': Badge(_('Warning'), 'orange'), + 'error': Badge(_('Error'), 'red'), } diff --git a/netbox/core/dataclasses.py b/netbox/core/dataclasses.py new file mode 100644 index 000000000..21f97d01d --- /dev/null +++ b/netbox/core/dataclasses.py @@ -0,0 +1,21 @@ +import logging + +from dataclasses import dataclass, field +from datetime import datetime + +from django.utils import timezone + +__all__ = ( + 'JobLogEntry', +) + + +@dataclass +class JobLogEntry: + level: str + message: str + timestamp: datetime = field(default_factory=timezone.now) + + @classmethod + def from_logrecord(cls, record: logging.LogRecord): + return cls(record.levelname.lower(), record.msg) diff --git a/netbox/core/jobs.py b/netbox/core/jobs.py index b3dfaf1e7..d82ba5886 100644 --- a/netbox/core/jobs.py +++ b/netbox/core/jobs.py @@ -7,7 +7,6 @@ from netbox.jobs import JobRunner, system_job from netbox.search.backends import search_backend from utilities.proxy import resolve_proxies from .choices import DataSourceStatusChoices, JobIntervalChoices -from .exceptions import SyncError from .models import DataSource logger = logging.getLogger(__name__) @@ -23,19 +22,23 @@ class SyncDataSourceJob(JobRunner): def run(self, *args, **kwargs): datasource = DataSource.objects.get(pk=self.job.object_id) + self.logger.debug(f"Found DataSource ID {datasource.pk}") try: + self.logger.info(f"Syncing data source {datasource}") datasource.sync() # Update the search cache for DataFiles belonging to this source + self.logger.debug("Updating search cache for data files") search_backend.cache(datasource.datafiles.iterator()) except Exception as e: + self.logger.error(f"Error syncing data source: {e}") DataSource.objects.filter(pk=datasource.pk).update(status=DataSourceStatusChoices.FAILED) - if type(e) is SyncError: - logging.error(e) raise e + self.logger.info("Syncing completed successfully") + @system_job(interval=JobIntervalChoices.INTERVAL_DAILY) class SystemHousekeepingJob(JobRunner): diff --git a/netbox/core/migrations/0016_job_log_entries.py b/netbox/core/migrations/0016_job_log_entries.py new file mode 100644 index 000000000..030bd4e38 --- /dev/null +++ b/netbox/core/migrations/0016_job_log_entries.py @@ -0,0 +1,28 @@ +import django.contrib.postgres.fields +import django.core.serializers.json +from django.db import migrations, models + +import utilities.json + + +class Migration(migrations.Migration): + + dependencies = [ + ('core', '0015_remove_redundant_indexes'), + ] + + operations = [ + migrations.AddField( + model_name='job', + name='log_entries', + field=django.contrib.postgres.fields.ArrayField( + base_field=models.JSONField( + decoder=utilities.json.JobLogDecoder, + encoder=django.core.serializers.json.DjangoJSONEncoder + ), + blank=True, + default=list, + size=None + ), + ), + ] diff --git a/netbox/core/models/jobs.py b/netbox/core/models/jobs.py index b36cc16f5..ee4c3db12 100644 --- a/netbox/core/models/jobs.py +++ b/netbox/core/models/jobs.py @@ -1,9 +1,12 @@ +import logging import uuid +from dataclasses import asdict from functools import partial import django_rq from django.conf import settings from django.contrib.contenttypes.fields import GenericForeignKey +from django.contrib.postgres.fields import ArrayField from django.core.exceptions import ValidationError from django.core.serializers.json import DjangoJSONEncoder from django.core.validators import MinValueValidator @@ -14,8 +17,10 @@ from django.utils.translation import gettext as _ from rq.exceptions import InvalidJobOperation from core.choices import JobStatusChoices +from core.dataclasses import JobLogEntry from core.models import ObjectType from core.signals import job_end, job_start +from utilities.json import JobLogDecoder from utilities.querysets import RestrictedQuerySet from utilities.rqworker import get_queue_for_model @@ -104,6 +109,15 @@ class Job(models.Model): verbose_name=_('job ID'), unique=True ) + log_entries = ArrayField( + verbose_name=_('log entries'), + base_field=models.JSONField( + encoder=DjangoJSONEncoder, + decoder=JobLogDecoder, + ), + blank=True, + default=list, + ) objects = RestrictedQuerySet.as_manager() @@ -205,6 +219,13 @@ class Job(models.Model): # Send signal job_end.send(self) + def log(self, record: logging.LogRecord): + """ + Record a LogRecord from Python's native logging in the job's log. + """ + entry = JobLogEntry.from_logrecord(record) + self.log_entries.append(asdict(entry)) + @classmethod def enqueue( cls, diff --git a/netbox/core/tables/columns.py b/netbox/core/tables/columns.py index f3d985bc3..84187b0fa 100644 --- a/netbox/core/tables/columns.py +++ b/netbox/core/tables/columns.py @@ -1,12 +1,11 @@ import django_tables2 as tables from django.utils.safestring import mark_safe -from core.constants import RQ_TASK_STATUSES from netbox.registry import registry __all__ = ( 'BackendTypeColumn', - 'RQJobStatusColumn', + 'BadgeColumn', ) @@ -23,14 +22,21 @@ class BackendTypeColumn(tables.Column): return value -class RQJobStatusColumn(tables.Column): +class BadgeColumn(tables.Column): """ - Render a colored label for the status of an RQ job. + Render a colored badge for a value. + + Args: + badges: A dictionary mapping of values to core.constants.Badge instances. """ + def __init__(self, badges, *args, **kwargs): + super().__init__(*args, **kwargs) + self.badges = badges + def render(self, value): - status = RQ_TASK_STATUSES.get(value) - return mark_safe(f'{status.label}') + badge = self.badges.get(value) + return mark_safe(f'{badge.label}') def value(self, value): - status = RQ_TASK_STATUSES.get(value) - return status.label + badge = self.badges.get(value) + return badge.label diff --git a/netbox/core/tables/jobs.py b/netbox/core/tables/jobs.py index ac27224b3..00032057f 100644 --- a/netbox/core/tables/jobs.py +++ b/netbox/core/tables/jobs.py @@ -1,8 +1,10 @@ import django_tables2 as tables from django.utils.translation import gettext_lazy as _ -from netbox.tables import NetBoxTable, columns -from ..models import Job +from netbox.tables import BaseTable, NetBoxTable, columns +from core.constants import JOB_LOG_ENTRY_LEVELS +from core.models import Job +from core.tables.columns import BadgeColumn class JobTable(NetBoxTable): @@ -40,6 +42,9 @@ class JobTable(NetBoxTable): completed = columns.DateTimeColumn( verbose_name=_('Completed'), ) + log_entries = tables.Column( + verbose_name=_('Log Entries'), + ) actions = columns.ActionsColumn( actions=('delete',) ) @@ -53,3 +58,24 @@ class JobTable(NetBoxTable): default_columns = ( 'pk', 'id', 'object_type', 'object', 'name', 'status', 'created', 'started', 'completed', 'user', ) + + def render_log_entries(self, value): + return len(value) + + +class JobLogEntryTable(BaseTable): + timestamp = columns.DateTimeColumn( + timespec='milliseconds', + verbose_name=_('Time'), + ) + level = BadgeColumn( + badges=JOB_LOG_ENTRY_LEVELS, + verbose_name=_('Level'), + ) + message = tables.Column( + verbose_name=_('Message'), + ) + + class Meta(BaseTable.Meta): + empty_text = _('No log entries') + fields = ('timestamp', 'level', 'message') diff --git a/netbox/core/tables/tasks.py b/netbox/core/tables/tasks.py index f53e598b5..64641b282 100644 --- a/netbox/core/tables/tasks.py +++ b/netbox/core/tables/tasks.py @@ -2,7 +2,8 @@ import django_tables2 as tables from django.utils.translation import gettext_lazy as _ from django_tables2.utils import A -from core.tables.columns import RQJobStatusColumn +from core.constants import RQ_TASK_STATUSES +from core.tables.columns import BadgeColumn from netbox.tables import BaseTable, columns @@ -84,7 +85,8 @@ class BackgroundTaskTable(BaseTable): ended_at = columns.DateTimeColumn( verbose_name=_("Ended") ) - status = RQJobStatusColumn( + status = BadgeColumn( + badges=RQ_TASK_STATUSES, verbose_name=_("Status"), accessor='get_status' ) diff --git a/netbox/core/views.py b/netbox/core/views.py index 5729e5f2c..91c5cd6d1 100644 --- a/netbox/core/views.py +++ b/netbox/core/views.py @@ -32,13 +32,13 @@ from utilities.forms import ConfirmationForm from utilities.htmx import htmx_partial from utilities.json import ConfigJSONEncoder from utilities.query import count_related -from utilities.views import ContentTypePermissionRequiredMixin, GetRelatedModelsMixin, register_model_view +from utilities.views import ContentTypePermissionRequiredMixin, GetRelatedModelsMixin, ViewTab, register_model_view from . import filtersets, forms, tables from .choices import DataSourceStatusChoices from .jobs import SyncDataSourceJob from .models import * from .plugins import get_catalog_plugins, get_local_plugins -from .tables import CatalogPluginTable, PluginVersionTable +from .tables import CatalogPluginTable, JobLogEntryTable, PluginVersionTable # @@ -184,6 +184,25 @@ class JobView(generic.ObjectView): actions = (DeleteObject,) +@register_model_view(Job, 'log') +class JobLogView(generic.ObjectView): + queryset = Job.objects.all() + actions = (DeleteObject,) + template_name = 'core/job/log.html' + tab = ViewTab( + label=_('Log'), + badge=lambda obj: len(obj.log_entries), + weight=500, + ) + + def get_extra_context(self, request, instance): + table = JobLogEntryTable(instance.log_entries) + table.configure(request) + return { + 'table': table, + } + + @register_model_view(Job, 'delete') class JobDeleteView(generic.ObjectDeleteView): queryset = Job.objects.defer('data') diff --git a/netbox/extras/jobs.py b/netbox/extras/jobs.py index 733654198..c4a1b3b26 100644 --- a/netbox/extras/jobs.py +++ b/netbox/extras/jobs.py @@ -90,7 +90,10 @@ class ScriptJob(JobRunner): request: The WSGI request associated with this execution (if any) commit: Passed through to Script.run() """ - script = ScriptModel.objects.get(pk=self.job.object_id).python_class() + script_model = ScriptModel.objects.get(pk=self.job.object_id) + self.logger.debug(f"Found ScriptModel ID {script_model.pk}") + script = script_model.python_class() + self.logger.debug(f"Loaded script {script.full_name}") # Add files to form data if request: @@ -100,6 +103,7 @@ class ScriptJob(JobRunner): # Add the current request as a property of the script script.request = request + self.logger.debug(f"Request ID: {request.id}") # Execute the script. If commit is True, wrap it with the event_tracking context manager to ensure we process # change logging, event rules, etc. diff --git a/netbox/netbox/jobs.py b/netbox/netbox/jobs.py index b7a6a6db1..76f2e4feb 100644 --- a/netbox/netbox/jobs.py +++ b/netbox/netbox/jobs.py @@ -38,6 +38,19 @@ def system_job(interval): return _wrapper +class JobLogHandler(logging.Handler): + """ + A logging handler which records entries on a Job. + """ + def __init__(self, job, *args, **kwargs): + super().__init__(*args, **kwargs) + self.job = job + + def emit(self, record): + # Enter the record in the log of the associated Job + self.job.log(record) + + class JobRunner(ABC): """ Background Job helper class. @@ -56,6 +69,11 @@ class JobRunner(ABC): """ self.job = job + # Initiate the system logger + self.logger = logging.getLogger(f"netbox.jobs.{self.__class__.__name__}") + self.logger.setLevel(logging.DEBUG) + self.logger.addHandler(JobLogHandler(job)) + @classproperty def name(cls): return getattr(cls.Meta, 'name', cls.__name__) diff --git a/netbox/netbox/tests/test_jobs.py b/netbox/netbox/tests/test_jobs.py index e3e24a235..0920ef32b 100644 --- a/netbox/netbox/tests/test_jobs.py +++ b/netbox/netbox/tests/test_jobs.py @@ -11,7 +11,10 @@ from core.choices import JobStatusChoices class TestJobRunner(JobRunner): def run(self, *args, **kwargs): - pass + self.logger.debug("Debug message") + self.logger.info("Info message") + self.logger.warning("Warning message") + self.logger.error("Error message") class JobRunnerTestCase(TestCase): @@ -47,8 +50,16 @@ class JobRunnerTest(JobRunnerTestCase): def test_handle(self): job = TestJobRunner.enqueue(immediate=True) + # Check job status self.assertEqual(job.status, JobStatusChoices.STATUS_COMPLETED) + # Check logging + self.assertEqual(len(job.log_entries), 4) + self.assertEqual(job.log_entries[0]['message'], "Debug message") + self.assertEqual(job.log_entries[1]['message'], "Info message") + self.assertEqual(job.log_entries[2]['message'], "Warning message") + self.assertEqual(job.log_entries[3]['message'], "Error message") + def test_handle_errored(self): class ErroredJobRunner(TestJobRunner): EXP = Exception('Test error') diff --git a/netbox/templates/core/job.html b/netbox/templates/core/job.html index 49fa0231a..ae6e4d63d 100644 --- a/netbox/templates/core/job.html +++ b/netbox/templates/core/job.html @@ -1,27 +1,6 @@ -{% extends 'generic/object.html' %} -{% load buttons %} -{% load helpers %} -{% load perms %} +{% extends 'core/job/base.html' %} {% load i18n %} -{% block breadcrumbs %} - {{ block.super }} - {% if object.object %} - - {% with parent_jobs_viewname=object.object|viewname:"jobs" %} - - {% endwith %} - {% else %} - - {% endif %} -{% endblock breadcrumbs %} - {% block content %}
diff --git a/netbox/templates/core/job/base.html b/netbox/templates/core/job/base.html new file mode 100644 index 000000000..a31c53a8a --- /dev/null +++ b/netbox/templates/core/job/base.html @@ -0,0 +1,23 @@ +{% extends 'generic/object.html' %} +{% load buttons %} +{% load helpers %} +{% load perms %} +{% load i18n %} + +{% block breadcrumbs %} + {{ block.super }} + {% if object.object %} + + {% with parent_jobs_viewname=object.object|viewname:"jobs" %} + + {% endwith %} + {% else %} + + {% endif %} +{% endblock breadcrumbs %} diff --git a/netbox/templates/core/job/log.html b/netbox/templates/core/job/log.html new file mode 100644 index 000000000..b8c727299 --- /dev/null +++ b/netbox/templates/core/job/log.html @@ -0,0 +1,12 @@ +{% extends 'core/job/base.html' %} +{% load render_table from django_tables2 %} + +{% block content %} +
+
+
+ {% render_table table %} +
+
+
+{% endblock %} diff --git a/netbox/utilities/json.py b/netbox/utilities/json.py index 3114be1bf..eaf45feea 100644 --- a/netbox/utilities/json.py +++ b/netbox/utilities/json.py @@ -1,10 +1,14 @@ import decimal +import json from django.core.serializers.json import DjangoJSONEncoder +from utilities.datetime import datetime_from_timestamp + __all__ = ( 'ConfigJSONEncoder', 'CustomFieldJSONEncoder', + 'JobLogDecoder', ) @@ -29,3 +33,21 @@ class ConfigJSONEncoder(DjangoJSONEncoder): return type(o).__name__ return super().default(o) + + +class JobLogDecoder(json.JSONDecoder): + """ + Deserialize JobLogEntry timestamps. + """ + def __init__(self, *args, **kwargs): + kwargs['object_hook'] = self._deserialize_entry + super().__init__(*args, **kwargs) + + def _deserialize_entry(self, obj: dict) -> dict: + if obj.get('timestamp'): + # Deserialize a timestamp string to a native datetime object + try: + obj['timestamp'] = datetime_from_timestamp(obj['timestamp']) + except ValueError: + pass + return obj