#18349: Adopt new job logging functionality (#19816)

This commit is contained in:
Jeremy Stretch 2025-08-12 14:44:05 -04:00
parent a175030655
commit cd3f930e19
2 changed files with 36 additions and 40 deletions

View File

@ -1,4 +1,3 @@
import logging
import sys
from datetime import timedelta
from importlib import import_module
@ -17,8 +16,6 @@ from utilities.proxy import resolve_proxies
from .choices import DataSourceStatusChoices, JobIntervalChoices
from .models import DataSource
logger = logging.getLogger(__name__)
class SyncDataSourceJob(JobRunner):
"""
@ -69,7 +66,11 @@ class SystemHousekeepingJob(JobRunner):
def run(self, *args, **kwargs):
# Skip if running in development or test mode
if settings.DEBUG or 'test' in sys.argv:
if settings.DEBUG:
self.logger.warning("Aborting execution: Debug is enabled")
return
if 'test' in sys.argv:
self.logger.warning("Aborting execution: Tests are running")
return
self.send_census_report()
@ -78,17 +79,16 @@ class SystemHousekeepingJob(JobRunner):
self.delete_expired_jobs()
self.check_for_new_releases()
@staticmethod
def send_census_report():
def send_census_report(self):
"""
Send a census report (if enabled).
"""
logging.info("Reporting census data...")
self.logger.info("Reporting census data...")
if settings.ISOLATED_DEPLOYMENT:
logging.info("ISOLATED_DEPLOYMENT is enabled; skipping")
self.logger.info("ISOLATED_DEPLOYMENT is enabled; skipping")
return
if not settings.CENSUS_REPORTING_ENABLED:
logging.info("CENSUS_REPORTING_ENABLED is disabled; skipping")
self.logger.info("CENSUS_REPORTING_ENABLED is disabled; skipping")
return
census_data = {
@ -106,73 +106,71 @@ class SystemHousekeepingJob(JobRunner):
except requests.exceptions.RequestException:
pass
@staticmethod
def clear_expired_sessions():
def clear_expired_sessions(self):
"""
Clear any expired sessions from the database.
"""
logging.info("Clearing expired sessions...")
self.logger.info("Clearing expired sessions...")
engine = import_module(settings.SESSION_ENGINE)
try:
engine.SessionStore.clear_expired()
logging.info("Sessions cleared.")
self.logger.info("Sessions cleared.")
except NotImplementedError:
logging.warning(
self.logger.warning(
f"The configured session engine ({settings.SESSION_ENGINE}) does not support "
f"clearing sessions; skipping."
)
@staticmethod
def prune_changelog():
def prune_changelog(self):
"""
Delete any ObjectChange records older than the configured changelog retention time (if any).
"""
logging.info("Pruning old changelog entries...")
self.logger.info("Pruning old changelog entries...")
config = Config()
if not config.CHANGELOG_RETENTION:
logging.info("No retention period specified; skipping.")
self.logger.info("No retention period specified; skipping.")
return
cutoff = timezone.now() - timedelta(days=config.CHANGELOG_RETENTION)
logging.debug(f"Retention period: {config.CHANGELOG_RETENTION} days")
logging.debug(f"Cut-off time: {cutoff}")
self.logger.debug(
f"Changelog retention period: {config.CHANGELOG_RETENTION} days ({cutoff:%Y-%m-%d %H:%M:%S})"
)
count = ObjectChange.objects.filter(time__lt=cutoff).delete()[0]
logging.info(f"Deleted {count} expired records")
self.logger.info(f"Deleted {count} expired changelog records")
@staticmethod
def delete_expired_jobs():
def delete_expired_jobs(self):
"""
Delete any jobs older than the configured retention period (if any).
"""
logging.info("Deleting expired jobs...")
self.logger.info("Deleting expired jobs...")
config = Config()
if not config.JOB_RETENTION:
logging.info("No retention period specified; skipping.")
self.logger.info("No retention period specified; skipping.")
return
cutoff = timezone.now() - timedelta(days=config.JOB_RETENTION)
logging.debug(f"Retention period: {config.CHANGELOG_RETENTION} days")
logging.debug(f"Cut-off time: {cutoff}")
self.logger.debug(
f"Job retention period: {config.JOB_RETENTION} days ({cutoff:%Y-%m-%d %H:%M:%S})"
)
count = Job.objects.filter(created__lt=cutoff).delete()[0]
logging.info(f"Deleted {count} expired records")
self.logger.info(f"Deleted {count} expired jobs")
@staticmethod
def check_for_new_releases():
def check_for_new_releases(self):
"""
Check for new releases and cache the latest release.
"""
logging.info("Checking for new releases...")
self.logger.info("Checking for new releases...")
if settings.ISOLATED_DEPLOYMENT:
logging.info("ISOLATED_DEPLOYMENT is enabled; skipping")
self.logger.info("ISOLATED_DEPLOYMENT is enabled; skipping")
return
if not settings.RELEASE_CHECK_URL:
logging.info("RELEASE_CHECK_URL is not set; skipping")
self.logger.info("RELEASE_CHECK_URL is not set; skipping")
return
# Fetch the latest releases
logging.debug(f"Release check URL: {settings.RELEASE_CHECK_URL}")
self.logger.debug(f"Release check URL: {settings.RELEASE_CHECK_URL}")
try:
response = requests.get(
url=settings.RELEASE_CHECK_URL,
@ -181,7 +179,7 @@ class SystemHousekeepingJob(JobRunner):
)
response.raise_for_status()
except requests.exceptions.RequestException as exc:
logging.error(f"Error fetching release: {exc}")
self.logger.error(f"Error fetching release: {exc}")
return
# Determine the most recent stable release
@ -191,8 +189,8 @@ class SystemHousekeepingJob(JobRunner):
continue
releases.append((version.parse(release['tag_name']), release.get('html_url')))
latest_release = max(releases)
logging.debug(f"Found {len(response.json())} releases; {len(releases)} usable")
logging.info(f"Latest release: {latest_release[0]}")
self.logger.debug(f"Found {len(response.json())} releases; {len(releases)} usable")
self.logger.info(f"Latest release: {latest_release[0]}")
# Cache the most recent release
cache.set('latest_release', latest_release, None)

View File

@ -67,9 +67,7 @@
<div class="col col-12">
<div class="card">
<h2 class="card-header">{% trans "Data" %}</h2>
<div class="card-body">
<pre>{{ object.data|json }}</pre>
</div>
<pre class="card-body m-0">{{ object.data|json }}</pre>
</div>
</div>
</div>