diff --git a/docs/configuration/optional-settings.md b/docs/configuration/optional-settings.md index 337b41b1b..83a56d2c4 100644 --- a/docs/configuration/optional-settings.md +++ b/docs/configuration/optional-settings.md @@ -299,6 +299,24 @@ When determining the primary IP address for a device, IPv6 is preferred over IPv --- +## RELEASE_CHECK_TIMEOUT + +Default: 86,400 (24 hours) + +The number of seconds to retain the latest version that is fetched from the GitHub API before automatically invalidating it and fetching it from the API again. This must be set to at least one hour (3600 seconds). + +--- + +## RELEASE_CHECK_URL + +Default: None + +The releases of this repository are checked to detect new releases, which are shown on the home page of the web interface. You can change this to your own fork of the NetBox repository, or set it to `None` to disable the check. The URL provided **must** be compatible with the GitHub API. + +Use `'https://api.github.com/repos/netbox-community/netbox/releases'` to check for release in the official NetBox repository. + +--- + ## REPORTS_ROOT Default: $BASE_DIR/netbox/reports/ diff --git a/docs/configuration/required-settings.md b/docs/configuration/required-settings.md index e86b2810a..053e2d3d4 100644 --- a/docs/configuration/required-settings.md +++ b/docs/configuration/required-settings.md @@ -46,9 +46,9 @@ DATABASE = { [Redis](https://redis.io/) is an in-memory data store similar to memcached. While Redis has been an optional component of NetBox since the introduction of webhooks in version 2.4, it is required starting in 2.6 to support NetBox's caching functionality (as well as other planned features). In 2.7, the connection settings were broken down into two sections for -webhooks and caching, allowing the user to connect to different Redis instances/databases per feature. +task queuing and caching, allowing the user to connect to different Redis instances/databases per feature. -Redis is configured using a configuration setting similar to `DATABASE` and these settings are the same for both of the `webhooks` and `caching` subsections: +Redis is configured using a configuration setting similar to `DATABASE` and these settings are the same for both of the `tasks` and `caching` subsections: * `HOST` - Name or IP address of the Redis server (use `localhost` if running locally) * `PORT` - TCP port of the Redis service; leave blank for default port (6379) @@ -61,7 +61,7 @@ Example: ```python REDIS = { - 'webhooks': { + 'tasks': { 'HOST': 'redis.example.com', 'PORT': 1234, 'PASSWORD': 'foobar', @@ -84,9 +84,9 @@ REDIS = { If you are upgrading from a version prior to v2.7, please note that the Redis connection configuration settings have changed. Manual modification to bring the `REDIS` section inline with the above specification is necessary -!!! note - It is highly recommended to keep the webhook and cache databases separate. Using the same database number on the - same Redis instance for both may result in webhook processing data being lost during cache flushing events. +!!! warning + It is highly recommended to keep the task and cache databases separate. Using the same database number on the + same Redis instance for both may result in queued background tasks being lost during cache flushing events. ### Using Redis Sentinel @@ -102,7 +102,7 @@ Example: ```python REDIS = { - 'webhooks': { + 'tasks': { 'SENTINELS': [('mysentinel.redis.example.com', 6379)], 'SENTINEL_SERVICE': 'netbox', 'PASSWORD': '', @@ -126,7 +126,7 @@ REDIS = { !!! note It is possible to have only one or the other Redis configurations to use Sentinel functionality. It is possible - for example to have the webhook use sentinel via `HOST`/`PORT` and for caching to use Sentinel via + for example to have the tasks database use sentinel via `HOST`/`PORT` and for caching to use Sentinel via `SENTINELS`/`SENTINEL_SERVICE`. diff --git a/docs/installation/3-netbox.md b/docs/installation/3-netbox.md index fabad20eb..b9b68be1b 100644 --- a/docs/installation/3-netbox.md +++ b/docs/installation/3-netbox.md @@ -172,7 +172,7 @@ Redis is a in-memory key-value store required as part of the NetBox installation ```python REDIS = { - 'webhooks': { + 'tasks': { 'HOST': 'redis.example.com', 'PORT': 1234, 'PASSWORD': 'foobar', diff --git a/netbox/extras/management/commands/rqworker.py b/netbox/extras/management/commands/rqworker.py new file mode 100644 index 000000000..02e93c4ef --- /dev/null +++ b/netbox/extras/management/commands/rqworker.py @@ -0,0 +1,16 @@ +from django.conf import settings +from django_rq.management.commands.rqworker import Command as _Command + + +class Command(_Command): + """ + Subclass django_rq's built-in rqworker to listen on all configured queues if none are specified (instead + of only the 'default' queue). + """ + def handle(self, *args, **options): + + # If no queues have been specified on the command line, listen on all configured queues. + if len(args) < 1: + args = settings.RQ_QUEUES + + super().handle(*args, **options) diff --git a/netbox/netbox/configuration.example.py b/netbox/netbox/configuration.example.py index 7002def9b..f41c6892a 100644 --- a/netbox/netbox/configuration.example.py +++ b/netbox/netbox/configuration.example.py @@ -21,11 +21,11 @@ DATABASE = { 'CONN_MAX_AGE': 300, # Max database connection age } -# Redis database settings. The Redis database is used for caching and background processing such as webhooks -# Seperate sections for webhooks and caching allow for connecting to seperate Redis instances/datbases if desired. -# Full connection details are required in both sections, even if they are the same. +# Redis database settings. Redis is used for caching and for queuing background tasks such as webhook events. A separate +# configuration exists for each. Full connection details are required in both sections, and it is strongly recommended +# to use two separate database IDs. REDIS = { - 'webhooks': { + 'tasks': { 'HOST': 'localhost', 'PORT': 6379, # Comment out `HOST` and `PORT` lines and uncomment the following if using Redis Sentinel @@ -179,6 +179,14 @@ PAGINATE_COUNT = 50 # prefer IPv4 instead. PREFER_IPV4 = False +# This determines how often the GitHub API is called to check the latest release of NetBox. Must be at least 1 hour. +RELEASE_CHECK_TIMEOUT = 24 * 3600 + +# This repository is used to check whether there is a new release of NetBox available. Set to None to disable the +# version check or use the URL below to check for release in the official NetBox repository. +RELEASE_CHECK_URL = None +# RELEASE_CHECK_URL = 'https://api.github.com/repos/netbox-community/netbox/releases' + # The file path where custom reports will be stored. A trailing slash is not needed. Note that the default value of # this setting is derived from the installed location. # REPORTS_ROOT = '/opt/netbox/netbox/reports' diff --git a/netbox/netbox/releases.py b/netbox/netbox/releases.py new file mode 100644 index 000000000..27279cc16 --- /dev/null +++ b/netbox/netbox/releases.py @@ -0,0 +1,33 @@ +import logging + +from cacheops import CacheMiss, cache +from django.conf import settings +from django_rq import get_queue + +from utilities.background_tasks import get_releases + +logger = logging.getLogger('netbox.releases') + + +def get_latest_release(pre_releases=False): + if settings.RELEASE_CHECK_URL: + logger.debug("Checking for most recent release") + try: + latest_release = cache.get('latest_release') + if latest_release: + logger.debug("Found cached release: {}".format(latest_release)) + return latest_release + except CacheMiss: + # Check for an existing job. This can happen if the RQ worker process is not running. + queue = get_queue('check_releases') + if queue.jobs: + logger.warning("Job to check for new releases is already queued; skipping") + else: + # Get the releases in the background worker, it will fill the cache + logger.info("Initiating background task to retrieve updated releases list") + get_releases.delay(pre_releases=pre_releases) + + else: + logger.debug("Skipping release check; RELEASE_CHECK_URL not defined") + + return 'unknown', None diff --git a/netbox/netbox/settings.py b/netbox/netbox/settings.py index 3fcc9fbf3..7e4e8a1d9 100644 --- a/netbox/netbox/settings.py +++ b/netbox/netbox/settings.py @@ -1,11 +1,14 @@ import logging import os import platform +import re import socket import warnings +from urllib.parse import urlsplit from django.contrib.messages import constants as messages -from django.core.exceptions import ImproperlyConfigured +from django.core.exceptions import ImproperlyConfigured, ValidationError +from django.core.validators import URLValidator # @@ -94,6 +97,8 @@ NAPALM_TIMEOUT = getattr(configuration, 'NAPALM_TIMEOUT', 30) NAPALM_USERNAME = getattr(configuration, 'NAPALM_USERNAME', '') PAGINATE_COUNT = getattr(configuration, 'PAGINATE_COUNT', 50) PREFER_IPV4 = getattr(configuration, 'PREFER_IPV4', False) +RELEASE_CHECK_URL = getattr(configuration, 'RELEASE_CHECK_URL', None) +RELEASE_CHECK_TIMEOUT = getattr(configuration, 'RELEASE_CHECK_TIMEOUT', 24 * 3600) REPORTS_ROOT = getattr(configuration, 'REPORTS_ROOT', os.path.join(BASE_DIR, 'reports')).rstrip('/') SCRIPTS_ROOT = getattr(configuration, 'SCRIPTS_ROOT', os.path.join(BASE_DIR, 'scripts')).rstrip('/') SESSION_FILE_PATH = getattr(configuration, 'SESSION_FILE_PATH', None) @@ -103,6 +108,20 @@ SHORT_TIME_FORMAT = getattr(configuration, 'SHORT_TIME_FORMAT', 'H:i:s') TIME_FORMAT = getattr(configuration, 'TIME_FORMAT', 'g:i a') TIME_ZONE = getattr(configuration, 'TIME_ZONE', 'UTC') +# Validate update repo URL and timeout +if RELEASE_CHECK_URL: + try: + URLValidator(RELEASE_CHECK_URL) + except ValidationError: + raise ImproperlyConfigured( + "RELEASE_CHECK_URL must be a valid API URL. Example: " + "https://api.github.com/repos/netbox-community/netbox" + ) + +# Enforce a minimum cache timeout for update checks +if RELEASE_CHECK_TIMEOUT < 3600: + raise ImproperlyConfigured("RELEASE_CHECK_TIMEOUT has to be at least 3600 seconds (1 hour)") + # # Database @@ -159,31 +178,40 @@ if STORAGE_CONFIG and STORAGE_BACKEND is None: # Redis # -if 'webhooks' not in REDIS: - raise ImproperlyConfigured( - "REDIS section in configuration.py is missing webhooks subsection." +# Background task queuing +if 'tasks' in REDIS: + TASKS_REDIS = REDIS['tasks'] +elif 'webhooks' in REDIS: + # TODO: Remove support for 'webhooks' name in v2.9 + warnings.warn( + "The 'webhooks' REDIS configuration section has been renamed to 'tasks'. Please update your configuration as " + "support for the old name will be removed in a future release." ) -if 'caching' not in REDIS: + TASKS_REDIS = REDIS['webhooks'] +else: + raise ImproperlyConfigured( + "REDIS section in configuration.py is missing the 'tasks' subsection." + ) +TASKS_REDIS_HOST = TASKS_REDIS.get('HOST', 'localhost') +TASKS_REDIS_PORT = TASKS_REDIS.get('PORT', 6379) +TASKS_REDIS_SENTINELS = TASKS_REDIS.get('SENTINELS', []) +TASKS_REDIS_USING_SENTINEL = all([ + isinstance(TASKS_REDIS_SENTINELS, (list, tuple)), + len(TASKS_REDIS_SENTINELS) > 0 +]) +TASKS_REDIS_SENTINEL_SERVICE = TASKS_REDIS.get('SENTINEL_SERVICE', 'default') +TASKS_REDIS_PASSWORD = TASKS_REDIS.get('PASSWORD', '') +TASKS_REDIS_DATABASE = TASKS_REDIS.get('DATABASE', 0) +TASKS_REDIS_DEFAULT_TIMEOUT = TASKS_REDIS.get('DEFAULT_TIMEOUT', 300) +TASKS_REDIS_SSL = TASKS_REDIS.get('SSL', False) + +# Caching +if 'caching' in REDIS: + CACHING_REDIS = REDIS['caching'] +else: raise ImproperlyConfigured( "REDIS section in configuration.py is missing caching subsection." ) - -WEBHOOKS_REDIS = REDIS.get('webhooks', {}) -WEBHOOKS_REDIS_HOST = WEBHOOKS_REDIS.get('HOST', 'localhost') -WEBHOOKS_REDIS_PORT = WEBHOOKS_REDIS.get('PORT', 6379) -WEBHOOKS_REDIS_SENTINELS = WEBHOOKS_REDIS.get('SENTINELS', []) -WEBHOOKS_REDIS_USING_SENTINEL = all([ - isinstance(WEBHOOKS_REDIS_SENTINELS, (list, tuple)), - len(WEBHOOKS_REDIS_SENTINELS) > 0 -]) -WEBHOOKS_REDIS_SENTINEL_SERVICE = WEBHOOKS_REDIS.get('SENTINEL_SERVICE', 'default') -WEBHOOKS_REDIS_PASSWORD = WEBHOOKS_REDIS.get('PASSWORD', '') -WEBHOOKS_REDIS_DATABASE = WEBHOOKS_REDIS.get('DATABASE', 0) -WEBHOOKS_REDIS_DEFAULT_TIMEOUT = WEBHOOKS_REDIS.get('DEFAULT_TIMEOUT', 300) -WEBHOOKS_REDIS_SSL = WEBHOOKS_REDIS.get('SSL', False) - - -CACHING_REDIS = REDIS.get('caching', {}) CACHING_REDIS_HOST = CACHING_REDIS.get('HOST', 'localhost') CACHING_REDIS_PORT = CACHING_REDIS.get('PORT', 6379) CACHING_REDIS_SENTINELS = CACHING_REDIS.get('SENTINELS', []) @@ -238,7 +266,6 @@ INSTALLED_APPS = [ 'corsheaders', 'debug_toolbar', 'django_filters', - 'django_rq', 'django_tables2', 'django_prometheus', 'mptt', @@ -255,6 +282,7 @@ INSTALLED_APPS = [ 'users', 'utilities', 'virtualization', + 'django_rq', # Must come after extras to allow overriding management commands 'drf_yasg', ] @@ -548,26 +576,31 @@ SWAGGER_SETTINGS = { # Django RQ (Webhooks backend) # -RQ_QUEUES = { - 'default': { - 'HOST': WEBHOOKS_REDIS_HOST, - 'PORT': WEBHOOKS_REDIS_PORT, - 'DB': WEBHOOKS_REDIS_DATABASE, - 'PASSWORD': WEBHOOKS_REDIS_PASSWORD, - 'DEFAULT_TIMEOUT': WEBHOOKS_REDIS_DEFAULT_TIMEOUT, - 'SSL': WEBHOOKS_REDIS_SSL, - } if not WEBHOOKS_REDIS_USING_SENTINEL else { - 'SENTINELS': WEBHOOKS_REDIS_SENTINELS, - 'MASTER_NAME': WEBHOOKS_REDIS_SENTINEL_SERVICE, - 'DB': WEBHOOKS_REDIS_DATABASE, - 'PASSWORD': WEBHOOKS_REDIS_PASSWORD, +if TASKS_REDIS_USING_SENTINEL: + RQ_PARAMS = { + 'SENTINELS': TASKS_REDIS_SENTINELS, + 'MASTER_NAME': TASKS_REDIS_SENTINEL_SERVICE, + 'DB': TASKS_REDIS_DATABASE, + 'PASSWORD': TASKS_REDIS_PASSWORD, 'SOCKET_TIMEOUT': None, 'CONNECTION_KWARGS': { - 'socket_connect_timeout': WEBHOOKS_REDIS_DEFAULT_TIMEOUT + 'socket_connect_timeout': TASKS_REDIS_DEFAULT_TIMEOUT }, } -} +else: + RQ_PARAMS = { + 'HOST': TASKS_REDIS_HOST, + 'PORT': TASKS_REDIS_PORT, + 'DB': TASKS_REDIS_DATABASE, + 'PASSWORD': TASKS_REDIS_PASSWORD, + 'DEFAULT_TIMEOUT': TASKS_REDIS_DEFAULT_TIMEOUT, + 'SSL': TASKS_REDIS_SSL, + } +RQ_QUEUES = { + 'default': RQ_PARAMS, # Webhooks + 'check_releases': RQ_PARAMS, +} # # Django debug toolbar diff --git a/netbox/netbox/tests/test_releases.py b/netbox/netbox/tests/test_releases.py new file mode 100644 index 000000000..635a6782b --- /dev/null +++ b/netbox/netbox/tests/test_releases.py @@ -0,0 +1,166 @@ +from io import BytesIO +from logging import ERROR +from unittest.mock import Mock, patch + +import requests +from cacheops import CacheMiss, RedisCache +from django.test import SimpleTestCase, override_settings +from packaging.version import Version +from requests import Response + +from utilities.background_tasks import get_releases + + +def successful_github_response(url, *_args, **_kwargs): + r = Response() + r.url = url + r.status_code = 200 + r.reason = 'OK' + r.headers = { + 'Content-Type': 'application/json; charset=utf-8', + } + r.raw = BytesIO(b'''[ + { + "html_url": "https://github.com/netbox-community/netbox/releases/tag/v2.7.8", + "tag_name": "v2.7.8", + "prerelease": false + }, + { + "html_url": "https://github.com/netbox-community/netbox/releases/tag/v2.6-beta1", + "tag_name": "v2.6-beta1", + "prerelease": true + }, + { + "html_url": "https://github.com/netbox-community/netbox/releases/tag/v2.5.9", + "tag_name": "v2.5.9", + "prerelease": false + } + ] + ''') + return r + + +def unsuccessful_github_response(url, *_args, **_kwargs): + r = Response() + r.url = url + r.status_code = 404 + r.reason = 'Not Found' + r.headers = { + 'Content-Type': 'application/json; charset=utf-8', + } + r.raw = BytesIO(b'''{ + "message": "Not Found", + "documentation_url": "https://developer.github.com/v3/repos/releases/#list-releases-for-a-repository" + } + ''') + return r + + +@override_settings(RELEASE_CHECK_URL='https://localhost/unittest/releases', RELEASE_CHECK_TIMEOUT=160876) +class GetReleasesTestCase(SimpleTestCase): + @patch.object(requests, 'get') + @patch.object(RedisCache, 'set') + @patch.object(RedisCache, 'get') + def test_pre_releases(self, dummy_cache_get: Mock, dummy_cache_set: Mock, dummy_request_get: Mock): + dummy_cache_get.side_effect = CacheMiss() + dummy_request_get.side_effect = successful_github_response + + releases = get_releases(pre_releases=True) + + # Check result + self.assertListEqual(releases, [ + (Version('2.7.8'), 'https://github.com/netbox-community/netbox/releases/tag/v2.7.8'), + (Version('2.6b1'), 'https://github.com/netbox-community/netbox/releases/tag/v2.6-beta1'), + (Version('2.5.9'), 'https://github.com/netbox-community/netbox/releases/tag/v2.5.9') + ]) + + # Check if correct request is made + dummy_request_get.assert_called_once_with( + 'https://localhost/unittest/releases', + headers={'Accept': 'application/vnd.github.v3+json'} + ) + + # Check if result is put in cache + dummy_cache_set.assert_called_once_with( + 'latest_release', + max(releases), + 160876 + ) + + @patch.object(requests, 'get') + @patch.object(RedisCache, 'set') + @patch.object(RedisCache, 'get') + def test_no_pre_releases(self, dummy_cache_get: Mock, dummy_cache_set: Mock, dummy_request_get: Mock): + dummy_cache_get.side_effect = CacheMiss() + dummy_request_get.side_effect = successful_github_response + + releases = get_releases(pre_releases=False) + + # Check result + self.assertListEqual(releases, [ + (Version('2.7.8'), 'https://github.com/netbox-community/netbox/releases/tag/v2.7.8'), + (Version('2.5.9'), 'https://github.com/netbox-community/netbox/releases/tag/v2.5.9') + ]) + + # Check if correct request is made + dummy_request_get.assert_called_once_with( + 'https://localhost/unittest/releases', + headers={'Accept': 'application/vnd.github.v3+json'} + ) + + # Check if result is put in cache + dummy_cache_set.assert_called_once_with( + 'latest_release', + max(releases), + 160876 + ) + + @patch.object(requests, 'get') + @patch.object(RedisCache, 'set') + @patch.object(RedisCache, 'get') + def test_failed_request(self, dummy_cache_get: Mock, dummy_cache_set: Mock, dummy_request_get: Mock): + dummy_cache_get.side_effect = CacheMiss() + dummy_request_get.side_effect = unsuccessful_github_response + + with self.assertLogs(level=ERROR) as cm: + releases = get_releases() + + # Check log entry + self.assertEqual(len(cm.output), 1) + log_output = cm.output[0] + last_log_line = log_output.split('\n')[-1] + self.assertRegex(last_log_line, '404 .* Not Found') + + # Check result + self.assertListEqual(releases, []) + + # Check if correct request is made + dummy_request_get.assert_called_once_with( + 'https://localhost/unittest/releases', + headers={'Accept': 'application/vnd.github.v3+json'} + ) + + # Check if failure is put in cache + dummy_cache_set.assert_called_once_with( + 'latest_release_no_retry', + 'https://localhost/unittest/releases', + 900 + ) + + @patch.object(requests, 'get') + @patch.object(RedisCache, 'set') + @patch.object(RedisCache, 'get') + def test_blocked_retry(self, dummy_cache_get: Mock, dummy_cache_set: Mock, dummy_request_get: Mock): + dummy_cache_get.return_value = 'https://localhost/unittest/releases' + dummy_request_get.side_effect = successful_github_response + + releases = get_releases() + + # Check result + self.assertListEqual(releases, []) + + # Check if request is NOT made + dummy_request_get.assert_not_called() + + # Check if cache is not updated + dummy_cache_set.assert_not_called() diff --git a/netbox/netbox/views.py b/netbox/netbox/views.py index b0f5b6022..bc87a825b 100644 --- a/netbox/netbox/views.py +++ b/netbox/netbox/views.py @@ -1,8 +1,10 @@ from collections import OrderedDict +from django.conf import settings from django.db.models import Count, F from django.shortcuts import render from django.views.generic import View +from packaging import version from rest_framework.response import Response from rest_framework.reverse import reverse from rest_framework.views import APIView @@ -25,6 +27,7 @@ from extras.models import ObjectChange, ReportResult from ipam.filters import AggregateFilterSet, IPAddressFilterSet, PrefixFilterSet, VLANFilterSet, VRFFilterSet from ipam.models import Aggregate, IPAddress, Prefix, VLAN, VRF from ipam.tables import AggregateTable, IPAddressTable, PrefixTable, VLANTable, VRFTable +from netbox.releases import get_latest_release from secrets.filters import SecretFilterSet from secrets.models import Secret from secrets.tables import SecretTable @@ -240,11 +243,24 @@ class HomeView(View): } + # Check whether a new release is available. (Only for staff/superusers.) + new_release = None + if request.user.is_staff or request.user.is_superuser: + latest_release, release_url = get_latest_release() + if isinstance(latest_release, version.Version): + current_version = version.parse(settings.VERSION) + if latest_release > current_version: + new_release = { + 'version': str(latest_release), + 'url': release_url, + } + return render(request, self.template_name, { 'search_form': SearchForm(), 'stats': stats, 'report_results': ReportResult.objects.order_by('-created')[:10], - 'changelog': ObjectChange.objects.prefetch_related('user', 'changed_object_type')[:15] + 'changelog': ObjectChange.objects.prefetch_related('user', 'changed_object_type')[:15], + 'new_release': new_release, }) diff --git a/netbox/templates/home.html b/netbox/templates/home.html index 6977bba4c..d3885b88f 100644 --- a/netbox/templates/home.html +++ b/netbox/templates/home.html @@ -1,6 +1,19 @@ {% extends '_base.html' %} {% load helpers %} +{% block header %} + {{ block.super }} + {% if new_release %} + {# new_release is set only if the current user is a superuser or staff member #} + + {% endif %} +{% endblock %} + + {% block content %} {% include 'search_form.html' %}
diff --git a/netbox/utilities/background_tasks.py b/netbox/utilities/background_tasks.py new file mode 100644 index 000000000..1255846b7 --- /dev/null +++ b/netbox/utilities/background_tasks.py @@ -0,0 +1,52 @@ +import logging + +import requests +from cacheops.simple import cache, CacheMiss +from django.conf import settings +from django_rq import job +from packaging import version + +# Get an instance of a logger +logger = logging.getLogger('netbox.releases') + + +@job('check_releases') +def get_releases(pre_releases=False): + url = settings.RELEASE_CHECK_URL + headers = { + 'Accept': 'application/vnd.github.v3+json', + } + releases = [] + + # Check whether this URL has failed recently and shouldn't be retried yet + try: + if url == cache.get('latest_release_no_retry'): + logger.info("Skipping release check; URL failed recently: {}".format(url)) + return [] + except CacheMiss: + pass + + try: + logger.debug("Fetching new releases from {}".format(url)) + response = requests.get(url, headers=headers) + response.raise_for_status() + total_releases = len(response.json()) + + for release in response.json(): + if 'tag_name' not in release: + continue + if not pre_releases and (release.get('devrelease') or release.get('prerelease')): + continue + releases.append((version.parse(release['tag_name']), release.get('html_url'))) + logger.debug("Found {} releases; {} usable".format(total_releases, len(releases))) + + except requests.exceptions.RequestException: + # The request failed. Set a flag in the cache to disable future checks to this URL for 15 minutes. + logger.exception("Error while fetching {}. Disabling checks for 15 minutes.".format(url)) + cache.set('latest_release_no_retry', url, 900) + return [] + + # Cache the most recent release + cache.set('latest_release', max(releases), settings.RELEASE_CHECK_TIMEOUT) + + return releases