From da1fb4f969b4454dda592b2e5bf6c561808fe153 Mon Sep 17 00:00:00 2001 From: jeremystretch Date: Tue, 25 May 2021 15:05:02 -0400 Subject: [PATCH 01/28] Replace references to v2.12 with v3.0 --- docs/additional-features/custom-scripts.md | 2 +- docs/installation/index.md | 2 +- docs/release-notes/version-2.11.md | 4 ++-- netbox/extras/scripts.py | 4 ++-- netbox/netbox/settings.py | 4 ++-- netbox/secrets/views.py | 2 +- netbox/utilities/forms/fields.py | 2 +- 7 files changed, 10 insertions(+), 10 deletions(-) diff --git a/docs/additional-features/custom-scripts.md b/docs/additional-features/custom-scripts.md index 3ed910791..8fe3661ed 100644 --- a/docs/additional-features/custom-scripts.md +++ b/docs/additional-features/custom-scripts.md @@ -175,7 +175,7 @@ A particular object within NetBox. Each ObjectVar must specify a particular mode * `null_option` - A label representing a "null" or empty choice (optional) !!! warning - The `display_field` parameter is now deprecated, and will be removed in NetBox v2.12. All ObjectVar instances will + The `display_field` parameter is now deprecated, and will be removed in NetBox v3.0. All ObjectVar instances will instead use the new standard `display` field for all serializers (introduced in NetBox v2.11). To limit the selections available within the list, additional query parameters can be passed as the `query_params` dictionary. For example, to show only devices with an "active" status: diff --git a/docs/installation/index.md b/docs/installation/index.md index 2ae5e5299..1c2fcf567 100644 --- a/docs/installation/index.md +++ b/docs/installation/index.md @@ -24,7 +24,7 @@ The video below demonstrates the installation of NetBox v2.10.3 on Ubuntu 20.04 | Redis | 4.0 | !!! note - Python 3.7 or later will be required in NetBox v2.12. Users are strongly encouraged to install NetBox using Python 3.7 or later for new deployments. + Python 3.7 or later will be required in NetBox v3.0. Users are strongly encouraged to install NetBox using Python 3.7 or later for new deployments. Below is a simplified overview of the NetBox application stack for reference: diff --git a/docs/release-notes/version-2.11.md b/docs/release-notes/version-2.11.md index 4a0fcef8f..65f81a2b5 100644 --- a/docs/release-notes/version-2.11.md +++ b/docs/release-notes/version-2.11.md @@ -93,7 +93,7 @@ ## v2.11.0 (2021-04-16) -**Note:** NetBox v2.11 is the last major release that will support Python 3.6. Beginning with NetBox v2.12, Python 3.7 or later will be required. +**Note:** NetBox v2.11 is the last major release that will support Python 3.6. Beginning with NetBox v3.0, Python 3.7 or later will be required. ### Breaking Changes @@ -151,7 +151,7 @@ Devices can now be assigned to locations (formerly known as rack groups) within When exporting a list of objects in NetBox, users now have the option of selecting the "current view". This will render CSV output matching the current configuration of the table being viewed. For example, if you modify the sites list to display only the site name, tenant, and status, the rendered CSV will include only these columns, and they will appear in the order chosen. -The legacy static export behavior has been retained to ensure backward compatibility for dependent integrations. However, users are strongly encouraged to adapt custom export templates where needed as this functionality will be removed in v2.12. +The legacy static export behavior has been retained to ensure backward compatibility for dependent integrations. However, users are strongly encouraged to adapt custom export templates where needed as this functionality will be removed in v3.0. #### Variable Scope Support for VLAN Groups ([#5284](https://github.com/netbox-community/netbox/issues/5284)) diff --git a/netbox/extras/scripts.py b/netbox/extras/scripts.py index 29ecc3ef3..a4040bdf4 100644 --- a/netbox/extras/scripts.py +++ b/netbox/extras/scripts.py @@ -188,10 +188,10 @@ class ObjectVar(ScriptVariable): def __init__(self, model, query_params=None, null_option=None, *args, **kwargs): - # TODO: Remove display_field in v2.12 + # TODO: Remove display_field in v3.0 if 'display_field' in kwargs: warnings.warn( - "The 'display_field' parameter has been deprecated, and will be removed in NetBox v2.12. Object " + "The 'display_field' parameter has been deprecated, and will be removed in NetBox v3.0. Object " "variables will now reference the 'display' attribute available on all model serializers by default." ) display_field = kwargs.pop('display_field', 'display') diff --git a/netbox/netbox/settings.py b/netbox/netbox/settings.py index 2d3f8570d..bf42a64c2 100644 --- a/netbox/netbox/settings.py +++ b/netbox/netbox/settings.py @@ -29,10 +29,10 @@ if platform.python_version_tuple() < ('3', '6'): raise RuntimeError( "NetBox requires Python 3.6 or higher (current: Python {})".format(platform.python_version()) ) -# TODO: Remove in NetBox v2.12 +# TODO: Remove in NetBox v3.0 if platform.python_version_tuple() < ('3', '7'): warnings.warn( - "Support for Python 3.6 will be dropped in NetBox v2.12. Please upgrade to Python 3.7 or later at your " + "Support for Python 3.6 will be dropped in NetBox v3.0. Please upgrade to Python 3.7 or later at your " "earliest convenience." ) diff --git a/netbox/secrets/views.py b/netbox/secrets/views.py index 88f647225..f10f0e4d5 100644 --- a/netbox/secrets/views.py +++ b/netbox/secrets/views.py @@ -92,7 +92,7 @@ def inject_deprecation_warning(request): """ messages.warning( request, - mark_safe(' The secrets functionality will be moved to a plugin in NetBox v2.12. ' + mark_safe(' The secrets functionality will be moved to a plugin in NetBox v3.0. ' 'Please see issue #5278 for ' 'more information.') ) diff --git a/netbox/utilities/forms/fields.py b/netbox/utilities/forms/fields.py index 9bc0e3df7..93b9e6c44 100644 --- a/netbox/utilities/forms/fields.py +++ b/netbox/utilities/forms/fields.py @@ -338,7 +338,7 @@ class DynamicModelChoiceMixin: filter = django_filters.ModelChoiceFilter widget = widgets.APISelect - # TODO: Remove display_field in v2.12 + # TODO: Remove display_field in v3.0 def __init__(self, display_field='display', query_params=None, initial_params=None, null_option=None, disabled_indicator=None, *args, **kwargs): self.display_field = display_field From da558de76913caec478835a84cf7ff71e2d61be7 Mon Sep 17 00:00:00 2001 From: jeremystretch Date: Mon, 24 May 2021 16:51:05 -0400 Subject: [PATCH 02/28] Initial work on #6087 --- netbox/ipam/filtersets.py | 6 + .../migrations/0047_prefix_depth_children.py | 21 + .../0048_prefix_populate_depth_children.py | 86 +++++ netbox/ipam/models/ip.py | 45 +++ netbox/ipam/querysets.py | 33 +- netbox/ipam/signals.py | 47 ++- netbox/ipam/tables.py | 13 +- netbox/ipam/tests/test_models.py | 358 ++++++++++++++---- netbox/ipam/views.py | 8 +- 9 files changed, 515 insertions(+), 102 deletions(-) create mode 100644 netbox/ipam/migrations/0047_prefix_depth_children.py create mode 100644 netbox/ipam/migrations/0048_prefix_populate_depth_children.py diff --git a/netbox/ipam/filtersets.py b/netbox/ipam/filtersets.py index d618c8eab..bca3f08c9 100644 --- a/netbox/ipam/filtersets.py +++ b/netbox/ipam/filtersets.py @@ -209,6 +209,12 @@ class PrefixFilterSet(PrimaryModelFilterSet, TenancyFilterSet): method='search_contains', label='Prefixes which contain this prefix or IP', ) + depth = django_filters.NumberFilter( + field_name='_depth' + ) + children = django_filters.NumberFilter( + field_name='_children' + ) mask_length = django_filters.NumberFilter( field_name='prefix', lookup_expr='net_mask_length' diff --git a/netbox/ipam/migrations/0047_prefix_depth_children.py b/netbox/ipam/migrations/0047_prefix_depth_children.py new file mode 100644 index 000000000..4c49b1358 --- /dev/null +++ b/netbox/ipam/migrations/0047_prefix_depth_children.py @@ -0,0 +1,21 @@ +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('ipam', '0046_set_vlangroup_scope_types'), + ] + + operations = [ + migrations.AddField( + model_name='prefix', + name='_children', + field=models.PositiveBigIntegerField(default=0, editable=False), + ), + migrations.AddField( + model_name='prefix', + name='_depth', + field=models.PositiveSmallIntegerField(default=0, editable=False), + ), + ] diff --git a/netbox/ipam/migrations/0048_prefix_populate_depth_children.py b/netbox/ipam/migrations/0048_prefix_populate_depth_children.py new file mode 100644 index 000000000..b265e7f6f --- /dev/null +++ b/netbox/ipam/migrations/0048_prefix_populate_depth_children.py @@ -0,0 +1,86 @@ +from django.db import migrations + + +def push_to_stack(stack, prefix): + # Increment child count on parent nodes + for n in stack: + n['children'] += 1 + stack.append({ + 'pk': prefix['pk'], + 'prefix': prefix['prefix'], + 'children': 0, + }) + + +def populate_prefix_hierarchy(apps, schema_editor): + """ + Populate _depth and _children attrs for all Prefixes. + """ + Prefix = apps.get_model('ipam', 'Prefix') + VRF = apps.get_model('ipam', 'VRF') + + total_count = Prefix.objects.count() + print(f'\nUpdating {total_count} prefixes...') + + # Iterate through all VRFs and the global table + vrfs = [None] + list(VRF.objects.values_list('pk', flat=True)) + for vrf in vrfs: + + stack = [] + update_queue = [] + + # Iterate through all Prefixes in the VRF, growing and shrinking the stack as we go + prefixes = Prefix.objects.filter(vrf=vrf).values('pk', 'prefix') + for i, p in enumerate(prefixes): + + # Grow the stack if this is a child of the most recent prefix + if not stack or p['prefix'] in stack[-1]['prefix']: + push_to_stack(stack, p) + + # If this is a sibling or parent of the most recent prefix, pop nodes from the + # stack until we reach a parent prefix (or the root) + else: + while stack and p['prefix'] not in stack[-1]['prefix'] and p['prefix'] != stack[-1]['prefix']: + node = stack.pop() + update_queue.append( + Prefix( + pk=node['pk'], + _depth=len(stack), + _children=node['children'] + ) + ) + push_to_stack(stack, p) + + # Flush the update queue once it reaches 100 Prefixes + if len(update_queue) >= 100: + Prefix.objects.bulk_update(update_queue, ['_depth', '_children']) + update_queue = [] + print(f' [{i}/{total_count}]') + + # Clear out any prefixes remaining in the stack + while stack: + node = stack.pop() + update_queue.append( + Prefix( + pk=node['pk'], + _depth=len(stack), + _children=node['children'] + ) + ) + + # Final flush of any remaining Prefixes + Prefix.objects.bulk_update(update_queue, ['_depth', '_children']) + + +class Migration(migrations.Migration): + + dependencies = [ + ('ipam', '0047_prefix_depth_children'), + ] + + operations = [ + migrations.RunPython( + code=populate_prefix_hierarchy, + reverse_code=migrations.RunPython.noop + ), + ] diff --git a/netbox/ipam/models/ip.py b/netbox/ipam/models/ip.py index 7df84c98b..c6c8cf74c 100644 --- a/netbox/ipam/models/ip.py +++ b/netbox/ipam/models/ip.py @@ -293,6 +293,16 @@ class Prefix(PrimaryModel): blank=True ) + # Cached depth & child counts + _depth = models.PositiveSmallIntegerField( + default=0, + editable=False + ) + _children = models.PositiveBigIntegerField( + default=0, + editable=False + ) + objects = PrefixQuerySet.as_manager() csv_headers = [ @@ -306,6 +316,13 @@ class Prefix(PrimaryModel): ordering = (F('vrf').asc(nulls_first=True), 'prefix', 'pk') # (vrf, prefix) may be non-unique verbose_name_plural = 'prefixes' + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + + # Cache the original prefix and VRF so we can check if they have changed on post_save + self._prefix = self.prefix + self._vrf = self.vrf + def __str__(self): return str(self.prefix) @@ -373,6 +390,14 @@ class Prefix(PrimaryModel): return self.prefix.version return None + @property + def depth(self): + return self._depth + + @property + def children(self): + return self._children + def _set_prefix_length(self, value): """ Expose the IPNetwork object's prefixlen attribute on the parent model so that it can be manipulated directly, @@ -385,6 +410,26 @@ class Prefix(PrimaryModel): def get_status_class(self): return PrefixStatusChoices.CSS_CLASSES.get(self.status) + def get_parents(self, include_self=False): + """ + Return all containing Prefixes in the hierarchy. + """ + lookup = 'net_contains_or_equals' if include_self else 'net_contains' + return Prefix.objects.filter(**{ + 'vrf': self.vrf, + f'prefix__{lookup}': self.prefix + }) + + def get_children(self, include_self=False): + """ + Return all covered Prefixes in the hierarchy. + """ + lookup = 'net_contained_or_equal' if include_self else 'net_contained' + return Prefix.objects.filter(**{ + 'vrf': self.vrf, + f'prefix__{lookup}': self.prefix + }) + def get_duplicates(self): return Prefix.objects.filter(vrf=self.vrf, prefix=str(self.prefix)).exclude(pk=self.pk) diff --git a/netbox/ipam/querysets.py b/netbox/ipam/querysets.py index 784d58342..7edac2eff 100644 --- a/netbox/ipam/querysets.py +++ b/netbox/ipam/querysets.py @@ -1,27 +1,32 @@ from django.contrib.contenttypes.models import ContentType from django.db.models import Q +from django.db.models.expressions import RawSQL from utilities.querysets import RestrictedQuerySet class PrefixQuerySet(RestrictedQuerySet): - def annotate_tree(self): + def annotate_hierarchy(self): """ - Annotate the number of parent and child prefixes for each Prefix. Raw SQL is needed for these subqueries - because we need to cast NULL VRF values to integers for comparison. (NULL != NULL). + Annotate the depth and number of child prefixes for each Prefix. Cast null VRF values to zero for + comparison. (NULL != NULL). """ - return self.extra( - select={ - 'parents': 'SELECT COUNT(U0."prefix") AS "c" ' - 'FROM "ipam_prefix" U0 ' - 'WHERE (U0."prefix" >> "ipam_prefix"."prefix" ' - 'AND COALESCE(U0."vrf_id", 0) = COALESCE("ipam_prefix"."vrf_id", 0))', - 'children': 'SELECT COUNT(U1."prefix") AS "c" ' - 'FROM "ipam_prefix" U1 ' - 'WHERE (U1."prefix" << "ipam_prefix"."prefix" ' - 'AND COALESCE(U1."vrf_id", 0) = COALESCE("ipam_prefix"."vrf_id", 0))', - } + return self.annotate( + hierarchy_depth=RawSQL( + 'SELECT COUNT(DISTINCT U0."prefix") AS "c" ' + 'FROM "ipam_prefix" U0 ' + 'WHERE (U0."prefix" >> "ipam_prefix"."prefix" ' + 'AND COALESCE(U0."vrf_id", 0) = COALESCE("ipam_prefix"."vrf_id", 0))', + () + ), + hierarchy_children=RawSQL( + 'SELECT COUNT(U1."prefix") AS "c" ' + 'FROM "ipam_prefix" U1 ' + 'WHERE (U1."prefix" << "ipam_prefix"."prefix" ' + 'AND COALESCE(U1."vrf_id", 0) = COALESCE("ipam_prefix"."vrf_id", 0))', + () + ) ) diff --git a/netbox/ipam/signals.py b/netbox/ipam/signals.py index a8fce8310..f8673b10e 100644 --- a/netbox/ipam/signals.py +++ b/netbox/ipam/signals.py @@ -1,9 +1,52 @@ -from django.db.models.signals import pre_delete +from django.db.models.signals import post_delete, post_save, pre_delete from django.dispatch import receiver from dcim.models import Device from virtualization.models import VirtualMachine -from .models import IPAddress +from .models import IPAddress, Prefix + + +def update_parents_children(prefix): + """ + Update depth on prefix & containing prefixes + """ + parents = prefix.get_parents(include_self=True).annotate_hierarchy() + for parent in parents: + parent._children = parent.hierarchy_children + Prefix.objects.bulk_update(parents, ['_children']) + + +def update_children_depth(prefix): + """ + Update children count on prefix & contained prefixes + """ + children = prefix.get_children(include_self=True).annotate_hierarchy() + for child in children: + child._depth = child.hierarchy_depth + Prefix.objects.bulk_update(children, ['_depth']) + + +@receiver(post_save, sender=Prefix) +def handle_prefix_saved(instance, created, **kwargs): + + # Prefix has changed (or new instance has been created) + if created or instance.vrf != instance._vrf or instance.prefix != instance._prefix: + + update_parents_children(instance) + update_children_depth(instance) + + # If this is not a new prefix, clean up parent/children of previous prefix + if not created: + old_prefix = Prefix(vrf=instance._vrf, prefix=instance._prefix) + update_parents_children(old_prefix) + update_children_depth(old_prefix) + + +@receiver(post_delete, sender=Prefix) +def handle_prefix_deleted(instance, **kwargs): + + update_parents_children(instance) + update_children_depth(instance) @receiver(pre_delete, sender=IPAddress) diff --git a/netbox/ipam/tables.py b/netbox/ipam/tables.py index 82e8751a9..12c835e6c 100644 --- a/netbox/ipam/tables.py +++ b/netbox/ipam/tables.py @@ -15,7 +15,7 @@ AVAILABLE_LABEL = mark_safe('Available' PREFIX_LINK = """ {% load helpers %} -{% for i in record.parents|as_range %} +{% for i in record.depth|as_range %} {% endfor %} {{ record.prefix }} @@ -262,6 +262,14 @@ class PrefixTable(BaseTable): template_code=PREFIX_LINK, attrs={'td': {'class': 'text-nowrap'}} ) + depth = tables.Column( + accessor=Accessor('_depth'), + verbose_name='Depth' + ) + children = tables.Column( + accessor=Accessor('_children'), + verbose_name='Children' + ) status = ChoiceFieldColumn( default=AVAILABLE_LABEL ) @@ -287,7 +295,8 @@ class PrefixTable(BaseTable): class Meta(BaseTable.Meta): model = Prefix fields = ( - 'pk', 'prefix', 'status', 'children', 'vrf', 'tenant', 'site', 'vlan', 'role', 'is_pool', 'description', + 'pk', 'prefix', 'status', 'depth', 'children', 'vrf', 'tenant', 'site', 'vlan', 'role', 'is_pool', + 'description', ) default_columns = ('pk', 'prefix', 'status', 'vrf', 'tenant', 'site', 'vlan', 'role', 'description') row_attrs = { diff --git a/netbox/ipam/tests/test_models.py b/netbox/ipam/tests/test_models.py index a47862165..4fefdec54 100644 --- a/netbox/ipam/tests/test_models.py +++ b/netbox/ipam/tests/test_models.py @@ -1,4 +1,4 @@ -import netaddr +from netaddr import IPNetwork, IPSet from django.core.exceptions import ValidationError from django.test import TestCase, override_settings @@ -10,27 +10,27 @@ class TestAggregate(TestCase): def test_get_utilization(self): rir = RIR.objects.create(name='RIR 1', slug='rir-1') - aggregate = Aggregate(prefix=netaddr.IPNetwork('10.0.0.0/8'), rir=rir) + aggregate = Aggregate(prefix=IPNetwork('10.0.0.0/8'), rir=rir) aggregate.save() # 25% utilization Prefix.objects.bulk_create(( - Prefix(prefix=netaddr.IPNetwork('10.0.0.0/12')), - Prefix(prefix=netaddr.IPNetwork('10.16.0.0/12')), - Prefix(prefix=netaddr.IPNetwork('10.32.0.0/12')), - Prefix(prefix=netaddr.IPNetwork('10.48.0.0/12')), + Prefix(prefix=IPNetwork('10.0.0.0/12')), + Prefix(prefix=IPNetwork('10.16.0.0/12')), + Prefix(prefix=IPNetwork('10.32.0.0/12')), + Prefix(prefix=IPNetwork('10.48.0.0/12')), )) self.assertEqual(aggregate.get_utilization(), 25) # 50% utilization Prefix.objects.bulk_create(( - Prefix(prefix=netaddr.IPNetwork('10.64.0.0/10')), + Prefix(prefix=IPNetwork('10.64.0.0/10')), )) self.assertEqual(aggregate.get_utilization(), 50) # 100% utilization Prefix.objects.bulk_create(( - Prefix(prefix=netaddr.IPNetwork('10.128.0.0/9')), + Prefix(prefix=IPNetwork('10.128.0.0/9')), )) self.assertEqual(aggregate.get_utilization(), 100) @@ -39,9 +39,9 @@ class TestPrefix(TestCase): def test_get_duplicates(self): prefixes = Prefix.objects.bulk_create(( - Prefix(prefix=netaddr.IPNetwork('192.0.2.0/24')), - Prefix(prefix=netaddr.IPNetwork('192.0.2.0/24')), - Prefix(prefix=netaddr.IPNetwork('192.0.2.0/24')), + Prefix(prefix=IPNetwork('192.0.2.0/24')), + Prefix(prefix=IPNetwork('192.0.2.0/24')), + Prefix(prefix=IPNetwork('192.0.2.0/24')), )) duplicate_prefix_pks = [p.pk for p in prefixes[0].get_duplicates()] @@ -54,11 +54,11 @@ class TestPrefix(TestCase): VRF(name='VRF 3'), )) prefixes = Prefix.objects.bulk_create(( - Prefix(prefix=netaddr.IPNetwork('10.0.0.0/16'), status=PrefixStatusChoices.STATUS_CONTAINER), - Prefix(prefix=netaddr.IPNetwork('10.0.0.0/24'), vrf=None), - Prefix(prefix=netaddr.IPNetwork('10.0.1.0/24'), vrf=vrfs[0]), - Prefix(prefix=netaddr.IPNetwork('10.0.2.0/24'), vrf=vrfs[1]), - Prefix(prefix=netaddr.IPNetwork('10.0.3.0/24'), vrf=vrfs[2]), + Prefix(prefix=IPNetwork('10.0.0.0/16'), status=PrefixStatusChoices.STATUS_CONTAINER), + Prefix(prefix=IPNetwork('10.0.0.0/24'), vrf=None), + Prefix(prefix=IPNetwork('10.0.1.0/24'), vrf=vrfs[0]), + Prefix(prefix=IPNetwork('10.0.2.0/24'), vrf=vrfs[1]), + Prefix(prefix=IPNetwork('10.0.3.0/24'), vrf=vrfs[2]), )) child_prefix_pks = {p.pk for p in prefixes[0].get_child_prefixes()} @@ -79,13 +79,13 @@ class TestPrefix(TestCase): VRF(name='VRF 3'), )) parent_prefix = Prefix.objects.create( - prefix=netaddr.IPNetwork('10.0.0.0/16'), status=PrefixStatusChoices.STATUS_CONTAINER + prefix=IPNetwork('10.0.0.0/16'), status=PrefixStatusChoices.STATUS_CONTAINER ) ips = IPAddress.objects.bulk_create(( - IPAddress(address=netaddr.IPNetwork('10.0.0.1/24'), vrf=None), - IPAddress(address=netaddr.IPNetwork('10.0.1.1/24'), vrf=vrfs[0]), - IPAddress(address=netaddr.IPNetwork('10.0.2.1/24'), vrf=vrfs[1]), - IPAddress(address=netaddr.IPNetwork('10.0.3.1/24'), vrf=vrfs[2]), + IPAddress(address=IPNetwork('10.0.0.1/24'), vrf=None), + IPAddress(address=IPNetwork('10.0.1.1/24'), vrf=vrfs[0]), + IPAddress(address=IPNetwork('10.0.2.1/24'), vrf=vrfs[1]), + IPAddress(address=IPNetwork('10.0.3.1/24'), vrf=vrfs[2]), )) child_ip_pks = {p.pk for p in parent_prefix.get_child_ips()} @@ -102,16 +102,16 @@ class TestPrefix(TestCase): def test_get_available_prefixes(self): prefixes = Prefix.objects.bulk_create(( - Prefix(prefix=netaddr.IPNetwork('10.0.0.0/16')), # Parent prefix - Prefix(prefix=netaddr.IPNetwork('10.0.0.0/20')), - Prefix(prefix=netaddr.IPNetwork('10.0.32.0/20')), - Prefix(prefix=netaddr.IPNetwork('10.0.128.0/18')), + Prefix(prefix=IPNetwork('10.0.0.0/16')), # Parent prefix + Prefix(prefix=IPNetwork('10.0.0.0/20')), + Prefix(prefix=IPNetwork('10.0.32.0/20')), + Prefix(prefix=IPNetwork('10.0.128.0/18')), )) - missing_prefixes = netaddr.IPSet([ - netaddr.IPNetwork('10.0.16.0/20'), - netaddr.IPNetwork('10.0.48.0/20'), - netaddr.IPNetwork('10.0.64.0/18'), - netaddr.IPNetwork('10.0.192.0/18'), + missing_prefixes = IPSet([ + IPNetwork('10.0.16.0/20'), + IPNetwork('10.0.48.0/20'), + IPNetwork('10.0.64.0/18'), + IPNetwork('10.0.192.0/18'), ]) available_prefixes = prefixes[0].get_available_prefixes() @@ -119,17 +119,17 @@ class TestPrefix(TestCase): def test_get_available_ips(self): - parent_prefix = Prefix.objects.create(prefix=netaddr.IPNetwork('10.0.0.0/28')) + parent_prefix = Prefix.objects.create(prefix=IPNetwork('10.0.0.0/28')) IPAddress.objects.bulk_create(( - IPAddress(address=netaddr.IPNetwork('10.0.0.1/26')), - IPAddress(address=netaddr.IPNetwork('10.0.0.3/26')), - IPAddress(address=netaddr.IPNetwork('10.0.0.5/26')), - IPAddress(address=netaddr.IPNetwork('10.0.0.7/26')), - IPAddress(address=netaddr.IPNetwork('10.0.0.9/26')), - IPAddress(address=netaddr.IPNetwork('10.0.0.11/26')), - IPAddress(address=netaddr.IPNetwork('10.0.0.13/26')), + IPAddress(address=IPNetwork('10.0.0.1/26')), + IPAddress(address=IPNetwork('10.0.0.3/26')), + IPAddress(address=IPNetwork('10.0.0.5/26')), + IPAddress(address=IPNetwork('10.0.0.7/26')), + IPAddress(address=IPNetwork('10.0.0.9/26')), + IPAddress(address=IPNetwork('10.0.0.11/26')), + IPAddress(address=IPNetwork('10.0.0.13/26')), )) - missing_ips = netaddr.IPSet([ + missing_ips = IPSet([ '10.0.0.2/32', '10.0.0.4/32', '10.0.0.6/32', @@ -145,39 +145,39 @@ class TestPrefix(TestCase): def test_get_first_available_prefix(self): prefixes = Prefix.objects.bulk_create(( - Prefix(prefix=netaddr.IPNetwork('10.0.0.0/16')), # Parent prefix - Prefix(prefix=netaddr.IPNetwork('10.0.0.0/24')), - Prefix(prefix=netaddr.IPNetwork('10.0.1.0/24')), - Prefix(prefix=netaddr.IPNetwork('10.0.2.0/24')), + Prefix(prefix=IPNetwork('10.0.0.0/16')), # Parent prefix + Prefix(prefix=IPNetwork('10.0.0.0/24')), + Prefix(prefix=IPNetwork('10.0.1.0/24')), + Prefix(prefix=IPNetwork('10.0.2.0/24')), )) - self.assertEqual(prefixes[0].get_first_available_prefix(), netaddr.IPNetwork('10.0.3.0/24')) + self.assertEqual(prefixes[0].get_first_available_prefix(), IPNetwork('10.0.3.0/24')) - Prefix.objects.create(prefix=netaddr.IPNetwork('10.0.3.0/24')) - self.assertEqual(prefixes[0].get_first_available_prefix(), netaddr.IPNetwork('10.0.4.0/22')) + Prefix.objects.create(prefix=IPNetwork('10.0.3.0/24')) + self.assertEqual(prefixes[0].get_first_available_prefix(), IPNetwork('10.0.4.0/22')) def test_get_first_available_ip(self): - parent_prefix = Prefix.objects.create(prefix=netaddr.IPNetwork('10.0.0.0/24')) + parent_prefix = Prefix.objects.create(prefix=IPNetwork('10.0.0.0/24')) IPAddress.objects.bulk_create(( - IPAddress(address=netaddr.IPNetwork('10.0.0.1/24')), - IPAddress(address=netaddr.IPNetwork('10.0.0.2/24')), - IPAddress(address=netaddr.IPNetwork('10.0.0.3/24')), + IPAddress(address=IPNetwork('10.0.0.1/24')), + IPAddress(address=IPNetwork('10.0.0.2/24')), + IPAddress(address=IPNetwork('10.0.0.3/24')), )) self.assertEqual(parent_prefix.get_first_available_ip(), '10.0.0.4/24') - IPAddress.objects.create(address=netaddr.IPNetwork('10.0.0.4/24')) + IPAddress.objects.create(address=IPNetwork('10.0.0.4/24')) self.assertEqual(parent_prefix.get_first_available_ip(), '10.0.0.5/24') def test_get_utilization(self): # Container Prefix prefix = Prefix.objects.create( - prefix=netaddr.IPNetwork('10.0.0.0/24'), + prefix=IPNetwork('10.0.0.0/24'), status=PrefixStatusChoices.STATUS_CONTAINER ) Prefix.objects.bulk_create(( - Prefix(prefix=netaddr.IPNetwork('10.0.0.0/26')), - Prefix(prefix=netaddr.IPNetwork('10.0.0.128/26')), + Prefix(prefix=IPNetwork('10.0.0.0/26')), + Prefix(prefix=IPNetwork('10.0.0.128/26')), )) self.assertEqual(prefix.get_utilization(), 50) @@ -186,7 +186,7 @@ class TestPrefix(TestCase): prefix.save() IPAddress.objects.bulk_create( # Create 32 IPAddresses within the Prefix - [IPAddress(address=netaddr.IPNetwork('10.0.0.{}/24'.format(i))) for i in range(1, 33)] + [IPAddress(address=IPNetwork('10.0.0.{}/24'.format(i))) for i in range(1, 33)] ) self.assertEqual(prefix.get_utilization(), 12) # ~= 12% @@ -196,36 +196,234 @@ class TestPrefix(TestCase): @override_settings(ENFORCE_GLOBAL_UNIQUE=False) def test_duplicate_global(self): - Prefix.objects.create(prefix=netaddr.IPNetwork('192.0.2.0/24')) - duplicate_prefix = Prefix(prefix=netaddr.IPNetwork('192.0.2.0/24')) + Prefix.objects.create(prefix=IPNetwork('192.0.2.0/24')) + duplicate_prefix = Prefix(prefix=IPNetwork('192.0.2.0/24')) self.assertIsNone(duplicate_prefix.clean()) @override_settings(ENFORCE_GLOBAL_UNIQUE=True) def test_duplicate_global_unique(self): - Prefix.objects.create(prefix=netaddr.IPNetwork('192.0.2.0/24')) - duplicate_prefix = Prefix(prefix=netaddr.IPNetwork('192.0.2.0/24')) + Prefix.objects.create(prefix=IPNetwork('192.0.2.0/24')) + duplicate_prefix = Prefix(prefix=IPNetwork('192.0.2.0/24')) self.assertRaises(ValidationError, duplicate_prefix.clean) def test_duplicate_vrf(self): vrf = VRF.objects.create(name='Test', rd='1:1', enforce_unique=False) - Prefix.objects.create(vrf=vrf, prefix=netaddr.IPNetwork('192.0.2.0/24')) - duplicate_prefix = Prefix(vrf=vrf, prefix=netaddr.IPNetwork('192.0.2.0/24')) + Prefix.objects.create(vrf=vrf, prefix=IPNetwork('192.0.2.0/24')) + duplicate_prefix = Prefix(vrf=vrf, prefix=IPNetwork('192.0.2.0/24')) self.assertIsNone(duplicate_prefix.clean()) def test_duplicate_vrf_unique(self): vrf = VRF.objects.create(name='Test', rd='1:1', enforce_unique=True) - Prefix.objects.create(vrf=vrf, prefix=netaddr.IPNetwork('192.0.2.0/24')) - duplicate_prefix = Prefix(vrf=vrf, prefix=netaddr.IPNetwork('192.0.2.0/24')) + Prefix.objects.create(vrf=vrf, prefix=IPNetwork('192.0.2.0/24')) + duplicate_prefix = Prefix(vrf=vrf, prefix=IPNetwork('192.0.2.0/24')) self.assertRaises(ValidationError, duplicate_prefix.clean) +class TestPrefixHierarchy(TestCase): + """ + Test the automatic updating of depth and child count in response to changes made within + the prefix hierarchy. + """ + @classmethod + def setUpTestData(cls): + + prefixes = ( + + # IPv4 + Prefix(prefix='10.0.0.0/8', _depth=0, _children=2), + Prefix(prefix='10.0.0.0/16', _depth=1, _children=1), + Prefix(prefix='10.0.0.0/24', _depth=2, _children=0), + + # IPv6 + Prefix(prefix='2001:db8::/32', _depth=0, _children=2), + Prefix(prefix='2001:db8::/40', _depth=1, _children=1), + Prefix(prefix='2001:db8::/48', _depth=2, _children=0), + + ) + Prefix.objects.bulk_create(prefixes) + + def test_create_prefix4(self): + # Create 10.0.0.0/12 + Prefix(prefix='10.0.0.0/12').save() + + prefixes = Prefix.objects.filter(prefix__family=4) + self.assertEqual(prefixes[0].prefix, IPNetwork('10.0.0.0/8')) + self.assertEqual(prefixes[0]._depth, 0) + self.assertEqual(prefixes[0]._children, 3) + self.assertEqual(prefixes[1].prefix, IPNetwork('10.0.0.0/12')) + self.assertEqual(prefixes[1]._depth, 1) + self.assertEqual(prefixes[1]._children, 2) + self.assertEqual(prefixes[2].prefix, IPNetwork('10.0.0.0/16')) + self.assertEqual(prefixes[2]._depth, 2) + self.assertEqual(prefixes[2]._children, 1) + self.assertEqual(prefixes[3].prefix, IPNetwork('10.0.0.0/24')) + self.assertEqual(prefixes[3]._depth, 3) + self.assertEqual(prefixes[3]._children, 0) + + def test_create_prefix6(self): + # Create 2001:db8::/36 + Prefix(prefix='2001:db8::/36').save() + + prefixes = Prefix.objects.filter(prefix__family=6) + self.assertEqual(prefixes[0].prefix, IPNetwork('2001:db8::/32')) + self.assertEqual(prefixes[0]._depth, 0) + self.assertEqual(prefixes[0]._children, 3) + self.assertEqual(prefixes[1].prefix, IPNetwork('2001:db8::/36')) + self.assertEqual(prefixes[1]._depth, 1) + self.assertEqual(prefixes[1]._children, 2) + self.assertEqual(prefixes[2].prefix, IPNetwork('2001:db8::/40')) + self.assertEqual(prefixes[2]._depth, 2) + self.assertEqual(prefixes[2]._children, 1) + self.assertEqual(prefixes[3].prefix, IPNetwork('2001:db8::/48')) + self.assertEqual(prefixes[3]._depth, 3) + self.assertEqual(prefixes[3]._children, 0) + + def test_update_prefix4(self): + # Change 10.0.0.0/24 to 10.0.0.0/12 + p = Prefix.objects.get(prefix='10.0.0.0/24') + p.prefix = '10.0.0.0/12' + p.save() + + prefixes = Prefix.objects.filter(prefix__family=4) + self.assertEqual(prefixes[0].prefix, IPNetwork('10.0.0.0/8')) + self.assertEqual(prefixes[0]._depth, 0) + self.assertEqual(prefixes[0]._children, 2) + self.assertEqual(prefixes[1].prefix, IPNetwork('10.0.0.0/12')) + self.assertEqual(prefixes[1]._depth, 1) + self.assertEqual(prefixes[1]._children, 1) + self.assertEqual(prefixes[2].prefix, IPNetwork('10.0.0.0/16')) + self.assertEqual(prefixes[2]._depth, 2) + self.assertEqual(prefixes[2]._children, 0) + + def test_update_prefix6(self): + # Change 2001:db8::/48 to 2001:db8::/36 + p = Prefix.objects.get(prefix='2001:db8::/48') + p.prefix = '2001:db8::/36' + p.save() + + prefixes = Prefix.objects.filter(prefix__family=6) + self.assertEqual(prefixes[0].prefix, IPNetwork('2001:db8::/32')) + self.assertEqual(prefixes[0]._depth, 0) + self.assertEqual(prefixes[0]._children, 2) + self.assertEqual(prefixes[1].prefix, IPNetwork('2001:db8::/36')) + self.assertEqual(prefixes[1]._depth, 1) + self.assertEqual(prefixes[1]._children, 1) + self.assertEqual(prefixes[2].prefix, IPNetwork('2001:db8::/40')) + self.assertEqual(prefixes[2]._depth, 2) + self.assertEqual(prefixes[2]._children, 0) + + def test_update_prefix_vrf4(self): + vrf = VRF(name='VRF A') + vrf.save() + + # Move 10.0.0.0/16 to a VRF + p = Prefix.objects.get(prefix='10.0.0.0/16') + p.vrf = vrf + p.save() + + prefixes = Prefix.objects.filter(vrf__isnull=True, prefix__family=4) + self.assertEqual(prefixes[0].prefix, IPNetwork('10.0.0.0/8')) + self.assertEqual(prefixes[0]._depth, 0) + self.assertEqual(prefixes[0]._children, 1) + self.assertEqual(prefixes[1].prefix, IPNetwork('10.0.0.0/24')) + self.assertEqual(prefixes[1]._depth, 1) + self.assertEqual(prefixes[1]._children, 0) + + prefixes = Prefix.objects.filter(vrf=vrf) + self.assertEqual(prefixes[0].prefix, IPNetwork('10.0.0.0/16')) + self.assertEqual(prefixes[0]._depth, 0) + self.assertEqual(prefixes[0]._children, 0) + + def test_update_prefix_vrf6(self): + vrf = VRF(name='VRF A') + vrf.save() + + # Move 2001:db8::/40 to a VRF + p = Prefix.objects.get(prefix='2001:db8::/40') + p.vrf = vrf + p.save() + + prefixes = Prefix.objects.filter(vrf__isnull=True, prefix__family=6) + self.assertEqual(prefixes[0].prefix, IPNetwork('2001:db8::/32')) + self.assertEqual(prefixes[0]._depth, 0) + self.assertEqual(prefixes[0]._children, 1) + self.assertEqual(prefixes[1].prefix, IPNetwork('2001:db8::/48')) + self.assertEqual(prefixes[1]._depth, 1) + self.assertEqual(prefixes[1]._children, 0) + + prefixes = Prefix.objects.filter(vrf=vrf) + self.assertEqual(prefixes[0].prefix, IPNetwork('2001:db8::/40')) + self.assertEqual(prefixes[0]._depth, 0) + self.assertEqual(prefixes[0]._children, 0) + + def test_delete_prefix4(self): + # Delete 10.0.0.0/16 + Prefix.objects.filter(prefix='10.0.0.0/16').delete() + + prefixes = Prefix.objects.filter(prefix__family=4) + self.assertEqual(prefixes[0].prefix, IPNetwork('10.0.0.0/8')) + self.assertEqual(prefixes[0]._depth, 0) + self.assertEqual(prefixes[0]._children, 1) + self.assertEqual(prefixes[1].prefix, IPNetwork('10.0.0.0/24')) + self.assertEqual(prefixes[1]._depth, 1) + self.assertEqual(prefixes[1]._children, 0) + + def test_delete_prefix6(self): + # Delete 2001:db8::/40 + Prefix.objects.filter(prefix='2001:db8::/40').delete() + + prefixes = Prefix.objects.filter(prefix__family=6) + self.assertEqual(prefixes[0].prefix, IPNetwork('2001:db8::/32')) + self.assertEqual(prefixes[0]._depth, 0) + self.assertEqual(prefixes[0]._children, 1) + self.assertEqual(prefixes[1].prefix, IPNetwork('2001:db8::/48')) + self.assertEqual(prefixes[1]._depth, 1) + self.assertEqual(prefixes[1]._children, 0) + + def test_duplicate_prefix4(self): + # Duplicate 10.0.0.0/16 + Prefix(prefix='10.0.0.0/16').save() + + prefixes = Prefix.objects.filter(prefix__family=4) + self.assertEqual(prefixes[0].prefix, IPNetwork('10.0.0.0/8')) + self.assertEqual(prefixes[0]._depth, 0) + self.assertEqual(prefixes[0]._children, 3) + self.assertEqual(prefixes[1].prefix, IPNetwork('10.0.0.0/16')) + self.assertEqual(prefixes[1]._depth, 1) + self.assertEqual(prefixes[1]._children, 1) + self.assertEqual(prefixes[2].prefix, IPNetwork('10.0.0.0/16')) + self.assertEqual(prefixes[2]._depth, 1) + self.assertEqual(prefixes[2]._children, 1) + self.assertEqual(prefixes[3].prefix, IPNetwork('10.0.0.0/24')) + self.assertEqual(prefixes[3]._depth, 2) + self.assertEqual(prefixes[3]._children, 0) + + def test_duplicate_prefix6(self): + # Duplicate 2001:db8::/40 + Prefix(prefix='2001:db8::/40').save() + + prefixes = Prefix.objects.filter(prefix__family=6) + self.assertEqual(prefixes[0].prefix, IPNetwork('2001:db8::/32')) + self.assertEqual(prefixes[0]._depth, 0) + self.assertEqual(prefixes[0]._children, 3) + self.assertEqual(prefixes[1].prefix, IPNetwork('2001:db8::/40')) + self.assertEqual(prefixes[1]._depth, 1) + self.assertEqual(prefixes[1]._children, 1) + self.assertEqual(prefixes[2].prefix, IPNetwork('2001:db8::/40')) + self.assertEqual(prefixes[2]._depth, 1) + self.assertEqual(prefixes[2]._children, 1) + self.assertEqual(prefixes[3].prefix, IPNetwork('2001:db8::/48')) + self.assertEqual(prefixes[3]._depth, 2) + self.assertEqual(prefixes[3]._children, 0) + + class TestIPAddress(TestCase): def test_get_duplicates(self): ips = IPAddress.objects.bulk_create(( - IPAddress(address=netaddr.IPNetwork('192.0.2.1/24')), - IPAddress(address=netaddr.IPNetwork('192.0.2.1/24')), - IPAddress(address=netaddr.IPNetwork('192.0.2.1/24')), + IPAddress(address=IPNetwork('192.0.2.1/24')), + IPAddress(address=IPNetwork('192.0.2.1/24')), + IPAddress(address=IPNetwork('192.0.2.1/24')), )) duplicate_ip_pks = [p.pk for p in ips[0].get_duplicates()] @@ -237,44 +435,44 @@ class TestIPAddress(TestCase): @override_settings(ENFORCE_GLOBAL_UNIQUE=False) def test_duplicate_global(self): - IPAddress.objects.create(address=netaddr.IPNetwork('192.0.2.1/24')) - duplicate_ip = IPAddress(address=netaddr.IPNetwork('192.0.2.1/24')) + IPAddress.objects.create(address=IPNetwork('192.0.2.1/24')) + duplicate_ip = IPAddress(address=IPNetwork('192.0.2.1/24')) self.assertIsNone(duplicate_ip.clean()) @override_settings(ENFORCE_GLOBAL_UNIQUE=True) def test_duplicate_global_unique(self): - IPAddress.objects.create(address=netaddr.IPNetwork('192.0.2.1/24')) - duplicate_ip = IPAddress(address=netaddr.IPNetwork('192.0.2.1/24')) + IPAddress.objects.create(address=IPNetwork('192.0.2.1/24')) + duplicate_ip = IPAddress(address=IPNetwork('192.0.2.1/24')) self.assertRaises(ValidationError, duplicate_ip.clean) def test_duplicate_vrf(self): vrf = VRF.objects.create(name='Test', rd='1:1', enforce_unique=False) - IPAddress.objects.create(vrf=vrf, address=netaddr.IPNetwork('192.0.2.1/24')) - duplicate_ip = IPAddress(vrf=vrf, address=netaddr.IPNetwork('192.0.2.1/24')) + IPAddress.objects.create(vrf=vrf, address=IPNetwork('192.0.2.1/24')) + duplicate_ip = IPAddress(vrf=vrf, address=IPNetwork('192.0.2.1/24')) self.assertIsNone(duplicate_ip.clean()) def test_duplicate_vrf_unique(self): vrf = VRF.objects.create(name='Test', rd='1:1', enforce_unique=True) - IPAddress.objects.create(vrf=vrf, address=netaddr.IPNetwork('192.0.2.1/24')) - duplicate_ip = IPAddress(vrf=vrf, address=netaddr.IPNetwork('192.0.2.1/24')) + IPAddress.objects.create(vrf=vrf, address=IPNetwork('192.0.2.1/24')) + duplicate_ip = IPAddress(vrf=vrf, address=IPNetwork('192.0.2.1/24')) self.assertRaises(ValidationError, duplicate_ip.clean) @override_settings(ENFORCE_GLOBAL_UNIQUE=True) def test_duplicate_nonunique_nonrole_role(self): - IPAddress.objects.create(address=netaddr.IPNetwork('192.0.2.1/24')) - duplicate_ip = IPAddress(address=netaddr.IPNetwork('192.0.2.1/24'), role=IPAddressRoleChoices.ROLE_VIP) + IPAddress.objects.create(address=IPNetwork('192.0.2.1/24')) + duplicate_ip = IPAddress(address=IPNetwork('192.0.2.1/24'), role=IPAddressRoleChoices.ROLE_VIP) self.assertRaises(ValidationError, duplicate_ip.clean) @override_settings(ENFORCE_GLOBAL_UNIQUE=True) def test_duplicate_nonunique_role_nonrole(self): - IPAddress.objects.create(address=netaddr.IPNetwork('192.0.2.1/24'), role=IPAddressRoleChoices.ROLE_VIP) - duplicate_ip = IPAddress(address=netaddr.IPNetwork('192.0.2.1/24')) + IPAddress.objects.create(address=IPNetwork('192.0.2.1/24'), role=IPAddressRoleChoices.ROLE_VIP) + duplicate_ip = IPAddress(address=IPNetwork('192.0.2.1/24')) self.assertRaises(ValidationError, duplicate_ip.clean) @override_settings(ENFORCE_GLOBAL_UNIQUE=True) def test_duplicate_nonunique_role(self): - IPAddress.objects.create(address=netaddr.IPNetwork('192.0.2.1/24'), role=IPAddressRoleChoices.ROLE_VIP) - IPAddress.objects.create(address=netaddr.IPNetwork('192.0.2.1/24'), role=IPAddressRoleChoices.ROLE_VIP) + IPAddress.objects.create(address=IPNetwork('192.0.2.1/24'), role=IPAddressRoleChoices.ROLE_VIP) + IPAddress.objects.create(address=IPNetwork('192.0.2.1/24'), role=IPAddressRoleChoices.ROLE_VIP) class TestVLANGroup(TestCase): diff --git a/netbox/ipam/views.py b/netbox/ipam/views.py index 168933af7..7c1a06cf3 100644 --- a/netbox/ipam/views.py +++ b/netbox/ipam/views.py @@ -238,7 +238,7 @@ class AggregateView(generic.ObjectView): 'site', 'role' ).order_by( 'prefix' - ).annotate_tree() + ) # Add available prefixes to the table if requested if request.GET.get('show_available', 'true') == 'true': @@ -352,7 +352,7 @@ class RoleBulkDeleteView(generic.BulkDeleteView): # class PrefixListView(generic.ObjectListView): - queryset = Prefix.objects.annotate_tree() + queryset = Prefix.objects.all() filterset = filtersets.PrefixFilterSet filterset_form = forms.PrefixFilterForm table = tables.PrefixDetailTable @@ -377,7 +377,7 @@ class PrefixView(generic.ObjectView): prefix__net_contains=str(instance.prefix) ).prefetch_related( 'site', 'role' - ).annotate_tree() + ) parent_prefix_table = tables.PrefixTable(list(parent_prefixes), orderable=False) parent_prefix_table.exclude = ('vrf',) @@ -407,7 +407,7 @@ class PrefixPrefixesView(generic.ObjectView): # Child prefixes table child_prefixes = instance.get_child_prefixes().restrict(request.user, 'view').prefetch_related( 'site', 'vlan', 'role', - ).annotate_tree() + ) # Add available prefixes to the table if requested if child_prefixes and request.GET.get('show_available', 'true') == 'true': From 8c1b681391c455a59f2bd8237933db0a5684cb85 Mon Sep 17 00:00:00 2001 From: jeremystretch Date: Wed, 26 May 2021 21:39:42 -0400 Subject: [PATCH 03/28] Add GitHub discussions link; replace Google Group with netdev.chat --- .github/ISSUE_TEMPLATE/config.yml | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 3e1d0167d..d83eecc0c 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,9 +1,12 @@ # Reference: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository#configuring-the-template-chooser blank_issues_enabled: false contact_links: + - name: ❓ Discussion + url: https://github.com/netbox-community/netbox/discussions + about: "If you're just looking for help, try starting a discussion instead" - name: 📖 Contributing Policy url: https://github.com/netbox-community/netbox/blob/develop/CONTRIBUTING.md - about: Please read through our contributing policy before opening an issue or pull request - - name: 💬 Discussion Group - url: https://groups.google.com/g/netbox-discuss - about: Join our discussion group for assistance with installation issues and other problems + about: "Please read through our contributing policy before opening an issue or pull request" + - name: 💬 Community Slack + url: https://netdev.chat/ + about: "Join #netbox on the NetDev Community Slack for assistance with installation issues and other problems" From 5ac6a307bfb76dc415f7fc1c73315db674ad9842 Mon Sep 17 00:00:00 2001 From: jeremystretch Date: Wed, 26 May 2021 21:45:18 -0400 Subject: [PATCH 04/28] Rearrange contact links --- .github/ISSUE_TEMPLATE/config.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index d83eecc0c..1f8fdebd4 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,12 +1,12 @@ # Reference: https://help.github.com/en/github/building-a-strong-community/configuring-issue-templates-for-your-repository#configuring-the-template-chooser blank_issues_enabled: false contact_links: - - name: ❓ Discussion - url: https://github.com/netbox-community/netbox/discussions - about: "If you're just looking for help, try starting a discussion instead" - name: 📖 Contributing Policy url: https://github.com/netbox-community/netbox/blob/develop/CONTRIBUTING.md about: "Please read through our contributing policy before opening an issue or pull request" + - name: ❓ Discussion + url: https://github.com/netbox-community/netbox/discussions + about: "If you're just looking for help, try starting a discussion instead" - name: 💬 Community Slack url: https://netdev.chat/ about: "Join #netbox on the NetDev Community Slack for assistance with installation issues and other problems" From 34e673f7d619445d1746a52f422b41aa7c6268f4 Mon Sep 17 00:00:00 2001 From: jeremystretch Date: Thu, 27 May 2021 09:24:29 -0400 Subject: [PATCH 05/28] Introduce rebuild_prefixes management command --- netbox/ipam/management/__init__.py | 0 netbox/ipam/management/commands/__init__.py | 0 .../management/commands/rebuild_prefixes.py | 27 +++++++++ .../0048_prefix_populate_depth_children.py | 54 +++-------------- netbox/ipam/utils.py | 60 +++++++++++++++++++ 5 files changed, 94 insertions(+), 47 deletions(-) create mode 100644 netbox/ipam/management/__init__.py create mode 100644 netbox/ipam/management/commands/__init__.py create mode 100644 netbox/ipam/management/commands/rebuild_prefixes.py diff --git a/netbox/ipam/management/__init__.py b/netbox/ipam/management/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/netbox/ipam/management/commands/__init__.py b/netbox/ipam/management/commands/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/netbox/ipam/management/commands/rebuild_prefixes.py b/netbox/ipam/management/commands/rebuild_prefixes.py new file mode 100644 index 000000000..5d614b834 --- /dev/null +++ b/netbox/ipam/management/commands/rebuild_prefixes.py @@ -0,0 +1,27 @@ +from django.core.management.base import BaseCommand + +from ipam.models import Prefix, VRF +from ipam.utils import rebuild_prefixes + + +class Command(BaseCommand): + help = "Rebuild the prefix hierarchy (depth and children counts)" + + def handle(self, *model_names, **options): + self.stdout.write(f'Rebuilding {Prefix.objects.count()} prefixes...') + + # Reset existing counts + Prefix.objects.update(_depth=0, _children=0) + + # Rebuild the global table + global_count = Prefix.objects.filter(vrf__isnull=True).count() + self.stdout.write(f'Global: {global_count} prefixes...') + rebuild_prefixes(None) + + # Rebuild each VRF + for vrf in VRF.objects.all(): + vrf_count = Prefix.objects.filter(vrf=vrf).count() + self.stdout.write(f'VRF {vrf}: {vrf_count} prefixes...') + rebuild_prefixes(vrf) + + self.stdout.write(self.style.SUCCESS('Finished.')) diff --git a/netbox/ipam/migrations/0048_prefix_populate_depth_children.py b/netbox/ipam/migrations/0048_prefix_populate_depth_children.py index b265e7f6f..dc5070656 100644 --- a/netbox/ipam/migrations/0048_prefix_populate_depth_children.py +++ b/netbox/ipam/migrations/0048_prefix_populate_depth_children.py @@ -1,5 +1,7 @@ from django.db import migrations +from ipam.utils import rebuild_prefixes + def push_to_stack(stack, prefix): # Increment child count on parent nodes @@ -22,54 +24,12 @@ def populate_prefix_hierarchy(apps, schema_editor): total_count = Prefix.objects.count() print(f'\nUpdating {total_count} prefixes...') - # Iterate through all VRFs and the global table - vrfs = [None] + list(VRF.objects.values_list('pk', flat=True)) - for vrf in vrfs: + # Rebuild the global table + rebuild_prefixes(None) - stack = [] - update_queue = [] - - # Iterate through all Prefixes in the VRF, growing and shrinking the stack as we go - prefixes = Prefix.objects.filter(vrf=vrf).values('pk', 'prefix') - for i, p in enumerate(prefixes): - - # Grow the stack if this is a child of the most recent prefix - if not stack or p['prefix'] in stack[-1]['prefix']: - push_to_stack(stack, p) - - # If this is a sibling or parent of the most recent prefix, pop nodes from the - # stack until we reach a parent prefix (or the root) - else: - while stack and p['prefix'] not in stack[-1]['prefix'] and p['prefix'] != stack[-1]['prefix']: - node = stack.pop() - update_queue.append( - Prefix( - pk=node['pk'], - _depth=len(stack), - _children=node['children'] - ) - ) - push_to_stack(stack, p) - - # Flush the update queue once it reaches 100 Prefixes - if len(update_queue) >= 100: - Prefix.objects.bulk_update(update_queue, ['_depth', '_children']) - update_queue = [] - print(f' [{i}/{total_count}]') - - # Clear out any prefixes remaining in the stack - while stack: - node = stack.pop() - update_queue.append( - Prefix( - pk=node['pk'], - _depth=len(stack), - _children=node['children'] - ) - ) - - # Final flush of any remaining Prefixes - Prefix.objects.bulk_update(update_queue, ['_depth', '_children']) + # Iterate through all VRFs, rebuilding each + for vrf in VRF.objects.all(): + rebuild_prefixes(vrf) class Migration(migrations.Migration): diff --git a/netbox/ipam/utils.py b/netbox/ipam/utils.py index 0414a01e0..953a4a468 100644 --- a/netbox/ipam/utils.py +++ b/netbox/ipam/utils.py @@ -91,3 +91,63 @@ def add_available_vlans(vlan_group, vlans): vlans.sort(key=lambda v: v.vid if type(v) == VLAN else v['vid']) return vlans + + +def rebuild_prefixes(vrf): + """ + Rebuild the prefix hierarchy for all prefixes in the specified VRF (or global table). + """ + def contains(parent, child): + return child in parent and child != parent + + def push_to_stack(prefix): + # Increment child count on parent nodes + for n in stack: + n['children'] += 1 + stack.append({ + 'pk': prefix['pk'], + 'prefix': prefix['prefix'], + 'children': 0, + }) + + stack = [] + update_queue = [] + prefixes = Prefix.objects.filter(vrf=vrf).values('pk', 'prefix') + + # Iterate through all Prefixes in the VRF, growing and shrinking the stack as we go + for i, p in enumerate(prefixes): + + # Grow the stack if this is a child of the most recent prefix + if not stack or contains(stack[-1]['prefix'], p['prefix']): + push_to_stack(p) + + # Handle duplicate prefixes + elif stack[-1]['prefix'] == p['prefix']: + update_queue.append( + Prefix(pk=p['pk'], _depth=len(stack) - 1, _children=stack[-1]['children']) + ) + + # If this is a sibling or parent of the most recent prefix, pop nodes from the + # stack until we reach a parent prefix (or the root) + else: + while stack and not contains(stack[-1]['prefix'], p['prefix']): + node = stack.pop() + update_queue.append( + Prefix(pk=node['pk'], _depth=len(stack), _children=node['children']) + ) + push_to_stack(p) + + # Flush the update queue once it reaches 100 Prefixes + if len(update_queue) >= 100: + Prefix.objects.bulk_update(update_queue, ['_depth', '_children']) + update_queue = [] + + # Clear out any prefixes remaining in the stack + while stack: + node = stack.pop() + update_queue.append( + Prefix(pk=node['pk'], _depth=len(stack), _children=node['children']) + ) + + # Final flush of any remaining Prefixes + Prefix.objects.bulk_update(update_queue, ['_depth', '_children']) From 84017776ecdc9f9f69b36fbb7947da36a93f9799 Mon Sep 17 00:00:00 2001 From: jeremystretch Date: Thu, 27 May 2021 10:03:00 -0400 Subject: [PATCH 06/28] Fix handling of duplicate prefixes --- netbox/ipam/utils.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/netbox/ipam/utils.py b/netbox/ipam/utils.py index 953a4a468..18a470253 100644 --- a/netbox/ipam/utils.py +++ b/netbox/ipam/utils.py @@ -105,7 +105,7 @@ def rebuild_prefixes(vrf): for n in stack: n['children'] += 1 stack.append({ - 'pk': prefix['pk'], + 'pk': [prefix['pk']], 'prefix': prefix['prefix'], 'children': 0, }) @@ -123,18 +123,17 @@ def rebuild_prefixes(vrf): # Handle duplicate prefixes elif stack[-1]['prefix'] == p['prefix']: - update_queue.append( - Prefix(pk=p['pk'], _depth=len(stack) - 1, _children=stack[-1]['children']) - ) + stack[-1]['pk'].append(p['pk']) # If this is a sibling or parent of the most recent prefix, pop nodes from the # stack until we reach a parent prefix (or the root) else: while stack and not contains(stack[-1]['prefix'], p['prefix']): node = stack.pop() - update_queue.append( - Prefix(pk=node['pk'], _depth=len(stack), _children=node['children']) - ) + for pk in node['pk']: + update_queue.append( + Prefix(pk=pk, _depth=len(stack), _children=node['children']) + ) push_to_stack(p) # Flush the update queue once it reaches 100 Prefixes @@ -145,9 +144,10 @@ def rebuild_prefixes(vrf): # Clear out any prefixes remaining in the stack while stack: node = stack.pop() - update_queue.append( - Prefix(pk=node['pk'], _depth=len(stack), _children=node['children']) - ) + for pk in node['pk']: + update_queue.append( + Prefix(pk=pk, _depth=len(stack), _children=node['children']) + ) # Final flush of any remaining Prefixes Prefix.objects.bulk_update(update_queue, ['_depth', '_children']) From 103730a64243d3475975d6cf1e1f212c546db579 Mon Sep 17 00:00:00 2001 From: jeremystretch Date: Thu, 27 May 2021 12:54:41 -0400 Subject: [PATCH 07/28] Extend depth & children filters --- netbox/ipam/filtersets.py | 4 ++-- netbox/ipam/tests/test_filtersets.py | 15 ++++++++++++++- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/netbox/ipam/filtersets.py b/netbox/ipam/filtersets.py index bca3f08c9..f67317b7d 100644 --- a/netbox/ipam/filtersets.py +++ b/netbox/ipam/filtersets.py @@ -209,10 +209,10 @@ class PrefixFilterSet(PrimaryModelFilterSet, TenancyFilterSet): method='search_contains', label='Prefixes which contain this prefix or IP', ) - depth = django_filters.NumberFilter( + depth = MultiValueNumberFilter( field_name='_depth' ) - children = django_filters.NumberFilter( + children = MultiValueNumberFilter( field_name='_children' ) mask_length = django_filters.NumberFilter( diff --git a/netbox/ipam/tests/test_filtersets.py b/netbox/ipam/tests/test_filtersets.py index 282a19b66..087328189 100644 --- a/netbox/ipam/tests/test_filtersets.py +++ b/netbox/ipam/tests/test_filtersets.py @@ -400,7 +400,8 @@ class PrefixTestCase(TestCase, ChangeLoggedFilterSetTests): Prefix(prefix='10.0.0.0/16'), Prefix(prefix='2001:db8::/32'), ) - Prefix.objects.bulk_create(prefixes) + for prefix in prefixes: + prefix.save() def test_family(self): params = {'family': '6'} @@ -431,6 +432,18 @@ class PrefixTestCase(TestCase, ChangeLoggedFilterSetTests): params = {'contains': '2001:db8:0:1::/64'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) + def test_depth(self): + params = {'depth': '0'} + self.assertEqual(self.filterset(params, self.queryset).qs.count(), 8) + params = {'depth__gt': '0'} + self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) + + def test_children(self): + params = {'children': '0'} + self.assertEqual(self.filterset(params, self.queryset).qs.count(), 8) + params = {'children__gt': '0'} + self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2) + def test_mask_length(self): params = {'mask_length': '24'} self.assertEqual(self.filterset(params, self.queryset).qs.count(), 4) From d34b9ee00e20b9f707febea6e24a2786d2b1e49f Mon Sep 17 00:00:00 2001 From: jeremystretch Date: Thu, 27 May 2021 13:24:31 -0400 Subject: [PATCH 08/28] Add max depth selector --- netbox/templates/ipam/prefix_list.html | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/netbox/templates/ipam/prefix_list.html b/netbox/templates/ipam/prefix_list.html index e6e2abca4..8eeb06846 100644 --- a/netbox/templates/ipam/prefix_list.html +++ b/netbox/templates/ipam/prefix_list.html @@ -2,6 +2,26 @@ {% load helpers %} {% block buttons %} +
+ +
+
+ +
+

{{ termination_a.device.location|placeholder }}

+
+
-

{{ termination_a.device.rack|default:"None" }}

+

{{ termination_a.device.rack|placeholder }}

From b2b3f388b17bdc0c6f7e219c1e664d9aa690f261 Mon Sep 17 00:00:00 2001 From: jeremystretch Date: Fri, 28 May 2021 11:15:45 -0400 Subject: [PATCH 13/28] Correct Prefix REST API test case --- netbox/ipam/tests/test_api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/netbox/ipam/tests/test_api.py b/netbox/ipam/tests/test_api.py index b2b4b9e8f..3036a8aa9 100644 --- a/netbox/ipam/tests/test_api.py +++ b/netbox/ipam/tests/test_api.py @@ -186,7 +186,7 @@ class RoleTest(APIViewTestCases.APIViewTestCase): class PrefixTest(APIViewTestCases.APIViewTestCase): model = Prefix - brief_fields = ['display', 'family', 'id', 'prefix', 'url'] + brief_fields = ['_depth', 'display', 'family', 'id', 'prefix', 'url'] create_data = [ { 'prefix': '192.168.4.0/24', From 1c0f3e1b81de36f06fef6e6605bbebad9f54b14c Mon Sep 17 00:00:00 2001 From: jeremystretch Date: Fri, 28 May 2021 13:16:25 -0400 Subject: [PATCH 14/28] Fixes #6502: Correct permissions evaluation for running a report via the REST API --- docs/release-notes/version-2.11.md | 4 ++++ netbox/extras/api/views.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/release-notes/version-2.11.md b/docs/release-notes/version-2.11.md index b9b86ef91..634e47316 100644 --- a/docs/release-notes/version-2.11.md +++ b/docs/release-notes/version-2.11.md @@ -8,6 +8,10 @@ * [#6487](https://github.com/netbox-community/netbox/issues/6487) - Add location filter to cable connection form * [#6501](https://github.com/netbox-community/netbox/issues/6501) - Expose prefix depth and children on REST API serializer +### Bug Fixes + +* [#6502](https://github.com/netbox-community/netbox/issues/6502) - Correct permissions evaluation for running a report via the REST API + --- ## v2.11.4 (2021-05-25) diff --git a/netbox/extras/api/views.py b/netbox/extras/api/views.py index 7e6c97782..fbeba8328 100644 --- a/netbox/extras/api/views.py +++ b/netbox/extras/api/views.py @@ -239,7 +239,7 @@ class ReportViewSet(ViewSet): Run a Report identified as ".