mirror of
https://github.com/netbox-community/netbox.git
synced 2025-12-28 16:17:46 -06:00
Compare commits
49 Commits
v4.3.3
...
7604-filte
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aa9ee0e5c6 | ||
|
|
35b9d80819 | ||
|
|
d4b30a64ba | ||
|
|
de53fd2bd1 | ||
|
|
c7b68664f9 | ||
|
|
a20715f229 | ||
|
|
1b8767f1e3 | ||
|
|
5acef5038f | ||
|
|
6ca3908715 | ||
|
|
c736ce3179 | ||
|
|
111fefdf9c | ||
|
|
063d1fef7a | ||
|
|
6ba6ff3fee | ||
|
|
7bb7307892 | ||
|
|
c2d3363930 | ||
|
|
6e30c11017 | ||
|
|
b01c75cf3a | ||
|
|
ffa9a52667 | ||
|
|
47320f9958 | ||
|
|
d08a1bd07d | ||
|
|
14c4aeca54 | ||
|
|
26bec1275f | ||
|
|
fa2d7f6516 | ||
|
|
d571cb4867 | ||
|
|
2129355c30 | ||
|
|
c40bfb1445 | ||
|
|
b88b5b0b1b | ||
|
|
6eeb382512 | ||
|
|
e5d6c71171 | ||
|
|
f777bfee2e | ||
|
|
8b63eb64c1 | ||
|
|
cff29f9551 | ||
|
|
a5c0cae112 | ||
|
|
2a27e475e4 | ||
|
|
44efa037cc | ||
|
|
6c17629159 | ||
|
|
f13d028c98 | ||
|
|
f5d32b1bf1 | ||
|
|
f05897d61a | ||
|
|
b5421f1cd6 | ||
|
|
23cc4f1c41 | ||
|
|
9c2cd66162 | ||
|
|
f61a2964c8 | ||
|
|
ee94fb0b94 | ||
|
|
8fb8f4c75b | ||
|
|
e33793dc82 | ||
|
|
3b8841ee3b | ||
|
|
ea4c205a37 | ||
|
|
2a5d3abafb |
@@ -15,7 +15,7 @@ body:
|
||||
attributes:
|
||||
label: NetBox version
|
||||
description: What version of NetBox are you currently running?
|
||||
placeholder: v4.3.3
|
||||
placeholder: v4.3.5
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/02-bug_report.yaml
vendored
2
.github/ISSUE_TEMPLATE/02-bug_report.yaml
vendored
@@ -27,7 +27,7 @@ body:
|
||||
attributes:
|
||||
label: NetBox Version
|
||||
description: What version of NetBox are you currently running?
|
||||
placeholder: v4.3.3
|
||||
placeholder: v4.3.5
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
3
.github/codeql/codeql-config.yml
vendored
Normal file
3
.github/codeql/codeql-config.yml
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
paths-ignore:
|
||||
# Ignore compiled JS
|
||||
- netbox/project-static/dist
|
||||
42
.github/workflows/codeql.yml
vendored
Normal file
42
.github/workflows/codeql.yml
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "main", "feature" ]
|
||||
pull_request:
|
||||
branches: [ "main", "feature" ]
|
||||
schedule:
|
||||
- cron: '38 16 * * 4'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze (${{ matrix.language }})
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- language: actions
|
||||
build-mode: none
|
||||
- language: javascript-typescript
|
||||
build-mode: none
|
||||
- language: python
|
||||
build-mode: none
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
build-mode: ${{ matrix.build-mode }}
|
||||
config-file: .github/codeql/codeql-config.yml
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
||||
@@ -6,7 +6,7 @@
|
||||
<a href="https://github.com/netbox-community/netbox/graphs/contributors"><img src="https://img.shields.io/github/contributors/netbox-community/netbox?color=blue" alt="Contributors" /></a>
|
||||
<a href="https://github.com/netbox-community/netbox/stargazers"><img src="https://img.shields.io/github/stars/netbox-community/netbox?style=flat" alt="GitHub stars" /></a>
|
||||
<a href="https://explore.transifex.com/netbox-community/netbox/"><img src="https://img.shields.io/badge/languages-15-blue" alt="Languages supported" /></a>
|
||||
<a href="https://github.com/netbox-community/netbox/actions/workflows/ci.yml"><img src="https://github.com/netbox-community/netbox/workflows/CI/badge.svg?branch=main" alt="CI status" /></a>
|
||||
<a href="https://github.com/netbox-community/netbox/actions/workflows/ci.yml"><img src="https://github.com/netbox-community/netbox/actions/workflows/ci.yml/badge.svg" alt="CI status" /></a>
|
||||
<p>
|
||||
<strong><a href="https://netboxlabs.com/community/">NetBox Community</a></strong> |
|
||||
<strong><a href="https://netboxlabs.com/netbox-cloud/">NetBox Cloud</a></strong> |
|
||||
|
||||
@@ -8,12 +8,18 @@ django-cors-headers
|
||||
|
||||
# Runtime UI tool for debugging Django
|
||||
# https://github.com/jazzband/django-debug-toolbar/blob/main/docs/changes.rst
|
||||
django-debug-toolbar
|
||||
# django-debug-toolbar v6.0.0 raises "Attribute Error at /: 'function' object has no attribute 'set'"
|
||||
# see https://github.com/netbox-community/netbox/issues/19974
|
||||
django-debug-toolbar==5.2.0
|
||||
|
||||
# Library for writing reusable URL query filters
|
||||
# https://github.com/carltongibson/django-filter/blob/main/CHANGES.rst
|
||||
django-filter
|
||||
|
||||
# Django Debug Toolbar extension for GraphiQL
|
||||
# https://github.com/flavors/django-graphiql-debug-toolbar/blob/main/CHANGES.rst
|
||||
django-graphiql-debug-toolbar
|
||||
|
||||
# HTMX utilities for Django
|
||||
# https://django-htmx.readthedocs.io/en/latest/changelog.html
|
||||
django-htmx
|
||||
@@ -108,6 +114,7 @@ nh3
|
||||
|
||||
# Fork of PIL (Python Imaging Library) for image processing
|
||||
# https://github.com/python-pillow/Pillow/releases
|
||||
# https://pillow.readthedocs.io/en/stable/releasenotes/
|
||||
Pillow
|
||||
|
||||
# PostgreSQL database adapter for Python
|
||||
@@ -126,22 +133,21 @@ requests
|
||||
# https://github.com/rq/rq/blob/master/CHANGES.md
|
||||
rq
|
||||
|
||||
# Social authentication framework
|
||||
# https://github.com/python-social-auth/social-core/blob/master/CHANGELOG.md
|
||||
social-auth-core
|
||||
|
||||
# Django app for social-auth-core
|
||||
# https://github.com/python-social-auth/social-app-django/blob/master/CHANGELOG.md
|
||||
social-auth-app-django
|
||||
|
||||
# Social authentication framework
|
||||
# https://github.com/python-social-auth/social-core/blob/master/CHANGELOG.md
|
||||
social-auth-core
|
||||
|
||||
# Strawberry GraphQL
|
||||
# https://github.com/strawberry-graphql/strawberry/blob/main/CHANGELOG.md
|
||||
strawberry-graphql
|
||||
|
||||
# Strawberry GraphQL Django extension
|
||||
# https://github.com/strawberry-graphql/strawberry-django/releases
|
||||
# See #19771
|
||||
strawberry-graphql-django==0.60.0
|
||||
strawberry-graphql-django
|
||||
|
||||
# SVG image rendering (used for rack elevations)
|
||||
# https://github.com/mozman/svgwrite/blob/master/NEWS.rst
|
||||
|
||||
@@ -18,10 +18,10 @@ pg_dump --username netbox --password --host localhost netbox > netbox.sql
|
||||
!!! note
|
||||
You may need to change the username, host, and/or database in the command above to match your installation.
|
||||
|
||||
When replicating a production database for development purposes, you may find it convenient to exclude changelog data, which can easily account for the bulk of a database's size. To do this, exclude the `extras_objectchange` table data from the export. The table will still be included in the output file, but will not be populated with any data.
|
||||
When replicating a production database for development purposes, you may find it convenient to exclude changelog data, which can easily account for the bulk of a database's size. To do this, exclude the `core_objectchange` table data from the export. The table will still be included in the output file, but will not be populated with any data.
|
||||
|
||||
```no-highlight
|
||||
pg_dump ... --exclude-table-data=extras_objectchange netbox > netbox.sql
|
||||
pg_dump ... --exclude-table-data=core_objectchange netbox > netbox.sql
|
||||
```
|
||||
|
||||
### Load an Exported Database
|
||||
|
||||
@@ -158,6 +158,7 @@ LOGGING = {
|
||||
* `netbox.<app>.<model>` - Generic form for model-specific log messages
|
||||
* `netbox.auth.*` - Authentication events
|
||||
* `netbox.api.views.*` - Views which handle business logic for the REST API
|
||||
* `netbox.event_rules` - Event rules
|
||||
* `netbox.reports.*` - Report execution (`module.name`)
|
||||
* `netbox.scripts.*` - Custom script execution (`module.name`)
|
||||
* `netbox.views.*` - Views which handle business logic for the web UI
|
||||
|
||||
@@ -147,7 +147,7 @@ For UI development you will need to review the [Web UI Development Guide](web-ui
|
||||
|
||||
## Populating Demo Data
|
||||
|
||||
Once you have your development environment up and running, it might be helpful to populate some "dummy" data to make interacting with the UI and APIs more convenient. Check out the [netbox-demo-data](https://github.com/netbox-community/netbox-demo-data) repo on GitHub, which houses a collection of sample data that can be easily imported to any new NetBox deployment. (This sample data is used to populate the public demo instance at <https://demo.netbox.dev>.)
|
||||
Once you have your development environment up and running, it might be helpful to populate some "dummy" data to make interacting with the UI and APIs more convenient. Check out the [netbox-demo-data](https://github.com/netbox-community/netbox-demo-data) repo on GitHub, which houses a collection of sample data that can be easily imported to any new NetBox deployment. This sample data is used to populate the [public demo instance](https://demo.netbox.dev).
|
||||
|
||||
The demo data is provided in JSON format and loaded into an empty database using Django's `loaddata` management command. Consult the demo data repo's `README` file for complete instructions on populating the data.
|
||||
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
|
||||
NetBox includes the ability to execute certain functions as background tasks. These include:
|
||||
|
||||
* [Report](../customization/reports.md) execution
|
||||
* [Custom script](../customization/custom-scripts.md) execution
|
||||
* Synchronization of [remote data sources](../integrations/synchronized-data.md)
|
||||
* Housekeeping tasks
|
||||
|
||||
Additionally, NetBox plugins can enqueue their own background tasks. This is accomplished using the [Job model](../models/core/job.md). Background tasks are executed by the `rqworker` process(es).
|
||||
|
||||
|
||||
@@ -302,13 +302,6 @@ Quit the server with CONTROL-C.
|
||||
|
||||
Next, connect to the name or IP of the server (as defined in `ALLOWED_HOSTS`) on port 8000; for example, <http://127.0.0.1:8000/>. You should be greeted with the NetBox home page. Try logging in using the username and password specified when creating a superuser.
|
||||
|
||||
!!! note
|
||||
By default RHEL based distros will likely block your testing attempts with firewalld. The development server port can be opened with `firewall-cmd` (add `--permanent` if you want the rule to survive server restarts):
|
||||
|
||||
```no-highlight
|
||||
firewall-cmd --zone=public --add-port=8000/tcp
|
||||
```
|
||||
|
||||
!!! danger "Not for production use"
|
||||
The development server is for development and testing purposes only. It is neither performant nor secure enough for production use. **Do not use it in production.**
|
||||
|
||||
|
||||
@@ -135,7 +135,7 @@ Check out the desired release by specifying its tag. For example:
|
||||
|
||||
```
|
||||
cd /opt/netbox && \
|
||||
sudo git fetch && \
|
||||
sudo git fetch --tags && \
|
||||
sudo git checkout v4.2.7
|
||||
```
|
||||
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 24 KiB |
@@ -24,6 +24,14 @@ Jinja2 template code, if being defined locally rather than replicated from a dat
|
||||
|
||||
A dictionary of any additional parameters to pass when instantiating the [Jinja2 environment](https://jinja.palletsprojects.com/en/3.1.x/api/#jinja2.Environment). Jinja2 supports various optional parameters which can be used to modify its default behavior.
|
||||
|
||||
The `undefined` and `finalize` Jinja environment parameters, which must reference a Python class or function, can define a dotted path to the desired resource. For example:
|
||||
|
||||
```json
|
||||
{
|
||||
"undefined": "jinja2.StrictUndefined"
|
||||
}
|
||||
```
|
||||
|
||||
### MIME Type
|
||||
|
||||
!!! info "This field was introduced in NetBox v4.3."
|
||||
|
||||
@@ -26,6 +26,14 @@ Jinja2 template code for rendering the exported data.
|
||||
|
||||
A dictionary of any additional parameters to pass when instantiating the [Jinja2 environment](https://jinja.palletsprojects.com/en/3.1.x/api/#jinja2.Environment). Jinja2 supports various optional parameters which can be used to modify its default behavior.
|
||||
|
||||
The `undefined` and `finalize` Jinja environment parameters, which must reference a Python class or function, can define a dotted path to the desired resource. For example:
|
||||
|
||||
```json
|
||||
{
|
||||
"undefined": "jinja2.StrictUndefined"
|
||||
}
|
||||
```
|
||||
|
||||
### MIME Type
|
||||
|
||||
The MIME type to indicate in the response when rendering the export template (optional). Defaults to `text/plain`.
|
||||
|
||||
@@ -15,7 +15,6 @@ A background job implements a basic [Job](../../models/core/job.md) executor for
|
||||
```python title="jobs.py"
|
||||
from netbox.jobs import JobRunner
|
||||
|
||||
|
||||
class MyTestJob(JobRunner):
|
||||
class Meta:
|
||||
name = "My Test Job"
|
||||
@@ -25,6 +24,8 @@ class MyTestJob(JobRunner):
|
||||
# your logic goes here
|
||||
```
|
||||
|
||||
Completed jobs will have their status updated to "completed" by default, or "errored" if an unhandled exception was raised by the `run()` method. To intentionally mark a job as failed, raise the `core.exceptions.JobFailed` exception. (Note that "failed" differs from "errored" in that a failure may be expected under certain conditions, whereas an error is not.)
|
||||
|
||||
You can schedule the background job from within your code (e.g. from a model's `save()` method or a view) by calling `MyTestJob.enqueue()`. This method passes through all arguments to `Job.enqueue()`. However, no `name` argument must be passed, as the background job name will be used instead.
|
||||
|
||||
!!! tip
|
||||
|
||||
@@ -80,18 +80,20 @@ GET /api/ipam/vlans/?vid__gt=900
|
||||
|
||||
String based (char) fields (Name, Address, etc) support these lookup expressions:
|
||||
|
||||
| Filter | Description |
|
||||
|---------|----------------------------------------|
|
||||
| `n` | Not equal to |
|
||||
| `ic` | Contains (case-insensitive) |
|
||||
| `nic` | Does not contain (case-insensitive) |
|
||||
| `isw` | Starts with (case-insensitive) |
|
||||
| `nisw` | Does not start with (case-insensitive) |
|
||||
| `iew` | Ends with (case-insensitive) |
|
||||
| `niew` | Does not end with (case-insensitive) |
|
||||
| `ie` | Exact match (case-insensitive) |
|
||||
| `nie` | Inverse exact match (case-insensitive) |
|
||||
| `empty` | Is empty/null (boolean) |
|
||||
| Filter | Description |
|
||||
|----------|----------------------------------------|
|
||||
| `n` | Not equal to |
|
||||
| `ic` | Contains (case-insensitive) |
|
||||
| `nic` | Does not contain (case-insensitive) |
|
||||
| `isw` | Starts with (case-insensitive) |
|
||||
| `nisw` | Does not start with (case-insensitive) |
|
||||
| `iew` | Ends with (case-insensitive) |
|
||||
| `niew` | Does not end with (case-insensitive) |
|
||||
| `ie` | Exact match (case-insensitive) |
|
||||
| `nie` | Inverse exact match (case-insensitive) |
|
||||
| `empty` | Is empty/null (boolean) |
|
||||
| `regex` | Regexp matching |
|
||||
| `iregex` | Regexp matching (case-insensitive) |
|
||||
|
||||
Here is an example of a lookup expression on a string field that will return all devices with `switch` in the name:
|
||||
|
||||
|
||||
@@ -1,5 +1,43 @@
|
||||
# NetBox v4.3
|
||||
|
||||
## v4.3.5 (2025-07-29)
|
||||
|
||||
### Enhancements
|
||||
* [#18797](https://github.com/netbox-community/netbox/issues/18797) - Added jinja2.StrictUndefined option for config template rendering to catch undefined variables
|
||||
* [#18936](https://github.com/netbox-community/netbox/issues/18936) - Cable imports now accept color names (e.g. "red", "blue") in addition to hex color codes
|
||||
* [#19840](https://github.com/netbox-community/netbox/issues/19840) - Cable imports now support specifying site information for better organization
|
||||
* [#19902](https://github.com/netbox-community/netbox/issues/19902) - Device names in rack elevation SVG exports are automatically truncated to prevent overflow beyond rack unit boundaries
|
||||
* [#19903](https://github.com/netbox-community/netbox/issues/19903) - String field filters now support `regex` and `iregex` lookups for advanced pattern matching
|
||||
* [#19910](https://github.com/netbox-community/netbox/issues/19910) - Internet-dependent links are no longer visible when running in air-gapped environments
|
||||
|
||||
### Bug Fixes
|
||||
* [#18900](https://github.com/netbox-community/netbox/issues/18900) - REST API paginator now raises proper exceptions when attempting to paginate unordered querysets
|
||||
* [#19916](https://github.com/netbox-community/netbox/issues/19916) - Rack elevation image/label dropdown functionality restored
|
||||
* [#19934](https://github.com/netbox-community/netbox/issues/19934) - Added missing description field to tenant bulk edit form
|
||||
* [#19956](https://github.com/netbox-community/netbox/issues/19956) - Prevent duplicate deletion records in changelog from cascading deletions
|
||||
|
||||
## v4.3.4 (2025-07-15)
|
||||
|
||||
### Enhancements
|
||||
|
||||
* [#18811](https://github.com/netbox-community/netbox/issues/18811) - Match expanded form IPv6 addresses in global search
|
||||
* [#19550](https://github.com/netbox-community/netbox/issues/19550) - Enable lazy loading for rack elevations
|
||||
* [#19571](https://github.com/netbox-community/netbox/issues/19571) - Add a default module type profile for expansion cards
|
||||
* [#19793](https://github.com/netbox-community/netbox/issues/19793) - Support custom dynamic navigation menu links
|
||||
* [#19828](https://github.com/netbox-community/netbox/issues/19828) - Expose L2VPN termination in interface GraphQL response
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* [#19413](https://github.com/netbox-community/netbox/issues/19413) - Custom fields should be grouped in filter forms
|
||||
* [#19633](https://github.com/netbox-community/netbox/issues/19633) - Introduce InvalidCondition exception and log all evaluations of invalid event rule conditions
|
||||
* [#19800](https://github.com/netbox-community/netbox/issues/19800) - Module type bulk import should support profile assignment
|
||||
* [#19806](https://github.com/netbox-community/netbox/issues/19806) - Introduce JobFailed exception to allow marking background jobs as failed
|
||||
* [#19827](https://github.com/netbox-community/netbox/issues/19827) - Enforce uniqueness for device role names & slugs
|
||||
* [#19839](https://github.com/netbox-community/netbox/issues/19839) - Enable export of parent assignment for recursively nested objects
|
||||
* [#19876](https://github.com/netbox-community/netbox/issues/19876) - Remove Markdown rendering from CustomFieldChoiceSet description field
|
||||
|
||||
---
|
||||
|
||||
## v4.3.3 (2025-06-26)
|
||||
|
||||
### Enhancements
|
||||
|
||||
@@ -1,29 +1,28 @@
|
||||
from django.http import Http404, HttpResponse
|
||||
from django.shortcuts import get_object_or_404
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_rq.queues import get_redis_connection
|
||||
from django_rq.settings import QUEUES_LIST
|
||||
from django_rq.utils import get_statistics
|
||||
from drf_spectacular.types import OpenApiTypes
|
||||
from drf_spectacular.utils import extend_schema
|
||||
from rest_framework import viewsets
|
||||
from rest_framework.decorators import action
|
||||
from rest_framework.exceptions import PermissionDenied
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rest_framework.response import Response
|
||||
from rest_framework.routers import APIRootView
|
||||
from rest_framework.viewsets import ReadOnlyModelViewSet
|
||||
from rq.job import Job as RQ_Job
|
||||
from rq.worker import Worker
|
||||
|
||||
from core import filtersets
|
||||
from core.choices import DataSourceStatusChoices
|
||||
from core.jobs import SyncDataSourceJob
|
||||
from core.models import *
|
||||
from core.utils import delete_rq_job, enqueue_rq_job, get_rq_jobs, requeue_rq_job, stop_rq_job
|
||||
from django_rq.queues import get_redis_connection
|
||||
from django_rq.utils import get_statistics
|
||||
from django_rq.settings import QUEUES_LIST
|
||||
from netbox.api.metadata import ContentTypeMetadata
|
||||
from netbox.api.pagination import LimitOffsetListPagination
|
||||
from netbox.api.viewsets import NetBoxModelViewSet, NetBoxReadOnlyModelViewSet
|
||||
from rest_framework import viewsets
|
||||
from rest_framework.permissions import IsAdminUser
|
||||
from rq.job import Job as RQ_Job
|
||||
from rq.worker import Worker
|
||||
from . import serializers
|
||||
|
||||
|
||||
@@ -50,10 +49,8 @@ class DataSourceViewSet(NetBoxModelViewSet):
|
||||
if not request.user.has_perm('core.sync_datasource', obj=datasource):
|
||||
raise PermissionDenied(_("This user does not have permission to synchronize this data source."))
|
||||
|
||||
# Enqueue the sync job & update the DataSource's status
|
||||
# Enqueue the sync job
|
||||
SyncDataSourceJob.enqueue(instance=datasource, user=request.user)
|
||||
datasource.status = DataSourceStatusChoices.QUEUED
|
||||
DataSource.objects.filter(pk=datasource.pk).update(status=datasource.status)
|
||||
|
||||
serializer = serializers.DataSourceSerializer(datasource, context={'request': request})
|
||||
|
||||
|
||||
@@ -1,9 +1,19 @@
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
|
||||
|
||||
class SyncError(Exception):
|
||||
pass
|
||||
__all__ = (
|
||||
'IncompatiblePluginError',
|
||||
'JobFailed',
|
||||
'SyncError',
|
||||
)
|
||||
|
||||
|
||||
class IncompatiblePluginError(ImproperlyConfigured):
|
||||
pass
|
||||
|
||||
|
||||
class JobFailed(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class SyncError(Exception):
|
||||
pass
|
||||
|
||||
@@ -21,6 +21,17 @@ class SyncDataSourceJob(JobRunner):
|
||||
class Meta:
|
||||
name = 'Synchronization'
|
||||
|
||||
@classmethod
|
||||
def enqueue(cls, *args, **kwargs):
|
||||
job = super().enqueue(*args, **kwargs)
|
||||
|
||||
# Update the DataSource's synchronization status to queued
|
||||
if datasource := job.object:
|
||||
datasource.status = DataSourceStatusChoices.QUEUED
|
||||
DataSource.objects.filter(pk=datasource.pk).update(status=datasource.status)
|
||||
|
||||
return job
|
||||
|
||||
def run(self, *args, **kwargs):
|
||||
datasource = DataSource.objects.get(pk=self.job.object_id)
|
||||
|
||||
|
||||
@@ -187,15 +187,14 @@ class Job(models.Model):
|
||||
"""
|
||||
Mark the job as completed, optionally specifying a particular termination status.
|
||||
"""
|
||||
valid_statuses = JobStatusChoices.TERMINAL_STATE_CHOICES
|
||||
if status not in valid_statuses:
|
||||
if status not in JobStatusChoices.TERMINAL_STATE_CHOICES:
|
||||
raise ValueError(
|
||||
_("Invalid status for job termination. Choices are: {choices}").format(
|
||||
choices=', '.join(valid_statuses)
|
||||
choices=', '.join(JobStatusChoices.TERMINAL_STATE_CHOICES)
|
||||
)
|
||||
)
|
||||
|
||||
# Mark the job as completed
|
||||
# Set the job's status and completion time
|
||||
self.status = status
|
||||
if error:
|
||||
self.error = error
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
import logging
|
||||
from threading import local
|
||||
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db.models.fields.reverse_related import ManyToManyRel, ManyToOneRel
|
||||
from django.db.models.signals import m2m_changed, post_save, pre_delete
|
||||
from django.dispatch import receiver, Signal
|
||||
from django.core.signals import request_finished
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_prometheus.models import model_deletes, model_inserts, model_updates
|
||||
|
||||
@@ -42,6 +44,10 @@ clear_events = Signal()
|
||||
# Change logging & event handling
|
||||
#
|
||||
|
||||
# Used to track received signals per object
|
||||
_signals_received = local()
|
||||
|
||||
|
||||
@receiver((post_save, m2m_changed))
|
||||
def handle_changed_object(sender, instance, **kwargs):
|
||||
"""
|
||||
@@ -130,6 +136,16 @@ def handle_deleted_object(sender, instance, **kwargs):
|
||||
if request is None:
|
||||
return
|
||||
|
||||
# Check whether we've already processed a pre_delete signal for this object. (This can
|
||||
# happen e.g. when both a parent object and its child are deleted simultaneously, due
|
||||
# to cascading deletion.)
|
||||
if not hasattr(_signals_received, 'pre_delete'):
|
||||
_signals_received.pre_delete = set()
|
||||
signature = (ContentType.objects.get_for_model(instance), instance.pk)
|
||||
if signature in _signals_received.pre_delete:
|
||||
return
|
||||
_signals_received.pre_delete.add(signature)
|
||||
|
||||
# Record an ObjectChange if applicable
|
||||
if hasattr(instance, 'to_objectchange'):
|
||||
if hasattr(instance, 'snapshot') and not getattr(instance, '_prechange_snapshot', None):
|
||||
@@ -179,6 +195,14 @@ def handle_deleted_object(sender, instance, **kwargs):
|
||||
model_deletes.labels(instance._meta.model_name).inc()
|
||||
|
||||
|
||||
@receiver(request_finished)
|
||||
def clear_signal_history(sender, **kwargs):
|
||||
"""
|
||||
Clear out the signals history once the request is finished.
|
||||
"""
|
||||
_signals_received.pre_delete = set()
|
||||
|
||||
|
||||
@receiver(clear_events)
|
||||
def clear_events_queue(sender, **kwargs):
|
||||
"""
|
||||
|
||||
@@ -346,6 +346,38 @@ class ChangeLogViewTest(ModelViewTestCase):
|
||||
self.assertEqual(changes[1].changed_object_type, ContentType.objects.get_for_model(Interface))
|
||||
self.assertEqual(changes[2].changed_object_type, ContentType.objects.get_for_model(Device))
|
||||
|
||||
def test_duplicate_deletions(self):
|
||||
"""
|
||||
Check that a cascading deletion event does not generate multiple "deleted" ObjectChange records for
|
||||
the same object.
|
||||
"""
|
||||
role1 = DeviceRole(name='Role 1', slug='role-1')
|
||||
role1.save()
|
||||
role2 = DeviceRole(name='Role 2', slug='role-2', parent=role1)
|
||||
role2.save()
|
||||
pk_list = [role1.pk, role2.pk]
|
||||
|
||||
# Delete both objects simultaneously
|
||||
form_data = {
|
||||
'pk': pk_list,
|
||||
'confirm': True,
|
||||
'_confirm': True,
|
||||
}
|
||||
request = {
|
||||
'path': reverse('dcim:devicerole_bulk_delete'),
|
||||
'data': post_data(form_data),
|
||||
}
|
||||
self.add_permissions('dcim.delete_devicerole')
|
||||
self.assertHttpStatus(self.client.post(**request), 302)
|
||||
|
||||
# This should result in exactly one change record per object
|
||||
objectchanges = ObjectChange.objects.filter(
|
||||
changed_object_type=ContentType.objects.get_for_model(DeviceRole),
|
||||
changed_object_id__in=pk_list,
|
||||
action=ObjectChangeActionChoices.ACTION_DELETE
|
||||
)
|
||||
self.assertEqual(objectchanges.count(), 2)
|
||||
|
||||
|
||||
class ChangeLogAPITest(APITestCase):
|
||||
|
||||
|
||||
@@ -33,7 +33,6 @@ from utilities.json import ConfigJSONEncoder
|
||||
from utilities.query import count_related
|
||||
from utilities.views import ContentTypePermissionRequiredMixin, GetRelatedModelsMixin, register_model_view
|
||||
from . import filtersets, forms, tables
|
||||
from .choices import DataSourceStatusChoices
|
||||
from .jobs import SyncDataSourceJob
|
||||
from .models import *
|
||||
from .plugins import get_catalog_plugins, get_local_plugins
|
||||
@@ -78,12 +77,8 @@ class DataSourceSyncView(BaseObjectView):
|
||||
|
||||
def post(self, request, pk):
|
||||
datasource = get_object_or_404(self.queryset, pk=pk)
|
||||
|
||||
# Enqueue the sync job & update the DataSource's status
|
||||
# Enqueue the sync job
|
||||
job = SyncDataSourceJob.enqueue(instance=datasource, user=request.user)
|
||||
datasource.status = DataSourceStatusChoices.QUEUED
|
||||
DataSource.objects.filter(pk=datasource.pk).update(status=datasource.status)
|
||||
|
||||
messages.success(
|
||||
request,
|
||||
_("Queued job #{id} to sync {datasource}").format(id=job.pk, datasource=datasource)
|
||||
|
||||
@@ -1515,34 +1515,34 @@ class DeviceComponentFilterSet(django_filters.FilterSet):
|
||||
label=_('Site group (slug)'),
|
||||
)
|
||||
site_id = django_filters.ModelMultipleChoiceFilter(
|
||||
field_name='device__site',
|
||||
field_name='_site',
|
||||
queryset=Site.objects.all(),
|
||||
label=_('Site (ID)'),
|
||||
)
|
||||
site = django_filters.ModelMultipleChoiceFilter(
|
||||
field_name='device__site__slug',
|
||||
field_name='_site__slug',
|
||||
queryset=Site.objects.all(),
|
||||
to_field_name='slug',
|
||||
label=_('Site name (slug)'),
|
||||
)
|
||||
location_id = django_filters.ModelMultipleChoiceFilter(
|
||||
field_name='device__location',
|
||||
field_name='_location',
|
||||
queryset=Location.objects.all(),
|
||||
label=_('Location (ID)'),
|
||||
)
|
||||
location = django_filters.ModelMultipleChoiceFilter(
|
||||
field_name='device__location__slug',
|
||||
field_name='_location__slug',
|
||||
queryset=Location.objects.all(),
|
||||
to_field_name='slug',
|
||||
label=_('Location (slug)'),
|
||||
)
|
||||
rack_id = django_filters.ModelMultipleChoiceFilter(
|
||||
field_name='device__rack',
|
||||
field_name='_rack',
|
||||
queryset=Rack.objects.all(),
|
||||
label=_('Rack (ID)'),
|
||||
)
|
||||
rack = django_filters.ModelMultipleChoiceFilter(
|
||||
field_name='device__rack__name',
|
||||
field_name='_rack__name',
|
||||
queryset=Rack.objects.all(),
|
||||
to_field_name='name',
|
||||
label=_('Rack (name)'),
|
||||
|
||||
@@ -470,8 +470,8 @@ class ModuleTypeImportForm(NetBoxModelImportForm):
|
||||
class Meta:
|
||||
model = ModuleType
|
||||
fields = [
|
||||
'manufacturer', 'model', 'part_number', 'description', 'airflow', 'weight', 'weight_unit', 'comments',
|
||||
'tags',
|
||||
'manufacturer', 'model', 'part_number', 'description', 'airflow', 'weight', 'weight_unit', 'profile',
|
||||
'comments', 'tags'
|
||||
]
|
||||
|
||||
|
||||
@@ -1335,6 +1335,13 @@ class MACAddressImportForm(NetBoxModelImportForm):
|
||||
|
||||
class CableImportForm(NetBoxModelImportForm):
|
||||
# Termination A
|
||||
side_a_site = CSVModelChoiceField(
|
||||
label=_('Side A site'),
|
||||
queryset=Site.objects.all(),
|
||||
required=False,
|
||||
to_field_name='name',
|
||||
help_text=_('Site of parent device A (if any)'),
|
||||
)
|
||||
side_a_device = CSVModelChoiceField(
|
||||
label=_('Side A device'),
|
||||
queryset=Device.objects.all(),
|
||||
@@ -1353,6 +1360,13 @@ class CableImportForm(NetBoxModelImportForm):
|
||||
)
|
||||
|
||||
# Termination B
|
||||
side_b_site = CSVModelChoiceField(
|
||||
label=_('Side B site'),
|
||||
queryset=Site.objects.all(),
|
||||
required=False,
|
||||
to_field_name='name',
|
||||
help_text=_('Site of parent device B (if any)'),
|
||||
)
|
||||
side_b_device = CSVModelChoiceField(
|
||||
label=_('Side B device'),
|
||||
queryset=Device.objects.all(),
|
||||
@@ -1396,14 +1410,39 @@ class CableImportForm(NetBoxModelImportForm):
|
||||
required=False,
|
||||
help_text=_('Length unit')
|
||||
)
|
||||
color = forms.CharField(
|
||||
label=_('Color'),
|
||||
required=False,
|
||||
max_length=16,
|
||||
help_text=_('Color name (e.g. "Red") or hex code (e.g. "f44336")')
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Cable
|
||||
fields = [
|
||||
'side_a_device', 'side_a_type', 'side_a_name', 'side_b_device', 'side_b_type', 'side_b_name', 'type',
|
||||
'status', 'tenant', 'label', 'color', 'length', 'length_unit', 'description', 'comments', 'tags',
|
||||
'side_a_site', 'side_a_device', 'side_a_type', 'side_a_name', 'side_b_site', 'side_b_device', 'side_b_type',
|
||||
'side_b_name', 'type', 'status', 'tenant', 'label', 'color', 'length', 'length_unit', 'description',
|
||||
'comments', 'tags',
|
||||
]
|
||||
|
||||
def __init__(self, data=None, *args, **kwargs):
|
||||
super().__init__(data, *args, **kwargs)
|
||||
|
||||
if data:
|
||||
# Limit choices for side_a_device to the assigned side_a_site
|
||||
if side_a_site := data.get('side_a_site'):
|
||||
side_a_device_params = {f'site__{self.fields["side_a_site"].to_field_name}': side_a_site}
|
||||
self.fields['side_a_device'].queryset = self.fields['side_a_device'].queryset.filter(
|
||||
**side_a_device_params
|
||||
)
|
||||
|
||||
# Limit choices for side_b_device to the assigned side_b_site
|
||||
if side_b_site := data.get('side_b_site'):
|
||||
side_b_device_params = {f'site__{self.fields["side_b_site"].to_field_name}': side_b_site}
|
||||
self.fields['side_b_device'].queryset = self.fields['side_b_device'].queryset.filter(
|
||||
**side_b_device_params
|
||||
)
|
||||
|
||||
def _clean_side(self, side):
|
||||
"""
|
||||
Derive a Cable's A/B termination objects.
|
||||
@@ -1440,6 +1479,24 @@ class CableImportForm(NetBoxModelImportForm):
|
||||
setattr(self.instance, f'{side}_terminations', [termination_object])
|
||||
return termination_object
|
||||
|
||||
def _clean_color(self, color):
|
||||
"""
|
||||
Derive a colors hex code
|
||||
|
||||
:param color: color as hex or color name
|
||||
"""
|
||||
color_parsed = color.strip().lower()
|
||||
|
||||
for hex_code, label in ColorChoices.CHOICES:
|
||||
if color.lower() == label.lower():
|
||||
color_parsed = hex_code
|
||||
|
||||
if len(color_parsed) > 6:
|
||||
raise forms.ValidationError(
|
||||
_(f"{color} did not match any used color name and was longer than six characters: invalid hex.")
|
||||
)
|
||||
return color_parsed
|
||||
|
||||
def clean_side_a_name(self):
|
||||
return self._clean_side('a')
|
||||
|
||||
@@ -1451,11 +1508,14 @@ class CableImportForm(NetBoxModelImportForm):
|
||||
length_unit = self.cleaned_data.get('length_unit', None)
|
||||
return length_unit if length_unit is not None else ''
|
||||
|
||||
|
||||
def clean_color(self):
|
||||
color = self.cleaned_data.get('color', None)
|
||||
return self._clean_color(color) if color is not None else ''
|
||||
#
|
||||
# Virtual chassis
|
||||
#
|
||||
|
||||
|
||||
class VirtualChassisImportForm(NetBoxModelImportForm):
|
||||
master = CSVModelChoiceField(
|
||||
label=_('Master'),
|
||||
|
||||
@@ -33,6 +33,7 @@ if TYPE_CHECKING:
|
||||
from tenancy.graphql.types import TenantType
|
||||
from users.graphql.types import UserType
|
||||
from virtualization.graphql.types import ClusterType, VMInterfaceType, VirtualMachineType
|
||||
from vpn.graphql.types import L2VPNTerminationType
|
||||
from wireless.graphql.types import WirelessLANType, WirelessLinkType
|
||||
|
||||
__all__ = (
|
||||
@@ -440,6 +441,7 @@ class InterfaceType(IPAddressesMixin, ModularComponentType, CabledObjectMixin, P
|
||||
primary_mac_address: Annotated["MACAddressType", strawberry.lazy('dcim.graphql.types')] | None
|
||||
qinq_svlan: Annotated["VLANType", strawberry.lazy('ipam.graphql.types')] | None
|
||||
vlan_translation_policy: Annotated["VLANTranslationPolicyType", strawberry.lazy('ipam.graphql.types')] | None
|
||||
l2vpn_termination: Annotated["L2VPNTerminationType", strawberry.lazy('vpn.graphql.types')] | None
|
||||
|
||||
vdcs: List[Annotated["VirtualDeviceContextType", strawberry.lazy('dcim.graphql.types')]]
|
||||
tagged_vlans: List[Annotated["VLANType", strawberry.lazy('ipam.graphql.types')]]
|
||||
|
||||
@@ -19,7 +19,8 @@ def load_initial_data(apps, schema_editor):
|
||||
'gpu',
|
||||
'hard_disk',
|
||||
'memory',
|
||||
'power_supply'
|
||||
'power_supply',
|
||||
'expansion_card'
|
||||
)
|
||||
|
||||
for name in initial_profiles:
|
||||
|
||||
44
netbox/dcim/migrations/0208_devicerole_uniqueness.py
Normal file
44
netbox/dcim/migrations/0208_devicerole_uniqueness.py
Normal file
@@ -0,0 +1,44 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dcim', '0207_remove_redundant_indexes'),
|
||||
('extras', '0129_fix_script_paths'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddConstraint(
|
||||
model_name='devicerole',
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=('parent', 'name'),
|
||||
name='dcim_devicerole_parent_name'
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='devicerole',
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(('parent__isnull', True)),
|
||||
fields=('name',),
|
||||
name='dcim_devicerole_name',
|
||||
violation_error_message='A top-level device role with this name already exists.'
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='devicerole',
|
||||
constraint=models.UniqueConstraint(
|
||||
fields=('parent', 'slug'),
|
||||
name='dcim_devicerole_parent_slug'
|
||||
),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='devicerole',
|
||||
constraint=models.UniqueConstraint(
|
||||
condition=models.Q(('parent__isnull', True)),
|
||||
fields=('slug',),
|
||||
name='dcim_devicerole_slug',
|
||||
violation_error_message='A top-level device role with this slug already exists.'
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,287 @@
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
from django.db.models import OuterRef, Subquery
|
||||
|
||||
|
||||
def populate_denormalized_data(apps, schema_editor):
|
||||
Device = apps.get_model('dcim', 'Device')
|
||||
component_models = (
|
||||
apps.get_model('dcim', 'ConsolePort'),
|
||||
apps.get_model('dcim', 'ConsoleServerPort'),
|
||||
apps.get_model('dcim', 'PowerPort'),
|
||||
apps.get_model('dcim', 'PowerOutlet'),
|
||||
apps.get_model('dcim', 'Interface'),
|
||||
apps.get_model('dcim', 'FrontPort'),
|
||||
apps.get_model('dcim', 'RearPort'),
|
||||
apps.get_model('dcim', 'DeviceBay'),
|
||||
apps.get_model('dcim', 'ModuleBay'),
|
||||
apps.get_model('dcim', 'InventoryItem'),
|
||||
)
|
||||
|
||||
for model in component_models:
|
||||
subquery = Device.objects.filter(pk=OuterRef('device_id'))
|
||||
model.objects.update(
|
||||
_site=Subquery(subquery.values('site_id')[:1]),
|
||||
_location=Subquery(subquery.values('location_id')[:1]),
|
||||
_rack=Subquery(subquery.values('rack_id')[:1]),
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('dcim', '0208_devicerole_uniqueness'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='consoleport',
|
||||
name='_location',
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name='+',
|
||||
to='dcim.location',
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='consoleport',
|
||||
name='_rack',
|
||||
field=models.ForeignKey(
|
||||
blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='dcim.rack'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='consoleport',
|
||||
name='_site',
|
||||
field=models.ForeignKey(
|
||||
blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='dcim.site'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='consoleserverport',
|
||||
name='_location',
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name='+',
|
||||
to='dcim.location',
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='consoleserverport',
|
||||
name='_rack',
|
||||
field=models.ForeignKey(
|
||||
blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='dcim.rack'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='consoleserverport',
|
||||
name='_site',
|
||||
field=models.ForeignKey(
|
||||
blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='dcim.site'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='devicebay',
|
||||
name='_location',
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name='+',
|
||||
to='dcim.location',
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='devicebay',
|
||||
name='_rack',
|
||||
field=models.ForeignKey(
|
||||
blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='dcim.rack'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='devicebay',
|
||||
name='_site',
|
||||
field=models.ForeignKey(
|
||||
blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='dcim.site'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='frontport',
|
||||
name='_location',
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name='+',
|
||||
to='dcim.location',
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='frontport',
|
||||
name='_rack',
|
||||
field=models.ForeignKey(
|
||||
blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='dcim.rack'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='frontport',
|
||||
name='_site',
|
||||
field=models.ForeignKey(
|
||||
blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='dcim.site'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='interface',
|
||||
name='_location',
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name='+',
|
||||
to='dcim.location',
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='interface',
|
||||
name='_rack',
|
||||
field=models.ForeignKey(
|
||||
blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='dcim.rack'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='interface',
|
||||
name='_site',
|
||||
field=models.ForeignKey(
|
||||
blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='dcim.site'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventoryitem',
|
||||
name='_location',
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name='+',
|
||||
to='dcim.location',
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventoryitem',
|
||||
name='_rack',
|
||||
field=models.ForeignKey(
|
||||
blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='dcim.rack'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='inventoryitem',
|
||||
name='_site',
|
||||
field=models.ForeignKey(
|
||||
blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='dcim.site'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='modulebay',
|
||||
name='_location',
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name='+',
|
||||
to='dcim.location',
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='modulebay',
|
||||
name='_rack',
|
||||
field=models.ForeignKey(
|
||||
blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='dcim.rack'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='modulebay',
|
||||
name='_site',
|
||||
field=models.ForeignKey(
|
||||
blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='dcim.site'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='poweroutlet',
|
||||
name='_location',
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name='+',
|
||||
to='dcim.location',
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='poweroutlet',
|
||||
name='_rack',
|
||||
field=models.ForeignKey(
|
||||
blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='dcim.rack'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='poweroutlet',
|
||||
name='_site',
|
||||
field=models.ForeignKey(
|
||||
blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='dcim.site'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='powerport',
|
||||
name='_location',
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name='+',
|
||||
to='dcim.location',
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='powerport',
|
||||
name='_rack',
|
||||
field=models.ForeignKey(
|
||||
blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='dcim.rack'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='powerport',
|
||||
name='_site',
|
||||
field=models.ForeignKey(
|
||||
blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='dcim.site'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='rearport',
|
||||
name='_location',
|
||||
field=models.ForeignKey(
|
||||
blank=True,
|
||||
null=True,
|
||||
on_delete=django.db.models.deletion.SET_NULL,
|
||||
related_name='+',
|
||||
to='dcim.location',
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='rearport',
|
||||
name='_rack',
|
||||
field=models.ForeignKey(
|
||||
blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='dcim.rack'
|
||||
),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='rearport',
|
||||
name='_site',
|
||||
field=models.ForeignKey(
|
||||
blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='dcim.site'
|
||||
),
|
||||
),
|
||||
migrations.RunPython(populate_denormalized_data),
|
||||
]
|
||||
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "Expansion card",
|
||||
"schema": {
|
||||
"properties": {
|
||||
"connector_type": {
|
||||
"type": "string",
|
||||
"description": "Connector type e.g. PCIe x4"
|
||||
},
|
||||
"bandwidth": {
|
||||
"type": "integer",
|
||||
"description": "Total Bandwidth for this module"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -11,6 +11,7 @@ from dcim.choices import *
|
||||
from dcim.constants import *
|
||||
from dcim.fields import PathField
|
||||
from dcim.utils import decompile_path_node, object_to_path_node
|
||||
from netbox.choices import ColorChoices
|
||||
from netbox.models import ChangeLoggedModel, PrimaryModel
|
||||
from utilities.conversion import to_meters
|
||||
from utilities.exceptions import AbortRequest
|
||||
@@ -155,6 +156,15 @@ class Cable(PrimaryModel):
|
||||
self._terminations_modified = True
|
||||
self._b_terminations = value
|
||||
|
||||
@property
|
||||
def color_name(self):
|
||||
color_name = ""
|
||||
for hex_code, label in ColorChoices.CHOICES:
|
||||
if hex_code.lower() == self.color.lower():
|
||||
color_name = str(label)
|
||||
|
||||
return color_name
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
|
||||
|
||||
@@ -65,6 +65,29 @@ class ComponentModel(NetBoxModel):
|
||||
blank=True
|
||||
)
|
||||
|
||||
# Denormalized references replicated from the parent Device
|
||||
_site = models.ForeignKey(
|
||||
to='dcim.Site',
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='+',
|
||||
blank=True,
|
||||
null=True,
|
||||
)
|
||||
_location = models.ForeignKey(
|
||||
to='dcim.Location',
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='+',
|
||||
blank=True,
|
||||
null=True,
|
||||
)
|
||||
_rack = models.ForeignKey(
|
||||
to='dcim.Rack',
|
||||
on_delete=models.SET_NULL,
|
||||
related_name='+',
|
||||
blank=True,
|
||||
null=True,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
ordering = ('device', 'name')
|
||||
@@ -100,6 +123,14 @@ class ComponentModel(NetBoxModel):
|
||||
"device": _("Components cannot be moved to a different device.")
|
||||
})
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
# Save denormalized references
|
||||
self._site = self.device.site
|
||||
self._location = self.device.location
|
||||
self._rack = self.device.rack
|
||||
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
@property
|
||||
def parent_object(self):
|
||||
return self.device
|
||||
|
||||
@@ -8,7 +8,7 @@ from django.core.exceptions import ValidationError
|
||||
from django.core.files.storage import default_storage
|
||||
from django.core.validators import MaxValueValidator, MinValueValidator
|
||||
from django.db import models
|
||||
from django.db.models import F, ProtectedError
|
||||
from django.db.models import F, ProtectedError, prefetch_related_objects
|
||||
from django.db.models.functions import Lower
|
||||
from django.db.models.signals import post_save
|
||||
from django.urls import reverse
|
||||
@@ -28,6 +28,7 @@ from netbox.models import NestedGroupModel, OrganizationalModel, PrimaryModel
|
||||
from netbox.models.mixins import WeightMixin
|
||||
from netbox.models.features import ContactsMixin, ImageAttachmentsMixin
|
||||
from utilities.fields import ColorField, CounterCacheField
|
||||
from utilities.prefetch import get_prefetchable_fields
|
||||
from utilities.tracking import TrackingModelMixin
|
||||
from .device_components import *
|
||||
from .mixins import RenderConfigMixin
|
||||
@@ -398,6 +399,28 @@ class DeviceRole(NestedGroupModel):
|
||||
|
||||
class Meta:
|
||||
ordering = ('name',)
|
||||
constraints = (
|
||||
models.UniqueConstraint(
|
||||
fields=('parent', 'name'),
|
||||
name='%(app_label)s_%(class)s_parent_name'
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
fields=('name',),
|
||||
name='%(app_label)s_%(class)s_name',
|
||||
condition=Q(parent__isnull=True),
|
||||
violation_error_message=_("A top-level device role with this name already exists.")
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
fields=('parent', 'slug'),
|
||||
name='%(app_label)s_%(class)s_parent_slug'
|
||||
),
|
||||
models.UniqueConstraint(
|
||||
fields=('slug',),
|
||||
name='%(app_label)s_%(class)s_slug',
|
||||
condition=Q(parent__isnull=True),
|
||||
violation_error_message=_("A top-level device role with this slug already exists.")
|
||||
),
|
||||
)
|
||||
verbose_name = _('device role')
|
||||
verbose_name_plural = _('device roles')
|
||||
|
||||
@@ -902,7 +925,10 @@ class Device(
|
||||
if cf_defaults := CustomField.objects.get_defaults_for_model(model):
|
||||
for component in components:
|
||||
component.custom_field_data = cf_defaults
|
||||
model.objects.bulk_create(components)
|
||||
components = model.objects.bulk_create(components)
|
||||
# Prefetch related objects to minimize queries needed during post_save
|
||||
prefetch_fields = get_prefetchable_fields(model)
|
||||
prefetch_related_objects(components, *prefetch_fields)
|
||||
# Manually send the post_save signal for each of the newly created components
|
||||
for component in components:
|
||||
post_save.send(
|
||||
|
||||
@@ -3,13 +3,28 @@ import logging
|
||||
from django.db.models.signals import post_save, post_delete, pre_delete
|
||||
from django.dispatch import receiver
|
||||
|
||||
from .choices import CableEndChoices, LinkStatusChoices
|
||||
from dcim.choices import CableEndChoices, LinkStatusChoices
|
||||
from .models import (
|
||||
Cable, CablePath, CableTermination, Device, FrontPort, PathEndpoint, PowerPanel, Rack, Location, VirtualChassis,
|
||||
Cable, CablePath, CableTermination, ConsolePort, ConsoleServerPort, Device, DeviceBay, FrontPort, Interface,
|
||||
InventoryItem, ModuleBay, PathEndpoint, PowerOutlet, PowerPanel, PowerPort, Rack, RearPort, Location,
|
||||
VirtualChassis,
|
||||
)
|
||||
from .models.cables import trace_paths
|
||||
from .utils import create_cablepath, rebuild_paths
|
||||
|
||||
COMPONENT_MODELS = (
|
||||
ConsolePort,
|
||||
ConsoleServerPort,
|
||||
DeviceBay,
|
||||
FrontPort,
|
||||
Interface,
|
||||
InventoryItem,
|
||||
ModuleBay,
|
||||
PowerOutlet,
|
||||
PowerPort,
|
||||
RearPort,
|
||||
)
|
||||
|
||||
|
||||
#
|
||||
# Location/rack/device assignment
|
||||
@@ -39,6 +54,20 @@ def handle_rack_site_change(instance, created, **kwargs):
|
||||
Device.objects.filter(rack=instance).update(site=instance.site, location=instance.location)
|
||||
|
||||
|
||||
@receiver(post_save, sender=Device)
|
||||
def handle_device_site_change(instance, created, **kwargs):
|
||||
"""
|
||||
Update child components to update the parent Site, Location, and Rack when a Device is saved.
|
||||
"""
|
||||
if not created:
|
||||
for model in COMPONENT_MODELS:
|
||||
model.objects.filter(device=instance).update(
|
||||
_site=instance.site,
|
||||
_location=instance.location,
|
||||
_rack=instance.rack,
|
||||
)
|
||||
|
||||
|
||||
#
|
||||
# Virtual chassis
|
||||
#
|
||||
|
||||
@@ -3,6 +3,7 @@ import svgwrite
|
||||
from svgwrite.container import Hyperlink
|
||||
from svgwrite.image import Image
|
||||
from svgwrite.gradients import LinearGradient
|
||||
from svgwrite.masking import ClipPath
|
||||
from svgwrite.shapes import Rect
|
||||
from svgwrite.text import Text
|
||||
|
||||
@@ -67,6 +68,20 @@ def get_device_description(device):
|
||||
return description
|
||||
|
||||
|
||||
def truncate_text(text, width, font_size=15):
|
||||
"""
|
||||
Truncate text to fit within the width of a rectangle.
|
||||
|
||||
:param text: The text to truncate
|
||||
:param width: Width of rectangle
|
||||
:param font_size: Font size (default is 15, ~0.875rem)
|
||||
"""
|
||||
char_width = font_size * 0.6 # 0.6 is an approximation of the average character width in pixels
|
||||
max_char = int(width / char_width)
|
||||
|
||||
return text if len(text) <= max_char else text[:max_char] + '...'
|
||||
|
||||
|
||||
class RackElevationSVG:
|
||||
"""
|
||||
Use this class to render a rack elevation as an SVG image.
|
||||
@@ -177,12 +192,26 @@ class RackElevationSVG:
|
||||
link = Hyperlink(href=f'{self.base_url}{device.get_absolute_url()}', target="_parent")
|
||||
link.set_desc(description)
|
||||
|
||||
# Create clipPath element
|
||||
# This is necessary as fallback because the truncate_text method is an approximation
|
||||
clip_id = f"clip-{device.id}"
|
||||
clip_path = ClipPath(id=clip_id)
|
||||
clip_path.add(Rect(coords, size))
|
||||
|
||||
self.drawing.defs.add(clip_path)
|
||||
|
||||
# Name to display
|
||||
display_name = truncate_text(name, size[0])
|
||||
|
||||
# Add rect element to hyperlink
|
||||
if color:
|
||||
link.add(Rect(coords, size, style=f'fill: #{color}', class_=f'slot{css_extra}'))
|
||||
else:
|
||||
link.add(Rect(coords, size, class_=f'slot blocked{css_extra}'))
|
||||
link.add(Text(name, insert=text_coords, fill=text_color, class_=f'label{css_extra}'))
|
||||
link.add(
|
||||
Text(display_name, insert=text_coords, fill=text_color, clip_path=f"url(#{clip_id})",
|
||||
class_=f'label{css_extra}')
|
||||
)
|
||||
|
||||
# Embed device type image if provided
|
||||
if self.include_images and image:
|
||||
|
||||
@@ -113,6 +113,10 @@ class CableTable(TenancyColumnsMixin, NetBoxTable):
|
||||
order_by=('_abs_length')
|
||||
)
|
||||
color = columns.ColorColumn()
|
||||
color_name = tables.Column(
|
||||
verbose_name=_('Color Name'),
|
||||
orderable=False
|
||||
)
|
||||
comments = columns.MarkdownColumn()
|
||||
tags = columns.TagColumn(
|
||||
url_name='dcim:cable_list'
|
||||
@@ -123,7 +127,7 @@ class CableTable(TenancyColumnsMixin, NetBoxTable):
|
||||
fields = (
|
||||
'pk', 'id', 'label', 'a_terminations', 'b_terminations', 'device_a', 'device_b', 'rack_a', 'rack_b',
|
||||
'location_a', 'location_b', 'site_a', 'site_b', 'status', 'type', 'tenant', 'tenant_group', 'color',
|
||||
'length', 'description', 'comments', 'tags', 'created', 'last_updated',
|
||||
'color_name', 'length', 'description', 'comments', 'tags', 'created', 'last_updated',
|
||||
)
|
||||
default_columns = (
|
||||
'pk', 'id', 'label', 'a_terminations', 'b_terminations', 'status', 'type',
|
||||
|
||||
@@ -63,6 +63,10 @@ class DeviceRoleTable(NetBoxTable):
|
||||
verbose_name=_('Name'),
|
||||
linkify=True
|
||||
)
|
||||
parent = tables.Column(
|
||||
verbose_name=_('Parent'),
|
||||
linkify=True,
|
||||
)
|
||||
device_count = columns.LinkedCountColumn(
|
||||
viewname='dcim:device_list',
|
||||
url_params={'role_id': 'pk'},
|
||||
@@ -88,8 +92,8 @@ class DeviceRoleTable(NetBoxTable):
|
||||
class Meta(NetBoxTable.Meta):
|
||||
model = models.DeviceRole
|
||||
fields = (
|
||||
'pk', 'id', 'name', 'device_count', 'vm_count', 'color', 'vm_role', 'config_template', 'description',
|
||||
'slug', 'tags', 'actions', 'created', 'last_updated',
|
||||
'pk', 'id', 'name', 'parent', 'device_count', 'vm_count', 'color', 'vm_role', 'config_template',
|
||||
'description', 'slug', 'tags', 'actions', 'created', 'last_updated',
|
||||
)
|
||||
default_columns = ('pk', 'name', 'device_count', 'vm_count', 'color', 'vm_role', 'description')
|
||||
|
||||
|
||||
@@ -24,6 +24,10 @@ class RegionTable(ContactsColumnMixin, NetBoxTable):
|
||||
verbose_name=_('Name'),
|
||||
linkify=True
|
||||
)
|
||||
parent = tables.Column(
|
||||
verbose_name=_('Parent'),
|
||||
linkify=True,
|
||||
)
|
||||
site_count = columns.LinkedCountColumn(
|
||||
viewname='dcim:site_list',
|
||||
url_params={'region_id': 'pk'},
|
||||
@@ -39,7 +43,7 @@ class RegionTable(ContactsColumnMixin, NetBoxTable):
|
||||
class Meta(NetBoxTable.Meta):
|
||||
model = Region
|
||||
fields = (
|
||||
'pk', 'id', 'name', 'slug', 'site_count', 'description', 'comments', 'contacts', 'tags',
|
||||
'pk', 'id', 'name', 'parent', 'slug', 'site_count', 'description', 'comments', 'contacts', 'tags',
|
||||
'created', 'last_updated', 'actions',
|
||||
)
|
||||
default_columns = ('pk', 'name', 'site_count', 'description')
|
||||
@@ -54,6 +58,10 @@ class SiteGroupTable(ContactsColumnMixin, NetBoxTable):
|
||||
verbose_name=_('Name'),
|
||||
linkify=True
|
||||
)
|
||||
parent = tables.Column(
|
||||
verbose_name=_('Parent'),
|
||||
linkify=True,
|
||||
)
|
||||
site_count = columns.LinkedCountColumn(
|
||||
viewname='dcim:site_list',
|
||||
url_params={'group_id': 'pk'},
|
||||
@@ -69,7 +77,7 @@ class SiteGroupTable(ContactsColumnMixin, NetBoxTable):
|
||||
class Meta(NetBoxTable.Meta):
|
||||
model = SiteGroup
|
||||
fields = (
|
||||
'pk', 'id', 'name', 'slug', 'site_count', 'description', 'comments', 'contacts', 'tags',
|
||||
'pk', 'id', 'name', 'parent', 'slug', 'site_count', 'description', 'comments', 'contacts', 'tags',
|
||||
'created', 'last_updated', 'actions',
|
||||
)
|
||||
default_columns = ('pk', 'name', 'site_count', 'description')
|
||||
@@ -135,6 +143,10 @@ class LocationTable(TenancyColumnsMixin, ContactsColumnMixin, NetBoxTable):
|
||||
verbose_name=_('Name'),
|
||||
linkify=True
|
||||
)
|
||||
parent = tables.Column(
|
||||
verbose_name=_('Parent'),
|
||||
linkify=True,
|
||||
)
|
||||
site = tables.Column(
|
||||
verbose_name=_('Site'),
|
||||
linkify=True
|
||||
@@ -170,8 +182,8 @@ class LocationTable(TenancyColumnsMixin, ContactsColumnMixin, NetBoxTable):
|
||||
class Meta(NetBoxTable.Meta):
|
||||
model = Location
|
||||
fields = (
|
||||
'pk', 'id', 'name', 'site', 'status', 'facility', 'tenant', 'tenant_group', 'rack_count', 'device_count',
|
||||
'description', 'slug', 'comments', 'contacts', 'tags', 'actions', 'created', 'last_updated',
|
||||
'pk', 'id', 'name', 'parent', 'site', 'status', 'facility', 'tenant', 'tenant_group', 'rack_count',
|
||||
'device_count', 'description', 'slug', 'comments', 'contacts', 'tags', 'actions', 'created', 'last_updated',
|
||||
'vlangroup_count',
|
||||
)
|
||||
default_columns = (
|
||||
|
||||
@@ -3367,9 +3367,36 @@ class ConsolePortTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedF
|
||||
ConsoleServerPort.objects.bulk_create(console_server_ports)
|
||||
|
||||
console_ports = (
|
||||
ConsolePort(device=devices[0], module=modules[0], name='Console Port 1', label='A', description='First'),
|
||||
ConsolePort(device=devices[1], module=modules[1], name='Console Port 2', label='B', description='Second'),
|
||||
ConsolePort(device=devices[2], module=modules[2], name='Console Port 3', label='C', description='Third'),
|
||||
ConsolePort(
|
||||
device=devices[0],
|
||||
module=modules[0],
|
||||
name='Console Port 1',
|
||||
label='A',
|
||||
description='First',
|
||||
_site=devices[0].site,
|
||||
_location=devices[0].location,
|
||||
_rack=devices[0].rack,
|
||||
),
|
||||
ConsolePort(
|
||||
device=devices[1],
|
||||
module=modules[1],
|
||||
name='Console Port 2',
|
||||
label='B',
|
||||
description='Second',
|
||||
_site=devices[1].site,
|
||||
_location=devices[1].location,
|
||||
_rack=devices[1].rack,
|
||||
),
|
||||
ConsolePort(
|
||||
device=devices[2],
|
||||
module=modules[2],
|
||||
name='Console Port 3',
|
||||
label='C',
|
||||
description='Third',
|
||||
_site=devices[2].site,
|
||||
_location=devices[2].location,
|
||||
_rack=devices[2].rack,
|
||||
),
|
||||
)
|
||||
ConsolePort.objects.bulk_create(console_ports)
|
||||
|
||||
@@ -3581,13 +3608,34 @@ class ConsoleServerPortTestCase(TestCase, DeviceComponentFilterSetTests, ChangeL
|
||||
|
||||
console_server_ports = (
|
||||
ConsoleServerPort(
|
||||
device=devices[0], module=modules[0], name='Console Server Port 1', label='A', description='First'
|
||||
device=devices[0],
|
||||
module=modules[0],
|
||||
name='Console Server Port 1',
|
||||
label='A',
|
||||
description='First',
|
||||
_site=devices[0].site,
|
||||
_location=devices[0].location,
|
||||
_rack=devices[0].rack,
|
||||
),
|
||||
ConsoleServerPort(
|
||||
device=devices[1], module=modules[1], name='Console Server Port 2', label='B', description='Second'
|
||||
device=devices[1],
|
||||
module=modules[1],
|
||||
name='Console Server Port 2',
|
||||
label='B',
|
||||
description='Second',
|
||||
_site=devices[1].site,
|
||||
_location=devices[1].location,
|
||||
_rack=devices[1].rack,
|
||||
),
|
||||
ConsoleServerPort(
|
||||
device=devices[2], module=modules[2], name='Console Server Port 3', label='C', description='Third'
|
||||
device=devices[2],
|
||||
module=modules[2],
|
||||
name='Console Server Port 3',
|
||||
label='C',
|
||||
description='Third',
|
||||
_site=devices[2].site,
|
||||
_location=devices[2].location,
|
||||
_rack=devices[2].rack,
|
||||
),
|
||||
)
|
||||
ConsoleServerPort.objects.bulk_create(console_server_ports)
|
||||
@@ -3807,6 +3855,9 @@ class PowerPortTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedFil
|
||||
maximum_draw=100,
|
||||
allocated_draw=50,
|
||||
description='First',
|
||||
_site=devices[0].site,
|
||||
_location=devices[0].location,
|
||||
_rack=devices[0].rack,
|
||||
),
|
||||
PowerPort(
|
||||
device=devices[1],
|
||||
@@ -3816,6 +3867,9 @@ class PowerPortTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedFil
|
||||
maximum_draw=200,
|
||||
allocated_draw=100,
|
||||
description='Second',
|
||||
_site=devices[1].site,
|
||||
_location=devices[1].location,
|
||||
_rack=devices[1].rack,
|
||||
),
|
||||
PowerPort(
|
||||
device=devices[2],
|
||||
@@ -3825,6 +3879,9 @@ class PowerPortTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedFil
|
||||
maximum_draw=300,
|
||||
allocated_draw=150,
|
||||
description='Third',
|
||||
_site=devices[2].site,
|
||||
_location=devices[2].location,
|
||||
_rack=devices[2].rack,
|
||||
),
|
||||
)
|
||||
PowerPort.objects.bulk_create(power_ports)
|
||||
@@ -4053,6 +4110,9 @@ class PowerOutletTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedF
|
||||
description='First',
|
||||
color='ff0000',
|
||||
status=PowerOutletStatusChoices.STATUS_ENABLED,
|
||||
_site=devices[0].site,
|
||||
_location=devices[0].location,
|
||||
_rack=devices[0].rack,
|
||||
),
|
||||
PowerOutlet(
|
||||
device=devices[1],
|
||||
@@ -4063,6 +4123,9 @@ class PowerOutletTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedF
|
||||
description='Second',
|
||||
color='00ff00',
|
||||
status=PowerOutletStatusChoices.STATUS_DISABLED,
|
||||
_site=devices[1].site,
|
||||
_location=devices[1].location,
|
||||
_rack=devices[1].rack,
|
||||
),
|
||||
PowerOutlet(
|
||||
device=devices[2],
|
||||
@@ -4073,6 +4136,9 @@ class PowerOutletTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedF
|
||||
description='Third',
|
||||
color='0000ff',
|
||||
status=PowerOutletStatusChoices.STATUS_FAULTY,
|
||||
_site=devices[2].site,
|
||||
_location=devices[2].location,
|
||||
_rack=devices[2].rack,
|
||||
),
|
||||
)
|
||||
PowerOutlet.objects.bulk_create(power_outlets)
|
||||
@@ -4381,13 +4447,19 @@ class InterfaceTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedFil
|
||||
poe_mode=InterfacePoEModeChoices.MODE_PSE,
|
||||
poe_type=InterfacePoETypeChoices.TYPE_1_8023AF,
|
||||
vlan_translation_policy=vlan_translation_policies[0],
|
||||
_site=devices[0].site,
|
||||
_location=devices[0].location,
|
||||
_rack=devices[0].rack,
|
||||
),
|
||||
Interface(
|
||||
device=devices[1],
|
||||
module=modules[1],
|
||||
name='VC Chassis Interface',
|
||||
type=InterfaceTypeChoices.TYPE_1GE_SFP,
|
||||
enabled=True
|
||||
enabled=True,
|
||||
_site=devices[1].site,
|
||||
_location=devices[1].location,
|
||||
_rack=devices[1].rack,
|
||||
),
|
||||
Interface(
|
||||
device=devices[2],
|
||||
@@ -4406,6 +4478,9 @@ class InterfaceTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedFil
|
||||
poe_mode=InterfacePoEModeChoices.MODE_PD,
|
||||
poe_type=InterfacePoETypeChoices.TYPE_1_8023AF,
|
||||
vlan_translation_policy=vlan_translation_policies[0],
|
||||
_site=devices[2].site,
|
||||
_location=devices[2].location,
|
||||
_rack=devices[2].rack,
|
||||
),
|
||||
Interface(
|
||||
device=devices[3],
|
||||
@@ -4424,6 +4499,9 @@ class InterfaceTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedFil
|
||||
poe_mode=InterfacePoEModeChoices.MODE_PSE,
|
||||
poe_type=InterfacePoETypeChoices.TYPE_2_8023AT,
|
||||
vlan_translation_policy=vlan_translation_policies[1],
|
||||
_site=devices[3].site,
|
||||
_location=devices[3].location,
|
||||
_rack=devices[3].rack,
|
||||
),
|
||||
Interface(
|
||||
device=devices[4],
|
||||
@@ -4440,6 +4518,9 @@ class InterfaceTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedFil
|
||||
mode=InterfaceModeChoices.MODE_Q_IN_Q,
|
||||
qinq_svlan=vlans[0],
|
||||
vlan_translation_policy=vlan_translation_policies[1],
|
||||
_site=devices[4].site,
|
||||
_location=devices[4].location,
|
||||
_rack=devices[4].rack,
|
||||
),
|
||||
Interface(
|
||||
device=devices[4],
|
||||
@@ -4450,7 +4531,10 @@ class InterfaceTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedFil
|
||||
mgmt_only=True,
|
||||
tx_power=40,
|
||||
mode=InterfaceModeChoices.MODE_Q_IN_Q,
|
||||
qinq_svlan=vlans[1]
|
||||
qinq_svlan=vlans[1],
|
||||
_site=devices[4].site,
|
||||
_location=devices[4].location,
|
||||
_rack=devices[4].rack,
|
||||
),
|
||||
Interface(
|
||||
device=devices[4],
|
||||
@@ -4461,7 +4545,10 @@ class InterfaceTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedFil
|
||||
mgmt_only=False,
|
||||
tx_power=40,
|
||||
mode=InterfaceModeChoices.MODE_Q_IN_Q,
|
||||
qinq_svlan=vlans[2]
|
||||
qinq_svlan=vlans[2],
|
||||
_site=devices[4].site,
|
||||
_location=devices[4].location,
|
||||
_rack=devices[4].rack,
|
||||
),
|
||||
Interface(
|
||||
device=devices[4],
|
||||
@@ -4470,7 +4557,10 @@ class InterfaceTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedFil
|
||||
rf_role=WirelessRoleChoices.ROLE_AP,
|
||||
rf_channel=WirelessChannelChoices.CHANNEL_24G_1,
|
||||
rf_channel_frequency=2412,
|
||||
rf_channel_width=22
|
||||
rf_channel_width=22,
|
||||
_site=devices[4].site,
|
||||
_location=devices[4].location,
|
||||
_rack=devices[4].rack,
|
||||
),
|
||||
Interface(
|
||||
device=devices[4],
|
||||
@@ -4479,7 +4569,10 @@ class InterfaceTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedFil
|
||||
rf_role=WirelessRoleChoices.ROLE_STATION,
|
||||
rf_channel=WirelessChannelChoices.CHANNEL_5G_32,
|
||||
rf_channel_frequency=5160,
|
||||
rf_channel_width=20
|
||||
rf_channel_width=20,
|
||||
_site=devices[4].site,
|
||||
_location=devices[4].location,
|
||||
_rack=devices[4].rack,
|
||||
),
|
||||
)
|
||||
Interface.objects.bulk_create(interfaces)
|
||||
@@ -4906,6 +4999,9 @@ class FrontPortTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedFil
|
||||
rear_port=rear_ports[0],
|
||||
rear_port_position=1,
|
||||
description='First',
|
||||
_site=devices[0].site,
|
||||
_location=devices[0].location,
|
||||
_rack=devices[0].rack,
|
||||
),
|
||||
FrontPort(
|
||||
device=devices[1],
|
||||
@@ -4917,6 +5013,9 @@ class FrontPortTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedFil
|
||||
rear_port=rear_ports[1],
|
||||
rear_port_position=2,
|
||||
description='Second',
|
||||
_site=devices[1].site,
|
||||
_location=devices[1].location,
|
||||
_rack=devices[1].rack,
|
||||
),
|
||||
FrontPort(
|
||||
device=devices[2],
|
||||
@@ -4928,6 +5027,9 @@ class FrontPortTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedFil
|
||||
rear_port=rear_ports[2],
|
||||
rear_port_position=3,
|
||||
description='Third',
|
||||
_site=devices[2].site,
|
||||
_location=devices[2].location,
|
||||
_rack=devices[2].rack,
|
||||
),
|
||||
FrontPort(
|
||||
device=devices[3],
|
||||
@@ -4936,6 +5038,9 @@ class FrontPortTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedFil
|
||||
type=PortTypeChoices.TYPE_FC,
|
||||
rear_port=rear_ports[3],
|
||||
rear_port_position=1,
|
||||
_site=devices[3].site,
|
||||
_location=devices[3].location,
|
||||
_rack=devices[3].rack,
|
||||
),
|
||||
FrontPort(
|
||||
device=devices[3],
|
||||
@@ -4944,6 +5049,9 @@ class FrontPortTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedFil
|
||||
type=PortTypeChoices.TYPE_FC,
|
||||
rear_port=rear_ports[4],
|
||||
rear_port_position=1,
|
||||
_site=devices[3].site,
|
||||
_location=devices[3].location,
|
||||
_rack=devices[3].rack,
|
||||
),
|
||||
FrontPort(
|
||||
device=devices[3],
|
||||
@@ -4952,6 +5060,9 @@ class FrontPortTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedFil
|
||||
type=PortTypeChoices.TYPE_FC,
|
||||
rear_port=rear_ports[5],
|
||||
rear_port_position=1,
|
||||
_site=devices[3].site,
|
||||
_location=devices[3].location,
|
||||
_rack=devices[3].rack,
|
||||
),
|
||||
)
|
||||
FrontPort.objects.bulk_create(front_ports)
|
||||
@@ -5168,6 +5279,9 @@ class RearPortTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedFilt
|
||||
color=ColorChoices.COLOR_RED,
|
||||
positions=1,
|
||||
description='First',
|
||||
_site=devices[0].site,
|
||||
_location=devices[0].location,
|
||||
_rack=devices[0].rack,
|
||||
),
|
||||
RearPort(
|
||||
device=devices[1],
|
||||
@@ -5178,6 +5292,9 @@ class RearPortTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedFilt
|
||||
color=ColorChoices.COLOR_GREEN,
|
||||
positions=2,
|
||||
description='Second',
|
||||
_site=devices[1].site,
|
||||
_location=devices[1].location,
|
||||
_rack=devices[1].rack,
|
||||
),
|
||||
RearPort(
|
||||
device=devices[2],
|
||||
@@ -5188,10 +5305,40 @@ class RearPortTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedFilt
|
||||
color=ColorChoices.COLOR_BLUE,
|
||||
positions=3,
|
||||
description='Third',
|
||||
_site=devices[2].site,
|
||||
_location=devices[2].location,
|
||||
_rack=devices[2].rack,
|
||||
),
|
||||
RearPort(
|
||||
device=devices[3],
|
||||
name='Rear Port 4',
|
||||
label='D',
|
||||
type=PortTypeChoices.TYPE_FC,
|
||||
positions=4,
|
||||
_site=devices[3].site,
|
||||
_location=devices[3].location,
|
||||
_rack=devices[3].rack,
|
||||
),
|
||||
RearPort(
|
||||
device=devices[3],
|
||||
name='Rear Port 5',
|
||||
label='E',
|
||||
type=PortTypeChoices.TYPE_FC,
|
||||
positions=5,
|
||||
_site=devices[3].site,
|
||||
_location=devices[3].location,
|
||||
_rack=devices[3].rack,
|
||||
),
|
||||
RearPort(
|
||||
device=devices[3],
|
||||
name='Rear Port 6',
|
||||
label='F',
|
||||
type=PortTypeChoices.TYPE_FC,
|
||||
positions=6,
|
||||
_site=devices[3].site,
|
||||
_location=devices[3].location,
|
||||
_rack=devices[3].rack,
|
||||
),
|
||||
RearPort(device=devices[3], name='Rear Port 4', label='D', type=PortTypeChoices.TYPE_FC, positions=4),
|
||||
RearPort(device=devices[3], name='Rear Port 5', label='E', type=PortTypeChoices.TYPE_FC, positions=5),
|
||||
RearPort(device=devices[3], name='Rear Port 6', label='F', type=PortTypeChoices.TYPE_FC, positions=6),
|
||||
)
|
||||
RearPort.objects.bulk_create(rear_ports)
|
||||
|
||||
@@ -5550,9 +5697,33 @@ class DeviceBayTestCase(TestCase, DeviceComponentFilterSetTests, ChangeLoggedFil
|
||||
Device.objects.bulk_create(devices)
|
||||
|
||||
device_bays = (
|
||||
DeviceBay(device=devices[0], name='Device Bay 1', label='A', description='First'),
|
||||
DeviceBay(device=devices[1], name='Device Bay 2', label='B', description='Second'),
|
||||
DeviceBay(device=devices[2], name='Device Bay 3', label='C', description='Third'),
|
||||
DeviceBay(
|
||||
device=devices[0],
|
||||
name='Device Bay 1',
|
||||
label='A',
|
||||
description='First',
|
||||
_site=devices[0].site,
|
||||
_location=devices[0].location,
|
||||
_rack=devices[0].rack,
|
||||
),
|
||||
DeviceBay(
|
||||
device=devices[1],
|
||||
name='Device Bay 2',
|
||||
label='B',
|
||||
description='Second',
|
||||
_site=devices[1].site,
|
||||
_location=devices[1].location,
|
||||
_rack=devices[1].rack,
|
||||
),
|
||||
DeviceBay(
|
||||
device=devices[2],
|
||||
name='Device Bay 3',
|
||||
label='C',
|
||||
description='Third',
|
||||
_site=devices[2].site,
|
||||
_location=devices[2].location,
|
||||
_rack=devices[2].rack,
|
||||
),
|
||||
)
|
||||
DeviceBay.objects.bulk_create(device_bays)
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ from decimal import Decimal
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
import yaml
|
||||
from django.test import override_settings
|
||||
from django.test import override_settings, tag
|
||||
from django.urls import reverse
|
||||
from netaddr import EUI
|
||||
|
||||
@@ -1000,18 +1000,7 @@ inventory-items:
|
||||
self.assertEqual(response.get('Content-Type'), 'text/csv; charset=utf-8')
|
||||
|
||||
|
||||
# TODO: Change base class to PrimaryObjectViewTestCase
|
||||
# Blocked by absence of bulk import view for ModuleTypes
|
||||
class ModuleTypeTestCase(
|
||||
ViewTestCases.GetObjectViewTestCase,
|
||||
ViewTestCases.GetObjectChangelogViewTestCase,
|
||||
ViewTestCases.CreateObjectViewTestCase,
|
||||
ViewTestCases.EditObjectViewTestCase,
|
||||
ViewTestCases.DeleteObjectViewTestCase,
|
||||
ViewTestCases.ListObjectsViewTestCase,
|
||||
ViewTestCases.BulkEditObjectsViewTestCase,
|
||||
ViewTestCases.BulkDeleteObjectsViewTestCase
|
||||
):
|
||||
class ModuleTypeTestCase(ViewTestCases.PrimaryObjectViewTestCase):
|
||||
model = ModuleType
|
||||
|
||||
@classmethod
|
||||
@@ -1023,7 +1012,7 @@ class ModuleTypeTestCase(
|
||||
)
|
||||
Manufacturer.objects.bulk_create(manufacturers)
|
||||
|
||||
ModuleType.objects.bulk_create([
|
||||
module_types = ModuleType.objects.bulk_create([
|
||||
ModuleType(model='Module Type 1', manufacturer=manufacturers[0]),
|
||||
ModuleType(model='Module Type 2', manufacturer=manufacturers[0]),
|
||||
ModuleType(model='Module Type 3', manufacturer=manufacturers[0]),
|
||||
@@ -1031,6 +1020,8 @@ class ModuleTypeTestCase(
|
||||
|
||||
tags = create_tags('Alpha', 'Bravo', 'Charlie')
|
||||
|
||||
fan_module_type_profile = ModuleTypeProfile.objects.get(name='Fan')
|
||||
|
||||
cls.form_data = {
|
||||
'manufacturer': manufacturers[1].pk,
|
||||
'model': 'Device Type X',
|
||||
@@ -1044,6 +1035,70 @@ class ModuleTypeTestCase(
|
||||
'part_number': '456DEF',
|
||||
}
|
||||
|
||||
cls.csv_data = (
|
||||
"manufacturer,model,part_number,comments,profile",
|
||||
f"Manufacturer 1,fan0,generic-fan,,{fan_module_type_profile.name}"
|
||||
)
|
||||
|
||||
cls.csv_update_data = (
|
||||
"id,model",
|
||||
f"{module_types[0].id},test model",
|
||||
)
|
||||
|
||||
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*'])
|
||||
def test_bulk_update_objects_with_permission(self):
|
||||
self.add_permissions(
|
||||
'dcim.add_consoleporttemplate',
|
||||
'dcim.add_consoleserverporttemplate',
|
||||
'dcim.add_powerporttemplate',
|
||||
'dcim.add_poweroutlettemplate',
|
||||
'dcim.add_interfacetemplate',
|
||||
'dcim.add_frontporttemplate',
|
||||
'dcim.add_rearporttemplate',
|
||||
'dcim.add_modulebaytemplate',
|
||||
)
|
||||
|
||||
# run base test
|
||||
super().test_bulk_update_objects_with_permission()
|
||||
|
||||
@tag('regression')
|
||||
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*'], EXEMPT_EXCLUDE_MODELS=[])
|
||||
def test_bulk_import_objects_with_permission(self):
|
||||
self.add_permissions(
|
||||
'dcim.add_consoleporttemplate',
|
||||
'dcim.add_consoleserverporttemplate',
|
||||
'dcim.add_powerporttemplate',
|
||||
'dcim.add_poweroutlettemplate',
|
||||
'dcim.add_interfacetemplate',
|
||||
'dcim.add_frontporttemplate',
|
||||
'dcim.add_rearporttemplate',
|
||||
'dcim.add_modulebaytemplate',
|
||||
)
|
||||
|
||||
# run base test
|
||||
super().test_bulk_import_objects_with_permission()
|
||||
|
||||
# TODO: remove extra regression asserts once parent test supports testing all import fields
|
||||
fan_module_type = ModuleType.objects.get(part_number='generic-fan')
|
||||
fan_module_type_profile = ModuleTypeProfile.objects.get(name='Fan')
|
||||
|
||||
assert fan_module_type.profile == fan_module_type_profile
|
||||
|
||||
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*'], EXEMPT_EXCLUDE_MODELS=[])
|
||||
def test_bulk_import_objects_with_constrained_permission(self):
|
||||
self.add_permissions(
|
||||
'dcim.add_consoleporttemplate',
|
||||
'dcim.add_consoleserverporttemplate',
|
||||
'dcim.add_powerporttemplate',
|
||||
'dcim.add_poweroutlettemplate',
|
||||
'dcim.add_interfacetemplate',
|
||||
'dcim.add_frontporttemplate',
|
||||
'dcim.add_rearporttemplate',
|
||||
'dcim.add_modulebaytemplate',
|
||||
)
|
||||
|
||||
super().test_bulk_import_objects_with_constrained_permission()
|
||||
|
||||
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*'])
|
||||
def test_moduletype_consoleports(self):
|
||||
moduletype = ModuleType.objects.first()
|
||||
@@ -1804,9 +1859,9 @@ class DeviceRoleTestCase(ViewTestCases.OrganizationalObjectViewTestCase):
|
||||
|
||||
cls.csv_data = (
|
||||
"name,slug,color",
|
||||
"Device Role 4,device-role-4,ff0000",
|
||||
"Device Role 5,device-role-5,00ff00",
|
||||
"Device Role 6,device-role-6,0000ff",
|
||||
"Device Role 6,device-role-6,ff0000",
|
||||
"Device Role 7,device-role-7,00ff00",
|
||||
"Device Role 8,device-role-8,0000ff",
|
||||
)
|
||||
|
||||
cls.csv_update_data = (
|
||||
@@ -3211,17 +3266,27 @@ class CableTestCase(
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
|
||||
site = Site.objects.create(name='Site 1', slug='site-1')
|
||||
sites = (
|
||||
Site(name='Site 1', slug='site-1'),
|
||||
Site(name='Site 2', slug='site-2'),
|
||||
)
|
||||
Site.objects.bulk_create(sites)
|
||||
manufacturer = Manufacturer.objects.create(name='Manufacturer 1', slug='manufacturer-1')
|
||||
devicetype = DeviceType.objects.create(model='Device Type 1', manufacturer=manufacturer)
|
||||
role = DeviceRole.objects.create(name='Device Role 1', slug='device-role-1')
|
||||
vc = VirtualChassis.objects.create(name='Virtual Chassis')
|
||||
|
||||
# NOTE: By design, NetBox now allows for the creation of devices with the same name if they belong to
|
||||
# different sites.
|
||||
# The CSV test below demonstrates that devices with identical names on different sites can be created
|
||||
# and referenced successfully.
|
||||
devices = (
|
||||
Device(name='Device 1', site=site, device_type=devicetype, role=role),
|
||||
Device(name='Device 2', site=site, device_type=devicetype, role=role),
|
||||
Device(name='Device 3', site=site, device_type=devicetype, role=role),
|
||||
Device(name='Device 4', site=site, device_type=devicetype, role=role),
|
||||
# Create 'Device 1' assigned to 'Site 1'
|
||||
Device(name='Device 1', site=sites[0], device_type=devicetype, role=role),
|
||||
Device(name='Device 2', site=sites[0], device_type=devicetype, role=role),
|
||||
Device(name='Device 3', site=sites[0], device_type=devicetype, role=role),
|
||||
# Create 'Device 1' assigned to 'Site 2' (allowed since the site is different)
|
||||
Device(name='Device 1', site=sites[1], device_type=devicetype, role=role),
|
||||
)
|
||||
Device.objects.bulk_create(devices)
|
||||
|
||||
@@ -3272,13 +3337,15 @@ class CableTestCase(
|
||||
'tags': [t.pk for t in tags],
|
||||
}
|
||||
|
||||
# Ensure that CSV bulk import supports assigning terminations from parent devices that share
|
||||
# the same device name, provided those devices belong to different sites.
|
||||
cls.csv_data = (
|
||||
"side_a_device,side_a_type,side_a_name,side_b_device,side_b_type,side_b_name",
|
||||
"Device 3,dcim.interface,Interface 1,Device 4,dcim.interface,Interface 1",
|
||||
"Device 3,dcim.interface,Interface 2,Device 4,dcim.interface,Interface 2",
|
||||
"Device 3,dcim.interface,Interface 3,Device 4,dcim.interface,Interface 3",
|
||||
"Device 1,dcim.interface,Device 2 Interface,Device 4,dcim.interface,Interface 4",
|
||||
"Device 1,dcim.interface,Device 3 Interface,Device 4,dcim.interface,Interface 5",
|
||||
"side_a_site,side_a_device,side_a_type,side_a_name,side_b_site,side_b_device,side_b_type,side_b_name",
|
||||
"Site 1,Device 3,dcim.interface,Interface 1,Site 2,Device 1,dcim.interface,Interface 1",
|
||||
"Site 1,Device 3,dcim.interface,Interface 2,Site 2,Device 1,dcim.interface,Interface 2",
|
||||
"Site 1,Device 3,dcim.interface,Interface 3,Site 2,Device 1,dcim.interface,Interface 3",
|
||||
"Site 1,Device 1,dcim.interface,Device 2 Interface,Site 2,Device 1,dcim.interface,Interface 4",
|
||||
"Site 1,Device 1,dcim.interface,Device 3 Interface,Site 2,Device 1,dcim.interface,Interface 5",
|
||||
)
|
||||
|
||||
cls.csv_update_data = (
|
||||
|
||||
@@ -185,7 +185,9 @@ class TagViewSet(NetBoxModelViewSet):
|
||||
|
||||
|
||||
class TaggedItemViewSet(RetrieveModelMixin, ListModelMixin, BaseViewSet):
|
||||
queryset = TaggedItem.objects.prefetch_related('content_type', 'content_object', 'tag')
|
||||
queryset = TaggedItem.objects.prefetch_related(
|
||||
'content_type', 'content_object', 'tag'
|
||||
).order_by('tag__weight', 'tag__name')
|
||||
serializer_class = serializers.TaggedItemSerializer
|
||||
filterset_class = filtersets.TaggedItemFilterSet
|
||||
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
import functools
|
||||
import operator
|
||||
import re
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
__all__ = (
|
||||
'Condition',
|
||||
'ConditionSet',
|
||||
'InvalidCondition',
|
||||
)
|
||||
|
||||
|
||||
AND = 'and'
|
||||
OR = 'or'
|
||||
|
||||
@@ -19,6 +20,10 @@ def is_ruleset(data):
|
||||
return type(data) is dict and len(data) == 1 and list(data.keys())[0] in (AND, OR)
|
||||
|
||||
|
||||
class InvalidCondition(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Condition:
|
||||
"""
|
||||
An individual conditional rule that evaluates a single attribute and its value.
|
||||
@@ -61,6 +66,7 @@ class Condition:
|
||||
|
||||
self.attr = attr
|
||||
self.value = value
|
||||
self.op = op
|
||||
self.eval_func = getattr(self, f'eval_{op}')
|
||||
self.negate = negate
|
||||
|
||||
@@ -70,16 +76,17 @@ class Condition:
|
||||
"""
|
||||
def _get(obj, key):
|
||||
if isinstance(obj, list):
|
||||
return [dict.get(i, key) for i in obj]
|
||||
|
||||
return dict.get(obj, key)
|
||||
return [operator.getitem(item or {}, key) for item in obj]
|
||||
return operator.getitem(obj or {}, key)
|
||||
|
||||
try:
|
||||
value = functools.reduce(_get, self.attr.split('.'), data)
|
||||
except TypeError:
|
||||
# Invalid key path
|
||||
value = None
|
||||
result = self.eval_func(value)
|
||||
except KeyError:
|
||||
raise InvalidCondition(f"Invalid key path: {self.attr}")
|
||||
try:
|
||||
result = self.eval_func(value)
|
||||
except TypeError as e:
|
||||
raise InvalidCondition(f"Invalid data type at '{self.attr}' for '{self.op}' evaluation: {e}")
|
||||
|
||||
if self.negate:
|
||||
return not result
|
||||
|
||||
@@ -21,6 +21,12 @@ WEBHOOK_EVENT_TYPES = {
|
||||
JOB_ERRORED: 'job_ended',
|
||||
}
|
||||
|
||||
# Jinja environment parameters which support path imports
|
||||
JINJA_ENV_PARAMS_WITH_PATH_IMPORT = (
|
||||
'undefined',
|
||||
'finalize',
|
||||
)
|
||||
|
||||
# Dashboard
|
||||
DEFAULT_DASHBOARD = [
|
||||
{
|
||||
|
||||
@@ -192,5 +192,5 @@ def flush_events(events):
|
||||
try:
|
||||
func = import_string(name)
|
||||
func(events)
|
||||
except Exception as e:
|
||||
except ImportError as e:
|
||||
logger.error(_("Cannot import events pipeline {name} error: {error}").format(name=name, error=e))
|
||||
|
||||
@@ -18,9 +18,22 @@ class Empty(Lookup):
|
||||
return f"CAST(LENGTH({sql}) AS BOOLEAN) IS TRUE", params
|
||||
|
||||
|
||||
class NetHost(Lookup):
|
||||
"""
|
||||
Similar to ipam.lookups.NetHost, but casts the field to INET.
|
||||
"""
|
||||
lookup_name = 'net_host'
|
||||
|
||||
def as_sql(self, qn, connection):
|
||||
lhs, lhs_params = self.process_lhs(qn, connection)
|
||||
rhs, rhs_params = self.process_rhs(qn, connection)
|
||||
params = lhs_params + rhs_params
|
||||
return 'HOST(CAST(%s AS INET)) = HOST(%s)' % (lhs, rhs), params
|
||||
|
||||
|
||||
class NetContainsOrEquals(Lookup):
|
||||
"""
|
||||
This lookup has the same functionality as the one from the ipam app except lhs is cast to inet
|
||||
Similar to ipam.lookups.NetContainsOrEquals, but casts the field to INET.
|
||||
"""
|
||||
lookup_name = 'net_contains_or_equals'
|
||||
|
||||
@@ -32,4 +45,5 @@ class NetContainsOrEquals(Lookup):
|
||||
|
||||
|
||||
CharField.register_lookup(Empty)
|
||||
CachedValueField.register_lookup(NetHost)
|
||||
CachedValueField.register_lookup(NetContainsOrEquals)
|
||||
|
||||
@@ -2,16 +2,17 @@ import importlib.abc
|
||||
import importlib.util
|
||||
import os
|
||||
import sys
|
||||
|
||||
from django.core.files.storage import storages
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.http import HttpResponse
|
||||
from django.utils.module_loading import import_string
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from extras.constants import DEFAULT_MIME_TYPE
|
||||
from extras.constants import DEFAULT_MIME_TYPE, JINJA_ENV_PARAMS_WITH_PATH_IMPORT
|
||||
from extras.utils import filename_from_model, filename_from_object
|
||||
from utilities.jinja2 import render_jinja2
|
||||
|
||||
|
||||
__all__ = (
|
||||
'PythonModuleMixin',
|
||||
'RenderTemplateMixin',
|
||||
@@ -125,12 +126,22 @@ class RenderTemplateMixin(models.Model):
|
||||
class_name=self.__class__
|
||||
))
|
||||
|
||||
def get_environment_params(self):
|
||||
"""
|
||||
Pre-processing of any defined Jinja environment parameters (e.g. to support path resolution).
|
||||
"""
|
||||
params = self.environment_params or {}
|
||||
for name, value in params.items():
|
||||
if name in JINJA_ENV_PARAMS_WITH_PATH_IMPORT and type(value) is str:
|
||||
params[name] = import_string(value)
|
||||
return params
|
||||
|
||||
def render(self, context=None, queryset=None):
|
||||
"""
|
||||
Render the template with the provided context. The context is passed to the Jinja2 environment as a dictionary.
|
||||
"""
|
||||
context = self.get_context(context=context, queryset=queryset)
|
||||
env_params = self.environment_params or {}
|
||||
env_params = self.get_environment_params()
|
||||
output = render_jinja2(self.template_code, context, env_params, getattr(self, 'data_file', None))
|
||||
|
||||
# Replace CRLF-style line terminators
|
||||
|
||||
@@ -13,7 +13,7 @@ from rest_framework.utils.encoders import JSONEncoder
|
||||
|
||||
from core.models import ObjectType
|
||||
from extras.choices import *
|
||||
from extras.conditions import ConditionSet
|
||||
from extras.conditions import ConditionSet, InvalidCondition
|
||||
from extras.constants import *
|
||||
from extras.utils import image_upload
|
||||
from extras.models.mixins import RenderTemplateMixin
|
||||
@@ -142,7 +142,15 @@ class EventRule(CustomFieldsMixin, ExportTemplatesMixin, TagsMixin, ChangeLogged
|
||||
if not self.conditions:
|
||||
return True
|
||||
|
||||
return ConditionSet(self.conditions).eval(data)
|
||||
logger = logging.getLogger('netbox.event_rules')
|
||||
|
||||
try:
|
||||
result = ConditionSet(self.conditions).eval(data)
|
||||
logger.debug(f'{self.name}: Evaluated as {result}')
|
||||
return result
|
||||
except InvalidCondition as e:
|
||||
logger.error(f"{self.name}: Evaluation failed. {e}")
|
||||
return False
|
||||
|
||||
|
||||
class Webhook(CustomFieldsMixin, ExportTemplatesMixin, TagsMixin, ChangeLoggedModel):
|
||||
|
||||
@@ -83,3 +83,6 @@ class TaggedItem(GenericTaggedItemBase):
|
||||
indexes = [models.Index(fields=["content_type", "object_id"])]
|
||||
verbose_name = _('tagged item')
|
||||
verbose_name_plural = _('tagged items')
|
||||
# Note: while there is no ordering applied here (because it would basically be done on fields
|
||||
# of the related `tag`), there is an ordering applied to extras.api.views.TaggedItemViewSet
|
||||
# to allow for proper pagination.
|
||||
|
||||
@@ -4,7 +4,7 @@ from django.test import TestCase
|
||||
from core.events import *
|
||||
from dcim.choices import SiteStatusChoices
|
||||
from dcim.models import Site
|
||||
from extras.conditions import Condition, ConditionSet
|
||||
from extras.conditions import Condition, ConditionSet, InvalidCondition
|
||||
from extras.events import serialize_for_event
|
||||
from extras.forms import EventRuleForm
|
||||
from extras.models import EventRule, Webhook
|
||||
@@ -12,16 +12,11 @@ from extras.models import EventRule, Webhook
|
||||
|
||||
class ConditionTestCase(TestCase):
|
||||
|
||||
def test_dotted_path_access(self):
|
||||
c = Condition('a.b.c', 1, 'eq')
|
||||
self.assertTrue(c.eval({'a': {'b': {'c': 1}}}))
|
||||
self.assertFalse(c.eval({'a': {'b': {'c': 2}}}))
|
||||
self.assertFalse(c.eval({'a': {'b': {'x': 1}}}))
|
||||
|
||||
def test_undefined_attr(self):
|
||||
c = Condition('x', 1, 'eq')
|
||||
self.assertFalse(c.eval({}))
|
||||
self.assertTrue(c.eval({'x': 1}))
|
||||
with self.assertRaises(InvalidCondition):
|
||||
c.eval({})
|
||||
|
||||
#
|
||||
# Validation tests
|
||||
@@ -37,10 +32,13 @@ class ConditionTestCase(TestCase):
|
||||
# dict type is unsupported
|
||||
Condition('x', 1, dict())
|
||||
|
||||
def test_invalid_op_type(self):
|
||||
def test_invalid_op_types(self):
|
||||
with self.assertRaises(ValueError):
|
||||
# 'gt' supports only numeric values
|
||||
Condition('x', 'foo', 'gt')
|
||||
with self.assertRaises(ValueError):
|
||||
# 'in' supports only iterable values
|
||||
Condition('x', 123, 'in')
|
||||
|
||||
#
|
||||
# Nested attrs tests
|
||||
@@ -50,7 +48,10 @@ class ConditionTestCase(TestCase):
|
||||
c = Condition('x.y.z', 1)
|
||||
self.assertTrue(c.eval({'x': {'y': {'z': 1}}}))
|
||||
self.assertFalse(c.eval({'x': {'y': {'z': 2}}}))
|
||||
self.assertFalse(c.eval({'a': {'b': {'c': 1}}}))
|
||||
with self.assertRaises(InvalidCondition):
|
||||
c.eval({'x': {'y': None}})
|
||||
with self.assertRaises(InvalidCondition):
|
||||
c.eval({'x': {'y': {'a': 1}}})
|
||||
|
||||
#
|
||||
# Operator tests
|
||||
@@ -74,23 +75,31 @@ class ConditionTestCase(TestCase):
|
||||
c = Condition('x', 1, 'gt')
|
||||
self.assertTrue(c.eval({'x': 2}))
|
||||
self.assertFalse(c.eval({'x': 1}))
|
||||
with self.assertRaises(InvalidCondition):
|
||||
c.eval({'x': 'foo'}) # Invalid type
|
||||
|
||||
def test_gte(self):
|
||||
c = Condition('x', 1, 'gte')
|
||||
self.assertTrue(c.eval({'x': 2}))
|
||||
self.assertTrue(c.eval({'x': 1}))
|
||||
self.assertFalse(c.eval({'x': 0}))
|
||||
with self.assertRaises(InvalidCondition):
|
||||
c.eval({'x': 'foo'}) # Invalid type
|
||||
|
||||
def test_lt(self):
|
||||
c = Condition('x', 2, 'lt')
|
||||
self.assertTrue(c.eval({'x': 1}))
|
||||
self.assertFalse(c.eval({'x': 2}))
|
||||
with self.assertRaises(InvalidCondition):
|
||||
c.eval({'x': 'foo'}) # Invalid type
|
||||
|
||||
def test_lte(self):
|
||||
c = Condition('x', 2, 'lte')
|
||||
self.assertTrue(c.eval({'x': 1}))
|
||||
self.assertTrue(c.eval({'x': 2}))
|
||||
self.assertFalse(c.eval({'x': 3}))
|
||||
with self.assertRaises(InvalidCondition):
|
||||
c.eval({'x': 'foo'}) # Invalid type
|
||||
|
||||
def test_in(self):
|
||||
c = Condition('x', [1, 2, 3], 'in')
|
||||
@@ -106,6 +115,8 @@ class ConditionTestCase(TestCase):
|
||||
c = Condition('x', 1, 'contains')
|
||||
self.assertTrue(c.eval({'x': [1, 2, 3]}))
|
||||
self.assertFalse(c.eval({'x': [2, 3, 4]}))
|
||||
with self.assertRaises(InvalidCondition):
|
||||
c.eval({'x': 123}) # Invalid type
|
||||
|
||||
def test_contains_negated(self):
|
||||
c = Condition('x', 1, 'contains', negate=True)
|
||||
|
||||
@@ -162,6 +162,11 @@ class Aggregate(ContactsMixin, GetAvailablePrefixesMixin, PrimaryModel):
|
||||
return self.prefix.version
|
||||
return None
|
||||
|
||||
@property
|
||||
def ipv6_full(self):
|
||||
if self.prefix and self.prefix.version == 6:
|
||||
return netaddr.IPAddress(self.prefix).format(netaddr.ipv6_full)
|
||||
|
||||
def get_child_prefixes(self):
|
||||
"""
|
||||
Return all Prefixes within this Aggregate
|
||||
@@ -330,6 +335,11 @@ class Prefix(ContactsMixin, GetAvailablePrefixesMixin, CachedScopeMixin, Primary
|
||||
def mask_length(self):
|
||||
return self.prefix.prefixlen if self.prefix else None
|
||||
|
||||
@property
|
||||
def ipv6_full(self):
|
||||
if self.prefix and self.prefix.version == 6:
|
||||
return netaddr.IPAddress(self.prefix).format(netaddr.ipv6_full)
|
||||
|
||||
@property
|
||||
def depth(self):
|
||||
return self._depth
|
||||
@@ -808,6 +818,11 @@ class IPAddress(ContactsMixin, PrimaryModel):
|
||||
self._original_assigned_object_id = self.__dict__.get('assigned_object_id')
|
||||
self._original_assigned_object_type_id = self.__dict__.get('assigned_object_type_id')
|
||||
|
||||
@property
|
||||
def ipv6_full(self):
|
||||
if self.address and self.address.version == 6:
|
||||
return netaddr.IPAddress(self.address).format(netaddr.ipv6_full)
|
||||
|
||||
def get_duplicates(self):
|
||||
return IPAddress.objects.filter(
|
||||
vrf=self.vrf,
|
||||
|
||||
@@ -12,3 +12,7 @@ class SerializerNotFound(Exception):
|
||||
|
||||
class GraphQLTypeNotFound(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class QuerySetNotOrdered(Exception):
|
||||
pass
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
from django.db.models import QuerySet
|
||||
from rest_framework.pagination import LimitOffsetPagination
|
||||
|
||||
from netbox.api.exceptions import QuerySetNotOrdered
|
||||
from netbox.config import get_config
|
||||
|
||||
|
||||
@@ -15,6 +16,12 @@ class OptionalLimitOffsetPagination(LimitOffsetPagination):
|
||||
|
||||
def paginate_queryset(self, queryset, request, view=None):
|
||||
|
||||
if isinstance(queryset, QuerySet) and not queryset.ordered:
|
||||
raise QuerySetNotOrdered(
|
||||
"Paginating over an unordered queryset is unreliable. Ensure that a minimal "
|
||||
"ordering has been applied to the queryset for this API endpoint."
|
||||
)
|
||||
|
||||
if isinstance(queryset, QuerySet):
|
||||
self.count = self.get_queryset_count(queryset)
|
||||
else:
|
||||
|
||||
@@ -8,6 +8,7 @@ from django_pglocks import advisory_lock
|
||||
from rq.timeouts import JobTimeoutException
|
||||
|
||||
from core.choices import JobStatusChoices
|
||||
from core.exceptions import JobFailed
|
||||
from core.models import Job, ObjectType
|
||||
from netbox.constants import ADVISORY_LOCK_KEYS
|
||||
from netbox.registry import registry
|
||||
@@ -73,15 +74,21 @@ class JobRunner(ABC):
|
||||
This method is called by the Job Scheduler to handle the execution of all job commands. It will maintain the
|
||||
job's metadata and handle errors. For periodic jobs, a new job is automatically scheduled using its `interval`.
|
||||
"""
|
||||
logger = logging.getLogger('netbox.jobs')
|
||||
|
||||
try:
|
||||
job.start()
|
||||
cls(job).run(*args, **kwargs)
|
||||
job.terminate()
|
||||
|
||||
except JobFailed:
|
||||
logger.warning(f"Job {job} failed")
|
||||
job.terminate(status=JobStatusChoices.STATUS_FAILED)
|
||||
|
||||
except Exception as e:
|
||||
job.terminate(status=JobStatusChoices.STATUS_ERRORED, error=repr(e))
|
||||
if type(e) is JobTimeoutException:
|
||||
logging.error(e)
|
||||
logger.error(e)
|
||||
|
||||
# If the executed job is a periodic job, schedule its next execution at the specified interval.
|
||||
finally:
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
from dataclasses import dataclass
|
||||
from typing import Sequence, Optional
|
||||
|
||||
from django.urls import reverse_lazy
|
||||
|
||||
|
||||
__all__ = (
|
||||
'get_model_item',
|
||||
@@ -22,20 +24,46 @@ class MenuItemButton:
|
||||
link: str
|
||||
title: str
|
||||
icon_class: str
|
||||
_url: Optional[str] = None
|
||||
permissions: Optional[Sequence[str]] = ()
|
||||
color: Optional[str] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if self.link:
|
||||
self._url = reverse_lazy(self.link)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return self._url
|
||||
|
||||
@url.setter
|
||||
def url(self, value):
|
||||
self._url = value
|
||||
|
||||
|
||||
@dataclass
|
||||
class MenuItem:
|
||||
|
||||
link: str
|
||||
link_text: str
|
||||
_url: Optional[str] = None
|
||||
permissions: Optional[Sequence[str]] = ()
|
||||
auth_required: Optional[bool] = False
|
||||
staff_only: Optional[bool] = False
|
||||
buttons: Optional[Sequence[MenuItemButton]] = ()
|
||||
|
||||
def __post_init__(self):
|
||||
if self.link:
|
||||
self._url = reverse_lazy(self.link)
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return self._url
|
||||
|
||||
@url.setter
|
||||
def url(self, value):
|
||||
self._url = value
|
||||
|
||||
|
||||
@dataclass
|
||||
class MenuGroup:
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from django.urls import reverse_lazy
|
||||
from django.utils.text import slugify
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
@@ -32,17 +33,23 @@ class PluginMenuItem:
|
||||
This class represents a navigation menu item. This constitutes primary link and its text, but also allows for
|
||||
specifying additional link buttons that appear to the right of the item in the van menu.
|
||||
|
||||
Links are specified as Django reverse URL strings.
|
||||
Links are specified as Django reverse URL strings suitable for rendering via {% url item.link %}.
|
||||
Alternatively, a pre-generated url can be set on the object which will be rendered literally.
|
||||
Buttons are each specified as a list of PluginMenuButton instances.
|
||||
"""
|
||||
permissions = []
|
||||
buttons = []
|
||||
_url = None
|
||||
|
||||
def __init__(self, link, link_text, auth_required=False, staff_only=False, permissions=None, buttons=None):
|
||||
def __init__(
|
||||
self, link, link_text, auth_required=False, staff_only=False, permissions=None, buttons=None
|
||||
):
|
||||
self.link = link
|
||||
self.link_text = link_text
|
||||
self.auth_required = auth_required
|
||||
self.staff_only = staff_only
|
||||
if link:
|
||||
self._url = reverse_lazy(link)
|
||||
if permissions is not None:
|
||||
if type(permissions) not in (list, tuple):
|
||||
raise TypeError(_("Permissions must be passed as a tuple or list."))
|
||||
@@ -52,6 +59,14 @@ class PluginMenuItem:
|
||||
raise TypeError(_("Buttons must be passed as a tuple or list."))
|
||||
self.buttons = buttons
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return self._url
|
||||
|
||||
@url.setter
|
||||
def url(self, value):
|
||||
self._url = value
|
||||
|
||||
|
||||
class PluginMenuButton:
|
||||
"""
|
||||
@@ -60,11 +75,14 @@ class PluginMenuButton:
|
||||
"""
|
||||
color = ButtonColorChoices.DEFAULT
|
||||
permissions = []
|
||||
_url = None
|
||||
|
||||
def __init__(self, link, title, icon_class, color=None, permissions=None):
|
||||
self.link = link
|
||||
self.title = title
|
||||
self.icon_class = icon_class
|
||||
if link:
|
||||
self._url = reverse_lazy(link)
|
||||
if permissions is not None:
|
||||
if type(permissions) not in (list, tuple):
|
||||
raise TypeError(_("Permissions must be passed as a tuple or list."))
|
||||
@@ -73,3 +91,11 @@ class PluginMenuButton:
|
||||
if color not in ButtonColorChoices.values():
|
||||
raise ValueError(_("Button color must be a choice within ButtonColorChoices."))
|
||||
self.color = color
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return self._url
|
||||
|
||||
@url.setter
|
||||
def url(self, value):
|
||||
self._url = value
|
||||
|
||||
@@ -115,11 +115,13 @@ class CachedValueSearchBackend(SearchBackend):
|
||||
if lookup in (LookupTypes.STARTSWITH, LookupTypes.ENDSWITH):
|
||||
# "Starts/ends with" matches are valid only on string values
|
||||
query_filter &= Q(type=FieldTypes.STRING)
|
||||
elif lookup == LookupTypes.PARTIAL:
|
||||
elif lookup in (LookupTypes.PARTIAL, LookupTypes.EXACT):
|
||||
try:
|
||||
# If the value looks like an IP address, add an extra match for CIDR values
|
||||
# If the value looks like an IP address, add extra filters for CIDR/INET values
|
||||
address = str(netaddr.IPNetwork(value.strip()).cidr)
|
||||
query_filter |= Q(type=FieldTypes.CIDR) & Q(value__net_contains_or_equals=address)
|
||||
query_filter |= Q(type=FieldTypes.INET) & Q(value__net_host=address)
|
||||
if lookup == LookupTypes.PARTIAL:
|
||||
query_filter |= Q(type=FieldTypes.CIDR) & Q(value__net_contains_or_equals=address)
|
||||
except (AddrFormatError, ValueError):
|
||||
pass
|
||||
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
import uuid
|
||||
|
||||
from django.test import RequestFactory, TestCase
|
||||
from django.urls import reverse
|
||||
from rest_framework.request import Request
|
||||
|
||||
from netbox.api.exceptions import QuerySetNotOrdered
|
||||
from netbox.api.pagination import OptionalLimitOffsetPagination
|
||||
from utilities.testing import APITestCase
|
||||
from users.models import Token
|
||||
|
||||
|
||||
class AppTest(APITestCase):
|
||||
@@ -26,3 +31,40 @@ class AppTest(APITestCase):
|
||||
response = self.client.get(f'{url}?format=api', **self.header)
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
|
||||
|
||||
class OptionalLimitOffsetPaginationTest(TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.paginator = OptionalLimitOffsetPagination()
|
||||
self.factory = RequestFactory()
|
||||
|
||||
def _make_drf_request(self, path='/', query_params=None):
|
||||
"""Helper to create a proper DRF Request object"""
|
||||
return Request(self.factory.get(path, query_params or {}))
|
||||
|
||||
def test_raises_exception_for_unordered_queryset(self):
|
||||
"""Should raise QuerySetNotOrdered for unordered QuerySet"""
|
||||
queryset = Token.objects.all().order_by()
|
||||
request = self._make_drf_request()
|
||||
|
||||
with self.assertRaises(QuerySetNotOrdered) as cm:
|
||||
self.paginator.paginate_queryset(queryset, request)
|
||||
|
||||
error_msg = str(cm.exception)
|
||||
self.assertIn("Paginating over an unordered queryset is unreliable", error_msg)
|
||||
self.assertIn("Ensure that a minimal ordering has been applied", error_msg)
|
||||
|
||||
def test_allows_ordered_queryset(self):
|
||||
"""Should not raise exception for ordered QuerySet"""
|
||||
queryset = Token.objects.all().order_by('created')
|
||||
request = self._make_drf_request()
|
||||
|
||||
self.paginator.paginate_queryset(queryset, request) # Should not raise exception
|
||||
|
||||
def test_allows_non_queryset_iterables(self):
|
||||
"""Should not raise exception for non-QuerySet iterables"""
|
||||
iterable = [1, 2, 3, 4, 5]
|
||||
request = self._make_drf_request()
|
||||
|
||||
self.paginator.paginate_queryset(iterable, request) # Should not raise exception
|
||||
|
||||
@@ -7,11 +7,15 @@ from django_rq import get_queue
|
||||
from ..jobs import *
|
||||
from core.models import DataSource, Job
|
||||
from core.choices import JobStatusChoices
|
||||
from core.exceptions import JobFailed
|
||||
from utilities.testing import disable_warnings
|
||||
|
||||
|
||||
class TestJobRunner(JobRunner):
|
||||
|
||||
def run(self, *args, **kwargs):
|
||||
pass
|
||||
if kwargs.get('make_fail', False):
|
||||
raise JobFailed()
|
||||
|
||||
|
||||
class JobRunnerTestCase(TestCase):
|
||||
@@ -49,6 +53,12 @@ class JobRunnerTest(JobRunnerTestCase):
|
||||
|
||||
self.assertEqual(job.status, JobStatusChoices.STATUS_COMPLETED)
|
||||
|
||||
def test_handle_failed(self):
|
||||
with disable_warnings('netbox.jobs'):
|
||||
job = TestJobRunner.enqueue(immediate=True, make_fail=True)
|
||||
|
||||
self.assertEqual(job.status, JobStatusChoices.STATUS_FAILED)
|
||||
|
||||
def test_handle_errored(self):
|
||||
class ErroredJobRunner(TestJobRunner):
|
||||
EXP = Exception('Test error')
|
||||
|
||||
2
netbox/project-static/dist/netbox.css
vendored
2
netbox/project-static/dist/netbox.css
vendored
File diff suppressed because one or more lines are too long
10
netbox/project-static/dist/netbox.js
vendored
10
netbox/project-static/dist/netbox.js
vendored
File diff suppressed because one or more lines are too long
6
netbox/project-static/dist/netbox.js.map
vendored
6
netbox/project-static/dist/netbox.js.map
vendored
File diff suppressed because one or more lines are too long
@@ -23,13 +23,13 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@mdi/font": "7.4.47",
|
||||
"@tabler/core": "1.3.2",
|
||||
"@tabler/core": "1.4.0",
|
||||
"bootstrap": "5.3.7",
|
||||
"clipboard": "2.0.11",
|
||||
"flatpickr": "4.6.13",
|
||||
"gridstack": "12.2.1",
|
||||
"htmx.org": "2.0.5",
|
||||
"query-string": "9.2.1",
|
||||
"gridstack": "12.2.2",
|
||||
"htmx.org": "2.0.6",
|
||||
"query-string": "9.2.2",
|
||||
"sass": "1.89.2",
|
||||
"tom-select": "2.4.3",
|
||||
"typeface-inter": "3.18.1",
|
||||
@@ -39,15 +39,15 @@
|
||||
"@types/bootstrap": "5.2.10",
|
||||
"@types/cookie": "^0.6.0",
|
||||
"@types/node": "^22.3.0",
|
||||
"@typescript-eslint/eslint-plugin": "^8.1.0",
|
||||
"@typescript-eslint/parser": "^8.1.0",
|
||||
"esbuild": "^0.25.3",
|
||||
"@typescript-eslint/eslint-plugin": "^8.37.0",
|
||||
"@typescript-eslint/parser": "^8.37.0",
|
||||
"esbuild": "^0.25.6",
|
||||
"esbuild-sass-plugin": "^3.3.1",
|
||||
"eslint": "<9.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
"eslint-import-resolver-typescript": "^3.6.3",
|
||||
"eslint-plugin-import": "^2.30.0",
|
||||
"eslint-plugin-prettier": "^5.2.1",
|
||||
"eslint-plugin-import": "^2.32.0",
|
||||
"eslint-plugin-prettier": "^5.5.1",
|
||||
"prettier": "^3.3.3",
|
||||
"typescript": "<5.5"
|
||||
},
|
||||
|
||||
@@ -1,5 +1,20 @@
|
||||
import { getElements } from '../util';
|
||||
|
||||
/**
|
||||
* Move selected options from one select element to another.
|
||||
*
|
||||
* @param source Select Element
|
||||
* @param target Select Element
|
||||
*/
|
||||
function moveOption(source: HTMLSelectElement, target: HTMLSelectElement): void {
|
||||
for (const option of Array.from(source.options)) {
|
||||
if (option.selected) {
|
||||
target.appendChild(option.cloneNode(true));
|
||||
option.remove();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Move selected options of a select element up in order.
|
||||
*
|
||||
@@ -39,23 +54,35 @@ function moveOptionDown(element: HTMLSelectElement): void {
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize move up/down buttons.
|
||||
* Initialize select/move buttons.
|
||||
*/
|
||||
export function initMoveButtons(): void {
|
||||
for (const button of getElements<HTMLButtonElement>('#move-option-up')) {
|
||||
// Move selected option(s) between lists
|
||||
for (const button of getElements<HTMLButtonElement>('.move-option')) {
|
||||
const source = button.getAttribute('data-source');
|
||||
const target = button.getAttribute('data-target');
|
||||
if (target !== null) {
|
||||
for (const select of getElements<HTMLSelectElement>(`#${target}`)) {
|
||||
button.addEventListener('click', () => moveOptionUp(select));
|
||||
}
|
||||
const source_select = document.getElementById(`id_${source}`) as HTMLSelectElement;
|
||||
const target_select = document.getElementById(`id_${target}`) as HTMLSelectElement;
|
||||
if (source_select !== null && target_select !== null) {
|
||||
button.addEventListener('click', () => moveOption(source_select, target_select));
|
||||
}
|
||||
}
|
||||
for (const button of getElements<HTMLButtonElement>('#move-option-down')) {
|
||||
|
||||
// Move selected option(s) up in current list
|
||||
for (const button of getElements<HTMLButtonElement>('.move-option-up')) {
|
||||
const target = button.getAttribute('data-target');
|
||||
if (target !== null) {
|
||||
for (const select of getElements<HTMLSelectElement>(`#${target}`)) {
|
||||
button.addEventListener('click', () => moveOptionDown(select));
|
||||
}
|
||||
const target_select = document.getElementById(`id_${target}`) as HTMLSelectElement;
|
||||
if (target_select !== null) {
|
||||
button.addEventListener('click', () => moveOptionUp(target_select));
|
||||
}
|
||||
}
|
||||
|
||||
// Move selected option(s) down in current list
|
||||
for (const button of getElements<HTMLButtonElement>('.move-option-down')) {
|
||||
const target = button.getAttribute('data-target');
|
||||
const target_select = document.getElementById(`id_${target}`) as HTMLSelectElement;
|
||||
if (target_select !== null) {
|
||||
button.addEventListener('click', () => moveOptionDown(target_select));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@ function showRackElements(
|
||||
selector: string,
|
||||
elevation: HTMLObjectElement,
|
||||
): void {
|
||||
const elements = elevation.contentDocument?.querySelectorAll(selector) ?? [];
|
||||
const elements = elevation.querySelectorAll(selector) ?? [];
|
||||
for (const element of elements) {
|
||||
element.classList.remove('hidden');
|
||||
}
|
||||
@@ -45,7 +45,7 @@ function hideRackElements(
|
||||
selector: string,
|
||||
elevation: HTMLObjectElement,
|
||||
): void {
|
||||
const elements = elevation.contentDocument?.querySelectorAll(selector) ?? [];
|
||||
const elements = elevation.querySelectorAll(selector) ?? [];
|
||||
for (const element of elements) {
|
||||
element.classList.add('hidden');
|
||||
}
|
||||
|
||||
4
netbox/project-static/styles/custom/racks.scss
Normal file
4
netbox/project-static/styles/custom/racks.scss
Normal file
@@ -0,0 +1,4 @@
|
||||
.rack-loading-container {
|
||||
min-height: 200px;
|
||||
margin-left: 30px;
|
||||
}
|
||||
@@ -27,3 +27,4 @@
|
||||
@import 'custom/markdown';
|
||||
@import 'custom/misc';
|
||||
@import 'custom/notifications';
|
||||
@import 'custom/racks';
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,3 +1,3 @@
|
||||
version: "4.3.3"
|
||||
version: "4.3.5"
|
||||
edition: "Community"
|
||||
published: "2025-06-26"
|
||||
published: "2025-07-29"
|
||||
|
||||
@@ -55,7 +55,7 @@ Blocks:
|
||||
{# Release info #}
|
||||
<div class="text-muted text-center fs-5 my-3">
|
||||
{{ settings.RELEASE.name }}
|
||||
{% if not settings.RELEASE.features.commercial %}
|
||||
{% if not settings.RELEASE.features.commercial and not settings.ISOLATED_DEPLOYMENT %}
|
||||
<div>
|
||||
<a href="https://netboxlabs.com/netbox-cloud/" class="text-muted">{% trans "Get" %} Cloud</a> |
|
||||
<a href="https://netboxlabs.com/netbox-enterprise/" class="text-muted">{% trans "Get" %} Enterprise</a>
|
||||
@@ -184,7 +184,7 @@ Blocks:
|
||||
{% endif %}
|
||||
|
||||
{# Commercial links #}
|
||||
{% if settings.RELEASE.features.commercial %}
|
||||
{% if settings.RELEASE.features.commercial and not settings.ISOLATED_DEPLOYMENT %}
|
||||
{# LinkedIn #}
|
||||
<li class="list-inline-item">
|
||||
<a href="https://www.linkedin.com/company/netboxlabs/" target="_blank" class="link-secondary" rel="noopener" aria-label="LinkedIn">
|
||||
@@ -199,7 +199,7 @@ Blocks:
|
||||
</li>
|
||||
|
||||
{# Community links #}
|
||||
{% else %}
|
||||
{% elif not settings.ISOLATED_DEPLOYMENT %}
|
||||
{# GitHub #}
|
||||
<li class="list-inline-item">
|
||||
<a href="https://github.com/netbox-community/netbox" target="_blank" class="link-secondary" rel="noopener" aria-label="{% trans "Source Code" %}">
|
||||
|
||||
@@ -1,6 +1,17 @@
|
||||
{% load i18n %}
|
||||
<div style="margin-left: -30px">
|
||||
<object data="{% url 'dcim-api:rack-elevation' pk=object.pk %}?face={{face}}&render=svg{% if extra_params %}&{{ extra_params }}{% endif %}" class="rack_elevation" aria-label="{% trans "Rack elevation" %}"></object>
|
||||
<div style="margin-left: -30px" class="rack_elevation">
|
||||
<div
|
||||
hx-get="{% url 'dcim-api:rack-elevation' pk=object.pk %}?face={{ face }}&render=svg{% if extra_params %}&{{ extra_params }}{% endif %}"
|
||||
hx-trigger="intersect"
|
||||
hx-swap="outerHTML"
|
||||
aria-label="{% trans "Rack elevation" %}"
|
||||
>
|
||||
<div class="d-flex justify-content-center align-items-center rack-loading-container">
|
||||
<div class="spinner-border" role="status">
|
||||
<span class="visually-hidden">{% trans "Loading..." %}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="text-center mt-3">
|
||||
<a class="btn btn-outline-primary" href="{% url 'dcim-api:rack-elevation' pk=object.pk %}?face={{face}}&render=svg{% if extra_params %}&{{ extra_params }}{% endif %}" hx-boost="false">
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="row">Description</th>
|
||||
<td>{{ object.description|markdown|placeholder }}</td>
|
||||
<td>{{ object.description|placeholder }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="row">Base Choices</th>
|
||||
|
||||
@@ -36,10 +36,10 @@
|
||||
<div class="col-5 text-center">
|
||||
<label class="form-label">{{ form.columns.label }}</label>
|
||||
{{ form.columns }}
|
||||
<a tabindex="0" class="btn btn-primary btn-sm mt-2" id="move-option-up" data-target="id_columns">
|
||||
<a tabindex="0" class="btn btn-primary btn-sm mt-2 move-option-up" data-target="columns">
|
||||
<i class="mdi mdi-arrow-up-bold"></i> {% trans "Move Up" %}
|
||||
</a>
|
||||
<a tabindex="0" class="btn btn-primary btn-sm mt-2" id="move-option-down" data-target="id_columns">
|
||||
<a tabindex="0" class="btn btn-primary btn-sm mt-2 move-option-down" data-target="columns">
|
||||
<i class="mdi mdi-arrow-down-bold"></i> {% trans "Move Down" %}
|
||||
</a>
|
||||
</div>
|
||||
|
||||
@@ -29,11 +29,7 @@
|
||||
<div class="hr-text">
|
||||
<span>{% trans "Custom Fields" %}</span>
|
||||
</div>
|
||||
{% for name in filter_form.custom_fields %}
|
||||
{% with field=filter_form|get_item:name %}
|
||||
{% render_field field %}
|
||||
{% endwith %}
|
||||
{% endfor %}
|
||||
{% render_custom_fields filter_form %}
|
||||
</div>
|
||||
{% endif %}
|
||||
</div>
|
||||
|
||||
@@ -45,12 +45,17 @@ class TenantBulkEditForm(NetBoxModelBulkEditForm):
|
||||
queryset=TenantGroup.objects.all(),
|
||||
required=False
|
||||
)
|
||||
description = forms.CharField(
|
||||
label=_('Description'),
|
||||
max_length=200,
|
||||
required=False
|
||||
)
|
||||
|
||||
model = Tenant
|
||||
fieldsets = (
|
||||
FieldSet('group'),
|
||||
FieldSet('group', 'description'),
|
||||
)
|
||||
nullable_fields = ('group',)
|
||||
nullable_fields = ('group', 'description')
|
||||
|
||||
|
||||
#
|
||||
|
||||
@@ -19,6 +19,10 @@ class ContactGroupTable(NetBoxTable):
|
||||
verbose_name=_('Name'),
|
||||
linkify=True
|
||||
)
|
||||
parent = tables.Column(
|
||||
verbose_name=_('Parent'),
|
||||
linkify=True,
|
||||
)
|
||||
contact_count = columns.LinkedCountColumn(
|
||||
viewname='tenancy:contact_list',
|
||||
url_params={'group_id': 'pk'},
|
||||
@@ -34,7 +38,7 @@ class ContactGroupTable(NetBoxTable):
|
||||
class Meta(NetBoxTable.Meta):
|
||||
model = ContactGroup
|
||||
fields = (
|
||||
'pk', 'name', 'contact_count', 'description', 'comments', 'slug', 'tags', 'created',
|
||||
'pk', 'name', 'parent', 'contact_count', 'description', 'comments', 'slug', 'tags', 'created',
|
||||
'last_updated', 'actions',
|
||||
)
|
||||
default_columns = ('pk', 'name', 'contact_count', 'description')
|
||||
|
||||
@@ -16,6 +16,10 @@ class TenantGroupTable(NetBoxTable):
|
||||
verbose_name=_('Name'),
|
||||
linkify=True
|
||||
)
|
||||
parent = tables.Column(
|
||||
verbose_name=_('Parent'),
|
||||
linkify=True,
|
||||
)
|
||||
tenant_count = columns.LinkedCountColumn(
|
||||
viewname='tenancy:tenant_list',
|
||||
url_params={'group_id': 'pk'},
|
||||
@@ -31,7 +35,7 @@ class TenantGroupTable(NetBoxTable):
|
||||
class Meta(NetBoxTable.Meta):
|
||||
model = TenantGroup
|
||||
fields = (
|
||||
'pk', 'id', 'name', 'tenant_count', 'description', 'comments', 'slug', 'tags', 'created',
|
||||
'pk', 'id', 'name', 'parent', 'tenant_count', 'description', 'comments', 'slug', 'tags', 'created',
|
||||
'last_updated', 'actions',
|
||||
)
|
||||
default_columns = ('pk', 'name', 'tenant_count', 'description')
|
||||
|
||||
@@ -98,6 +98,7 @@ class TenantTestCase(ViewTestCases.PrimaryObjectViewTestCase):
|
||||
|
||||
cls.bulk_edit_data = {
|
||||
'group': tenant_groups[1].pk,
|
||||
'description': 'Bulk edit description',
|
||||
}
|
||||
|
||||
|
||||
|
||||
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user