mirror of
https://github.com/netbox-community/netbox.git
synced 2025-12-31 01:27:45 -06:00
Compare commits
107 Commits
20660-scri
...
v4.4.7
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8b3f7ce507 | ||
|
|
adad3745ae | ||
|
|
8055fae253 | ||
|
|
aac3a51431 | ||
|
|
3e0ad2176f | ||
|
|
4e8edfb3d6 | ||
|
|
651557a82b | ||
|
|
c3d66dc42e | ||
|
|
a50e570f22 | ||
|
|
a44a79ec79 | ||
|
|
b919868521 | ||
|
|
d9aab6bbe2 | ||
|
|
82171fce7a | ||
|
|
020eb64eab | ||
|
|
ec7afccd55 | ||
|
|
76fd63823c | ||
|
|
6c373decd6 | ||
|
|
222b26e060 | ||
|
|
066b787777 | ||
|
|
90b2732068 | ||
|
|
bfba0ccaae | ||
|
|
d5718357f1 | ||
|
|
d61737396b | ||
|
|
c6248f1142 | ||
|
|
05f254a768 | ||
|
|
0cb10f806a | ||
|
|
8ac7f6f8de | ||
|
|
cd8087ab43 | ||
|
|
da5ae21150 | ||
|
|
fbb948d30e | ||
|
|
975e0ff398 | ||
|
|
d7877b7627 | ||
|
|
b685df7c9c | ||
|
|
9dcf9475cc | ||
|
|
e1bf27e4db | ||
|
|
9b89af75e4 | ||
|
|
9e13d89baa | ||
|
|
4961b0d334 | ||
|
|
ab06edd9f5 | ||
|
|
e787a71c1d | ||
|
|
cd8878df30 | ||
|
|
b5a9cb1762 | ||
|
|
9723a2f0ad | ||
|
|
327d08f4c2 | ||
|
|
4be476eb49 | ||
|
|
8005b56ab4 | ||
|
|
3f1654c9ba | ||
|
|
95f8fe788d | ||
|
|
588c069ff1 | ||
|
|
5b3ff3c0e9 | ||
|
|
730d73042d | ||
|
|
6c2a6d0e90 | ||
|
|
e6a6ff7aec | ||
|
|
87ff83ef1f | ||
|
|
3cdc6251be | ||
|
|
0e1705b870 | ||
|
|
8522c03b71 | ||
|
|
20af97ce24 | ||
|
|
264b40a269 | ||
|
|
cbf9b62f12 | ||
|
|
c429cc3638 | ||
|
|
032ed4f11c | ||
|
|
7ca4342c15 | ||
|
|
70bc1c226a | ||
|
|
6a21459ccc | ||
|
|
635de4af2e | ||
|
|
df96f7dd0f | ||
|
|
90712fa865 | ||
|
|
fbe76ac98a | ||
|
|
0b61d69e05 | ||
|
|
1245a9f99d | ||
|
|
78223cea03 | ||
|
|
8452222761 | ||
|
|
8a59fc733c | ||
|
|
df688ce064 | ||
|
|
1a1ab2a19d | ||
|
|
80f03daad6 | ||
|
|
d04c41d0f6 | ||
|
|
1fc849eb40 | ||
|
|
bbf1f6181d | ||
|
|
729b0365e0 | ||
|
|
43cb476223 | ||
|
|
d6f756d315 | ||
|
|
afc62b6ffd | ||
|
|
3d4841f17f | ||
|
|
2aefb3af73 | ||
|
|
4eff4d6a4a | ||
|
|
9381564cab | ||
|
|
3d143d635b | ||
|
|
77307b3c91 | ||
|
|
aa4571b61f | ||
|
|
56d9146323 | ||
|
|
e192f64dd2 | ||
|
|
d433a28524 | ||
|
|
dbfdf318ad | ||
|
|
639bc4462b | ||
|
|
1c59d411f7 | ||
|
|
ac7a4ec4a3 | ||
|
|
0cf58e62b2 | ||
|
|
fb8d41b527 | ||
|
|
ae5d7911f9 | ||
|
|
3bd0186870 | ||
|
|
09ce8a808d | ||
|
|
8eaff9dce7 | ||
|
|
cb3308a166 | ||
|
|
5fbae8407e | ||
|
|
2fdd46f64c |
@@ -2,7 +2,7 @@
|
||||
name: ✨ Feature Request
|
||||
type: Feature
|
||||
description: Propose a new NetBox feature or enhancement
|
||||
labels: ["type: feature", "status: needs triage"]
|
||||
labels: ["netbox", "type: feature", "status: needs triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
@@ -15,7 +15,7 @@ body:
|
||||
attributes:
|
||||
label: NetBox version
|
||||
description: What version of NetBox are you currently running?
|
||||
placeholder: v4.4.4
|
||||
placeholder: v4.4.7
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/02-bug_report.yaml
vendored
4
.github/ISSUE_TEMPLATE/02-bug_report.yaml
vendored
@@ -2,7 +2,7 @@
|
||||
name: 🐛 Bug Report
|
||||
type: Bug
|
||||
description: Report a reproducible bug in the current release of NetBox
|
||||
labels: ["type: bug", "status: needs triage"]
|
||||
labels: ["netbox", "type: bug", "status: needs triage"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
@@ -27,7 +27,7 @@ body:
|
||||
attributes:
|
||||
label: NetBox Version
|
||||
description: What version of NetBox are you currently running?
|
||||
placeholder: v4.4.4
|
||||
placeholder: v4.4.7
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
name: 📖 Documentation Change
|
||||
type: Documentation
|
||||
description: Suggest an addition or modification to the NetBox documentation
|
||||
labels: ["type: documentation", "status: needs triage"]
|
||||
labels: ["netbox", "type: documentation", "status: needs triage"]
|
||||
body:
|
||||
- type: dropdown
|
||||
attributes:
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/04-translation.yaml
vendored
2
.github/ISSUE_TEMPLATE/04-translation.yaml
vendored
@@ -2,7 +2,7 @@
|
||||
name: 🌍 Translation
|
||||
type: Translation
|
||||
description: Request support for a new language in the user interface
|
||||
labels: ["type: translation"]
|
||||
labels: ["netbox", "type: translation"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/05-housekeeping.yaml
vendored
2
.github/ISSUE_TEMPLATE/05-housekeeping.yaml
vendored
@@ -2,7 +2,7 @@
|
||||
name: 🏡 Housekeeping
|
||||
type: Housekeeping
|
||||
description: A change pertaining to the codebase itself (developers only)
|
||||
labels: ["type: housekeeping"]
|
||||
labels: ["netbox", "type: housekeeping"]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/06-deprecation.yaml
vendored
2
.github/ISSUE_TEMPLATE/06-deprecation.yaml
vendored
@@ -2,7 +2,7 @@
|
||||
name: 🗑️ Deprecation
|
||||
type: Deprecation
|
||||
description: The removal of an existing feature or resource
|
||||
labels: ["type: deprecation"]
|
||||
labels: ["netbox", "type: deprecation"]
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
|
||||
@@ -21,14 +21,6 @@ repos:
|
||||
language: system
|
||||
pass_filenames: false
|
||||
types: [python]
|
||||
- id: openapi-check
|
||||
name: "Validate OpenAPI schema"
|
||||
description: "Check for any unexpected changes to the OpenAPI schema"
|
||||
files: api/.*\.py$
|
||||
entry: scripts/verify-openapi.sh
|
||||
language: system
|
||||
pass_filenames: false
|
||||
types: [python]
|
||||
- id: mkdocs-build
|
||||
name: "Build documentation"
|
||||
description: "Build the documentation with mkdocs"
|
||||
|
||||
@@ -186,6 +186,7 @@
|
||||
"usb-3-micro-b",
|
||||
"molex-micro-fit-1x2",
|
||||
"molex-micro-fit-2x2",
|
||||
"molex-micro-fit-2x3",
|
||||
"molex-micro-fit-2x4",
|
||||
"dc-terminal",
|
||||
"saf-d-grid",
|
||||
@@ -293,6 +294,7 @@
|
||||
"usb-c",
|
||||
"molex-micro-fit-1x2",
|
||||
"molex-micro-fit-2x2",
|
||||
"molex-micro-fit-2x3",
|
||||
"molex-micro-fit-2x4",
|
||||
"dc-terminal",
|
||||
"eaton-c39",
|
||||
|
||||
6521
contrib/openapi.json
6521
contrib/openapi.json
File diff suppressed because one or more lines are too long
@@ -35,6 +35,7 @@ Some configuration parameters are primarily controlled via NetBox's admin interf
|
||||
* [`POWERFEED_DEFAULT_MAX_UTILIZATION`](./default-values.md#powerfeed_default_max_utilization)
|
||||
* [`POWERFEED_DEFAULT_VOLTAGE`](./default-values.md#powerfeed_default_voltage)
|
||||
* [`PREFER_IPV4`](./miscellaneous.md#prefer_ipv4)
|
||||
* [`PROTECTION_RULES`](./data-validation.md#protection_rules)
|
||||
* [`RACK_ELEVATION_DEFAULT_UNIT_HEIGHT`](./default-values.md#rack_elevation_default_unit_height)
|
||||
* [`RACK_ELEVATION_DEFAULT_UNIT_WIDTH`](./default-values.md#rack_elevation_default_unit_width)
|
||||
|
||||
|
||||
@@ -53,6 +53,16 @@ Sets content for the top banner in the user interface.
|
||||
|
||||
---
|
||||
|
||||
## COPILOT_ENABLED
|
||||
|
||||
!!! tip "Dynamic Configuration Parameter"
|
||||
|
||||
Default: `True`
|
||||
|
||||
Enables or disables the [NetBox Copilot](https://netboxlabs.com/docs/copilot/) agent globally. When enabled, users can opt to toggle the agent individually.
|
||||
|
||||
---
|
||||
|
||||
## CENSUS_REPORTING_ENABLED
|
||||
|
||||
Default: `True`
|
||||
|
||||
@@ -92,7 +92,7 @@ If `True`, the cookie employed for cross-site request forgery (CSRF) protection
|
||||
|
||||
Default: `[]`
|
||||
|
||||
Defines a list of trusted origins for unsafe (e.g. `POST`) requests. This is a pass-through to Django's [`CSRF_TRUSTED_ORIGINS`](https://docs.djangoproject.com/en/stable/ref/settings/#csrf-trusted-origins) setting. Note that each host listed must specify a scheme (e.g. `http://` or `https://).
|
||||
Defines a list of trusted origins for unsafe (e.g. `POST`) requests. This is a pass-through to Django's [`CSRF_TRUSTED_ORIGINS`](https://docs.djangoproject.com/en/stable/ref/settings/#csrf-trusted-origins) setting. Note that each host listed must specify a scheme (e.g. `http://` or `https://`).
|
||||
|
||||
```python
|
||||
CSRF_TRUSTED_ORIGINS = (
|
||||
|
||||
@@ -232,6 +232,9 @@ STORAGES = {
|
||||
},
|
||||
"scripts": {
|
||||
"BACKEND": "extras.storage.ScriptFileSystemStorage",
|
||||
"OPTIONS": {
|
||||
"allow_overwrite": True,
|
||||
},
|
||||
},
|
||||
}
|
||||
```
|
||||
@@ -247,6 +250,7 @@ STORAGES = {
|
||||
"OPTIONS": {
|
||||
'access_key': 'access key',
|
||||
'secret_key': 'secret key',
|
||||
"allow_overwrite": True,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@@ -95,7 +95,7 @@ An example fieldset definition is provided below:
|
||||
|
||||
```python
|
||||
class MyScript(Script):
|
||||
class Meta:
|
||||
class Meta(Script.Meta):
|
||||
fieldsets = (
|
||||
('First group', ('field1', 'field2', 'field3')),
|
||||
('Second group', ('field4', 'field5')),
|
||||
@@ -510,7 +510,7 @@ from extras.scripts import *
|
||||
|
||||
class NewBranchScript(Script):
|
||||
|
||||
class Meta:
|
||||
class Meta(Script.Meta):
|
||||
name = "New Branch"
|
||||
description = "Provision a new branch site"
|
||||
field_order = ['site_name', 'switch_count', 'switch_model']
|
||||
|
||||
@@ -6,10 +6,14 @@ For end‑user guidance on resetting saved table layouts, see [Features > User P
|
||||
|
||||
## Available Preferences
|
||||
|
||||
| Name | Description |
|
||||
|--------------------------|---------------------------------------------------------------|
|
||||
| data_format | Preferred format when rendering raw data (JSON or YAML) |
|
||||
| pagination.per_page | The number of items to display per page of a paginated table |
|
||||
| pagination.placement | Where to display the paginator controls relative to the table |
|
||||
| tables.${table}.columns | The ordered list of columns to display when viewing the table |
|
||||
| tables.${table}.ordering | A list of column names by which the table should be ordered |
|
||||
| Name | Description |
|
||||
|----------------------------|---------------------------------------------------------------|
|
||||
| `csv_delimiter` | The delimiting character used when exporting CSV data |
|
||||
| `data_format` | Preferred format when rendering raw data (JSON or YAML) |
|
||||
| `locale.language` | The language selected for UI translation |
|
||||
| `pagination.per_page` | The number of items to display per page of a paginated table |
|
||||
| `pagination.placement` | Where to display the paginator controls relative to the table |
|
||||
| `tables.${table}.columns` | The ordered list of columns to display when viewing the table |
|
||||
| `tables.${table}.ordering` | A list of column names by which the table should be ordered |
|
||||
| `ui.copilot_enabled` | Toggles the NetBox Copilot AI agent |
|
||||
| `ui.tables.striping` | Toggles visual striping of tables in the UI |
|
||||
|
||||
@@ -60,6 +60,13 @@ Four of the standard Python logging levels are supported:
|
||||
|
||||
Log entries recorded using the runner's logger will be saved in the job's log in the database in addition to being processed by other [system logging handlers](../../configuration/system.md#logging).
|
||||
|
||||
### Jobs running for Model instances
|
||||
|
||||
A Job can be executed for a specific instance of a Model.
|
||||
To enable this functionality, the model must include the `JobsMixin`.
|
||||
|
||||
When enqueuing a Job, you can associate it with a particular instance by passing that instance to the `instance` parameter.
|
||||
|
||||
### Scheduled Jobs
|
||||
|
||||
As described above, jobs can be scheduled for immediate execution or at any later time using the `enqueue()` method. However, for management purposes, the `enqueue_once()` method allows a job to be scheduled exactly once avoiding duplicates. If a job is already scheduled for a particular instance, a second one won't be scheduled, respecting thread safety. An example use case would be to schedule a periodic task that is bound to an instance in general, but not to any event of that instance (such as updates). The parameters of the `enqueue_once()` method are identical to those of `enqueue()`.
|
||||
@@ -73,9 +80,10 @@ As described above, jobs can be scheduled for immediate execution or at any late
|
||||
from django.db import models
|
||||
from core.choices import JobIntervalChoices
|
||||
from netbox.models import NetBoxModel
|
||||
from netbox.models.features import JobsMixin
|
||||
from .jobs import MyTestJob
|
||||
|
||||
class MyModel(NetBoxModel):
|
||||
class MyModel(JobsMixin, NetBoxModel):
|
||||
foo = models.CharField()
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
|
||||
@@ -55,6 +55,27 @@ class MyModelViewSet(...):
|
||||
filterset_class = filtersets.MyModelFilterSet
|
||||
```
|
||||
|
||||
### Implementing Quick Search
|
||||
|
||||
The `ObjectListView` has a field called Quick Search. For Quick Search to work the corresponding FilterSet has to override the `search` method that is implemented in `NetBoxModelFilterSet`. This function takes a queryset and can perform arbitrary operations on it and return it. A common use-case is to search for the given search value in multiple fields:
|
||||
|
||||
```python
|
||||
from django.db.models import Q
|
||||
from netbox.filtersets import NetBoxModelFilterSet
|
||||
|
||||
class MyFilterSet(NetBoxModelFilterSet):
|
||||
...
|
||||
def search(self, queryset, name, value):
|
||||
if not value.strip():
|
||||
return queryset
|
||||
return queryset.filter(
|
||||
Q(name__icontains=value) |
|
||||
Q(description__icontains=value)
|
||||
)
|
||||
```
|
||||
|
||||
The `search` method is also used by the `q` filter in `NetBoxModelFilterSet` which in turn is used by the Search field in the filters tab.
|
||||
|
||||
## Filter Classes
|
||||
|
||||
### TagFilter
|
||||
|
||||
@@ -1,5 +1,94 @@
|
||||
# NetBox v4.4
|
||||
|
||||
## v4.4.7 (2025-11-25)
|
||||
|
||||
### Enhancements
|
||||
|
||||
* [#20371](https://github.com/netbox-community/netbox/issues/20371) - Add Molex Micro-Fit 2x3 for power ports & power outlets
|
||||
* [#20731](https://github.com/netbox-community/netbox/issues/20731) - Enable specifying `data_source` & `data_file` when bulk import config templates
|
||||
* [#20820](https://github.com/netbox-community/netbox/issues/20820) - Enable filtering of custom fields by object type
|
||||
* [#20823](https://github.com/netbox-community/netbox/issues/20823) - Disallow creation of API tokens with an expiration date in the past
|
||||
* [#20841](https://github.com/netbox-community/netbox/issues/20841) - Support advanced filtering for available rack types when creating/editing a rack
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* [#20134](https://github.com/netbox-community/netbox/issues/20134) - Prevent out-of-band HTMX content swaps in embedded tables
|
||||
* [#20432](https://github.com/netbox-community/netbox/issues/20432) - Fix tracing of cables across multiple circuits in parallel
|
||||
* [#20465](https://github.com/netbox-community/netbox/issues/20465) - Ensure that scripts are updated immediately when a new file is uploaded
|
||||
* [#20638](https://github.com/netbox-community/netbox/issues/20638) - Correct OpenAPI schema for bulk create operations
|
||||
* [#20649](https://github.com/netbox-community/netbox/issues/20649) - Enforce view permissions on REST API endpoint for custom scripts
|
||||
* [#20740](https://github.com/netbox-community/netbox/issues/20740) - Ensure permissions constraints are enforced when executing custom scripts via the REST API
|
||||
* [#20743](https://github.com/netbox-community/netbox/issues/20743) - Pass request context to custom script when triggered by an event rule
|
||||
* [#20766](https://github.com/netbox-community/netbox/issues/20766) - Fix inadvertent translations on server error page
|
||||
* [#20775](https://github.com/netbox-community/netbox/issues/20775) - Fix `TypeError` exception when bulk renaming unnamed devices
|
||||
* [#20822](https://github.com/netbox-community/netbox/issues/20822) - Add missing `auto_sync_enabled` field in bulk edit forms
|
||||
* [#20827](https://github.com/netbox-community/netbox/issues/20827) - Fix UI styling issue when toggling between light and dark mode
|
||||
* [#20839](https://github.com/netbox-community/netbox/issues/20839) - Fix filtering by object type in UI for custom links and saved filters
|
||||
* [#20840](https://github.com/netbox-community/netbox/issues/20840) - Remove extraneous references to airflow for RackType model
|
||||
* [#20844](https://github.com/netbox-community/netbox/issues/20844) - Fix object type filter for L2VPN terminations
|
||||
* [#20859](https://github.com/netbox-community/netbox/issues/20859) - Prevent dashboard crash due to exception raised by a widget
|
||||
* [#20865](https://github.com/netbox-community/netbox/issues/20865) - Enforce proper min/max values for latitude & longitude fields
|
||||
|
||||
---
|
||||
|
||||
## v4.4.6 (2025-11-11)
|
||||
|
||||
### Enhancements
|
||||
|
||||
* [#14171](https://github.com/netbox-community/netbox/issues/14171) - Support VLAN assignment for device & VM interfaces being bulk imported
|
||||
* [#20297](https://github.com/netbox-community/netbox/issues/20297) - Introduce additional coaxial cable types
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* [#20378](https://github.com/netbox-community/netbox/issues/20378) - Prevent exception when attempting to delete a data source utilized by a custom script
|
||||
* [#20645](https://github.com/netbox-community/netbox/issues/20645) - CSVChoiceField should defer to model field's default value when CSV field is empty
|
||||
* [#20647](https://github.com/netbox-community/netbox/issues/20647) - Improve handling of empty strings during bulk imports
|
||||
* [#20653](https://github.com/netbox-community/netbox/issues/20653) - Fix filtering of jobs by object type ID
|
||||
* [#20660](https://github.com/netbox-community/netbox/issues/20660) - Optimize loading of custom script modules from remote storage
|
||||
* [#20670](https://github.com/netbox-community/netbox/issues/20670) - Improve validation of related objects during bulk import
|
||||
* [#20688](https://github.com/netbox-community/netbox/issues/20688) - Suppress non-harmful "No active configuration revision found" warning message
|
||||
* [#20697](https://github.com/netbox-community/netbox/issues/20697) - Prevent duplication of signals which increment/decrement related object counts
|
||||
* [#20699](https://github.com/netbox-community/netbox/issues/20699) - Ensure proper ordering of changelog entries resulting from cascading deletions
|
||||
* [#20713](https://github.com/netbox-community/netbox/issues/20713) - Ensure a pre-change snapshot is recorded on virtual chassis members being added/removed
|
||||
* [#20721](https://github.com/netbox-community/netbox/issues/20721) - Fix breadcrumb navigation links in UI for background tasks
|
||||
* [#20738](https://github.com/netbox-community/netbox/issues/20738) - Deleting a virtual chassis should nullify the `vc_position` of all former members
|
||||
* [#20750](https://github.com/netbox-community/netbox/issues/20750) - Fix cloning of permissions when only one action is enabled
|
||||
* [#20755](https://github.com/netbox-community/netbox/issues/20755) - Prevent duplicate results under certain conditions when filtering providers
|
||||
* [#20771](https://github.com/netbox-community/netbox/issues/20771) - Comments are required when creating a new journal entry
|
||||
* [#20774](https://github.com/netbox-community/netbox/issues/20774) - Bulk action button labels should be translated
|
||||
|
||||
---
|
||||
|
||||
## v4.4.5 (2025-10-28)
|
||||
|
||||
### Enhancements
|
||||
|
||||
* [#19751](https://github.com/netbox-community/netbox/issues/19751) - Disable occupied module bays in form dropdowns when installing a new module
|
||||
* [#20301](https://github.com/netbox-community/netbox/issues/20301) - Add a "dismiss all" option to the notifications dropdown
|
||||
* [#20399](https://github.com/netbox-community/netbox/issues/20399) - Add `assigned` and `primary` boolean filters for MAC addresses
|
||||
* [#20567](https://github.com/netbox-community/netbox/issues/20567) - Add contacts column to services table
|
||||
* [#20675](https://github.com/netbox-community/netbox/issues/20675) - Enable [NetBox Copilot](https://netboxlabs.com/products/netbox-copilot/) integration
|
||||
* [#20692](https://github.com/netbox-community/netbox/issues/20692) - Add contacts column to IP addresses table
|
||||
* [#20700](https://github.com/netbox-community/netbox/issues/20700) - Add contacts table column for various additional models
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* [#19872](https://github.com/netbox-community/netbox/issues/19872) - Ensure custom script validation failures display error messages
|
||||
* [#20389](https://github.com/netbox-community/netbox/issues/20389) - Fix "select all" behavior for bulk rename views
|
||||
* [#20422](https://github.com/netbox-community/netbox/issues/20422) - Enable filtering of aggregates and prefixes by family in GraphQL API
|
||||
* [#20459](https://github.com/netbox-community/netbox/issues/20459) - Fix validation of `is_oob` & `is_primary` fields under IP address bulk import
|
||||
* [#20466](https://github.com/netbox-community/netbox/issues/20466) - Fix querying of devices with a primary IP assigned in GraphQL API
|
||||
* [#20498](https://github.com/netbox-community/netbox/issues/20498) - Enforce the validation regex (if set) for custom URL fields
|
||||
* [#20524](https://github.com/netbox-community/netbox/issues/20524) - Raise a validation error when attempting to schedule a custom script for a past date/time
|
||||
* [#20541](https://github.com/netbox-community/netbox/issues/20541) - Fix resolution of GraphQL object fields which rely on custom filters
|
||||
* [#20551](https://github.com/netbox-community/netbox/issues/20551) - Fix automatic slug generation in quick-add UI form
|
||||
* [#20606](https://github.com/netbox-community/netbox/issues/20606) - Enable copying of values from table columns rendered as badges
|
||||
* [#20641](https://github.com/netbox-community/netbox/issues/20641) - Fix `AttributeError` exception raised by the object changes REST API endpoint
|
||||
* [#20646](https://github.com/netbox-community/netbox/issues/20646) - Prevent cables from connecting to objects marked as connected
|
||||
* [#20655](https://github.com/netbox-community/netbox/issues/20655) - Fix `FieldError` exception when attempting to sort permissions list by actions
|
||||
|
||||
---
|
||||
|
||||
## v4.4.4 (2025-10-15)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
@@ -89,8 +89,6 @@ class ProviderFilterSet(NetBoxModelFilterSet, ContactModelFilterSet):
|
||||
return queryset.filter(
|
||||
Q(name__icontains=value) |
|
||||
Q(description__icontains=value) |
|
||||
Q(accounts__account__icontains=value) |
|
||||
Q(accounts__name__icontains=value) |
|
||||
Q(comments__icontains=value)
|
||||
)
|
||||
|
||||
|
||||
@@ -83,6 +83,7 @@ class ProviderBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Provider, 'bulk_rename', path='rename', detail=False)
|
||||
class ProviderBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Provider.objects.all()
|
||||
filterset = filtersets.ProviderFilterSet
|
||||
|
||||
|
||||
@register_model_view(Provider, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -150,6 +151,7 @@ class ProviderAccountBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ProviderAccount, 'bulk_rename', path='rename', detail=False)
|
||||
class ProviderAccountBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ProviderAccount.objects.all()
|
||||
filterset = filtersets.ProviderAccountFilterSet
|
||||
|
||||
|
||||
@register_model_view(ProviderAccount, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -226,6 +228,7 @@ class ProviderNetworkBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ProviderNetwork, 'bulk_rename', path='rename', detail=False)
|
||||
class ProviderNetworkBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ProviderNetwork.objects.all()
|
||||
filterset = filtersets.ProviderNetworkFilterSet
|
||||
|
||||
|
||||
@register_model_view(ProviderNetwork, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -290,6 +293,7 @@ class CircuitTypeBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(CircuitType, 'bulk_rename', path='rename', detail=False)
|
||||
class CircuitTypeBulkRenameView(generic.BulkRenameView):
|
||||
queryset = CircuitType.objects.all()
|
||||
filterset = filtersets.CircuitTypeFilterSet
|
||||
|
||||
|
||||
@register_model_view(CircuitType, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -362,6 +366,7 @@ class CircuitBulkEditView(generic.BulkEditView):
|
||||
class CircuitBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Circuit.objects.all()
|
||||
field_name = 'cid'
|
||||
filterset = filtersets.CircuitFilterSet
|
||||
|
||||
|
||||
@register_model_view(Circuit, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -557,6 +562,7 @@ class CircuitGroupBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(CircuitGroup, 'bulk_rename', path='rename', detail=False)
|
||||
class CircuitGroupBulkRenameView(generic.BulkRenameView):
|
||||
queryset = CircuitGroup.objects.all()
|
||||
filterset = filtersets.CircuitGroupFilterSet
|
||||
|
||||
|
||||
@register_model_view(CircuitGroup, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -672,6 +678,7 @@ class VirtualCircuitTypeBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(VirtualCircuitType, 'bulk_rename', path='rename', detail=False)
|
||||
class VirtualCircuitTypeBulkRenameView(generic.BulkRenameView):
|
||||
queryset = VirtualCircuitType.objects.all()
|
||||
filterset = filtersets.VirtualCircuitTypeFilterSet
|
||||
|
||||
|
||||
@register_model_view(VirtualCircuitType, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -744,6 +751,7 @@ class VirtualCircuitBulkEditView(generic.BulkEditView):
|
||||
class VirtualCircuitBulkRenameView(generic.BulkRenameView):
|
||||
queryset = VirtualCircuit.objects.all()
|
||||
field_name = 'cid'
|
||||
filterset = filtersets.VirtualCircuitFilterSet
|
||||
|
||||
|
||||
@register_model_view(VirtualCircuit, 'bulk_delete', path='delete', detail=False)
|
||||
|
||||
@@ -12,6 +12,7 @@ from drf_spectacular.utils import Direction
|
||||
|
||||
from netbox.api.fields import ChoiceField
|
||||
from netbox.api.serializers import WritableNestedSerializer
|
||||
from netbox.api.viewsets import NetBoxModelViewSet
|
||||
|
||||
# see netbox.api.routers.NetBoxRouter
|
||||
BULK_ACTIONS = ("bulk_destroy", "bulk_partial_update", "bulk_update")
|
||||
@@ -49,6 +50,11 @@ class ChoiceFieldFix(OpenApiSerializerFieldExtension):
|
||||
)
|
||||
|
||||
|
||||
def viewset_handles_bulk_create(view):
|
||||
"""Check if view automatically provides list-based bulk create"""
|
||||
return isinstance(view, NetBoxModelViewSet)
|
||||
|
||||
|
||||
class NetBoxAutoSchema(AutoSchema):
|
||||
"""
|
||||
Overrides to drf_spectacular.openapi.AutoSchema to fix following issues:
|
||||
@@ -128,6 +134,36 @@ class NetBoxAutoSchema(AutoSchema):
|
||||
|
||||
return response_serializers
|
||||
|
||||
def _get_request_for_media_type(self, serializer, direction='request'):
|
||||
"""
|
||||
Override to generate oneOf schema for serializers that support both
|
||||
single object and array input (NetBoxModelViewSet POST operations).
|
||||
|
||||
Refs: #20638
|
||||
"""
|
||||
# Get the standard schema first
|
||||
schema, required = super()._get_request_for_media_type(serializer, direction)
|
||||
|
||||
# If this serializer supports arrays (marked in get_request_serializer),
|
||||
# wrap the schema in oneOf to allow single object OR array
|
||||
if (
|
||||
direction == 'request' and
|
||||
schema is not None and
|
||||
getattr(self.view, 'action', None) == 'create' and
|
||||
viewset_handles_bulk_create(self.view)
|
||||
):
|
||||
return {
|
||||
'oneOf': [
|
||||
schema, # Single object
|
||||
{
|
||||
'type': 'array',
|
||||
'items': schema, # Array of objects
|
||||
}
|
||||
]
|
||||
}, required
|
||||
|
||||
return schema, required
|
||||
|
||||
def _get_serializer_name(self, serializer, direction, bypass_extensions=False) -> str:
|
||||
name = super()._get_serializer_name(serializer, direction, bypass_extensions)
|
||||
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
from drf_spectacular.utils import extend_schema_field
|
||||
from rest_framework import serializers
|
||||
|
||||
from core.choices import *
|
||||
from core.models import Job
|
||||
from netbox.api.exceptions import SerializerNotFound
|
||||
from netbox.api.fields import ChoiceField, ContentTypeField
|
||||
from netbox.api.serializers import BaseModelSerializer
|
||||
from users.api.serializers_.users import UserSerializer
|
||||
from utilities.api import get_serializer_for_model
|
||||
|
||||
__all__ = (
|
||||
'JobSerializer',
|
||||
@@ -18,11 +23,28 @@ class JobSerializer(BaseModelSerializer):
|
||||
object_type = ContentTypeField(
|
||||
read_only=True
|
||||
)
|
||||
object = serializers.SerializerMethodField(
|
||||
read_only=True
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Job
|
||||
fields = [
|
||||
'id', 'url', 'display_url', 'display', 'object_type', 'object_id', 'name', 'status', 'created', 'scheduled',
|
||||
'interval', 'started', 'completed', 'user', 'data', 'error', 'job_id', 'log_entries',
|
||||
'id', 'url', 'display_url', 'display', 'object_type', 'object_id', 'object', 'name', 'status', 'created',
|
||||
'scheduled', 'interval', 'started', 'completed', 'user', 'data', 'error', 'job_id', 'log_entries',
|
||||
]
|
||||
brief_fields = ('url', 'created', 'completed', 'user', 'status')
|
||||
|
||||
@extend_schema_field(serializers.JSONField(allow_null=True))
|
||||
def get_object(self, obj):
|
||||
"""
|
||||
Serialize a nested representation of the object.
|
||||
"""
|
||||
if obj.object is None:
|
||||
return None
|
||||
try:
|
||||
serializer = get_serializer_for_model(obj.object)
|
||||
except SerializerNotFound:
|
||||
return obj.object_repr
|
||||
context = {'request': self.context['request']}
|
||||
return serializer(obj.object, nested=True, context=context).data
|
||||
|
||||
@@ -80,6 +80,10 @@ class JobFilterSet(BaseFilterSet):
|
||||
method='search',
|
||||
label=_('Search'),
|
||||
)
|
||||
object_type_id = django_filters.ModelMultipleChoiceFilter(
|
||||
queryset=ObjectType.objects.with_feature('jobs'),
|
||||
field_name='object_type_id',
|
||||
)
|
||||
object_type = ContentTypeFilter()
|
||||
created = django_filters.DateTimeFilter()
|
||||
created__before = django_filters.DateTimeFilter(
|
||||
@@ -124,7 +128,7 @@ class JobFilterSet(BaseFilterSet):
|
||||
|
||||
class Meta:
|
||||
model = Job
|
||||
fields = ('id', 'object_type', 'object_id', 'name', 'interval', 'status', 'user', 'job_id')
|
||||
fields = ('id', 'object_type', 'object_type_id', 'object_id', 'name', 'interval', 'status', 'user', 'job_id')
|
||||
|
||||
def search(self, queryset, name, value):
|
||||
if not value.strip():
|
||||
|
||||
@@ -70,13 +70,13 @@ class JobFilterForm(SavedFiltersMixin, FilterForm):
|
||||
model = Job
|
||||
fieldsets = (
|
||||
FieldSet('q', 'filter_id'),
|
||||
FieldSet('object_type', 'status', name=_('Attributes')),
|
||||
FieldSet('object_type_id', 'status', name=_('Attributes')),
|
||||
FieldSet(
|
||||
'created__before', 'created__after', 'scheduled__before', 'scheduled__after', 'started__before',
|
||||
'started__after', 'completed__before', 'completed__after', 'user', name=_('Creation')
|
||||
),
|
||||
)
|
||||
object_type = ContentTypeChoiceField(
|
||||
object_type_id = ContentTypeChoiceField(
|
||||
label=_('Object Type'),
|
||||
queryset=ObjectType.objects.with_feature('jobs'),
|
||||
required=False,
|
||||
|
||||
@@ -166,8 +166,8 @@ class ConfigRevisionForm(forms.ModelForm, metaclass=ConfigFormMetaclass):
|
||||
FieldSet('CUSTOM_VALIDATORS', 'PROTECTION_RULES', name=_('Validation')),
|
||||
FieldSet('DEFAULT_USER_PREFERENCES', name=_('User Preferences')),
|
||||
FieldSet(
|
||||
'MAINTENANCE_MODE', 'GRAPHQL_ENABLED', 'CHANGELOG_RETENTION', 'JOB_RETENTION', 'MAPS_URL',
|
||||
name=_('Miscellaneous')
|
||||
'MAINTENANCE_MODE', 'COPILOT_ENABLED', 'GRAPHQL_ENABLED', 'CHANGELOG_RETENTION', 'JOB_RETENTION',
|
||||
'MAPS_URL', name=_('Miscellaneous'),
|
||||
),
|
||||
FieldSet('comment', name=_('Config Revision'))
|
||||
)
|
||||
|
||||
@@ -6,7 +6,6 @@ from django.conf import settings
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.db import models
|
||||
from django.core.files.storage import storages
|
||||
from django.urls import reverse
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from ..choices import ManagedFileRootPathChoices
|
||||
@@ -64,9 +63,6 @@ class ManagedFile(SyncedDataMixin, models.Model):
|
||||
def __str__(self):
|
||||
return self.name
|
||||
|
||||
def get_absolute_url(self):
|
||||
return reverse('core:managedfile', args=[self.pk])
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.file_path
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from django.utils.translation import gettext as _
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from netbox.object_actions import ObjectAction
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ from threading import local
|
||||
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.exceptions import ObjectDoesNotExist, ValidationError
|
||||
from django.db.models import CASCADE
|
||||
from django.db.models.fields.reverse_related import ManyToManyRel, ManyToOneRel
|
||||
from django.db.models.signals import m2m_changed, post_migrate, post_save, pre_delete
|
||||
from django.dispatch import receiver, Signal
|
||||
@@ -220,14 +221,8 @@ def handle_deleted_object(sender, instance, **kwargs):
|
||||
obj.snapshot() # Ensure the change record includes the "before" state
|
||||
if type(relation) is ManyToManyRel:
|
||||
getattr(obj, related_field_name).remove(instance)
|
||||
elif type(relation) is ManyToOneRel and relation.field.null is True:
|
||||
elif type(relation) is ManyToOneRel and relation.null and relation.on_delete is not CASCADE:
|
||||
setattr(obj, related_field_name, None)
|
||||
# make sure the object hasn't been deleted - in case of
|
||||
# deletion chaining of related objects
|
||||
try:
|
||||
obj.refresh_from_db()
|
||||
except DoesNotExist:
|
||||
continue
|
||||
obj.save()
|
||||
|
||||
# Enqueue the object for event processing
|
||||
|
||||
@@ -5,14 +5,16 @@ from rest_framework import status
|
||||
|
||||
from core.choices import ObjectChangeActionChoices
|
||||
from core.models import ObjectChange, ObjectType
|
||||
from dcim.choices import SiteStatusChoices
|
||||
from dcim.models import Site, CableTermination, Device, DeviceType, DeviceRole, Interface, Cable
|
||||
from dcim.choices import InterfaceTypeChoices, ModuleStatusChoices, SiteStatusChoices
|
||||
from dcim.models import (
|
||||
Cable, CableTermination, Device, DeviceRole, DeviceType, Manufacturer, Module, ModuleBay, ModuleType, Interface,
|
||||
Site,
|
||||
)
|
||||
from extras.choices import *
|
||||
from extras.models import CustomField, CustomFieldChoiceSet, Tag
|
||||
from utilities.testing import APITestCase
|
||||
from utilities.testing.utils import create_tags, post_data
|
||||
from utilities.testing.utils import create_tags, create_test_device, post_data
|
||||
from utilities.testing.views import ModelViewTestCase
|
||||
from dcim.models import Manufacturer
|
||||
|
||||
|
||||
class ChangeLogViewTest(ModelViewTestCase):
|
||||
@@ -622,3 +624,64 @@ class ChangeLogAPITest(APITestCase):
|
||||
self.assertEqual(objectchange.prechange_data['name'], 'Site 1')
|
||||
self.assertEqual(objectchange.prechange_data['slug'], 'site-1')
|
||||
self.assertEqual(objectchange.postchange_data, None)
|
||||
|
||||
def test_deletion_ordering(self):
|
||||
"""
|
||||
Check that the cascading deletion of dependent objects is recorded in the correct order.
|
||||
"""
|
||||
device = create_test_device('device1')
|
||||
module_bay = ModuleBay.objects.create(device=device, name='Module Bay 1')
|
||||
module_type = ModuleType.objects.create(manufacturer=Manufacturer.objects.first(), model='Module Type 1')
|
||||
self.add_permissions('dcim.add_module', 'dcim.add_interface', 'dcim.delete_module')
|
||||
self.assertEqual(ObjectChange.objects.count(), 0) # Sanity check
|
||||
|
||||
# Create a new Module
|
||||
data = {
|
||||
'device': device.pk,
|
||||
'module_bay': module_bay.pk,
|
||||
'module_type': module_type.pk,
|
||||
'status': ModuleStatusChoices.STATUS_ACTIVE,
|
||||
}
|
||||
url = reverse('dcim-api:module-list')
|
||||
response = self.client.post(url, data, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_201_CREATED)
|
||||
module = device.modules.first()
|
||||
|
||||
# Create an Interface on the Module
|
||||
data = {
|
||||
'device': device.pk,
|
||||
'module': module.pk,
|
||||
'name': 'Interface 1',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_FIXED,
|
||||
}
|
||||
url = reverse('dcim-api:interface-list')
|
||||
response = self.client.post(url, data, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_201_CREATED)
|
||||
interface = device.interfaces.first()
|
||||
|
||||
# Delete the Module
|
||||
url = reverse('dcim-api:module-detail', kwargs={'pk': module.pk})
|
||||
response = self.client.delete(url, **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_204_NO_CONTENT)
|
||||
self.assertEqual(Module.objects.count(), 0)
|
||||
self.assertEqual(Interface.objects.count(), 0)
|
||||
|
||||
# Verify the creation of the expected ObjectChange records. We should see four total records, in this order:
|
||||
# 1. Module created
|
||||
# 2. Interface created
|
||||
# 3. Interface deleted
|
||||
# 4. Module deleted
|
||||
changes = ObjectChange.objects.order_by('time')
|
||||
self.assertEqual(len(changes), 4)
|
||||
self.assertEqual(changes[0].changed_object_type, ContentType.objects.get_for_model(Module))
|
||||
self.assertEqual(changes[0].changed_object_id, module.pk)
|
||||
self.assertEqual(changes[0].action, ObjectChangeActionChoices.ACTION_CREATE)
|
||||
self.assertEqual(changes[1].changed_object_type, ContentType.objects.get_for_model(Interface))
|
||||
self.assertEqual(changes[1].changed_object_id, interface.pk)
|
||||
self.assertEqual(changes[1].action, ObjectChangeActionChoices.ACTION_CREATE)
|
||||
self.assertEqual(changes[2].changed_object_type, ContentType.objects.get_for_model(Interface))
|
||||
self.assertEqual(changes[2].changed_object_id, interface.pk)
|
||||
self.assertEqual(changes[2].action, ObjectChangeActionChoices.ACTION_DELETE)
|
||||
self.assertEqual(changes[3].changed_object_type, ContentType.objects.get_for_model(Module))
|
||||
self.assertEqual(changes[3].changed_object_id, module.pk)
|
||||
self.assertEqual(changes[3].action, ObjectChangeActionChoices.ACTION_DELETE)
|
||||
|
||||
108
netbox/core/tests/test_openapi_schema.py
Normal file
108
netbox/core/tests/test_openapi_schema.py
Normal file
@@ -0,0 +1,108 @@
|
||||
"""
|
||||
Unit tests for OpenAPI schema generation.
|
||||
|
||||
Refs: #20638
|
||||
"""
|
||||
import json
|
||||
from django.test import TestCase
|
||||
|
||||
|
||||
class OpenAPISchemaTestCase(TestCase):
|
||||
"""Tests for OpenAPI schema generation."""
|
||||
|
||||
def setUp(self):
|
||||
"""Fetch schema via API endpoint."""
|
||||
response = self.client.get('/api/schema/', {'format': 'json'})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.schema = json.loads(response.content)
|
||||
|
||||
def test_post_operation_documents_single_or_array(self):
|
||||
"""
|
||||
POST operations on NetBoxModelViewSet endpoints should document
|
||||
support for both single objects and arrays via oneOf.
|
||||
|
||||
Refs: #20638
|
||||
"""
|
||||
# Test representative endpoints across different apps
|
||||
test_paths = [
|
||||
'/api/core/data-sources/',
|
||||
'/api/dcim/sites/',
|
||||
'/api/users/users/',
|
||||
'/api/ipam/ip-addresses/',
|
||||
]
|
||||
|
||||
for path in test_paths:
|
||||
with self.subTest(path=path):
|
||||
operation = self.schema['paths'][path]['post']
|
||||
|
||||
# Get the request body schema
|
||||
request_schema = operation['requestBody']['content']['application/json']['schema']
|
||||
|
||||
# Should have oneOf with two options
|
||||
self.assertIn('oneOf', request_schema, f"POST {path} should have oneOf schema")
|
||||
self.assertEqual(
|
||||
len(request_schema['oneOf']), 2,
|
||||
f"POST {path} oneOf should have exactly 2 options"
|
||||
)
|
||||
|
||||
# First option: single object (has $ref or properties)
|
||||
single_schema = request_schema['oneOf'][0]
|
||||
self.assertTrue(
|
||||
'$ref' in single_schema or 'properties' in single_schema,
|
||||
f"POST {path} first oneOf option should be single object"
|
||||
)
|
||||
|
||||
# Second option: array of objects
|
||||
array_schema = request_schema['oneOf'][1]
|
||||
self.assertEqual(
|
||||
array_schema['type'], 'array',
|
||||
f"POST {path} second oneOf option should be array"
|
||||
)
|
||||
self.assertIn('items', array_schema, f"POST {path} array should have items")
|
||||
|
||||
def test_bulk_update_operations_require_array_only(self):
|
||||
"""
|
||||
Bulk update/patch operations should require arrays only, not oneOf.
|
||||
They don't support single object input.
|
||||
|
||||
Refs: #20638
|
||||
"""
|
||||
test_paths = [
|
||||
'/api/dcim/sites/',
|
||||
'/api/users/users/',
|
||||
]
|
||||
|
||||
for path in test_paths:
|
||||
for method in ['put', 'patch']:
|
||||
with self.subTest(path=path, method=method):
|
||||
operation = self.schema['paths'][path][method]
|
||||
request_schema = operation['requestBody']['content']['application/json']['schema']
|
||||
|
||||
# Should be array-only, not oneOf
|
||||
self.assertNotIn(
|
||||
'oneOf', request_schema,
|
||||
f"{method.upper()} {path} should NOT have oneOf (array-only)"
|
||||
)
|
||||
self.assertEqual(
|
||||
request_schema['type'], 'array',
|
||||
f"{method.upper()} {path} should require array"
|
||||
)
|
||||
self.assertIn(
|
||||
'items', request_schema,
|
||||
f"{method.upper()} {path} array should have items"
|
||||
)
|
||||
|
||||
def test_bulk_delete_requires_array(self):
|
||||
"""
|
||||
Bulk delete operations should require arrays.
|
||||
|
||||
Refs: #20638
|
||||
"""
|
||||
path = '/api/dcim/sites/'
|
||||
operation = self.schema['paths'][path]['delete']
|
||||
request_schema = operation['requestBody']['content']['application/json']['schema']
|
||||
|
||||
# Should be array-only
|
||||
self.assertNotIn('oneOf', request_schema, "DELETE should NOT have oneOf")
|
||||
self.assertEqual(request_schema['type'], 'array', "DELETE should require array")
|
||||
self.assertIn('items', request_schema, "DELETE array should have items")
|
||||
@@ -125,6 +125,7 @@ class DataSourceBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(DataSource, 'bulk_rename', path='rename', detail=False)
|
||||
class DataSourceBulkRenameView(generic.BulkRenameView):
|
||||
queryset = DataSource.objects.all()
|
||||
filterset = filtersets.DataSourceFilterSet
|
||||
|
||||
|
||||
@register_model_view(DataSource, 'bulk_delete', path='delete', detail=False)
|
||||
|
||||
@@ -461,6 +461,7 @@ class PowerPortTypeChoices(ChoiceSet):
|
||||
# Molex
|
||||
TYPE_MOLEX_MICRO_FIT_1X2 = 'molex-micro-fit-1x2'
|
||||
TYPE_MOLEX_MICRO_FIT_2X2 = 'molex-micro-fit-2x2'
|
||||
TYPE_MOLEX_MICRO_FIT_2X3 = 'molex-micro-fit-2x3'
|
||||
TYPE_MOLEX_MICRO_FIT_2X4 = 'molex-micro-fit-2x4'
|
||||
# Direct current (DC)
|
||||
TYPE_DC = 'dc-terminal'
|
||||
@@ -588,6 +589,7 @@ class PowerPortTypeChoices(ChoiceSet):
|
||||
('Molex', (
|
||||
(TYPE_MOLEX_MICRO_FIT_1X2, 'Molex Micro-Fit 1x2'),
|
||||
(TYPE_MOLEX_MICRO_FIT_2X2, 'Molex Micro-Fit 2x2'),
|
||||
(TYPE_MOLEX_MICRO_FIT_2X3, 'Molex Micro-Fit 2x3'),
|
||||
(TYPE_MOLEX_MICRO_FIT_2X4, 'Molex Micro-Fit 2x4'),
|
||||
)),
|
||||
('DC', (
|
||||
@@ -710,6 +712,7 @@ class PowerOutletTypeChoices(ChoiceSet):
|
||||
# Molex
|
||||
TYPE_MOLEX_MICRO_FIT_1X2 = 'molex-micro-fit-1x2'
|
||||
TYPE_MOLEX_MICRO_FIT_2X2 = 'molex-micro-fit-2x2'
|
||||
TYPE_MOLEX_MICRO_FIT_2X3 = 'molex-micro-fit-2x3'
|
||||
TYPE_MOLEX_MICRO_FIT_2X4 = 'molex-micro-fit-2x4'
|
||||
# Direct current (DC)
|
||||
TYPE_DC = 'dc-terminal'
|
||||
@@ -831,6 +834,7 @@ class PowerOutletTypeChoices(ChoiceSet):
|
||||
('Molex', (
|
||||
(TYPE_MOLEX_MICRO_FIT_1X2, 'Molex Micro-Fit 1x2'),
|
||||
(TYPE_MOLEX_MICRO_FIT_2X2, 'Molex Micro-Fit 2x2'),
|
||||
(TYPE_MOLEX_MICRO_FIT_2X3, 'Molex Micro-Fit 2x3'),
|
||||
(TYPE_MOLEX_MICRO_FIT_2X4, 'Molex Micro-Fit 2x4'),
|
||||
)),
|
||||
('DC', (
|
||||
@@ -1736,6 +1740,15 @@ class CableTypeChoices(ChoiceSet):
|
||||
|
||||
# Copper - Coaxial
|
||||
TYPE_COAXIAL = 'coaxial'
|
||||
TYPE_RG_6 = 'rg-6'
|
||||
TYPE_RG_8 = 'rg-8'
|
||||
TYPE_RG_11 = 'rg-11'
|
||||
TYPE_RG_59 = 'rg-59'
|
||||
TYPE_RG_62 = 'rg-62'
|
||||
TYPE_RG_213 = 'rg-213'
|
||||
TYPE_LMR_100 = 'lmr-100'
|
||||
TYPE_LMR_200 = 'lmr-200'
|
||||
TYPE_LMR_400 = 'lmr-400'
|
||||
|
||||
# Fiber Optic - Multimode
|
||||
TYPE_MMF = 'mmf'
|
||||
@@ -1785,6 +1798,15 @@ class CableTypeChoices(ChoiceSet):
|
||||
_('Copper - Coaxial'),
|
||||
(
|
||||
(TYPE_COAXIAL, 'Coaxial'),
|
||||
(TYPE_RG_6, 'RG-6'),
|
||||
(TYPE_RG_8, 'RG-8'),
|
||||
(TYPE_RG_11, 'RG-11'),
|
||||
(TYPE_RG_59, 'RG-59'),
|
||||
(TYPE_RG_62, 'RG-62'),
|
||||
(TYPE_RG_213, 'RG-213'),
|
||||
(TYPE_LMR_100, 'LMR-100'),
|
||||
(TYPE_LMR_200, 'LMR-200'),
|
||||
(TYPE_LMR_400, 'LMR-400'),
|
||||
),
|
||||
),
|
||||
(
|
||||
|
||||
@@ -1288,7 +1288,6 @@ class DeviceFilterSet(
|
||||
Q(name__icontains=value) |
|
||||
Q(virtual_chassis__name__icontains=value) |
|
||||
Q(serial__icontains=value.strip()) |
|
||||
Q(inventoryitems__serial__icontains=value.strip()) |
|
||||
Q(asset_tag__icontains=value.strip()) |
|
||||
Q(description__icontains=value.strip()) |
|
||||
Q(comments__icontains=value) |
|
||||
|
||||
@@ -9,7 +9,8 @@ from dcim.choices import *
|
||||
from dcim.constants import *
|
||||
from dcim.models import *
|
||||
from extras.models import ConfigTemplate
|
||||
from ipam.models import VRF, IPAddress
|
||||
from ipam.choices import VLANQinQRoleChoices
|
||||
from ipam.models import VLAN, VRF, IPAddress, VLANGroup
|
||||
from netbox.choices import *
|
||||
from netbox.forms import NetBoxModelImportForm
|
||||
from tenancy.models import Tenant
|
||||
@@ -17,7 +18,7 @@ from utilities.forms.fields import (
|
||||
CSVChoiceField, CSVContentTypeField, CSVModelChoiceField, CSVModelMultipleChoiceField, CSVTypedChoiceField,
|
||||
SlugField,
|
||||
)
|
||||
from virtualization.models import Cluster, VMInterface, VirtualMachine
|
||||
from virtualization.models import Cluster, VirtualMachine, VMInterface
|
||||
from wireless.choices import WirelessRoleChoices
|
||||
from .common import ModuleCommonForm
|
||||
|
||||
@@ -938,7 +939,7 @@ class InterfaceImportForm(NetBoxModelImportForm):
|
||||
required=False,
|
||||
to_field_name='name',
|
||||
help_text=mark_safe(
|
||||
_('VDC names separated by commas, encased with double quotes. Example:') + ' <code>vdc1,vdc2,vdc3</code>'
|
||||
_('VDC names separated by commas, encased with double quotes. Example:') + ' <code>"vdc1,vdc2,vdc3"</code>'
|
||||
)
|
||||
)
|
||||
type = CSVChoiceField(
|
||||
@@ -967,7 +968,41 @@ class InterfaceImportForm(NetBoxModelImportForm):
|
||||
label=_('Mode'),
|
||||
choices=InterfaceModeChoices,
|
||||
required=False,
|
||||
help_text=_('IEEE 802.1Q operational mode (for L2 interfaces)')
|
||||
help_text=_('IEEE 802.1Q operational mode (for L2 interfaces)'),
|
||||
)
|
||||
vlan_group = CSVModelChoiceField(
|
||||
label=_('VLAN group'),
|
||||
queryset=VLANGroup.objects.all(),
|
||||
required=False,
|
||||
to_field_name='name',
|
||||
help_text=_('Filter VLANs available for assignment by group'),
|
||||
)
|
||||
untagged_vlan = CSVModelChoiceField(
|
||||
label=_('Untagged VLAN'),
|
||||
queryset=VLAN.objects.all(),
|
||||
required=False,
|
||||
to_field_name='vid',
|
||||
help_text=_('Assigned untagged VLAN ID (filtered by VLAN group)'),
|
||||
)
|
||||
tagged_vlans = CSVModelMultipleChoiceField(
|
||||
label=_('Tagged VLANs'),
|
||||
queryset=VLAN.objects.all(),
|
||||
required=False,
|
||||
to_field_name='vid',
|
||||
help_text=mark_safe(
|
||||
_(
|
||||
'Assigned tagged VLAN IDs separated by commas, encased with double quotes '
|
||||
'(filtered by VLAN group). Example:'
|
||||
)
|
||||
+ ' <code>"100,200,300"</code>'
|
||||
),
|
||||
)
|
||||
qinq_svlan = CSVModelChoiceField(
|
||||
label=_('Q-in-Q Service VLAN'),
|
||||
queryset=VLAN.objects.filter(qinq_role=VLANQinQRoleChoices.ROLE_SERVICE),
|
||||
required=False,
|
||||
to_field_name='vid',
|
||||
help_text=_('Assigned Q-in-Q Service VLAN ID (filtered by VLAN group)'),
|
||||
)
|
||||
vrf = CSVModelChoiceField(
|
||||
label=_('VRF'),
|
||||
@@ -988,7 +1023,8 @@ class InterfaceImportForm(NetBoxModelImportForm):
|
||||
fields = (
|
||||
'device', 'name', 'label', 'parent', 'bridge', 'lag', 'type', 'speed', 'duplex', 'enabled',
|
||||
'mark_connected', 'wwn', 'vdcs', 'mtu', 'mgmt_only', 'description', 'poe_mode', 'poe_type', 'mode',
|
||||
'vrf', 'rf_role', 'rf_channel', 'rf_channel_frequency', 'rf_channel_width', 'tx_power', 'tags'
|
||||
'vlan_group', 'untagged_vlan', 'tagged_vlans', 'qinq_svlan', 'vrf', 'rf_role', 'rf_channel',
|
||||
'rf_channel_frequency', 'rf_channel_width', 'tx_power', 'tags'
|
||||
)
|
||||
|
||||
def __init__(self, data=None, *args, **kwargs):
|
||||
@@ -1005,6 +1041,13 @@ class InterfaceImportForm(NetBoxModelImportForm):
|
||||
self.fields['lag'].queryset = self.fields['lag'].queryset.filter(**params)
|
||||
self.fields['vdcs'].queryset = self.fields['vdcs'].queryset.filter(**params)
|
||||
|
||||
# Limit choices for VLANs to the assigned VLAN group
|
||||
if vlan_group := data.get('vlan_group'):
|
||||
params = {f"group__{self.fields['vlan_group'].to_field_name}": vlan_group}
|
||||
self.fields['untagged_vlan'].queryset = self.fields['untagged_vlan'].queryset.filter(**params)
|
||||
self.fields['tagged_vlans'].queryset = self.fields['tagged_vlans'].queryset.filter(**params)
|
||||
self.fields['qinq_svlan'].queryset = self.fields['qinq_svlan'].queryset.filter(**params)
|
||||
|
||||
def clean_enabled(self):
|
||||
# Make sure enabled is True when it's not included in the uploaded data
|
||||
if 'enabled' not in self.data:
|
||||
|
||||
@@ -278,11 +278,6 @@ class RackBaseFilterForm(NetBoxModelFilterSetForm):
|
||||
choices=BOOLEAN_WITH_BLANK_CHOICES
|
||||
)
|
||||
)
|
||||
airflow = forms.MultipleChoiceField(
|
||||
label=_('Airflow'),
|
||||
choices=add_blank_choice(RackAirflowChoices),
|
||||
required=False
|
||||
)
|
||||
weight = forms.DecimalField(
|
||||
label=_('Weight'),
|
||||
required=False,
|
||||
@@ -381,6 +376,11 @@ class RackFilterForm(TenancyFilterForm, ContactModelFilterForm, RackBaseFilterFo
|
||||
},
|
||||
label=_('Rack type')
|
||||
)
|
||||
airflow = forms.MultipleChoiceField(
|
||||
label=_('Airflow'),
|
||||
choices=add_blank_choice(RackAirflowChoices),
|
||||
required=False
|
||||
)
|
||||
serial = forms.CharField(
|
||||
label=_('Serial'),
|
||||
required=False
|
||||
|
||||
@@ -269,7 +269,8 @@ class RackForm(TenancyForm, NetBoxModelForm):
|
||||
label=_('Rack Type'),
|
||||
queryset=RackType.objects.all(),
|
||||
required=False,
|
||||
help_text=_("Select a pre-defined rack type, or set physical characteristics below.")
|
||||
selector=True,
|
||||
help_text=_("Select a pre-defined rack type, or set physical characteristics below."),
|
||||
)
|
||||
comments = CommentField()
|
||||
|
||||
|
||||
@@ -453,6 +453,7 @@ class VirtualChassisCreateForm(NetBoxModelForm):
|
||||
if instance.pk and self.cleaned_data['members']:
|
||||
initial_position = self.cleaned_data.get('initial_position', 1)
|
||||
for i, member in enumerate(self.cleaned_data['members'], start=initial_position):
|
||||
member.snapshot()
|
||||
member.virtual_chassis = instance
|
||||
member.vc_position = i
|
||||
member.save()
|
||||
|
||||
67
netbox/dcim/migrations/0216_latitude_longitude_validators.py
Normal file
67
netbox/dcim/migrations/0216_latitude_longitude_validators.py
Normal file
@@ -0,0 +1,67 @@
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('dcim', '0215_rackreservation_status'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='device',
|
||||
name='latitude',
|
||||
field=models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=6,
|
||||
max_digits=8,
|
||||
null=True,
|
||||
validators=[
|
||||
django.core.validators.MinValueValidator(-90.0),
|
||||
django.core.validators.MaxValueValidator(90.0),
|
||||
],
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='device',
|
||||
name='longitude',
|
||||
field=models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=6,
|
||||
max_digits=9,
|
||||
null=True,
|
||||
validators=[
|
||||
django.core.validators.MinValueValidator(-180.0),
|
||||
django.core.validators.MaxValueValidator(180.0),
|
||||
],
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='site',
|
||||
name='latitude',
|
||||
field=models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=6,
|
||||
max_digits=8,
|
||||
null=True,
|
||||
validators=[
|
||||
django.core.validators.MinValueValidator(-90.0),
|
||||
django.core.validators.MaxValueValidator(90.0),
|
||||
],
|
||||
),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='site',
|
||||
name='longitude',
|
||||
field=models.DecimalField(
|
||||
blank=True,
|
||||
decimal_places=6,
|
||||
max_digits=9,
|
||||
null=True,
|
||||
validators=[
|
||||
django.core.validators.MinValueValidator(-180.0),
|
||||
django.core.validators.MaxValueValidator(180.0),
|
||||
],
|
||||
),
|
||||
),
|
||||
]
|
||||
@@ -10,6 +10,7 @@ from django.utils.translation import gettext_lazy as _
|
||||
from core.models import ObjectType
|
||||
from dcim.choices import *
|
||||
from dcim.constants import *
|
||||
from dcim.exceptions import UnsupportedCablePath
|
||||
from dcim.fields import PathField
|
||||
from dcim.utils import decompile_path_node, object_to_path_node
|
||||
from netbox.choices import ColorChoices
|
||||
@@ -28,8 +29,6 @@ __all__ = (
|
||||
'CableTermination',
|
||||
)
|
||||
|
||||
from ..exceptions import UnsupportedCablePath
|
||||
|
||||
trace_paths = Signal()
|
||||
|
||||
|
||||
@@ -393,6 +392,17 @@ class CableTermination(ChangeLoggedModel):
|
||||
def clean(self):
|
||||
super().clean()
|
||||
|
||||
# Disallow connecting a cable to any termination object that is
|
||||
# explicitly flagged as "mark connected".
|
||||
termination = getattr(self, 'termination', None)
|
||||
if termination is not None and getattr(termination, "mark_connected", False):
|
||||
raise ValidationError(
|
||||
_("Cannot connect a cable to {obj_parent} > {obj} because it is marked as connected.").format(
|
||||
obj_parent=termination.parent_object,
|
||||
obj=termination,
|
||||
)
|
||||
)
|
||||
|
||||
# Check for existing termination
|
||||
qs = CableTermination.objects.filter(
|
||||
termination_type=self.termination_type,
|
||||
@@ -404,14 +414,14 @@ class CableTermination(ChangeLoggedModel):
|
||||
existing_termination = qs.first()
|
||||
if existing_termination is not None:
|
||||
raise ValidationError(
|
||||
_("Duplicate termination found for {app_label}.{model} {termination_id}: cable {cable_pk}".format(
|
||||
_("Duplicate termination found for {app_label}.{model} {termination_id}: cable {cable_pk}").format(
|
||||
app_label=self.termination_type.app_label,
|
||||
model=self.termination_type.model,
|
||||
termination_id=self.termination_id,
|
||||
cable_pk=existing_termination.cable.pk
|
||||
))
|
||||
)
|
||||
)
|
||||
# Validate interface type (if applicable)
|
||||
# Validate the interface type (if applicable)
|
||||
if self.termination_type.model == 'interface' and self.termination.type in NONCONNECTABLE_IFACE_TYPES:
|
||||
raise ValidationError(
|
||||
_("Cables cannot be terminated to {type_display} interfaces").format(
|
||||
@@ -604,7 +614,7 @@ class CablePath(models.Model):
|
||||
Cable or WirelessLink connects (interfaces, console ports, circuit termination, etc.). All terminations must be
|
||||
of the same type and must belong to the same parent object.
|
||||
"""
|
||||
from circuits.models import CircuitTermination
|
||||
from circuits.models import CircuitTermination, Circuit
|
||||
|
||||
if not terminations:
|
||||
return None
|
||||
@@ -626,8 +636,11 @@ class CablePath(models.Model):
|
||||
raise UnsupportedCablePath(_("All mid-span terminations must have the same termination type"))
|
||||
|
||||
# All mid-span terminations must all be attached to the same device
|
||||
if (not isinstance(terminations[0], PathEndpoint) and not
|
||||
all(t.parent_object == terminations[0].parent_object for t in terminations[1:])):
|
||||
if (
|
||||
not isinstance(terminations[0], PathEndpoint) and
|
||||
not isinstance(terminations[0].parent_object, Circuit) and
|
||||
not all(t.parent_object == terminations[0].parent_object for t in terminations[1:])
|
||||
):
|
||||
raise UnsupportedCablePath(_("All mid-span terminations must have the same parent object"))
|
||||
|
||||
# Check for a split path (e.g. rear port fanning out to multiple front ports with
|
||||
@@ -771,32 +784,39 @@ class CablePath(models.Model):
|
||||
|
||||
elif isinstance(remote_terminations[0], CircuitTermination):
|
||||
# Follow a CircuitTermination to its corresponding CircuitTermination (A to Z or vice versa)
|
||||
if len(remote_terminations) > 1:
|
||||
is_split = True
|
||||
qs = Q()
|
||||
for remote_termination in remote_terminations:
|
||||
qs |= Q(
|
||||
circuit=remote_termination.circuit,
|
||||
term_side='Z' if remote_termination.term_side == 'A' else 'A'
|
||||
)
|
||||
|
||||
# Get all circuit terminations
|
||||
circuit_terminations = CircuitTermination.objects.filter(qs)
|
||||
|
||||
if not circuit_terminations.exists():
|
||||
break
|
||||
circuit_termination = CircuitTermination.objects.filter(
|
||||
circuit=remote_terminations[0].circuit,
|
||||
term_side='Z' if remote_terminations[0].term_side == 'A' else 'A'
|
||||
).first()
|
||||
if circuit_termination is None:
|
||||
break
|
||||
elif circuit_termination._provider_network:
|
||||
elif all([ct._provider_network for ct in circuit_terminations]):
|
||||
# Circuit terminates to a ProviderNetwork
|
||||
path.extend([
|
||||
[object_to_path_node(circuit_termination)],
|
||||
[object_to_path_node(circuit_termination._provider_network)],
|
||||
[object_to_path_node(ct) for ct in circuit_terminations],
|
||||
[object_to_path_node(ct._provider_network) for ct in circuit_terminations],
|
||||
])
|
||||
is_complete = True
|
||||
break
|
||||
elif circuit_termination.termination and not circuit_termination.cable:
|
||||
elif all([ct.termination and not ct.cable for ct in circuit_terminations]):
|
||||
# Circuit terminates to a Region/Site/etc.
|
||||
path.extend([
|
||||
[object_to_path_node(circuit_termination)],
|
||||
[object_to_path_node(circuit_termination.termination)],
|
||||
[object_to_path_node(ct) for ct in circuit_terminations],
|
||||
[object_to_path_node(ct.termination) for ct in circuit_terminations],
|
||||
])
|
||||
break
|
||||
elif any([ct.cable in links for ct in circuit_terminations]):
|
||||
# No valid path
|
||||
is_split = True
|
||||
break
|
||||
|
||||
terminations = [circuit_termination]
|
||||
terminations = circuit_terminations
|
||||
|
||||
else:
|
||||
# Check for non-symmetric path
|
||||
|
||||
@@ -646,6 +646,7 @@ class Device(
|
||||
decimal_places=6,
|
||||
blank=True,
|
||||
null=True,
|
||||
validators=[MinValueValidator(-90.0), MaxValueValidator(90.0)],
|
||||
help_text=_("GPS coordinate in decimal format (xx.yyyyyy)")
|
||||
)
|
||||
longitude = models.DecimalField(
|
||||
@@ -654,6 +655,7 @@ class Device(
|
||||
decimal_places=6,
|
||||
blank=True,
|
||||
null=True,
|
||||
validators=[MinValueValidator(-180.0), MaxValueValidator(180.0)],
|
||||
help_text=_("GPS coordinate in decimal format (xx.yyyyyy)")
|
||||
)
|
||||
services = GenericRelation(
|
||||
@@ -1154,7 +1156,6 @@ class VirtualChassis(PrimaryModel):
|
||||
})
|
||||
|
||||
def delete(self, *args, **kwargs):
|
||||
|
||||
# Check for LAG interfaces split across member chassis
|
||||
interfaces = Interface.objects.filter(
|
||||
device__in=self.members.all(),
|
||||
@@ -1168,6 +1169,13 @@ class VirtualChassis(PrimaryModel):
|
||||
"interfaces."
|
||||
).format(self=self, interfaces=InterfaceSpeedChoices))
|
||||
|
||||
# Clear vc_position and vc_priority on member devices BEFORE calling super().delete()
|
||||
# This must be done here because on_delete=SET_NULL executes before pre_delete signal
|
||||
for device in self.members.all():
|
||||
device.vc_position = None
|
||||
device.vc_priority = None
|
||||
device.save()
|
||||
|
||||
return super().delete(*args, **kwargs)
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
from django.contrib.contenttypes.fields import GenericRelation
|
||||
from django.core.exceptions import ValidationError
|
||||
from django.core.validators import MaxValueValidator, MinValueValidator
|
||||
from django.db import models
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from timezone_field import TimeZoneField
|
||||
@@ -210,6 +211,7 @@ class Site(ContactsMixin, ImageAttachmentsMixin, PrimaryModel):
|
||||
decimal_places=6,
|
||||
blank=True,
|
||||
null=True,
|
||||
validators=[MinValueValidator(-90.0), MaxValueValidator(90.0)],
|
||||
help_text=_('GPS coordinate in decimal format (xx.yyyyyy)')
|
||||
)
|
||||
longitude = models.DecimalField(
|
||||
@@ -218,6 +220,7 @@ class Site(ContactsMixin, ImageAttachmentsMixin, PrimaryModel):
|
||||
decimal_places=6,
|
||||
blank=True,
|
||||
null=True,
|
||||
validators=[MinValueValidator(-180.0), MaxValueValidator(180.0)],
|
||||
help_text=_('GPS coordinate in decimal format (xx.yyyyyy)')
|
||||
)
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from django.utils.translation import gettext as _
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from netbox.object_actions import ObjectAction
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import logging
|
||||
|
||||
from django.db.models.signals import post_save, post_delete, pre_delete
|
||||
from django.db.models.signals import post_save, post_delete
|
||||
from django.dispatch import receiver
|
||||
|
||||
from dcim.choices import CableEndChoices, LinkStatusChoices
|
||||
@@ -85,18 +85,6 @@ def assign_virtualchassis_master(instance, created, **kwargs):
|
||||
master.save()
|
||||
|
||||
|
||||
@receiver(pre_delete, sender=VirtualChassis)
|
||||
def clear_virtualchassis_members(instance, **kwargs):
|
||||
"""
|
||||
When a VirtualChassis is deleted, nullify the vc_position and vc_priority fields of its prior members.
|
||||
"""
|
||||
devices = Device.objects.filter(virtual_chassis=instance.pk)
|
||||
for device in devices:
|
||||
device.vc_position = None
|
||||
device.vc_priority = None
|
||||
device.save()
|
||||
|
||||
|
||||
#
|
||||
# Cables
|
||||
#
|
||||
|
||||
@@ -100,7 +100,7 @@ class RackTypeTable(NetBoxTable):
|
||||
model = RackType
|
||||
fields = (
|
||||
'pk', 'id', 'model', 'manufacturer', 'form_factor', 'u_height', 'starting_unit', 'width', 'outer_width',
|
||||
'outer_height', 'outer_depth', 'mounting_depth', 'airflow', 'weight', 'max_weight', 'description',
|
||||
'outer_height', 'outer_depth', 'mounting_depth', 'weight', 'max_weight', 'description',
|
||||
'comments', 'instance_count', 'tags', 'created', 'last_updated',
|
||||
)
|
||||
default_columns = (
|
||||
|
||||
@@ -2270,6 +2270,80 @@ class CablePathTestCase(TestCase):
|
||||
CableTraceSVG(interface1).render()
|
||||
CableTraceSVG(interface2).render()
|
||||
|
||||
def test_223_interface_to_interface_via_multiple_circuit_terminations(self):
|
||||
provider = Provider.objects.first()
|
||||
circuit_type = CircuitType.objects.first()
|
||||
circuit1 = self.circuit
|
||||
circuit2 = Circuit.objects.create(provider=provider, type=circuit_type, cid='Circuit 2')
|
||||
interface1 = Interface.objects.create(device=self.device, name='Interface 1')
|
||||
interface2 = Interface.objects.create(device=self.device, name='Interface 2')
|
||||
circuittermination1_A = CircuitTermination.objects.create(
|
||||
circuit=circuit1,
|
||||
termination=self.site,
|
||||
term_side='A'
|
||||
)
|
||||
circuittermination1_Z = CircuitTermination.objects.create(
|
||||
circuit=circuit1,
|
||||
termination=self.site,
|
||||
term_side='Z'
|
||||
)
|
||||
circuittermination2_A = CircuitTermination.objects.create(
|
||||
circuit=circuit2,
|
||||
termination=self.site,
|
||||
term_side='A'
|
||||
)
|
||||
circuittermination2_Z = CircuitTermination.objects.create(
|
||||
circuit=circuit2,
|
||||
termination=self.site,
|
||||
term_side='Z'
|
||||
)
|
||||
|
||||
# Create cables
|
||||
cable1 = Cable(
|
||||
a_terminations=[interface1],
|
||||
b_terminations=[circuittermination1_A, circuittermination2_A]
|
||||
)
|
||||
cable2 = Cable(
|
||||
a_terminations=[interface2],
|
||||
b_terminations=[circuittermination1_Z, circuittermination2_Z]
|
||||
)
|
||||
cable1.save()
|
||||
cable2.save()
|
||||
|
||||
self.assertEqual(CablePath.objects.count(), 2)
|
||||
|
||||
path1 = self.assertPathExists(
|
||||
(
|
||||
interface1,
|
||||
cable1,
|
||||
(circuittermination1_A, circuittermination2_A),
|
||||
(circuittermination1_Z, circuittermination2_Z),
|
||||
cable2,
|
||||
interface2
|
||||
|
||||
),
|
||||
is_active=True,
|
||||
is_complete=True,
|
||||
)
|
||||
interface1.refresh_from_db()
|
||||
self.assertPathIsSet(interface1, path1)
|
||||
|
||||
path2 = self.assertPathExists(
|
||||
(
|
||||
interface2,
|
||||
cable2,
|
||||
(circuittermination1_Z, circuittermination2_Z),
|
||||
(circuittermination1_A, circuittermination2_A),
|
||||
cable1,
|
||||
interface1
|
||||
|
||||
),
|
||||
is_active=True,
|
||||
is_complete=True,
|
||||
)
|
||||
interface2.refresh_from_db()
|
||||
self.assertPathIsSet(interface2, path2)
|
||||
|
||||
def test_301_create_path_via_existing_cable(self):
|
||||
"""
|
||||
[IF1] --C1-- [FP1] [RP1] --C2-- [RP2] [FP2] --C3-- [IF2]
|
||||
@@ -2510,3 +2584,33 @@ class CablePathTestCase(TestCase):
|
||||
is_active=True
|
||||
)
|
||||
self.assertEqual(CablePath.objects.count(), 0)
|
||||
|
||||
def test_402_exclude_circuit_loopback(self):
|
||||
interface = Interface.objects.create(device=self.device, name='Interface 1')
|
||||
circuittermination1 = CircuitTermination.objects.create(
|
||||
circuit=self.circuit,
|
||||
termination=self.site,
|
||||
term_side='A'
|
||||
)
|
||||
circuittermination2 = CircuitTermination.objects.create(
|
||||
circuit=self.circuit,
|
||||
termination=self.site,
|
||||
term_side='Z'
|
||||
)
|
||||
|
||||
# Create cables
|
||||
cable = Cable(
|
||||
a_terminations=[interface],
|
||||
b_terminations=[circuittermination1, circuittermination2]
|
||||
)
|
||||
cable.save()
|
||||
|
||||
path = self.assertPathExists(
|
||||
(interface, cable, (circuittermination1, circuittermination2)),
|
||||
is_active=True,
|
||||
is_complete=False,
|
||||
is_split=True
|
||||
)
|
||||
self.assertEqual(CablePath.objects.count(), 1)
|
||||
interface.refresh_from_db()
|
||||
self.assertPathIsSet(interface, path)
|
||||
|
||||
@@ -967,6 +967,18 @@ class CableTestCase(TestCase):
|
||||
with self.assertRaises(ValidationError):
|
||||
cable.clean()
|
||||
|
||||
def test_cannot_cable_to_mark_connected(self):
|
||||
"""
|
||||
Test that a cable cannot be connected to an interface marked as connected.
|
||||
"""
|
||||
device1 = Device.objects.get(name='TestDevice1')
|
||||
interface1 = Interface.objects.get(device__name='TestDevice2', name='eth1')
|
||||
|
||||
mark_connected_interface = Interface(device=device1, name='mark_connected1', mark_connected=True)
|
||||
cable = Cable(a_terminations=[mark_connected_interface], b_terminations=[interface1])
|
||||
with self.assertRaises(ValidationError):
|
||||
cable.clean()
|
||||
|
||||
|
||||
class VirtualDeviceContextTestCase(TestCase):
|
||||
|
||||
@@ -1019,3 +1031,92 @@ class VirtualDeviceContextTestCase(TestCase):
|
||||
vdc2 = VirtualDeviceContext(device=device, name="VDC 2", identifier=1, status='active')
|
||||
with self.assertRaises(ValidationError):
|
||||
vdc2.full_clean()
|
||||
|
||||
|
||||
class VirtualChassisTestCase(TestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
site = Site.objects.create(name='Test Site 1', slug='test-site-1')
|
||||
manufacturer = Manufacturer.objects.create(name='Test Manufacturer 1', slug='test-manufacturer-1')
|
||||
devicetype = DeviceType.objects.create(
|
||||
manufacturer=manufacturer, model='Test Device Type 1', slug='test-device-type-1'
|
||||
)
|
||||
role = DeviceRole.objects.create(
|
||||
name='Test Device Role 1', slug='test-device-role-1', color='ff0000'
|
||||
)
|
||||
Device.objects.create(
|
||||
device_type=devicetype, role=role, name='TestDevice1', site=site
|
||||
)
|
||||
Device.objects.create(
|
||||
device_type=devicetype, role=role, name='TestDevice2', site=site
|
||||
)
|
||||
|
||||
def test_virtualchassis_deletion_clears_vc_position(self):
|
||||
"""
|
||||
Test that when a VirtualChassis is deleted, member devices have their
|
||||
vc_position and vc_priority fields set to None.
|
||||
"""
|
||||
devices = Device.objects.all()
|
||||
device1 = devices[0]
|
||||
device2 = devices[1]
|
||||
|
||||
# Create a VirtualChassis with two member devices
|
||||
vc = VirtualChassis.objects.create(name='Test VC', master=device1)
|
||||
|
||||
device1.virtual_chassis = vc
|
||||
device1.vc_position = 1
|
||||
device1.vc_priority = 10
|
||||
device1.save()
|
||||
|
||||
device2.virtual_chassis = vc
|
||||
device2.vc_position = 2
|
||||
device2.vc_priority = 20
|
||||
device2.save()
|
||||
|
||||
# Verify devices are members of the VC with positions set
|
||||
device1.refresh_from_db()
|
||||
device2.refresh_from_db()
|
||||
self.assertEqual(device1.virtual_chassis, vc)
|
||||
self.assertEqual(device1.vc_position, 1)
|
||||
self.assertEqual(device1.vc_priority, 10)
|
||||
self.assertEqual(device2.virtual_chassis, vc)
|
||||
self.assertEqual(device2.vc_position, 2)
|
||||
self.assertEqual(device2.vc_priority, 20)
|
||||
|
||||
# Delete the VirtualChassis
|
||||
vc.delete()
|
||||
|
||||
# Verify devices have vc_position and vc_priority set to None
|
||||
device1.refresh_from_db()
|
||||
device2.refresh_from_db()
|
||||
self.assertIsNone(device1.virtual_chassis)
|
||||
self.assertIsNone(device1.vc_position)
|
||||
self.assertIsNone(device1.vc_priority)
|
||||
self.assertIsNone(device2.virtual_chassis)
|
||||
self.assertIsNone(device2.vc_position)
|
||||
self.assertIsNone(device2.vc_priority)
|
||||
|
||||
def test_virtualchassis_duplicate_vc_position(self):
|
||||
"""
|
||||
Test that two devices cannot be assigned to the same vc_position
|
||||
within the same VirtualChassis.
|
||||
"""
|
||||
devices = Device.objects.all()
|
||||
device1 = devices[0]
|
||||
device2 = devices[1]
|
||||
|
||||
# Create a VirtualChassis
|
||||
vc = VirtualChassis.objects.create(name='Test VC')
|
||||
|
||||
# Assign first device to vc_position 1
|
||||
device1.virtual_chassis = vc
|
||||
device1.vc_position = 1
|
||||
device1.full_clean()
|
||||
device1.save()
|
||||
|
||||
# Try to assign second device to the same vc_position
|
||||
device2.virtual_chassis = vc
|
||||
device2.vc_position = 1
|
||||
with self.assertRaises(ValidationError):
|
||||
device2.full_clean()
|
||||
|
||||
@@ -986,6 +986,131 @@ inventory-items:
|
||||
ii1 = InventoryItemTemplate.objects.first()
|
||||
self.assertEqual(ii1.name, 'Inventory Item 1')
|
||||
|
||||
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*'])
|
||||
def test_import_error_numbering(self):
|
||||
# Add all required permissions to the test user
|
||||
self.add_permissions(
|
||||
'dcim.view_devicetype',
|
||||
'dcim.add_devicetype',
|
||||
'dcim.add_consoleporttemplate',
|
||||
'dcim.add_consoleserverporttemplate',
|
||||
'dcim.add_powerporttemplate',
|
||||
'dcim.add_poweroutlettemplate',
|
||||
'dcim.add_interfacetemplate',
|
||||
'dcim.add_frontporttemplate',
|
||||
'dcim.add_rearporttemplate',
|
||||
'dcim.add_modulebaytemplate',
|
||||
'dcim.add_devicebaytemplate',
|
||||
'dcim.add_inventoryitemtemplate',
|
||||
)
|
||||
|
||||
import_data = '''
|
||||
---
|
||||
manufacturer: Manufacturer 1
|
||||
model: TEST-2001
|
||||
slug: test-2001
|
||||
u_height: 1
|
||||
module-bays:
|
||||
- name: Module Bay 1-1
|
||||
- name: Module Bay 1-2
|
||||
---
|
||||
- manufacturer: Manufacturer 1
|
||||
model: TEST-2002
|
||||
slug: test-2002
|
||||
u_height: 1
|
||||
module-bays:
|
||||
- name: Module Bay 2-1
|
||||
- name: Module Bay 2-2
|
||||
- not_name: Module Bay 2-3
|
||||
- manufacturer: Manufacturer 1
|
||||
model: TEST-2003
|
||||
slug: test-2003
|
||||
u_height: 1
|
||||
module-bays:
|
||||
- name: Module Bay 3-1
|
||||
'''
|
||||
form_data = {
|
||||
'data': import_data,
|
||||
'format': 'yaml'
|
||||
}
|
||||
|
||||
response = self.client.post(reverse('dcim:devicetype_bulk_import'), data=form_data, follow=True)
|
||||
self.assertHttpStatus(response, 200)
|
||||
self.assertContains(response, "Record 2 module-bays[3].name: This field is required.")
|
||||
|
||||
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*'])
|
||||
def test_import_nolist(self):
|
||||
# Add all required permissions to the test user
|
||||
self.add_permissions(
|
||||
'dcim.view_devicetype',
|
||||
'dcim.add_devicetype',
|
||||
'dcim.add_consoleporttemplate',
|
||||
'dcim.add_consoleserverporttemplate',
|
||||
'dcim.add_powerporttemplate',
|
||||
'dcim.add_poweroutlettemplate',
|
||||
'dcim.add_interfacetemplate',
|
||||
'dcim.add_frontporttemplate',
|
||||
'dcim.add_rearporttemplate',
|
||||
'dcim.add_modulebaytemplate',
|
||||
'dcim.add_devicebaytemplate',
|
||||
'dcim.add_inventoryitemtemplate',
|
||||
)
|
||||
|
||||
for value in ('', 'null', '3', '"My console port"', '{name: "My other console port"}'):
|
||||
with self.subTest(value=value):
|
||||
import_data = f'''
|
||||
manufacturer: Manufacturer 1
|
||||
model: TEST-3000
|
||||
slug: test-3000
|
||||
u_height: 1
|
||||
console-ports: {value}
|
||||
'''
|
||||
form_data = {
|
||||
'data': import_data,
|
||||
'format': 'yaml'
|
||||
}
|
||||
|
||||
response = self.client.post(reverse('dcim:devicetype_bulk_import'), data=form_data, follow=True)
|
||||
self.assertHttpStatus(response, 200)
|
||||
self.assertContains(response, "Record 1 console-ports: Must be a list.")
|
||||
|
||||
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*'])
|
||||
def test_import_nodict(self):
|
||||
# Add all required permissions to the test user
|
||||
self.add_permissions(
|
||||
'dcim.view_devicetype',
|
||||
'dcim.add_devicetype',
|
||||
'dcim.add_consoleporttemplate',
|
||||
'dcim.add_consoleserverporttemplate',
|
||||
'dcim.add_powerporttemplate',
|
||||
'dcim.add_poweroutlettemplate',
|
||||
'dcim.add_interfacetemplate',
|
||||
'dcim.add_frontporttemplate',
|
||||
'dcim.add_rearporttemplate',
|
||||
'dcim.add_modulebaytemplate',
|
||||
'dcim.add_devicebaytemplate',
|
||||
'dcim.add_inventoryitemtemplate',
|
||||
)
|
||||
|
||||
for value in ('', 'null', '3', '"My console port"', '["My other console port"]'):
|
||||
with self.subTest(value=value):
|
||||
import_data = f'''
|
||||
manufacturer: Manufacturer 1
|
||||
model: TEST-4000
|
||||
slug: test-4000
|
||||
u_height: 1
|
||||
console-ports:
|
||||
- {value}
|
||||
'''
|
||||
form_data = {
|
||||
'data': import_data,
|
||||
'format': 'yaml'
|
||||
}
|
||||
|
||||
response = self.client.post(reverse('dcim:devicetype_bulk_import'), data=form_data, follow=True)
|
||||
self.assertHttpStatus(response, 200)
|
||||
self.assertContains(response, "Record 1 console-ports[1]: Must be a dictionary.")
|
||||
|
||||
def test_export_objects(self):
|
||||
url = reverse('dcim:devicetype_list')
|
||||
self.add_permissions('dcim.view_devicetype')
|
||||
@@ -2834,10 +2959,19 @@ class InterfaceTestCase(ViewTestCases.DeviceComponentViewTestCase):
|
||||
}
|
||||
|
||||
cls.csv_data = (
|
||||
"device,name,type,vrf.pk,poe_mode,poe_type",
|
||||
f"Device 1,Interface 4,1000base-t,{vrfs[0].pk},pse,type1-ieee802.3af",
|
||||
f"Device 1,Interface 5,1000base-t,{vrfs[0].pk},pse,type1-ieee802.3af",
|
||||
f"Device 1,Interface 6,1000base-t,{vrfs[0].pk},pse,type1-ieee802.3af",
|
||||
"device,name,type,vrf.pk,poe_mode,poe_type,mode,untagged_vlan,tagged_vlans",
|
||||
(
|
||||
f"Device 1,Interface 4,1000base-t,{vrfs[0].pk},pse,type1-ieee802.3af,"
|
||||
f"tagged,{vlans[0].vid},'{','.join([str(v.vid) for v in vlans[1:4]])}'"
|
||||
),
|
||||
(
|
||||
f"Device 1,Interface 5,1000base-t,{vrfs[0].pk},pse,type1-ieee802.3af,"
|
||||
f"tagged,{vlans[0].vid},'{','.join([str(v.vid) for v in vlans[1:4]])}'"
|
||||
),
|
||||
(
|
||||
f"Device 1,Interface 6,1000base-t,{vrfs[0].pk},pse,type1-ieee802.3af,"
|
||||
f"tagged,{vlans[0].vid},'{','.join([str(v.vid) for v in vlans[1:4]])}'"
|
||||
),
|
||||
)
|
||||
|
||||
cls.csv_update_data = (
|
||||
@@ -2885,6 +3019,43 @@ class InterfaceTestCase(ViewTestCases.DeviceComponentViewTestCase):
|
||||
self.client.post(self._get_url('bulk_delete'), data)
|
||||
self.assertEqual(device.interfaces.count(), 4) # Child & parent were both deleted
|
||||
|
||||
def test_rename_select_all_spans_pages(self):
|
||||
"""
|
||||
Tests the bulk rename functionality for interfaces spanning multiple pages in the UI.
|
||||
"""
|
||||
device_name = 'DeviceRename'
|
||||
device = create_test_device(device_name)
|
||||
# Create > default page size (25) so selection spans multiple pages
|
||||
for i in range(37):
|
||||
Interface.objects.create(device=device, name=f'eth{i}')
|
||||
|
||||
self.add_permissions('dcim.change_interface')
|
||||
|
||||
# Filter to this device's interfaces to simulate a real list filter
|
||||
get_qs = {'device_id': Device.objects.get(name=device_name).pk}
|
||||
post_url = f'{self._get_url("bulk_rename")}?device_id={get_qs["device_id"]}'
|
||||
|
||||
# Preview step: ensure 37 selected (not just one page)
|
||||
data = {'_preview': '1', '_all': '1', 'find': 'eth', 'replace': 'xe'}
|
||||
response = self.client.post(post_url, data=data)
|
||||
self.assertHttpStatus(response, 200)
|
||||
self.assertEqual(len(response.context['selected_objects']), 37)
|
||||
|
||||
# Extract pk[] just like the browser would submit on Apply
|
||||
# (either from the form's initial, or from selected_objects)
|
||||
pk_list = response.context['form'].initial.get('pk')
|
||||
if not pk_list:
|
||||
pk_list = [obj.pk for obj in response.context['selected_objects']]
|
||||
pk_list = [str(pk) for pk in pk_list]
|
||||
|
||||
# Apply step: include pk[] in the POST
|
||||
apply_data = {'_apply': '1', '_all': '1', 'find': 'eth', 'replace': 'xe', 'pk': pk_list}
|
||||
response = self.client.post(post_url, data=apply_data)
|
||||
|
||||
# On success the view redirects back to the return URL
|
||||
self.assertHttpStatus(response, 302)
|
||||
self.assertEqual(Interface.objects.filter(device=device, name__startswith='xe').count(), 37)
|
||||
|
||||
|
||||
class FrontPortTestCase(ViewTestCases.DeviceComponentViewTestCase):
|
||||
model = FrontPort
|
||||
|
||||
@@ -295,6 +295,7 @@ class RegionBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Region, 'bulk_rename', path='rename', detail=False)
|
||||
class RegionBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Region.objects.all()
|
||||
filterset = filtersets.RegionFilterSet
|
||||
|
||||
|
||||
@register_model_view(Region, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -426,6 +427,7 @@ class SiteGroupBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(SiteGroup, 'bulk_rename', path='rename', detail=False)
|
||||
class SiteGroupBulkRenameView(generic.BulkRenameView):
|
||||
queryset = SiteGroup.objects.all()
|
||||
filterset = filtersets.SiteGroupFilterSet
|
||||
|
||||
|
||||
@register_model_view(SiteGroup, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -516,6 +518,7 @@ class SiteBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Site, 'bulk_rename', path='rename', detail=False)
|
||||
class SiteBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Site.objects.all()
|
||||
filterset = filtersets.SiteFilterSet
|
||||
|
||||
|
||||
@register_model_view(Site, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -625,6 +628,7 @@ class LocationBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Location, 'bulk_rename', path='rename', detail=False)
|
||||
class LocationBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Location.objects.all()
|
||||
filterset = filtersets.LocationFilterSet
|
||||
|
||||
|
||||
@register_model_view(Location, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -695,6 +699,7 @@ class RackRoleBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(RackRole, 'bulk_rename', path='rename', detail=False)
|
||||
class RackRoleBulkRenameView(generic.BulkRenameView):
|
||||
queryset = RackRole.objects.all()
|
||||
filterset = filtersets.RackRoleFilterSet
|
||||
|
||||
|
||||
@register_model_view(RackRole, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -760,6 +765,7 @@ class RackTypeBulkEditView(generic.BulkEditView):
|
||||
class RackTypeBulkRenameView(generic.BulkRenameView):
|
||||
queryset = RackType.objects.all()
|
||||
field_name = 'model'
|
||||
filterset = filtersets.RackTypeFilterSet
|
||||
|
||||
|
||||
@register_model_view(RackType, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -944,6 +950,7 @@ class RackBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Rack, 'bulk_rename', path='rename', detail=False)
|
||||
class RackBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Rack.objects.all()
|
||||
filterset = filtersets.RackFilterSet
|
||||
|
||||
|
||||
@register_model_view(Rack, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1083,6 +1090,7 @@ class ManufacturerBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Manufacturer, 'bulk_rename', path='rename', detail=False)
|
||||
class ManufacturerBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Manufacturer.objects.all()
|
||||
filterset = filtersets.ManufacturerFilterSet
|
||||
|
||||
|
||||
@register_model_view(Manufacturer, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1336,6 +1344,7 @@ class DeviceTypeBulkEditView(generic.BulkEditView):
|
||||
class DeviceTypeBulkRenameView(generic.BulkRenameView):
|
||||
queryset = DeviceType.objects.all()
|
||||
field_name = 'model'
|
||||
filterset = filtersets.DeviceTypeFilterSet
|
||||
|
||||
|
||||
@register_model_view(DeviceType, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1397,6 +1406,7 @@ class ModuleTypeProfileBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ModuleTypeProfile, 'bulk_rename', path='rename', detail=False)
|
||||
class ModuleTypeProfileBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ModuleTypeProfile.objects.all()
|
||||
filterset = filtersets.ModuleTypeProfileFilterSet
|
||||
|
||||
|
||||
@register_model_view(ModuleTypeProfile, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1612,6 +1622,7 @@ class ModuleTypeBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ModuleType, 'bulk_rename', path='rename', detail=False)
|
||||
class ModuleTypeBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ModuleType.objects.all()
|
||||
filterset = filtersets.ModuleTypeFilterSet
|
||||
|
||||
|
||||
@register_model_view(ModuleType, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -2100,6 +2111,7 @@ class DeviceRoleBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(DeviceRole, 'bulk_rename', path='rename', detail=False)
|
||||
class DeviceRoleBulkRenameView(generic.BulkRenameView):
|
||||
queryset = DeviceRole.objects.all()
|
||||
filterset = filtersets.DeviceRoleFilterSet
|
||||
|
||||
|
||||
@register_model_view(DeviceRole, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -2175,6 +2187,7 @@ class PlatformBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Platform, 'bulk_rename', path='rename', detail=False)
|
||||
class PlatformBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Platform.objects.all()
|
||||
filterset = filtersets.PlatformFilterSet
|
||||
|
||||
|
||||
@register_model_view(Platform, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -2582,6 +2595,7 @@ class ConsolePortBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ConsolePort, 'bulk_rename', path='rename', detail=False)
|
||||
class ConsolePortBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ConsolePort.objects.all()
|
||||
filterset = filtersets.ConsolePortFilterSet
|
||||
|
||||
|
||||
@register_model_view(ConsolePort, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
@@ -2652,6 +2666,7 @@ class ConsoleServerPortBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ConsoleServerPort, 'bulk_rename', path='rename', detail=False)
|
||||
class ConsoleServerPortBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ConsoleServerPort.objects.all()
|
||||
filterset = filtersets.ConsoleServerPortFilterSet
|
||||
|
||||
|
||||
@register_model_view(ConsoleServerPort, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
@@ -2722,6 +2737,7 @@ class PowerPortBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(PowerPort, 'bulk_rename', path='rename', detail=False)
|
||||
class PowerPortBulkRenameView(generic.BulkRenameView):
|
||||
queryset = PowerPort.objects.all()
|
||||
filterset = filtersets.PowerPortFilterSet
|
||||
|
||||
|
||||
@register_model_view(PowerPort, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
@@ -2792,6 +2808,7 @@ class PowerOutletBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(PowerOutlet, 'bulk_rename', path='rename', detail=False)
|
||||
class PowerOutletBulkRenameView(generic.BulkRenameView):
|
||||
queryset = PowerOutlet.objects.all()
|
||||
filterset = filtersets.PowerOutletFilterSet
|
||||
|
||||
|
||||
@register_model_view(PowerOutlet, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
@@ -2934,6 +2951,7 @@ class InterfaceBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Interface, 'bulk_rename', path='rename', detail=False)
|
||||
class InterfaceBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Interface.objects.all()
|
||||
filterset = filtersets.InterfaceFilterSet
|
||||
|
||||
|
||||
@register_model_view(Interface, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
@@ -3005,6 +3023,7 @@ class FrontPortBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(FrontPort, 'bulk_rename', path='rename', detail=False)
|
||||
class FrontPortBulkRenameView(generic.BulkRenameView):
|
||||
queryset = FrontPort.objects.all()
|
||||
filterset = filtersets.FrontPortFilterSet
|
||||
|
||||
|
||||
@register_model_view(FrontPort, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
@@ -3080,6 +3099,7 @@ class RearPortBulkRenameView(generic.BulkRenameView):
|
||||
@register_model_view(RearPort, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
class RearPortBulkDisconnectView(BulkDisconnectView):
|
||||
queryset = RearPort.objects.all()
|
||||
filterset = filtersets.RearPortFilterSet
|
||||
|
||||
|
||||
@register_model_view(RearPort, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -3145,6 +3165,7 @@ class ModuleBayBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ModuleBay, 'bulk_rename', path='rename', detail=False)
|
||||
class ModuleBayBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ModuleBay.objects.all()
|
||||
filterset = filtersets.ModuleBayFilterSet
|
||||
|
||||
|
||||
@register_model_view(ModuleBay, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -3287,6 +3308,7 @@ class DeviceBayBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(DeviceBay, 'bulk_rename', path='rename', detail=False)
|
||||
class DeviceBayBulkRenameView(generic.BulkRenameView):
|
||||
queryset = DeviceBay.objects.all()
|
||||
filterset = filtersets.DeviceBayFilterSet
|
||||
|
||||
|
||||
@register_model_view(DeviceBay, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -3348,6 +3370,7 @@ class InventoryItemBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(InventoryItem, 'bulk_rename', path='rename', detail=False)
|
||||
class InventoryItemBulkRenameView(generic.BulkRenameView):
|
||||
queryset = InventoryItem.objects.all()
|
||||
filterset = filtersets.InventoryItemFilterSet
|
||||
|
||||
|
||||
@register_model_view(InventoryItem, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -3431,6 +3454,7 @@ class InventoryItemRoleBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(InventoryItemRole, 'bulk_rename', path='rename', detail=False)
|
||||
class InventoryItemRoleBulkRenameView(generic.BulkRenameView):
|
||||
queryset = InventoryItemRole.objects.all()
|
||||
filterset = filtersets.InventoryItemRoleFilterSet
|
||||
|
||||
|
||||
@register_model_view(InventoryItemRole, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -3634,6 +3658,7 @@ class CableBulkEditView(generic.BulkEditView):
|
||||
class CableBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Cable.objects.all()
|
||||
field_name = 'label'
|
||||
filterset = filtersets.CableFilterSet
|
||||
|
||||
|
||||
@register_model_view(Cable, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -3754,6 +3779,7 @@ class VirtualChassisEditView(ObjectPermissionRequiredMixin, GetReturnURLMixin, V
|
||||
def post(self, request, pk):
|
||||
|
||||
virtual_chassis = get_object_or_404(self.queryset, pk=pk)
|
||||
virtual_chassis.snapshot()
|
||||
VCMemberFormSet = modelformset_factory(
|
||||
model=Device,
|
||||
form=forms.DeviceVCMembershipForm,
|
||||
@@ -3806,9 +3832,7 @@ class VirtualChassisAddMemberView(ObjectPermissionRequiredMixin, GetReturnURLMix
|
||||
return 'dcim.change_virtualchassis'
|
||||
|
||||
def get(self, request, pk):
|
||||
|
||||
virtual_chassis = get_object_or_404(self.queryset, pk=pk)
|
||||
|
||||
initial_data = {k: request.GET[k] for k in request.GET}
|
||||
member_select_form = forms.VCMemberSelectForm(initial=initial_data)
|
||||
membership_form = forms.DeviceVCMembershipForm(initial=initial_data)
|
||||
@@ -3821,20 +3845,20 @@ class VirtualChassisAddMemberView(ObjectPermissionRequiredMixin, GetReturnURLMix
|
||||
})
|
||||
|
||||
def post(self, request, pk):
|
||||
|
||||
virtual_chassis = get_object_or_404(self.queryset, pk=pk)
|
||||
|
||||
member_select_form = forms.VCMemberSelectForm(request.POST)
|
||||
|
||||
if member_select_form.is_valid():
|
||||
|
||||
device = member_select_form.cleaned_data['device']
|
||||
device.snapshot()
|
||||
device.virtual_chassis = virtual_chassis
|
||||
data = {k: request.POST[k] for k in ['vc_position', 'vc_priority']}
|
||||
data = {
|
||||
'vc_position': request.POST['vc_position'],
|
||||
'vc_priority': request.POST['vc_priority'],
|
||||
}
|
||||
membership_form = forms.DeviceVCMembershipForm(data=data, validate_vc_position=True, instance=device)
|
||||
|
||||
if membership_form.is_valid():
|
||||
|
||||
membership_form.save()
|
||||
messages.success(request, mark_safe(
|
||||
_('Added member <a href="{url}">{device}</a>').format(
|
||||
@@ -3844,11 +3868,9 @@ class VirtualChassisAddMemberView(ObjectPermissionRequiredMixin, GetReturnURLMix
|
||||
|
||||
if '_addanother' in request.POST and safe_for_redirect(request.get_full_path()):
|
||||
return redirect(request.get_full_path())
|
||||
|
||||
return redirect(self.get_return_url(request, device))
|
||||
|
||||
else:
|
||||
|
||||
membership_form = forms.DeviceVCMembershipForm(data=request.POST)
|
||||
|
||||
return render(request, 'dcim/virtualchassis_add_member.html', {
|
||||
@@ -3866,7 +3888,6 @@ class VirtualChassisRemoveMemberView(ObjectPermissionRequiredMixin, GetReturnURL
|
||||
return 'dcim.change_device'
|
||||
|
||||
def get(self, request, pk):
|
||||
|
||||
device = get_object_or_404(self.queryset, pk=pk, virtual_chassis__isnull=False)
|
||||
form = ConfirmationForm(initial=request.GET)
|
||||
|
||||
@@ -3877,7 +3898,6 @@ class VirtualChassisRemoveMemberView(ObjectPermissionRequiredMixin, GetReturnURL
|
||||
})
|
||||
|
||||
def post(self, request, pk):
|
||||
|
||||
device = get_object_or_404(self.queryset, pk=pk, virtual_chassis__isnull=False)
|
||||
form = ConfirmationForm(request.POST)
|
||||
|
||||
@@ -3891,13 +3911,11 @@ class VirtualChassisRemoveMemberView(ObjectPermissionRequiredMixin, GetReturnURL
|
||||
return redirect(device.get_absolute_url())
|
||||
|
||||
if form.is_valid():
|
||||
|
||||
devices = Device.objects.filter(pk=device.pk)
|
||||
for device in devices:
|
||||
device.virtual_chassis = None
|
||||
device.vc_position = None
|
||||
device.vc_priority = None
|
||||
device.save()
|
||||
device.snapshot()
|
||||
device.virtual_chassis = None
|
||||
device.vc_position = None
|
||||
device.vc_priority = None
|
||||
device.save()
|
||||
|
||||
msg = _('Removed {device} from virtual chassis {chassis}').format(
|
||||
device=device,
|
||||
@@ -3931,6 +3949,7 @@ class VirtualChassisBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(VirtualChassis, 'bulk_rename', path='rename', detail=False)
|
||||
class VirtualChassisBulkRenameView(generic.BulkRenameView):
|
||||
queryset = VirtualChassis.objects.all()
|
||||
filterset = filtersets.VirtualChassisFilterSet
|
||||
|
||||
|
||||
@register_model_view(VirtualChassis, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -3993,6 +4012,7 @@ class PowerPanelBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(PowerPanel, 'bulk_rename', path='rename', detail=False)
|
||||
class PowerPanelBulkRenameView(generic.BulkRenameView):
|
||||
queryset = PowerPanel.objects.all()
|
||||
filterset = filtersets.PowerPanelFilterSet
|
||||
|
||||
|
||||
@register_model_view(PowerPanel, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -4050,6 +4070,7 @@ class PowerFeedBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(PowerFeed, 'bulk_rename', path='rename', detail=False)
|
||||
class PowerFeedBulkRenameView(generic.BulkRenameView):
|
||||
queryset = PowerFeed.objects.all()
|
||||
filterset = filtersets.PowerFeedFilterSet
|
||||
|
||||
|
||||
@register_model_view(PowerFeed, 'bulk_disconnect', path='disconnect', detail=False)
|
||||
@@ -4128,6 +4149,7 @@ class VirtualDeviceContextBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(VirtualDeviceContext, 'bulk_rename', path='rename', detail=False)
|
||||
class VirtualDeviceContextBulkRenameView(generic.BulkRenameView):
|
||||
queryset = VirtualDeviceContext.objects.all()
|
||||
filterset = filtersets.VirtualDeviceContextFilterSet
|
||||
|
||||
|
||||
@register_model_view(VirtualDeviceContext, 'bulk_delete', path='delete', detail=False)
|
||||
|
||||
@@ -23,6 +23,6 @@ class ConfigTemplateSerializer(ChangeLogMessageSerializer, TaggableModelSerializ
|
||||
fields = [
|
||||
'id', 'url', 'display_url', 'display', 'name', 'description', 'environment_params', 'template_code',
|
||||
'mime_type', 'file_name', 'file_extension', 'as_attachment', 'data_source', 'data_path', 'data_file',
|
||||
'data_synced', 'tags', 'created', 'last_updated',
|
||||
'auto_sync_enabled', 'data_synced', 'tags', 'created', 'last_updated',
|
||||
]
|
||||
brief_fields = ('id', 'url', 'display', 'name', 'description')
|
||||
|
||||
@@ -267,6 +267,14 @@ class ScriptViewSet(ModelViewSet):
|
||||
_ignore_model_permissions = True
|
||||
lookup_value_regex = '[^/]+' # Allow dots
|
||||
|
||||
def initial(self, request, *args, **kwargs):
|
||||
super().initial(request, *args, **kwargs)
|
||||
|
||||
# Restrict the view's QuerySet to allow only the permitted objects
|
||||
if request.user.is_authenticated:
|
||||
action = 'run' if request.method == 'POST' else 'view'
|
||||
self.queryset = self.queryset.restrict(request.user, action)
|
||||
|
||||
def _get_script(self, pk):
|
||||
# If pk is numeric, retrieve script by ID
|
||||
if pk.isnumeric():
|
||||
@@ -290,10 +298,12 @@ class ScriptViewSet(ModelViewSet):
|
||||
"""
|
||||
Run a Script identified by its numeric PK or module & name and return the pending Job as the result
|
||||
"""
|
||||
if not request.user.has_perm('extras.run_script'):
|
||||
raise PermissionDenied("This user does not have permission to run scripts.")
|
||||
|
||||
script = self._get_script(pk)
|
||||
|
||||
if not request.user.has_perm('extras.run_script', obj=script):
|
||||
raise PermissionDenied("This user does not have permission to run this script.")
|
||||
|
||||
input_serializer = serializers.ScriptInputSerializer(
|
||||
data=request.data,
|
||||
context={'script': script}
|
||||
|
||||
@@ -209,7 +209,10 @@ class ObjectCountsWidget(DashboardWidget):
|
||||
url = get_action_url(model, action='list')
|
||||
except NoReverseMatch:
|
||||
url = None
|
||||
qs = model.objects.restrict(request.user, 'view')
|
||||
try:
|
||||
qs = model.objects.restrict(request.user, 'view')
|
||||
except AttributeError:
|
||||
qs = model.objects.all()
|
||||
# Apply any specified filters
|
||||
if url and (filters := self.config.get('filters')):
|
||||
params = dict_to_querydict(filters)
|
||||
|
||||
@@ -134,11 +134,18 @@ def process_event_rules(event_rules, object_type, event_type, data, username=Non
|
||||
|
||||
# Enqueue a Job to record the script's execution
|
||||
from extras.jobs import ScriptJob
|
||||
params = {
|
||||
"instance": event_rule.action_object,
|
||||
"name": script.name,
|
||||
"user": user,
|
||||
"data": event_data
|
||||
}
|
||||
if snapshots:
|
||||
params["snapshots"] = snapshots
|
||||
if request:
|
||||
params["request"] = copy_safe_request(request)
|
||||
ScriptJob.enqueue(
|
||||
instance=event_rule.action_object,
|
||||
name=script.name,
|
||||
user=user,
|
||||
data=event_data
|
||||
**params
|
||||
)
|
||||
|
||||
# Notification groups
|
||||
|
||||
@@ -398,8 +398,12 @@ class ConfigTemplateBulkEditForm(ChangelogMessageMixin, BulkEditForm):
|
||||
required=False,
|
||||
widget=BulkEditNullBooleanSelect()
|
||||
)
|
||||
|
||||
nullable_fields = ('description', 'mime_type', 'file_name', 'file_extension')
|
||||
auto_sync_enabled = forms.NullBooleanField(
|
||||
label=_('Auto sync enabled'),
|
||||
required=False,
|
||||
widget=BulkEditNullBooleanSelect()
|
||||
)
|
||||
nullable_fields = ('description', 'mime_type', 'file_name', 'file_extension', 'auto_sync_enabled',)
|
||||
|
||||
|
||||
class ImageAttachmentBulkEditForm(ChangelogMessageMixin, BulkEditForm):
|
||||
|
||||
@@ -5,7 +5,7 @@ from django.contrib.postgres.forms import SimpleArrayField
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from core.models import ObjectType
|
||||
from core.models import DataFile, DataSource, ObjectType
|
||||
from extras.choices import *
|
||||
from extras.models import *
|
||||
from netbox.events import get_event_type_choices
|
||||
@@ -160,14 +160,41 @@ class ConfigContextProfileImportForm(NetBoxModelImportForm):
|
||||
|
||||
|
||||
class ConfigTemplateImportForm(CSVModelForm):
|
||||
data_source = CSVModelChoiceField(
|
||||
label=_('Data source'),
|
||||
queryset=DataSource.objects.all(),
|
||||
required=False,
|
||||
to_field_name='name',
|
||||
help_text=_('Data source which provides the data file')
|
||||
)
|
||||
data_file = CSVModelChoiceField(
|
||||
label=_('Data file'),
|
||||
queryset=DataFile.objects.all(),
|
||||
required=False,
|
||||
to_field_name='path',
|
||||
help_text=_('Data file containing the template code')
|
||||
)
|
||||
auto_sync_enabled = forms.BooleanField(
|
||||
required=False,
|
||||
label=_('Auto sync enabled'),
|
||||
help_text=_("Enable automatic synchronization of template content when the data file is updated")
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = ConfigTemplate
|
||||
fields = (
|
||||
'name', 'description', 'template_code', 'environment_params', 'mime_type', 'file_name', 'file_extension',
|
||||
'as_attachment', 'tags',
|
||||
'name', 'description', 'template_code', 'data_source', 'data_file', 'auto_sync_enabled',
|
||||
'environment_params', 'mime_type', 'file_name', 'file_extension', 'as_attachment', 'tags',
|
||||
)
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
|
||||
# Make sure template_code is None when it's not included in the uploaded data
|
||||
if not self.data.get('template_code') and not self.data.get('data_file'):
|
||||
raise forms.ValidationError(_("Must specify either local content or a data file"))
|
||||
return self.cleaned_data['template_code']
|
||||
|
||||
|
||||
class SavedFilterImportForm(CSVModelForm):
|
||||
object_types = CSVMultipleContentTypeField(
|
||||
@@ -272,6 +299,10 @@ class JournalEntryImportForm(NetBoxModelImportForm):
|
||||
choices=JournalEntryKindChoices,
|
||||
help_text=_('The classification of entry')
|
||||
)
|
||||
comments = forms.CharField(
|
||||
label=_('Comments'),
|
||||
required=True
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = JournalEntry
|
||||
|
||||
@@ -42,17 +42,20 @@ class CustomFieldFilterForm(SavedFiltersMixin, FilterForm):
|
||||
model = CustomField
|
||||
fieldsets = (
|
||||
FieldSet('q', 'filter_id'),
|
||||
FieldSet(
|
||||
'type', 'related_object_type_id', 'group_name', 'weight', 'required', 'unique', 'choice_set_id',
|
||||
name=_('Attributes')
|
||||
),
|
||||
FieldSet('object_type_id', 'type', 'group_name', 'weight', 'required', 'unique', name=_('Attributes')),
|
||||
FieldSet('choice_set_id', 'related_object_type_id', name=_('Type Options')),
|
||||
FieldSet('ui_visible', 'ui_editable', 'is_cloneable', name=_('Behavior')),
|
||||
FieldSet('validation_minimum', 'validation_maximum', 'validation_regex', name=_('Validation')),
|
||||
)
|
||||
related_object_type_id = ContentTypeMultipleChoiceField(
|
||||
object_type_id = ContentTypeMultipleChoiceField(
|
||||
queryset=ObjectType.objects.with_feature('custom_fields'),
|
||||
required=False,
|
||||
label=_('Related object type')
|
||||
label=_('Object types'),
|
||||
)
|
||||
related_object_type_id = ContentTypeMultipleChoiceField(
|
||||
queryset=ObjectType.objects.public(),
|
||||
required=False,
|
||||
label=_('Related object type'),
|
||||
)
|
||||
type = forms.MultipleChoiceField(
|
||||
choices=CustomFieldTypeChoices,
|
||||
@@ -136,12 +139,12 @@ class CustomLinkFilterForm(SavedFiltersMixin, FilterForm):
|
||||
model = CustomLink
|
||||
fieldsets = (
|
||||
FieldSet('q', 'filter_id'),
|
||||
FieldSet('object_type', 'enabled', 'new_window', 'weight', name=_('Attributes')),
|
||||
FieldSet('object_type_id', 'enabled', 'new_window', 'weight', name=_('Attributes')),
|
||||
)
|
||||
object_type = ContentTypeMultipleChoiceField(
|
||||
object_type_id = ContentTypeMultipleChoiceField(
|
||||
label=_('Object types'),
|
||||
queryset=ObjectType.objects.with_feature('custom_links'),
|
||||
required=False
|
||||
required=False,
|
||||
)
|
||||
enabled = forms.NullBooleanField(
|
||||
label=_('Enabled'),
|
||||
@@ -230,12 +233,12 @@ class SavedFilterFilterForm(SavedFiltersMixin, FilterForm):
|
||||
model = SavedFilter
|
||||
fieldsets = (
|
||||
FieldSet('q', 'filter_id'),
|
||||
FieldSet('object_type', 'enabled', 'shared', 'weight', name=_('Attributes')),
|
||||
FieldSet('object_type_id', 'enabled', 'shared', 'weight', name=_('Attributes')),
|
||||
)
|
||||
object_type = ContentTypeMultipleChoiceField(
|
||||
object_type_id = ContentTypeMultipleChoiceField(
|
||||
label=_('Object types'),
|
||||
queryset=ObjectType.objects.public(),
|
||||
required=False
|
||||
required=False,
|
||||
)
|
||||
enabled = forms.NullBooleanField(
|
||||
label=_('Enabled'),
|
||||
@@ -476,7 +479,7 @@ class ConfigTemplateFilterForm(SavedFiltersMixin, FilterForm):
|
||||
model = ConfigTemplate
|
||||
fieldsets = (
|
||||
FieldSet('q', 'filter_id', 'tag'),
|
||||
FieldSet('data_source_id', 'data_file_id', name=_('Data')),
|
||||
FieldSet('data_source_id', 'data_file_id', 'auto_sync_enabled', name=_('Data')),
|
||||
FieldSet('mime_type', 'file_name', 'file_extension', 'as_attachment', name=_('Rendering'))
|
||||
)
|
||||
data_source_id = DynamicModelMultipleChoiceField(
|
||||
@@ -492,6 +495,13 @@ class ConfigTemplateFilterForm(SavedFiltersMixin, FilterForm):
|
||||
'source_id': '$data_source_id'
|
||||
}
|
||||
)
|
||||
auto_sync_enabled = forms.NullBooleanField(
|
||||
label=_('Auto sync enabled'),
|
||||
required=False,
|
||||
widget=forms.Select(
|
||||
choices=BOOLEAN_WITH_BLANK_CHOICES
|
||||
)
|
||||
)
|
||||
tag = TagFilterField(ConfigTemplate)
|
||||
mime_type = forms.CharField(
|
||||
required=False,
|
||||
|
||||
@@ -793,7 +793,7 @@ class JournalEntryForm(NetBoxModelForm):
|
||||
label=_('Kind'),
|
||||
choices=JournalEntryKindChoices
|
||||
)
|
||||
comments = CommentField()
|
||||
comments = CommentField(required=True)
|
||||
|
||||
class Meta:
|
||||
model = JournalEntry
|
||||
|
||||
@@ -3,7 +3,6 @@ import importlib.util
|
||||
import os
|
||||
import sys
|
||||
|
||||
from django.core.cache import cache
|
||||
from django.core.files.storage import storages
|
||||
from django.db import models
|
||||
from django.http import HttpResponse
|
||||
@@ -31,15 +30,7 @@ class CustomStoragesLoader(importlib.abc.Loader):
|
||||
return None # Use default module creation
|
||||
|
||||
def exec_module(self, module):
|
||||
# Cache storage for 5 minutes (300 seconds)
|
||||
cache_key = "storage_scripts"
|
||||
storage = cache.get(cache_key)
|
||||
|
||||
if storage is None:
|
||||
storage = storages['scripts']
|
||||
cache.set(cache_key, storage, timeout=300) # 5 minutes
|
||||
|
||||
with storage.open(self.filename, 'rb') as f:
|
||||
with storages["scripts"].open(self.filename, 'rb') as f:
|
||||
code = f.read()
|
||||
exec(code, module.__dict__)
|
||||
|
||||
|
||||
@@ -126,7 +126,7 @@ class ScriptModule(PythonModuleMixin, JobsMixin, ManagedFile):
|
||||
ordered.extend(script_objects.values())
|
||||
return ordered
|
||||
|
||||
@property
|
||||
@cached_property
|
||||
def module_scripts(self):
|
||||
|
||||
def _get_name(cls):
|
||||
|
||||
@@ -632,6 +632,10 @@ class ConfigTemplateTable(NetBoxTable):
|
||||
orderable=False,
|
||||
verbose_name=_('Synced')
|
||||
)
|
||||
auto_sync_enabled = columns.BooleanColumn(
|
||||
verbose_name=_('Auto Sync Enabled'),
|
||||
orderable=False,
|
||||
)
|
||||
mime_type = tables.Column(
|
||||
verbose_name=_('MIME Type')
|
||||
)
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
from django import template
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
|
||||
register = template.Library()
|
||||
@@ -8,4 +10,16 @@ register = template.Library()
|
||||
def render_widget(context, widget):
|
||||
request = context['request']
|
||||
|
||||
return widget.render(request)
|
||||
try:
|
||||
return widget.render(request)
|
||||
except Exception as e:
|
||||
message1 = _('An error was encountered when attempting to render this widget:')
|
||||
message2 = _('Please try reconfiguring the widget, or remove it from your dashboard.')
|
||||
return mark_safe(f"""
|
||||
<p>
|
||||
<span class="text-danger"><i class="mdi mdi-alert"></i></span>
|
||||
{message1}
|
||||
</p>
|
||||
<p class="font-monospace ps-3">{e}</p>
|
||||
<p>{message2}</p>
|
||||
""")
|
||||
|
||||
@@ -894,18 +894,13 @@ class ScriptTest(APITestCase):
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.add_permissions('extras.view_script')
|
||||
|
||||
# Monkey-patch the Script model to return our TestScriptClass above
|
||||
Script.python_class = self.python_class
|
||||
|
||||
def test_get_script(self):
|
||||
module = ScriptModule.objects.get(
|
||||
file_root=ManagedFileRootPathChoices.SCRIPTS,
|
||||
file_path='script.py',
|
||||
)
|
||||
script = module.scripts.all().first()
|
||||
url = reverse('extras-api:script-detail', kwargs={'pk': script.pk})
|
||||
response = self.client.get(url, **self.header)
|
||||
response = self.client.get(self.url, **self.header)
|
||||
|
||||
self.assertEqual(response.data['name'], self.TestScriptClass.Meta.name)
|
||||
self.assertEqual(response.data['vars']['var1'], 'StringVar')
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.urls import reverse
|
||||
from django.test import tag
|
||||
|
||||
from core.choices import ManagedFileRootPathChoices
|
||||
from core.events import *
|
||||
from core.models import ObjectType
|
||||
from dcim.models import DeviceType, Manufacturer, Site
|
||||
from extras.choices import *
|
||||
from extras.models import *
|
||||
from extras.scripts import Script as PythonClass, IntegerVar, BooleanVar
|
||||
from users.models import Group, User
|
||||
from utilities.testing import ViewTestCases, TestCase
|
||||
|
||||
@@ -897,3 +900,70 @@ class ScriptListViewTest(TestCase):
|
||||
response = self.client.get(url, {'embedded': 'true'})
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertTemplateUsed(response, 'extras/inc/script_list_content.html')
|
||||
|
||||
|
||||
class ScriptValidationErrorTest(TestCase):
|
||||
user_permissions = ['extras.view_script', 'extras.run_script']
|
||||
|
||||
class TestScriptMixin:
|
||||
bar = IntegerVar(min_value=0, max_value=30, default=30)
|
||||
|
||||
class TestScriptClass(TestScriptMixin, PythonClass):
|
||||
class Meta:
|
||||
name = 'Test script'
|
||||
commit_default = False
|
||||
fieldsets = (("Logging", ("debug_mode",)),)
|
||||
|
||||
debug_mode = BooleanVar(default=False)
|
||||
|
||||
def run(self, data, commit):
|
||||
return "Complete"
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
module = ScriptModule.objects.create(file_root=ManagedFileRootPathChoices.SCRIPTS, file_path='test_script.py')
|
||||
cls.script = Script.objects.create(module=module, name='Test script', is_executable=True)
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
Script.python_class = property(lambda self: ScriptValidationErrorTest.TestScriptClass)
|
||||
|
||||
@tag('regression')
|
||||
def test_script_validation_error_displays_message(self):
|
||||
from unittest.mock import patch
|
||||
|
||||
url = reverse('extras:script', kwargs={'pk': self.script.pk})
|
||||
|
||||
with patch('extras.views.get_workers_for_queue', return_value=['worker']):
|
||||
response = self.client.post(url, {'debug_mode': 'true', '_commit': 'true'})
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
messages = list(response.context['messages'])
|
||||
self.assertEqual(len(messages), 1)
|
||||
self.assertEqual(str(messages[0]), "bar: This field is required.")
|
||||
|
||||
@tag('regression')
|
||||
def test_script_validation_error_no_toast_for_fieldset_fields(self):
|
||||
from unittest.mock import patch, PropertyMock
|
||||
|
||||
class FieldsetScript(PythonClass):
|
||||
class Meta:
|
||||
name = 'Fieldset test'
|
||||
commit_default = False
|
||||
fieldsets = (("Fields", ("required_field",)),)
|
||||
|
||||
required_field = IntegerVar(min_value=10)
|
||||
|
||||
def run(self, data, commit):
|
||||
return "Complete"
|
||||
|
||||
url = reverse('extras:script', kwargs={'pk': self.script.pk})
|
||||
|
||||
with patch.object(Script, 'python_class', new_callable=PropertyMock) as mock_python_class:
|
||||
mock_python_class.return_value = FieldsetScript
|
||||
with patch('extras.views.get_workers_for_queue', return_value=['worker']):
|
||||
response = self.client.post(url, {'required_field': '5', '_commit': 'true'})
|
||||
|
||||
self.assertEqual(response.status_code, 200)
|
||||
messages = list(response.context['messages'])
|
||||
self.assertEqual(len(messages), 0)
|
||||
|
||||
@@ -4,7 +4,7 @@ from django.contrib.auth.mixins import LoginRequiredMixin
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.core.paginator import EmptyPage
|
||||
from django.db.models import Count, Q
|
||||
from django.http import HttpResponseBadRequest, HttpResponseForbidden, HttpResponse
|
||||
from django.http import HttpResponseBadRequest, HttpResponseForbidden, HttpResponse, Http404
|
||||
from django.shortcuts import get_object_or_404, redirect, render
|
||||
from django.urls import reverse
|
||||
from django.utils import timezone
|
||||
@@ -25,7 +25,7 @@ from netbox.object_actions import *
|
||||
from netbox.views import generic
|
||||
from netbox.views.generic.mixins import TableMixin
|
||||
from utilities.forms import ConfirmationForm, get_field_value
|
||||
from utilities.htmx import htmx_partial
|
||||
from utilities.htmx import htmx_partial, htmx_maybe_redirect_current_page
|
||||
from utilities.paginator import EnhancedPaginator, get_paginate_count
|
||||
from utilities.query import count_related
|
||||
from utilities.querydict import normalize_querydict
|
||||
@@ -101,6 +101,7 @@ class CustomFieldBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(CustomField, 'bulk_rename', path='rename', detail=False)
|
||||
class CustomFieldBulkRenameView(generic.BulkRenameView):
|
||||
queryset = CustomField.objects.all()
|
||||
filterset = filtersets.CustomFieldFilterSet
|
||||
|
||||
|
||||
@register_model_view(CustomField, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -175,6 +176,7 @@ class CustomFieldChoiceSetBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(CustomFieldChoiceSet, 'bulk_rename', path='rename', detail=False)
|
||||
class CustomFieldChoiceSetBulkRenameView(generic.BulkRenameView):
|
||||
queryset = CustomFieldChoiceSet.objects.all()
|
||||
filterset = filtersets.CustomFieldChoiceSetFilterSet
|
||||
|
||||
|
||||
@register_model_view(CustomFieldChoiceSet, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -230,6 +232,7 @@ class CustomLinkBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(CustomLink, 'bulk_rename', path='rename', detail=False)
|
||||
class CustomLinkBulkRenameView(generic.BulkRenameView):
|
||||
queryset = CustomLink.objects.all()
|
||||
filterset = filtersets.CustomLinkFilterSet
|
||||
|
||||
|
||||
@register_model_view(CustomLink, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -286,6 +289,7 @@ class ExportTemplateBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ExportTemplate, 'bulk_rename', path='rename', detail=False)
|
||||
class ExportTemplateBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ExportTemplate.objects.all()
|
||||
filterset = filtersets.ExportTemplateFilterSet
|
||||
|
||||
|
||||
@register_model_view(ExportTemplate, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -351,6 +355,7 @@ class SavedFilterBulkEditView(SharedObjectViewMixin, generic.BulkEditView):
|
||||
@register_model_view(SavedFilter, 'bulk_rename', path='rename', detail=False)
|
||||
class SavedFilterBulkRenameView(generic.BulkRenameView):
|
||||
queryset = SavedFilter.objects.all()
|
||||
filterset = filtersets.SavedFilterFilterSet
|
||||
|
||||
|
||||
@register_model_view(SavedFilter, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -413,6 +418,7 @@ class TableConfigBulkEditView(SharedObjectViewMixin, generic.BulkEditView):
|
||||
@register_model_view(TableConfig, 'bulk_rename', path='rename', detail=False)
|
||||
class TableConfigBulkRenameView(generic.BulkRenameView):
|
||||
queryset = TableConfig.objects.all()
|
||||
filterset = filtersets.TableConfigFilterSet
|
||||
|
||||
|
||||
@register_model_view(TableConfig, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -499,6 +505,7 @@ class NotificationGroupBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(NotificationGroup, 'bulk_rename', path='rename', detail=False)
|
||||
class NotificationGroupBulkRenameView(generic.BulkRenameView):
|
||||
queryset = NotificationGroup.objects.all()
|
||||
filterset = filtersets.NotificationGroupFilterSet
|
||||
|
||||
|
||||
@register_model_view(NotificationGroup, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -518,8 +525,9 @@ class NotificationsView(LoginRequiredMixin, View):
|
||||
"""
|
||||
def get(self, request):
|
||||
return render(request, 'htmx/notifications.html', {
|
||||
'notifications': request.user.notifications.unread(),
|
||||
'notifications': request.user.notifications.unread()[:10],
|
||||
'total_count': request.user.notifications.count(),
|
||||
'unread_count': request.user.notifications.unread().count(),
|
||||
})
|
||||
|
||||
|
||||
@@ -528,6 +536,7 @@ class NotificationReadView(LoginRequiredMixin, View):
|
||||
"""
|
||||
Mark the Notification read and redirect the user to its attached object.
|
||||
"""
|
||||
|
||||
def get(self, request, pk):
|
||||
# Mark the Notification as read
|
||||
notification = get_object_or_404(request.user.notifications, pk=pk)
|
||||
@@ -541,18 +550,48 @@ class NotificationReadView(LoginRequiredMixin, View):
|
||||
return redirect('account:notifications')
|
||||
|
||||
|
||||
@register_model_view(Notification, name='dismiss_all', path='dismiss-all', detail=False)
|
||||
class NotificationDismissAllView(LoginRequiredMixin, View):
|
||||
"""
|
||||
Convenience view to clear all *unread* notifications for the current user.
|
||||
"""
|
||||
|
||||
def get(self, request):
|
||||
request.user.notifications.unread().delete()
|
||||
if htmx_partial(request):
|
||||
# If a user is currently on the notification page, redirect there (full repaint)
|
||||
redirect_resp = htmx_maybe_redirect_current_page(request, 'account:notifications', preserve_query=True)
|
||||
if redirect_resp:
|
||||
return redirect_resp
|
||||
|
||||
return render(request, 'htmx/notifications.html', {
|
||||
'notifications': request.user.notifications.unread()[:10],
|
||||
'total_count': request.user.notifications.count(),
|
||||
'unread_count': request.user.notifications.unread().count(),
|
||||
})
|
||||
return redirect('account:notifications')
|
||||
|
||||
|
||||
@register_model_view(Notification, 'dismiss')
|
||||
class NotificationDismissView(LoginRequiredMixin, View):
|
||||
"""
|
||||
A convenience view which allows deleting notifications with one click.
|
||||
"""
|
||||
|
||||
def get(self, request, pk):
|
||||
notification = get_object_or_404(request.user.notifications, pk=pk)
|
||||
notification.delete()
|
||||
|
||||
if htmx_partial(request):
|
||||
# If a user is currently on the notification page, redirect there (full repaint)
|
||||
redirect_resp = htmx_maybe_redirect_current_page(request, 'account:notifications', preserve_query=True)
|
||||
if redirect_resp:
|
||||
return redirect_resp
|
||||
|
||||
return render(request, 'htmx/notifications.html', {
|
||||
'notifications': request.user.notifications.unread()[:10],
|
||||
'total_count': request.user.notifications.count(),
|
||||
'unread_count': request.user.notifications.unread().count(),
|
||||
})
|
||||
|
||||
return redirect('account:notifications')
|
||||
@@ -650,6 +689,7 @@ class WebhookBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Webhook, 'bulk_rename', path='rename', detail=False)
|
||||
class WebhookBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Webhook.objects.all()
|
||||
filterset = filtersets.WebhookFilterSet
|
||||
|
||||
|
||||
@register_model_view(Webhook, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -705,6 +745,7 @@ class EventRuleBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(EventRule, 'bulk_rename', path='rename', detail=False)
|
||||
class EventRuleBulkRenameView(generic.BulkRenameView):
|
||||
queryset = EventRule.objects.all()
|
||||
filterset = filtersets.EventRuleFilterSet
|
||||
|
||||
|
||||
@register_model_view(EventRule, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -841,6 +882,7 @@ class ConfigContextProfileBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ConfigContextProfile, 'bulk_rename', path='rename', detail=False)
|
||||
class ConfigContextProfileBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ConfigContextProfile.objects.all()
|
||||
filterset = filtersets.ConfigContextProfileFilterSet
|
||||
|
||||
|
||||
@register_model_view(ConfigContextProfile, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -929,6 +971,7 @@ class ConfigContextBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ConfigContext, 'bulk_rename', path='rename', detail=False)
|
||||
class ConfigContextBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ConfigContext.objects.all()
|
||||
filterset = filtersets.ConfigContextFilterSet
|
||||
|
||||
|
||||
@register_model_view(ConfigContext, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1020,6 +1063,7 @@ class ConfigTemplateBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ConfigTemplate, 'bulk_rename', path='rename', detail=False)
|
||||
class ConfigTemplateBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ConfigTemplate.objects.all()
|
||||
filterset = filtersets.ConfigTemplateFilterSet
|
||||
|
||||
|
||||
@register_model_view(ConfigTemplate, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1143,6 +1187,7 @@ class ImageAttachmentBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ImageAttachment, 'bulk_rename', path='rename', detail=False)
|
||||
class ImageAttachmentBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ImageAttachment.objects.all()
|
||||
filterset = filtersets.ImageAttachmentFilterSet
|
||||
|
||||
|
||||
@register_model_view(ImageAttachment, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1485,6 +1530,15 @@ class ScriptView(BaseScriptView):
|
||||
)
|
||||
|
||||
return redirect('extras:script_result', job_pk=job.pk)
|
||||
else:
|
||||
fieldset_fields = {field for _, fields in script_class.get_fieldsets() for field in fields}
|
||||
hidden_errors = {
|
||||
field: errors for field, errors in form.errors.items()
|
||||
if field not in fieldset_fields
|
||||
}
|
||||
if hidden_errors:
|
||||
error_msg = '; '.join(f"{field}: {', '.join(errors)}" for field, errors in hidden_errors.items())
|
||||
messages.error(request, error_msg)
|
||||
|
||||
return render(request, 'extras/script.html', {
|
||||
'object': script,
|
||||
|
||||
@@ -369,6 +369,20 @@ class IPAddressImportForm(NetBoxModelImportForm):
|
||||
**{f"virtual_machine__{self.fields['virtual_machine'].to_field_name}": data['virtual_machine']}
|
||||
)
|
||||
|
||||
def clean_is_primary(self):
|
||||
# Make sure is_primary is None when it's not included in the uploaded data
|
||||
if 'is_primary' not in self.data:
|
||||
return None
|
||||
else:
|
||||
return self.cleaned_data['is_primary']
|
||||
|
||||
def clean_is_oob(self):
|
||||
# Make sure is_oob is None when it's not included in the uploaded data
|
||||
if 'is_oob' not in self.data:
|
||||
return None
|
||||
else:
|
||||
return self.cleaned_data['is_oob']
|
||||
|
||||
def clean(self):
|
||||
super().clean()
|
||||
|
||||
@@ -411,18 +425,18 @@ class IPAddressImportForm(NetBoxModelImportForm):
|
||||
ipaddress = super().save(*args, **kwargs)
|
||||
|
||||
# Set as primary for device/VM
|
||||
if self.cleaned_data.get('is_primary'):
|
||||
if self.cleaned_data.get('is_primary') is not None:
|
||||
parent = self.cleaned_data.get('device') or self.cleaned_data.get('virtual_machine')
|
||||
if self.instance.address.version == 4:
|
||||
parent.primary_ip4 = ipaddress
|
||||
parent.primary_ip4 = ipaddress if self.cleaned_data.get('is_primary') else None
|
||||
elif self.instance.address.version == 6:
|
||||
parent.primary_ip6 = ipaddress
|
||||
parent.primary_ip6 = ipaddress if self.cleaned_data.get('is_primary') else None
|
||||
parent.save()
|
||||
|
||||
# Set as OOB for device
|
||||
if self.cleaned_data.get('is_oob'):
|
||||
if self.cleaned_data.get('is_oob') is not None:
|
||||
parent = self.cleaned_data.get('device')
|
||||
parent.oob_ip = ipaddress
|
||||
parent.oob_ip = ipaddress if self.cleaned_data.get('is_oob') else None
|
||||
parent.save()
|
||||
|
||||
return ipaddress
|
||||
|
||||
@@ -79,12 +79,36 @@ class ASNRangeFilter(TenancyFilterMixin, OrganizationalModelFilterMixin):
|
||||
|
||||
@strawberry_django.filter_type(models.Aggregate, lookups=True)
|
||||
class AggregateFilter(ContactFilterMixin, TenancyFilterMixin, PrimaryModelFilterMixin):
|
||||
prefix: Annotated['PrefixFilter', strawberry.lazy('ipam.graphql.filters')] | None = strawberry_django.filter_field()
|
||||
prefix_id: ID | None = strawberry_django.filter_field()
|
||||
prefix: FilterLookup[str] | None = strawberry_django.filter_field()
|
||||
rir: Annotated['RIRFilter', strawberry.lazy('ipam.graphql.filters')] | None = strawberry_django.filter_field()
|
||||
rir_id: ID | None = strawberry_django.filter_field()
|
||||
date_added: DateFilterLookup[date] | None = strawberry_django.filter_field()
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def contains(self, value: list[str], prefix) -> Q:
|
||||
"""
|
||||
Return aggregates whose `prefix` contains any of the supplied networks.
|
||||
Mirrors PrefixFilter.contains but operates on the Aggregate.prefix field itself.
|
||||
"""
|
||||
if not value:
|
||||
return Q()
|
||||
q = Q()
|
||||
for subnet in value:
|
||||
try:
|
||||
query = str(netaddr.IPNetwork(subnet.strip()).cidr)
|
||||
except (AddrFormatError, ValueError):
|
||||
continue
|
||||
q |= Q(**{f"{prefix}prefix__net_contains": query})
|
||||
return q
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def family(
|
||||
self,
|
||||
value: Annotated['IPAddressFamilyEnum', strawberry.lazy('ipam.graphql.enums')],
|
||||
prefix,
|
||||
) -> Q:
|
||||
return Q(**{f"{prefix}prefix__family": value.value})
|
||||
|
||||
|
||||
@strawberry_django.filter_type(models.FHRPGroup, lookups=True)
|
||||
class FHRPGroupFilter(PrimaryModelFilterMixin):
|
||||
@@ -119,28 +143,28 @@ class FHRPGroupAssignmentFilter(BaseObjectTypeFilterMixin, ChangeLogFilterMixin)
|
||||
)
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def device_id(self, queryset, value: list[str], prefix) -> Q:
|
||||
return self.filter_device('id', value)
|
||||
def device_id(self, value: list[str], prefix) -> Q:
|
||||
return self.filter_device('id', value, prefix)
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def device(self, value: list[str], prefix) -> Q:
|
||||
return self.filter_device('name', value)
|
||||
return self.filter_device('name', value, prefix)
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def virtual_machine_id(self, value: list[str], prefix) -> Q:
|
||||
return Q(interface_id__in=VMInterface.objects.filter(virtual_machine_id__in=value))
|
||||
return Q(**{f"{prefix}interface_id__in": VMInterface.objects.filter(virtual_machine_id__in=value)})
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def virtual_machine(self, value: list[str], prefix) -> Q:
|
||||
return Q(interface_id__in=VMInterface.objects.filter(virtual_machine__name__in=value))
|
||||
return Q(**{f"{prefix}interface_id__in": VMInterface.objects.filter(virtual_machine__name__in=value)})
|
||||
|
||||
def filter_device(self, field, value) -> Q:
|
||||
def filter_device(self, field, value, prefix) -> Q:
|
||||
"""Helper to standardize logic for device and device_id filters"""
|
||||
devices = Device.objects.filter(**{f'{field}__in': value})
|
||||
interface_ids = []
|
||||
for device in devices:
|
||||
interface_ids.extend(device.vc_interfaces().values_list('id', flat=True))
|
||||
return Q(interface_id__in=interface_ids)
|
||||
return Q(**{f"{prefix}interface_id__in": interface_ids})
|
||||
|
||||
|
||||
@strawberry_django.filter_type(models.IPAddress, lookups=True)
|
||||
@@ -180,9 +204,9 @@ class IPAddressFilter(ContactFilterMixin, TenancyFilterMixin, PrimaryModelFilter
|
||||
for subnet in value:
|
||||
try:
|
||||
query = str(netaddr.IPNetwork(subnet.strip()).cidr)
|
||||
q |= Q(address__net_host_contained=query)
|
||||
except (AddrFormatError, ValueError):
|
||||
return Q()
|
||||
continue
|
||||
q |= Q(**{f"{prefix}address__net_host_contained": query})
|
||||
return q
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
@@ -217,9 +241,14 @@ class IPRangeFilter(ContactFilterMixin, TenancyFilterMixin, PrimaryModelFilterMi
|
||||
for subnet in value:
|
||||
try:
|
||||
query = str(netaddr.IPNetwork(subnet.strip()).cidr)
|
||||
q |= Q(start_address__net_host_contained=query, end_address__net_host_contained=query)
|
||||
except (AddrFormatError, ValueError):
|
||||
return Q()
|
||||
continue
|
||||
q |= Q(
|
||||
**{
|
||||
f"{prefix}start_address__net_host_contained": query,
|
||||
f"{prefix}end_address__net_host_contained": query,
|
||||
}
|
||||
)
|
||||
return q
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
@@ -228,10 +257,17 @@ class IPRangeFilter(ContactFilterMixin, TenancyFilterMixin, PrimaryModelFilterMi
|
||||
return Q()
|
||||
q = Q()
|
||||
for subnet in value:
|
||||
net = netaddr.IPNetwork(subnet.strip())
|
||||
try:
|
||||
net = netaddr.IPNetwork(subnet.strip())
|
||||
query_start = str(netaddr.IPAddress(net.first))
|
||||
query_end = str(netaddr.IPAddress(net.last))
|
||||
except (AddrFormatError, ValueError):
|
||||
continue
|
||||
q |= Q(
|
||||
start_address__host__inet__lte=str(netaddr.IPAddress(net.first)),
|
||||
end_address__host__inet__gte=str(netaddr.IPAddress(net.last)),
|
||||
**{
|
||||
f"{prefix}start_address__host__inet__lte": query_start,
|
||||
f"{prefix}end_address__host__inet__gte": query_end,
|
||||
}
|
||||
)
|
||||
return q
|
||||
|
||||
@@ -257,10 +293,21 @@ class PrefixFilter(ContactFilterMixin, ScopedFilterMixin, TenancyFilterMixin, Pr
|
||||
return Q()
|
||||
q = Q()
|
||||
for subnet in value:
|
||||
query = str(netaddr.IPNetwork(subnet.strip()).cidr)
|
||||
q |= Q(prefix__net_contains=query)
|
||||
try:
|
||||
query = str(netaddr.IPNetwork(subnet.strip()).cidr)
|
||||
except (AddrFormatError, ValueError):
|
||||
continue
|
||||
q |= Q(**{f"{prefix}prefix__net_contains": query})
|
||||
return q
|
||||
|
||||
@strawberry_django.filter_field()
|
||||
def family(
|
||||
self,
|
||||
value: Annotated['IPAddressFamilyEnum', strawberry.lazy('ipam.graphql.enums')],
|
||||
prefix,
|
||||
) -> Q:
|
||||
return Q(**{f"{prefix}prefix__family": value.value})
|
||||
|
||||
|
||||
@strawberry_django.filter_type(models.RIR, lookups=True)
|
||||
class RIRFilter(OrganizationalModelFilterMixin):
|
||||
|
||||
@@ -0,0 +1,27 @@
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def populate_vlangroup_total_vlan_ids(apps, schema_editor):
|
||||
VLANGroup = apps.get_model('ipam', 'VLANGroup')
|
||||
db_alias = schema_editor.connection.alias
|
||||
|
||||
vlan_groups = VLANGroup.objects.using(db_alias).only('id', 'vid_ranges')
|
||||
for group in vlan_groups:
|
||||
total_vlan_ids = 0
|
||||
if group.vid_ranges:
|
||||
for r in group.vid_ranges:
|
||||
# Half-open [lo, hi): length is (hi - lo).
|
||||
if r is not None and r.lower is not None and r.upper is not None:
|
||||
total_vlan_ids += r.upper - r.lower
|
||||
group._total_vlan_ids = total_vlan_ids
|
||||
VLANGroup.objects.using(db_alias).bulk_update(vlan_groups, ['_total_vlan_ids'], batch_size=100)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
dependencies = [
|
||||
('ipam', '0082_add_prefix_network_containment_indexes'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(populate_vlangroup_total_vlan_ids, migrations.RunPython.noop),
|
||||
]
|
||||
@@ -132,7 +132,8 @@ class VLANGroup(OrganizationalModel):
|
||||
def save(self, *args, **kwargs):
|
||||
self._total_vlan_ids = 0
|
||||
for vid_range in self.vid_ranges:
|
||||
self._total_vlan_ids += vid_range.upper - vid_range.lower + 1
|
||||
# VID range is inclusive on lower-bound, exclusive on upper-bound
|
||||
self._total_vlan_ids += vid_range.upper - vid_range.lower
|
||||
|
||||
super().save(*args, **kwargs)
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from ipam.models import *
|
||||
from netbox.tables import NetBoxTable, columns
|
||||
from tenancy.tables import TenancyColumnsMixin
|
||||
from tenancy.tables import ContactsColumnMixin, TenancyColumnsMixin
|
||||
|
||||
__all__ = (
|
||||
'ASNTable',
|
||||
@@ -36,7 +36,7 @@ class ASNRangeTable(TenancyColumnsMixin, NetBoxTable):
|
||||
default_columns = ('pk', 'name', 'rir', 'start', 'end', 'tenant', 'asn_count', 'description')
|
||||
|
||||
|
||||
class ASNTable(TenancyColumnsMixin, NetBoxTable):
|
||||
class ASNTable(TenancyColumnsMixin, ContactsColumnMixin, NetBoxTable):
|
||||
asn = tables.Column(
|
||||
verbose_name=_('ASN'),
|
||||
linkify=True
|
||||
@@ -76,7 +76,7 @@ class ASNTable(TenancyColumnsMixin, NetBoxTable):
|
||||
model = ASN
|
||||
fields = (
|
||||
'pk', 'asn', 'asn_asdot', 'rir', 'site_count', 'provider_count', 'tenant', 'tenant_group', 'description',
|
||||
'comments', 'sites', 'tags', 'created', 'last_updated', 'actions',
|
||||
'contacts', 'comments', 'sites', 'tags', 'created', 'last_updated', 'actions',
|
||||
)
|
||||
default_columns = (
|
||||
'pk', 'asn', 'rir', 'site_count', 'provider_count', 'sites', 'description', 'tenant',
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
import django_tables2 as tables
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_tables2.utils import Accessor
|
||||
|
||||
from ipam.models import *
|
||||
from netbox.tables import NetBoxTable, columns
|
||||
from tenancy.tables import TenancyColumnsMixin, TenantColumn
|
||||
from tenancy.tables import ContactsColumnMixin, TenancyColumnsMixin, TenantColumn
|
||||
from .template_code import *
|
||||
|
||||
__all__ = (
|
||||
@@ -58,7 +58,7 @@ class RIRTable(NetBoxTable):
|
||||
# Aggregates
|
||||
#
|
||||
|
||||
class AggregateTable(TenancyColumnsMixin, NetBoxTable):
|
||||
class AggregateTable(TenancyColumnsMixin, ContactsColumnMixin, NetBoxTable):
|
||||
prefix = tables.Column(
|
||||
linkify=True,
|
||||
verbose_name=_('Aggregate'),
|
||||
@@ -93,7 +93,7 @@ class AggregateTable(TenancyColumnsMixin, NetBoxTable):
|
||||
model = Aggregate
|
||||
fields = (
|
||||
'pk', 'id', 'prefix', 'rir', 'tenant', 'tenant_group', 'child_count', 'utilization', 'date_added',
|
||||
'description', 'comments', 'tags', 'created', 'last_updated',
|
||||
'description', 'contacts', 'comments', 'tags', 'created', 'last_updated',
|
||||
)
|
||||
default_columns = ('pk', 'prefix', 'rir', 'tenant', 'child_count', 'utilization', 'date_added', 'description')
|
||||
|
||||
@@ -154,7 +154,7 @@ class PrefixUtilizationColumn(columns.UtilizationColumn):
|
||||
"""
|
||||
|
||||
|
||||
class PrefixTable(TenancyColumnsMixin, NetBoxTable):
|
||||
class PrefixTable(TenancyColumnsMixin, ContactsColumnMixin, NetBoxTable):
|
||||
prefix = columns.TemplateColumn(
|
||||
verbose_name=_('Prefix'),
|
||||
template_code=PREFIX_LINK_WITH_DEPTH,
|
||||
@@ -237,8 +237,8 @@ class PrefixTable(TenancyColumnsMixin, NetBoxTable):
|
||||
model = Prefix
|
||||
fields = (
|
||||
'pk', 'id', 'prefix', 'prefix_flat', 'status', 'children', 'vrf', 'utilization', 'tenant', 'tenant_group',
|
||||
'scope', 'scope_type', 'vlan_group', 'vlan', 'role', 'is_pool', 'mark_utilized', 'description', 'comments',
|
||||
'tags', 'created', 'last_updated',
|
||||
'scope', 'scope_type', 'vlan_group', 'vlan', 'role', 'is_pool', 'mark_utilized', 'description', 'contacts',
|
||||
'comments', 'tags', 'created', 'last_updated',
|
||||
)
|
||||
default_columns = (
|
||||
'pk', 'prefix', 'status', 'children', 'vrf', 'utilization', 'tenant', 'scope', 'vlan', 'role',
|
||||
@@ -252,7 +252,7 @@ class PrefixTable(TenancyColumnsMixin, NetBoxTable):
|
||||
#
|
||||
# IP ranges
|
||||
#
|
||||
class IPRangeTable(TenancyColumnsMixin, NetBoxTable):
|
||||
class IPRangeTable(TenancyColumnsMixin, ContactsColumnMixin, NetBoxTable):
|
||||
start_address = tables.Column(
|
||||
verbose_name=_('Start address'),
|
||||
linkify=True
|
||||
@@ -293,8 +293,8 @@ class IPRangeTable(TenancyColumnsMixin, NetBoxTable):
|
||||
model = IPRange
|
||||
fields = (
|
||||
'pk', 'id', 'start_address', 'end_address', 'size', 'vrf', 'status', 'role', 'tenant', 'tenant_group',
|
||||
'mark_populated', 'mark_utilized', 'utilization', 'description', 'comments', 'tags', 'created',
|
||||
'last_updated',
|
||||
'mark_populated', 'mark_utilized', 'utilization', 'description', 'contacts', 'comments', 'tags',
|
||||
'created', 'last_updated',
|
||||
)
|
||||
default_columns = (
|
||||
'pk', 'start_address', 'end_address', 'size', 'vrf', 'status', 'role', 'tenant', 'description',
|
||||
@@ -308,7 +308,7 @@ class IPRangeTable(TenancyColumnsMixin, NetBoxTable):
|
||||
# IPAddresses
|
||||
#
|
||||
|
||||
class IPAddressTable(TenancyColumnsMixin, NetBoxTable):
|
||||
class IPAddressTable(TenancyColumnsMixin, ContactsColumnMixin, NetBoxTable):
|
||||
address = tables.TemplateColumn(
|
||||
template_code=IPADDRESS_LINK,
|
||||
verbose_name=_('IP Address')
|
||||
@@ -365,7 +365,7 @@ class IPAddressTable(TenancyColumnsMixin, NetBoxTable):
|
||||
model = IPAddress
|
||||
fields = (
|
||||
'pk', 'id', 'address', 'vrf', 'status', 'role', 'tenant', 'tenant_group', 'nat_inside', 'nat_outside',
|
||||
'assigned', 'dns_name', 'description', 'comments', 'tags', 'created', 'last_updated',
|
||||
'assigned', 'dns_name', 'description', 'comments', 'contacts', 'tags', 'created', 'last_updated',
|
||||
)
|
||||
default_columns = (
|
||||
'pk', 'address', 'vrf', 'status', 'role', 'tenant', 'assigned', 'dns_name', 'description',
|
||||
|
||||
@@ -323,6 +323,55 @@ class AggregateTest(APIViewTestCases.APIViewTestCase):
|
||||
},
|
||||
]
|
||||
|
||||
@tag('regression')
|
||||
def test_graphql_aggregate_prefix_exact(self):
|
||||
"""
|
||||
Test case to verify aggregate prefix equality via field lookup in GraphQL API.
|
||||
"""
|
||||
|
||||
self.add_permissions('ipam.view_aggregate', 'ipam.view_rir')
|
||||
|
||||
rir = RIR.objects.create(name='RFC6598', slug='rfc6598', is_private=True)
|
||||
aggregate1 = Aggregate.objects.create(prefix='100.64.0.0/10', rir=rir)
|
||||
Aggregate.objects.create(prefix='203.0.113.0/24', rir=rir)
|
||||
|
||||
url = reverse('graphql')
|
||||
query = """{
|
||||
aggregate_list(filters: { prefix: { exact: "100.64.0.0/10" } }) { prefix }
|
||||
}"""
|
||||
response = self.client.post(url, data={'query': query}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
data = response.json()
|
||||
self.assertNotIn('errors', data)
|
||||
|
||||
prefixes = {row['prefix'] for row in data['data']['aggregate_list']}
|
||||
self.assertIn(str(aggregate1.prefix), prefixes)
|
||||
|
||||
@tag('regression')
|
||||
def test_graphql_aggregate_contains_skips_invalid(self):
|
||||
"""
|
||||
Test the GraphQL API Aggregate `contains` filter skips invalid input.
|
||||
"""
|
||||
|
||||
self.add_permissions('ipam.view_aggregate', 'ipam.view_rir')
|
||||
|
||||
rir = RIR.objects.create(name='RIR 3', slug='rir-3', is_private=False)
|
||||
aggregate1 = Aggregate.objects.create(prefix='100.64.0.0/10', rir=rir)
|
||||
Aggregate.objects.create(prefix='203.0.113.0/24', rir=rir)
|
||||
|
||||
url = reverse('graphql')
|
||||
query = """{
|
||||
aggregate_list(filters: { contains: ["100.64.16.0/24", "not-a-cidr", ""] }) { prefix }
|
||||
}"""
|
||||
response = self.client.post(url, data={'query': query}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
data = response.json()
|
||||
self.assertNotIn('errors', data)
|
||||
|
||||
prefixes = {row['prefix'] for row in data['data']['aggregate_list']}
|
||||
self.assertIn(str(aggregate1.prefix), prefixes)
|
||||
# No exception occurred; invalid entries were ignored
|
||||
|
||||
|
||||
class RoleTest(APIViewTestCases.APIViewTestCase):
|
||||
model = Role
|
||||
@@ -546,6 +595,30 @@ class PrefixTest(APIViewTestCases.APIViewTestCase):
|
||||
self.assertHttpStatus(response, status.HTTP_201_CREATED)
|
||||
self.assertEqual(len(response.data), 8)
|
||||
|
||||
@tag('regression')
|
||||
def test_graphql_tenant_prefixes_contains_nested_skips_invalid(self):
|
||||
"""
|
||||
Test the GraphQL API Tenant nested Prefix `contains` filter skips invalid input.
|
||||
"""
|
||||
|
||||
self.add_permissions('ipam.view_prefix', 'ipam.view_vrf', 'tenancy.view_tenant')
|
||||
|
||||
tenant = Tenant.objects.create(name='Tenant 1', slug='tenant-1')
|
||||
vrf = VRF.objects.create(name='Test VRF 1', rd='64512:1')
|
||||
Prefix.objects.create(prefix='10.20.0.0/16', vrf=vrf, tenant=tenant)
|
||||
Prefix.objects.create(prefix='198.51.100.0/24', vrf=vrf) # non-tenant
|
||||
|
||||
url = reverse('graphql')
|
||||
query = """{
|
||||
tenant_list(filters: { prefixes: { contains: ["10.20.1.0/24", "not-a-cidr"] } }) { id }
|
||||
}"""
|
||||
response = self.client.post(url, data={'query': query}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
data = response.json()
|
||||
self.assertNotIn('errors', data)
|
||||
|
||||
self.assertTrue(data['data']['tenant_list']) # tenant returned
|
||||
|
||||
|
||||
class IPRangeTest(APIViewTestCases.APIViewTestCase):
|
||||
model = IPRange
|
||||
@@ -645,6 +718,65 @@ class IPRangeTest(APIViewTestCases.APIViewTestCase):
|
||||
self.assertHttpStatus(response, status.HTTP_201_CREATED)
|
||||
self.assertEqual(len(response.data), 8)
|
||||
|
||||
@tag('regression')
|
||||
def test_graphql_tenant_ip_ranges_parent_nested_skips_invalid(self):
|
||||
"""
|
||||
Test the GraphQL API Tenant nested IP Range `parent` filter skips invalid input.
|
||||
"""
|
||||
|
||||
self.add_permissions('tenancy.view_tenant', 'ipam.view_iprange', 'ipam.view_vrf')
|
||||
|
||||
tenant = Tenant.objects.create(name='Tenant 1', slug='tenant-1')
|
||||
vrf = VRF.objects.create(name='Test VRF 1', rd='64512:1')
|
||||
IPRange.objects.create(
|
||||
start_address=IPNetwork('10.30.0.1/24'), end_address=IPNetwork('10.30.0.255/24'), vrf=vrf, tenant=tenant
|
||||
)
|
||||
IPRange.objects.create(
|
||||
start_address=IPNetwork('10.31.0.1/24'), end_address=IPNetwork('10.31.0.255/24'), vrf=vrf, tenant=tenant
|
||||
)
|
||||
|
||||
url = reverse('graphql')
|
||||
query = """{
|
||||
tenant_list(filters: {
|
||||
name: { exact: "Tenant 1" }
|
||||
ip_ranges: { parent: ["10.30.0.0/24", "bogus"] }
|
||||
}) { id }
|
||||
}"""
|
||||
response = self.client.post(url, data={'query': query}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
data = response.json()
|
||||
self.assertNotIn('errors', data)
|
||||
self.assertTrue(data['data']['tenant_list']) # tenant returned
|
||||
# No exception occurred; invalid entries were ignored
|
||||
|
||||
@tag('regression')
|
||||
def test_graphql_tenant_ip_ranges_contains_nested_skips_invalid(self):
|
||||
"""
|
||||
Test the GraphQL API Tenant nested IP Range `contains` filter skips invalid input.
|
||||
"""
|
||||
|
||||
self.add_permissions('tenancy.view_tenant', 'ipam.view_iprange', 'ipam.view_vrf')
|
||||
|
||||
tenant = Tenant.objects.create(name='Tenant 2', slug='tenant-2')
|
||||
vrf = VRF.objects.create(name='Test VRF 1', rd='64512:2')
|
||||
IPRange.objects.create(
|
||||
start_address=IPNetwork('10.40.0.1/24'), end_address=IPNetwork('10.40.0.255/24'), vrf=vrf, tenant=tenant
|
||||
)
|
||||
|
||||
url = reverse('graphql')
|
||||
query = """{
|
||||
tenant_list(filters: {
|
||||
name: { exact: "Tenant 2" }
|
||||
ip_ranges: { contains: ["10.40.0.128/25", "###"] }
|
||||
}) { id }
|
||||
}"""
|
||||
response = self.client.post(url, data={'query': query}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
data = response.json()
|
||||
self.assertNotIn('errors', data)
|
||||
self.assertTrue(data['data']['tenant_list']) # tenant returned
|
||||
# No exception occurred; invalid entries were ignored
|
||||
|
||||
|
||||
class IPAddressTest(APIViewTestCases.APIViewTestCase):
|
||||
model = IPAddress
|
||||
@@ -731,6 +863,75 @@ class IPAddressTest(APIViewTestCases.APIViewTestCase):
|
||||
response = self.client.patch(url, data, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
@tag('regression')
|
||||
def test_graphql_device_primary_ip4_assigned_nested(self):
|
||||
"""
|
||||
Test the GraphQL API Device nested IP Address `primary_ip4` filter.
|
||||
"""
|
||||
|
||||
self.add_permissions('dcim.view_device', 'dcim.view_interface', 'ipam.view_ipaddress')
|
||||
|
||||
site = Site.objects.create(name='Site 1')
|
||||
manufacturer = Manufacturer.objects.create(name='Manufacturer 1')
|
||||
device_type = DeviceType.objects.create(model='Device Type 1', manufacturer=manufacturer)
|
||||
role = DeviceRole.objects.create(name='Switch')
|
||||
|
||||
device1 = Device.objects.create(name='Device 1', site=site, device_type=device_type, role=role, status='active')
|
||||
interface1 = Interface.objects.create(name='Interface 1', device=device1, type='1000baset')
|
||||
ip1 = IPAddress.objects.create(address='10.0.0.1/24')
|
||||
ip1.assigned_object = interface1
|
||||
ip1.save()
|
||||
device1.primary_ip4 = ip1
|
||||
device1.save()
|
||||
|
||||
device2 = Device.objects.create(name='Device 2', site=site, device_type=device_type, role=role, status='active')
|
||||
|
||||
url = reverse('graphql')
|
||||
query = """{
|
||||
device_list(filters: { primary_ip4: { assigned: true } }) { id name }
|
||||
}"""
|
||||
response = self.client.post(url, data={'query': query}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
data = response.json()
|
||||
self.assertNotIn('errors', data)
|
||||
|
||||
ids = {row['id'] for row in data['data']['device_list']}
|
||||
self.assertIn(str(device1.pk), ids)
|
||||
self.assertNotIn(str(device2.pk), ids)
|
||||
|
||||
@tag('regression')
|
||||
def test_graphql_device_primary_ip4_parent_nested_skips_invalid(self):
|
||||
"""
|
||||
Test the GraphQL API Device nested IP Address `parent` filter skips invalid input.
|
||||
"""
|
||||
|
||||
self.add_permissions('dcim.view_device', 'dcim.view_interface', 'ipam.view_ipaddress')
|
||||
|
||||
site = Site.objects.create(name='Site 1')
|
||||
manufacturer = Manufacturer.objects.create(name='Manufacturer 1')
|
||||
device_type = DeviceType.objects.create(model='Device Type 1', manufacturer=manufacturer)
|
||||
role = DeviceRole.objects.create(name='Switch')
|
||||
|
||||
device1 = Device.objects.create(name='Device 1', site=site, device_type=device_type, role=role, status='active')
|
||||
interface1 = Interface.objects.create(name='Interface 1', device=device1, type='1000baset')
|
||||
ip1 = IPAddress.objects.create(address='192.0.2.10/24')
|
||||
ip1.assigned_object = interface1
|
||||
ip1.save()
|
||||
device1.primary_ip4 = ip1
|
||||
device1.save()
|
||||
|
||||
url = reverse('graphql')
|
||||
query = """{
|
||||
device_list(filters: { primary_ip4: { parent: ["192.0.2.0/24", "bad-cidr"] } }) { id }
|
||||
}"""
|
||||
response = self.client.post(url, data={'query': query}, format='json', **self.header)
|
||||
self.assertHttpStatus(response, status.HTTP_200_OK)
|
||||
data = response.json()
|
||||
self.assertNotIn('errors', data)
|
||||
|
||||
ids = {row['id'] for row in data['data']['device_list']}
|
||||
self.assertIn(str(device1.pk), ids)
|
||||
|
||||
|
||||
class FHRPGroupTest(APIViewTestCases.APIViewTestCase):
|
||||
model = FHRPGroup
|
||||
|
||||
@@ -661,6 +661,10 @@ class TestVLANGroup(TestCase):
|
||||
vlangroup.full_clean()
|
||||
vlangroup.save()
|
||||
|
||||
def test_total_vlan_ids(self):
|
||||
vlangroup = VLANGroup.objects.first()
|
||||
self.assertEqual(vlangroup._total_vlan_ids, 100)
|
||||
|
||||
|
||||
class TestVLAN(TestCase):
|
||||
|
||||
|
||||
@@ -108,6 +108,7 @@ class VRFBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(VRF, 'bulk_rename', path='rename', detail=False)
|
||||
class VRFBulkRenameView(generic.BulkRenameView):
|
||||
queryset = VRF.objects.all()
|
||||
filterset = filtersets.VRFFilterSet
|
||||
|
||||
|
||||
@register_model_view(VRF, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -163,6 +164,7 @@ class RouteTargetBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(RouteTarget, 'bulk_rename', path='rename', detail=False)
|
||||
class RouteTargetBulkRenameView(generic.BulkRenameView):
|
||||
queryset = RouteTarget.objects.all()
|
||||
filterset = filtersets.RouteTargetFilterSet
|
||||
|
||||
|
||||
@register_model_view(RouteTarget, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -227,6 +229,7 @@ class RIRBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(RIR, 'bulk_rename', path='rename', detail=False)
|
||||
class RIRBulkRenameView(generic.BulkRenameView):
|
||||
queryset = RIR.objects.all()
|
||||
filterset = filtersets.RIRFilterSet
|
||||
|
||||
|
||||
@register_model_view(RIR, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -305,6 +308,7 @@ class ASNRangeBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ASNRange, 'bulk_rename', path='rename', detail=False)
|
||||
class ASNRangeBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ASNRange.objects.all()
|
||||
filterset = filtersets.ASNRangeFilterSet
|
||||
|
||||
|
||||
@register_model_view(ASNRange, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -377,6 +381,7 @@ class ASNBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ASN, 'bulk_rename', path='rename', detail=False)
|
||||
class ASNBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ASN.objects.all()
|
||||
filterset = filtersets.ASNFilterSet
|
||||
|
||||
|
||||
@register_model_view(ASN, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -536,6 +541,7 @@ class RoleBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Role, 'bulk_rename', path='rename', detail=False)
|
||||
class RoleBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Role.objects.all()
|
||||
filterset = filtersets.RoleFilterSet
|
||||
|
||||
|
||||
@register_model_view(Role, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -820,6 +826,7 @@ class IPRangeBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(IPRange, 'bulk_rename', path='rename', detail=False)
|
||||
class IPRangeBulkRenameView(generic.BulkRenameView):
|
||||
queryset = IPRange.objects.all()
|
||||
filterset = filtersets.IPRangeFilterSet
|
||||
|
||||
|
||||
@register_model_view(IPRange, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1066,6 +1073,7 @@ class VLANGroupBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(VLANGroup, 'bulk_rename', path='rename', detail=False)
|
||||
class VLANGroupBulkRenameView(generic.BulkRenameView):
|
||||
queryset = VLANGroup.objects.all()
|
||||
filterset = filtersets.VLANGroupFilterSet
|
||||
|
||||
|
||||
@register_model_view(VLANGroup, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1160,6 +1168,7 @@ class VLANTranslationPolicyBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(VLANTranslationPolicy, 'bulk_rename', path='rename', detail=False)
|
||||
class VLANTranslationPolicyBulkRenameView(generic.BulkRenameView):
|
||||
queryset = VLANTranslationPolicy.objects.all()
|
||||
filterset = filtersets.VLANTranslationPolicyFilterSet
|
||||
|
||||
|
||||
@register_model_view(VLANTranslationPolicy, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1315,6 +1324,7 @@ class FHRPGroupBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(FHRPGroup, 'bulk_rename', path='rename', detail=False)
|
||||
class FHRPGroupBulkRenameView(generic.BulkRenameView):
|
||||
queryset = FHRPGroup.objects.all()
|
||||
filterset = filtersets.FHRPGroupFilterSet
|
||||
|
||||
|
||||
@register_model_view(FHRPGroup, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1447,6 +1457,7 @@ class VLANBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(VLAN, 'bulk_rename', path='rename', detail=False)
|
||||
class VLANBulkRenameView(generic.BulkRenameView):
|
||||
queryset = VLAN.objects.all()
|
||||
filterset = filtersets.VLANFilterSet
|
||||
|
||||
|
||||
@register_model_view(VLAN, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1502,6 +1513,7 @@ class ServiceTemplateBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(ServiceTemplate, 'bulk_rename', path='rename', detail=False)
|
||||
class ServiceTemplateBulkRenameView(generic.BulkRenameView):
|
||||
queryset = ServiceTemplate.objects.all()
|
||||
filterset = filtersets.ServiceTemplateFilterSet
|
||||
|
||||
|
||||
@register_model_view(ServiceTemplate, 'bulk_delete', path='delete', detail=False)
|
||||
@@ -1574,6 +1586,7 @@ class ServiceBulkEditView(generic.BulkEditView):
|
||||
@register_model_view(Service, 'bulk_rename', path='rename', detail=False)
|
||||
class ServiceBulkRenameView(generic.BulkRenameView):
|
||||
queryset = Service.objects.all()
|
||||
filterset = filtersets.ServiceFilterSet
|
||||
|
||||
|
||||
@register_model_view(Service, 'bulk_delete', path='delete', detail=False)
|
||||
|
||||
@@ -82,7 +82,7 @@ class Config:
|
||||
revision = ConfigRevision.objects.get(active=True)
|
||||
logger.debug(f"Loaded active configuration revision #{revision.pk}")
|
||||
except (ConfigRevision.DoesNotExist, ConfigRevision.MultipleObjectsReturned):
|
||||
logger.warning("No active configuration revision found - falling back to most recent")
|
||||
logger.debug("No active configuration revision found - falling back to most recent")
|
||||
revision = ConfigRevision.objects.order_by('-created').first()
|
||||
if revision is None:
|
||||
logger.debug("No previous configuration found in database; proceeding with default values")
|
||||
|
||||
@@ -183,6 +183,15 @@ PARAMS = (
|
||||
description=_("Enable maintenance mode"),
|
||||
field=forms.BooleanField
|
||||
),
|
||||
ConfigParam(
|
||||
name='COPILOT_ENABLED',
|
||||
label=_('NetBox Copilot enabled'),
|
||||
default=True,
|
||||
description=_(
|
||||
"Enable the NetBox Copilot AI agent globally. If enabled, users can toggle the agent individually."
|
||||
),
|
||||
field=forms.BooleanField
|
||||
),
|
||||
ConfigParam(
|
||||
name='GRAPHQL_ENABLED',
|
||||
label=_('GraphQL enabled'),
|
||||
|
||||
@@ -243,6 +243,9 @@ SESSION_FILE_PATH = None
|
||||
# },
|
||||
# "scripts": {
|
||||
# "BACKEND": "extras.storage.ScriptFileSystemStorage",
|
||||
# "OPTIONS": {
|
||||
# "allow_overwrite": True,
|
||||
# },
|
||||
# },
|
||||
# }
|
||||
|
||||
|
||||
@@ -25,10 +25,15 @@ def preferences(request):
|
||||
Adds preferences for the current user (if authenticated) to the template context.
|
||||
Example: {{ preferences|get_key:"pagination.placement" }}
|
||||
"""
|
||||
config = get_config()
|
||||
user_preferences = request.user.config if request.user.is_authenticated else {}
|
||||
return {
|
||||
'preferences': user_preferences,
|
||||
'htmx_navigation': user_preferences.get('ui.htmx_navigation', False) == 'true'
|
||||
'copilot_enabled': (
|
||||
config.COPILOT_ENABLED and not django_settings.ISOLATED_DEPLOYMENT and
|
||||
user_preferences.get('ui.copilot_enabled', False) == 'true'
|
||||
),
|
||||
'htmx_navigation': user_preferences.get('ui.htmx_navigation', False) == 'true',
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ import json
|
||||
|
||||
from django import forms
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
from django.db import models
|
||||
from django.db.models import Q
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
@@ -99,6 +100,35 @@ class NetBoxModelImportForm(CSVModelForm, NetBoxModelForm):
|
||||
def _get_form_field(self, customfield):
|
||||
return customfield.to_form_field(for_csv_import=True)
|
||||
|
||||
def clean(self):
|
||||
"""
|
||||
Cleans data in a form, ensuring proper handling of model fields with `null=True`.
|
||||
Overrides the `clean` method from the parent form to process and sanitize cleaned
|
||||
data for defined fields in the associated model.
|
||||
"""
|
||||
super().clean()
|
||||
cleaned = self.cleaned_data
|
||||
|
||||
model = getattr(self._meta, "model", None)
|
||||
if not model:
|
||||
return cleaned
|
||||
|
||||
for f in model._meta.get_fields():
|
||||
# Only forward, DB-backed fields (skip M2M & reverse relations)
|
||||
if not isinstance(f, models.Field) or not f.concrete or f.many_to_many:
|
||||
continue
|
||||
|
||||
if getattr(f, "null", False):
|
||||
name = f.name
|
||||
if name not in cleaned:
|
||||
continue
|
||||
val = cleaned[name]
|
||||
# Only coerce empty strings; leave other types alone
|
||||
if isinstance(val, str) and val.strip() == "":
|
||||
cleaned[name] = None
|
||||
|
||||
return cleaned
|
||||
|
||||
|
||||
class NetBoxModelBulkEditForm(ChangelogMessageMixin, CustomFieldsMixin, BulkEditForm):
|
||||
"""
|
||||
|
||||
@@ -2,14 +2,14 @@ import logging
|
||||
|
||||
from django.contrib.contenttypes.fields import GenericRelation
|
||||
from django.db import router
|
||||
from django.db.models.deletion import Collector
|
||||
from django.db.models.deletion import CASCADE, Collector
|
||||
|
||||
logger = logging.getLogger("netbox.models.deletion")
|
||||
|
||||
|
||||
class CustomCollector(Collector):
|
||||
"""
|
||||
Custom collector that handles GenericRelations correctly.
|
||||
Override Django's stock Collector to handle GenericRelations and ensure proper ordering of cascading deletions.
|
||||
"""
|
||||
|
||||
def collect(
|
||||
@@ -23,11 +23,15 @@ class CustomCollector(Collector):
|
||||
keep_parents=False,
|
||||
fail_on_restricted=True,
|
||||
):
|
||||
"""
|
||||
Override collect to first collect standard dependencies,
|
||||
then add GenericRelations to the dependency graph.
|
||||
"""
|
||||
# Call parent collect first to get all standard dependencies
|
||||
# By default, Django will force the deletion of dependent objects before the parent only if the ForeignKey field
|
||||
# is not nullable. We want to ensure proper ordering regardless, so if the ForeignKey has `on_delete=CASCADE`
|
||||
# applied, we set `nullable` to False when calling `collect()`.
|
||||
if objs and source and source_attr:
|
||||
model = objs[0].__class__
|
||||
field = model._meta.get_field(source_attr)
|
||||
if field.remote_field.on_delete == CASCADE:
|
||||
nullable = False
|
||||
|
||||
super().collect(
|
||||
objs,
|
||||
source=source,
|
||||
@@ -39,10 +43,8 @@ class CustomCollector(Collector):
|
||||
fail_on_restricted=fail_on_restricted,
|
||||
)
|
||||
|
||||
# Track which GenericRelations we've already processed to prevent infinite recursion
|
||||
# Add GenericRelations to the dependency graph
|
||||
processed_relations = set()
|
||||
|
||||
# Now add GenericRelations to the dependency graph
|
||||
for _, instances in list(self.data.items()):
|
||||
for instance in instances:
|
||||
# Get all GenericRelations for this model
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
from django.template import loader
|
||||
from django.urls.exceptions import NoReverseMatch
|
||||
from django.utils.translation import gettext as _
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from core.models import ObjectType
|
||||
from extras.models import ExportTemplate
|
||||
|
||||
@@ -49,6 +49,15 @@ PREFERENCES = {
|
||||
else ''
|
||||
)
|
||||
),
|
||||
'ui.copilot_enabled': UserPreference(
|
||||
label=_('NetBox Copilot'),
|
||||
choices=(
|
||||
('', _('Disabled')),
|
||||
('true', _('Enabled')),
|
||||
),
|
||||
description=_('Enable the NetBox Copilot AI agent'),
|
||||
default=False,
|
||||
),
|
||||
'pagination.per_page': UserPreference(
|
||||
label=_('Page length'),
|
||||
choices=get_page_lengths(),
|
||||
|
||||
@@ -291,6 +291,9 @@ DEFAULT_STORAGES = {
|
||||
},
|
||||
"scripts": {
|
||||
"BACKEND": "extras.storage.ScriptFileSystemStorage",
|
||||
"OPTIONS": {
|
||||
"allow_overwrite": True,
|
||||
},
|
||||
},
|
||||
}
|
||||
STORAGES = DEFAULT_STORAGES | STORAGES
|
||||
@@ -653,6 +656,13 @@ DEPLOYMENT_ID = hashlib.sha256(SECRET_KEY.encode('utf-8')).hexdigest()[:16]
|
||||
CENSUS_URL = 'https://census.netbox.oss.netboxlabs.com/api/v1/'
|
||||
|
||||
|
||||
#
|
||||
# NetBox Copilot
|
||||
#
|
||||
|
||||
NETBOX_COPILOT_URL = 'https://static.copilot.netboxlabs.ai/load.js'
|
||||
|
||||
|
||||
#
|
||||
# Django social auth
|
||||
#
|
||||
|
||||
303
netbox/netbox/tests/test_forms.py
Normal file
303
netbox/netbox/tests/test_forms.py
Normal file
@@ -0,0 +1,303 @@
|
||||
from django.test import TestCase
|
||||
|
||||
from dcim.choices import InterfaceTypeChoices
|
||||
from dcim.forms import InterfaceImportForm
|
||||
from dcim.models import Device, DeviceRole, DeviceType, Interface, Manufacturer, Site
|
||||
|
||||
|
||||
class NetBoxModelImportFormCleanTest(TestCase):
|
||||
"""
|
||||
Test the clean() method of NetBoxModelImportForm to ensure it properly converts
|
||||
empty strings to None for nullable fields during CSV import.
|
||||
Uses InterfaceImportForm as the concrete implementation to test.
|
||||
"""
|
||||
|
||||
@classmethod
|
||||
def setUpTestData(cls):
|
||||
# Create minimal test fixtures for Interface
|
||||
cls.site = Site.objects.create(name='Test Site', slug='test-site')
|
||||
cls.manufacturer = Manufacturer.objects.create(name='Test Manufacturer', slug='test-manufacturer')
|
||||
cls.device_type = DeviceType.objects.create(
|
||||
manufacturer=cls.manufacturer, model='Test Device Type', slug='test-device-type'
|
||||
)
|
||||
cls.device_role = DeviceRole.objects.create(name='Test Role', slug='test-role', color='ff0000')
|
||||
cls.device = Device.objects.create(
|
||||
name='Test Device', device_type=cls.device_type, role=cls.device_role, site=cls.site
|
||||
)
|
||||
# Create parent interfaces for ForeignKey testing
|
||||
cls.parent_interface = Interface.objects.create(
|
||||
device=cls.device, name='Parent Interface', type=InterfaceTypeChoices.TYPE_1GE_GBIC
|
||||
)
|
||||
cls.lag_interface = Interface.objects.create(
|
||||
device=cls.device, name='LAG Interface', type=InterfaceTypeChoices.TYPE_LAG
|
||||
)
|
||||
|
||||
def test_empty_string_to_none_nullable_charfield(self):
|
||||
"""Empty strings should convert to None for nullable CharField"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 1',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'duplex': '', # nullable CharField
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
self.assertIsNone(form.cleaned_data['duplex'])
|
||||
|
||||
def test_empty_string_to_none_nullable_integerfield(self):
|
||||
"""Empty strings should convert to None for nullable PositiveIntegerField"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 2',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'speed': '', # nullable PositiveIntegerField
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
self.assertIsNone(form.cleaned_data['speed'])
|
||||
|
||||
def test_empty_string_to_none_nullable_smallintegerfield(self):
|
||||
"""Empty strings should convert to None for nullable SmallIntegerField"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 3',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'tx_power': '', # nullable SmallIntegerField
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
self.assertIsNone(form.cleaned_data['tx_power'])
|
||||
|
||||
def test_empty_string_to_none_nullable_decimalfield(self):
|
||||
"""Empty strings should convert to None for nullable DecimalField"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 4',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'rf_channel_frequency': '', # nullable DecimalField
|
||||
'rf_channel_width': '', # nullable DecimalField
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
self.assertIsNone(form.cleaned_data['rf_channel_frequency'])
|
||||
self.assertIsNone(form.cleaned_data['rf_channel_width'])
|
||||
|
||||
def test_empty_string_to_none_nullable_foreignkey(self):
|
||||
"""Empty strings should convert to None for nullable ForeignKey"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 5',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'lag': '', # nullable ForeignKey
|
||||
'parent': '', # nullable ForeignKey
|
||||
'bridge': '', # nullable ForeignKey
|
||||
'vrf': '', # nullable ForeignKey
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
self.assertIsNone(form.cleaned_data['lag'])
|
||||
self.assertIsNone(form.cleaned_data['parent'])
|
||||
self.assertIsNone(form.cleaned_data['bridge'])
|
||||
self.assertIsNone(form.cleaned_data['vrf'])
|
||||
|
||||
def test_empty_string_preserved_non_nullable_charfield(self):
|
||||
"""Empty strings should be preserved for non-nullable CharField (blank=True only)"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 6',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'label': '', # CharField with blank=True (not null=True)
|
||||
'description': '', # CharField with blank=True (not null=True)
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
# label and description are NOT nullable in the model, so empty string remains
|
||||
self.assertEqual(form.cleaned_data['label'], '')
|
||||
self.assertEqual(form.cleaned_data['description'], '')
|
||||
|
||||
def test_empty_string_not_converted_for_required_fields(self):
|
||||
"""Empty strings should NOT be converted for required fields"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': '', # required field, empty string should remain and cause error
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
}
|
||||
)
|
||||
# Form should be invalid because name is required
|
||||
self.assertFalse(form.is_valid())
|
||||
if form.errors:
|
||||
self.assertIn('name', form.errors)
|
||||
|
||||
def test_non_string_none_value_preserved(self):
|
||||
"""None values should be preserved (not modified)"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 7',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'speed': None, # Already None
|
||||
'tx_power': None, # Already None
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
self.assertIsNone(form.cleaned_data['speed'])
|
||||
self.assertIsNone(form.cleaned_data['tx_power'])
|
||||
|
||||
def test_non_string_numeric_values_preserved(self):
|
||||
"""Numeric values (including 0) should not be modified"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 8',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'speed': 0, # nullable PositiveIntegerField with value 0
|
||||
'tx_power': 0, # nullable SmallIntegerField with value 0
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
self.assertEqual(form.cleaned_data['speed'], 0)
|
||||
self.assertEqual(form.cleaned_data['tx_power'], 0)
|
||||
|
||||
def test_manytomany_fields_skipped(self):
|
||||
"""ManyToMany fields should be skipped and not cause errors"""
|
||||
# Interface has 'vdcs' and 'wireless_lans' as M2M fields
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 9',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
# vdcs and wireless_lans fields are M2M, handled by parent class
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
|
||||
def test_fields_not_in_cleaned_data_skipped(self):
|
||||
"""Fields not present in cleaned_data should be skipped gracefully"""
|
||||
# Create minimal form data - some nullable fields won't be in cleaned_data
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 10',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
# lag, parent, bridge, vrf, speed, etc. not provided
|
||||
}
|
||||
)
|
||||
# Should not raise KeyError when checking fields not in form data
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
|
||||
def test_valid_string_values_preserved(self):
|
||||
"""Non-empty string values should be properly converted to their target types"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 11',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'speed': '1000000', # Valid speed value (string will be converted to int)
|
||||
'mtu': '1500', # Valid mtu value (string will be converted to int)
|
||||
'description': 'Test description',
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
# speed and mtu are converted to int
|
||||
self.assertEqual(form.cleaned_data['speed'], 1000000)
|
||||
self.assertEqual(form.cleaned_data['mtu'], 1500)
|
||||
self.assertEqual(form.cleaned_data['description'], 'Test description')
|
||||
|
||||
def test_multiple_nullable_fields_with_empty_strings(self):
|
||||
"""Multiple nullable fields with empty strings should all convert to None"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 12',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'speed': '', # nullable
|
||||
'duplex': '', # nullable
|
||||
'tx_power': '', # nullable
|
||||
'vrf': '', # nullable ForeignKey
|
||||
'poe_mode': '', # nullable
|
||||
'poe_type': '', # nullable
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
# All nullable fields should convert to None
|
||||
self.assertIsNone(form.cleaned_data['speed'])
|
||||
self.assertIsNone(form.cleaned_data['duplex'])
|
||||
self.assertIsNone(form.cleaned_data['tx_power'])
|
||||
self.assertIsNone(form.cleaned_data['vrf'])
|
||||
self.assertIsNone(form.cleaned_data['poe_mode'])
|
||||
self.assertIsNone(form.cleaned_data['poe_type'])
|
||||
|
||||
def test_mixed_nullable_and_non_nullable_empty_strings(self):
|
||||
"""Combination of nullable and non-nullable fields with empty strings"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 13',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'speed': '', # nullable, should become None
|
||||
'label': '', # NOT nullable (blank=True only), should remain empty string
|
||||
'duplex': '', # nullable, should become None
|
||||
'description': '', # NOT nullable (blank=True only), should remain empty string
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
# Nullable fields convert to None
|
||||
self.assertIsNone(form.cleaned_data['speed'])
|
||||
self.assertIsNone(form.cleaned_data['duplex'])
|
||||
# Non-nullable fields remain empty strings
|
||||
self.assertEqual(form.cleaned_data['label'], '')
|
||||
self.assertEqual(form.cleaned_data['description'], '')
|
||||
|
||||
def test_wireless_fields_nullable(self):
|
||||
"""Wireless-specific nullable fields should convert empty strings to None"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 14',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'rf_role': '', # nullable CharField
|
||||
'rf_channel': '', # nullable CharField
|
||||
'rf_channel_frequency': '', # nullable DecimalField
|
||||
'rf_channel_width': '', # nullable DecimalField
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
self.assertIsNone(form.cleaned_data['rf_role'])
|
||||
self.assertIsNone(form.cleaned_data['rf_channel'])
|
||||
self.assertIsNone(form.cleaned_data['rf_channel_frequency'])
|
||||
self.assertIsNone(form.cleaned_data['rf_channel_width'])
|
||||
|
||||
def test_poe_fields_nullable(self):
|
||||
"""PoE-specific nullable fields should convert empty strings to None"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 15',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'poe_mode': '', # nullable CharField
|
||||
'poe_type': '', # nullable CharField
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
self.assertIsNone(form.cleaned_data['poe_mode'])
|
||||
self.assertIsNone(form.cleaned_data['poe_type'])
|
||||
|
||||
def test_wwn_field_nullable(self):
|
||||
"""WWN field (special field type) should convert empty string to None"""
|
||||
form = InterfaceImportForm(
|
||||
data={
|
||||
'device': self.device,
|
||||
'name': 'Interface 16',
|
||||
'type': InterfaceTypeChoices.TYPE_1GE_GBIC,
|
||||
'wwn': '', # nullable WWNField
|
||||
}
|
||||
)
|
||||
self.assertTrue(form.is_valid(), f'Form errors: {form.errors}')
|
||||
self.assertIsNone(form.cleaned_data['wwn'])
|
||||
@@ -323,7 +323,7 @@ class BulkCreateView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
|
||||
class BulkImportView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
"""
|
||||
Import objects in bulk (CSV format).
|
||||
Import objects in bulk (CSV/JSON/YAML format).
|
||||
|
||||
Attributes:
|
||||
model_form: The form used to create each imported object
|
||||
@@ -368,7 +368,7 @@ class BulkImportView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
error_messages.append(f"Record {index} {prefix}{field_name}: {err}")
|
||||
return error_messages
|
||||
|
||||
def _save_object(self, model_form, request):
|
||||
def _save_object(self, model_form, request, parent_idx):
|
||||
_action = 'Updated' if model_form.instance.pk else 'Created'
|
||||
|
||||
# Save the primary object
|
||||
@@ -381,8 +381,25 @@ class BulkImportView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
# Iterate through the related object forms (if any), validating and saving each instance.
|
||||
for field_name, related_object_form in self.related_object_forms.items():
|
||||
|
||||
related_objects = model_form.data.get(field_name, list())
|
||||
if not isinstance(related_objects, list):
|
||||
raise ValidationError(
|
||||
self._compile_form_errors(
|
||||
{field_name: [_("Must be a list.")]},
|
||||
index=parent_idx
|
||||
)
|
||||
)
|
||||
|
||||
related_obj_pks = []
|
||||
for i, rel_obj_data in enumerate(model_form.data.get(field_name, list())):
|
||||
for i, rel_obj_data in enumerate(related_objects, start=1):
|
||||
if not isinstance(rel_obj_data, dict):
|
||||
raise ValidationError(
|
||||
self._compile_form_errors(
|
||||
{f'{field_name}[{i}]': [_("Must be a dictionary.")]},
|
||||
index=parent_idx,
|
||||
)
|
||||
)
|
||||
|
||||
rel_obj_data = self.prep_related_object_data(obj, rel_obj_data)
|
||||
f = related_object_form(rel_obj_data)
|
||||
|
||||
@@ -396,7 +413,7 @@ class BulkImportView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
else:
|
||||
# Replicate errors on the related object form to the import form for display and abort
|
||||
raise ValidationError(
|
||||
self._compile_form_errors(f.errors, index=i, prefix=f'{field_name}[{i}]')
|
||||
self._compile_form_errors(f.errors, index=parent_idx, prefix=f'{field_name}[{i}]')
|
||||
)
|
||||
|
||||
# Enforce object-level permissions on related objects
|
||||
@@ -439,8 +456,12 @@ class BulkImportView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
try:
|
||||
instance = prefetched_objects[object_id]
|
||||
except KeyError:
|
||||
form.add_error('data', _("Row {i}: Object with ID {id} does not exist").format(i=i, id=object_id))
|
||||
raise ValidationError('')
|
||||
raise ValidationError(
|
||||
self._compile_form_errors(
|
||||
{'id': [_("Object with ID {id} does not exist").format(id=object_id)]},
|
||||
index=i
|
||||
)
|
||||
)
|
||||
|
||||
# Take a snapshot for change logging
|
||||
if instance.pk and hasattr(instance, 'snapshot'):
|
||||
@@ -481,7 +502,7 @@ class BulkImportView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
restrict_form_fields(model_form, request.user)
|
||||
|
||||
if model_form.is_valid():
|
||||
obj = self._save_object(model_form, request)
|
||||
obj = self._save_object(model_form, request, i)
|
||||
saved_objects.append(obj)
|
||||
else:
|
||||
# Raise model form errors
|
||||
@@ -799,6 +820,9 @@ class BulkRenameView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
"""
|
||||
field_name = 'name'
|
||||
template_name = 'generic/bulk_rename.html'
|
||||
# Match BulkEditView/BulkDeleteView behavior: allow passing a FilterSet
|
||||
# so "Select all N matching query" can expand across the full queryset.
|
||||
filterset = None
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
@@ -827,12 +851,12 @@ class BulkRenameView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
replace = form.cleaned_data['replace']
|
||||
if form.cleaned_data['use_regex']:
|
||||
try:
|
||||
obj.new_name = re.sub(find, replace, getattr(obj, self.field_name, ''))
|
||||
obj.new_name = re.sub(find, replace, getattr(obj, self.field_name, '') or '')
|
||||
# Catch regex group reference errors
|
||||
except re.error:
|
||||
obj.new_name = getattr(obj, self.field_name)
|
||||
else:
|
||||
obj.new_name = getattr(obj, self.field_name, '').replace(find, replace)
|
||||
obj.new_name = (getattr(obj, self.field_name, '') or '').replace(find, replace)
|
||||
renamed_pks.append(obj.pk)
|
||||
|
||||
return renamed_pks
|
||||
@@ -840,9 +864,16 @@ class BulkRenameView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
def post(self, request):
|
||||
logger = logging.getLogger('netbox.views.BulkRenameView')
|
||||
|
||||
# If we are editing *all* objects in the queryset, replace the PK list with all matched objects.
|
||||
if request.POST.get('_all') and self.filterset is not None:
|
||||
pk_list = self.filterset(request.GET, self.queryset.values_list('pk', flat=True), request=request).qs
|
||||
else:
|
||||
pk_list = request.POST.getlist('pk')
|
||||
|
||||
selected_objects = self.queryset.filter(pk__in=pk_list)
|
||||
|
||||
if '_preview' in request.POST or '_apply' in request.POST:
|
||||
form = self.form(request.POST, initial={'pk': request.POST.getlist('pk')})
|
||||
selected_objects = self.queryset.filter(pk__in=form.initial['pk'])
|
||||
form = self.form(request.POST, initial={'pk': pk_list})
|
||||
|
||||
if form.is_valid():
|
||||
try:
|
||||
@@ -877,8 +908,7 @@ class BulkRenameView(GetReturnURLMixin, BaseMultiObjectView):
|
||||
clear_events.send(sender=self)
|
||||
|
||||
else:
|
||||
form = self.form(initial={'pk': request.POST.getlist('pk')})
|
||||
selected_objects = self.queryset.filter(pk__in=form.initial['pk'])
|
||||
form = self.form(initial={'pk': pk_list})
|
||||
|
||||
return render(request, self.template_name, {
|
||||
'field_name': self.field_name,
|
||||
|
||||
2
netbox/project-static/dist/netbox.css
vendored
2
netbox/project-static/dist/netbox.css
vendored
File diff suppressed because one or more lines are too long
2
netbox/project-static/dist/netbox.js
vendored
2
netbox/project-static/dist/netbox.js
vendored
File diff suppressed because one or more lines are too long
4
netbox/project-static/dist/netbox.js.map
vendored
4
netbox/project-static/dist/netbox.js.map
vendored
File diff suppressed because one or more lines are too long
@@ -28,9 +28,9 @@
|
||||
"clipboard": "2.0.11",
|
||||
"flatpickr": "4.6.13",
|
||||
"gridstack": "12.3.3",
|
||||
"htmx.org": "2.0.7",
|
||||
"htmx.org": "2.0.8",
|
||||
"query-string": "9.3.1",
|
||||
"sass": "1.93.2",
|
||||
"sass": "1.94.2",
|
||||
"tom-select": "2.4.3",
|
||||
"typeface-inter": "3.18.1",
|
||||
"typeface-roboto-mono": "1.1.13"
|
||||
|
||||
@@ -162,3 +162,18 @@ pre code {
|
||||
vertical-align: .05em;
|
||||
height: auto;
|
||||
}
|
||||
|
||||
// Theme-based visibility utilities
|
||||
// Tabler's .hide-theme-* utilities expect data-bs-theme on :root, but NetBox applies
|
||||
// it to body. These overrides use higher specificity selectors to ensure theme-based
|
||||
// visibility works correctly. The :root:not(.dummy) pattern provides the additional
|
||||
// specificity needed to override Tabler's :root:not() rules.
|
||||
:root:not(.dummy) body[data-bs-theme='light'] .hide-theme-light,
|
||||
:root:not(.dummy) body[data-bs-theme='dark'] .hide-theme-dark {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
:root:not(.dummy) body[data-bs-theme='dark'] .hide-theme-light,
|
||||
:root:not(.dummy) body[data-bs-theme='light'] .hide-theme-dark {
|
||||
display: inline-flex !important;
|
||||
}
|
||||
|
||||
@@ -2241,10 +2241,10 @@ hey-listen@^1.0.8:
|
||||
resolved "https://registry.yarnpkg.com/hey-listen/-/hey-listen-1.0.8.tgz#8e59561ff724908de1aa924ed6ecc84a56a9aa68"
|
||||
integrity sha512-COpmrF2NOg4TBWUJ5UVyaCU2A88wEMkUPK4hNqyCkqHbxT92BbvfjoSozkAIIm6XhicGlJHhFdullInrdhwU8Q==
|
||||
|
||||
htmx.org@2.0.7:
|
||||
version "2.0.7"
|
||||
resolved "https://registry.yarnpkg.com/htmx.org/-/htmx.org-2.0.7.tgz#991571e009a2ea4cb60e7af8bb4c1c8c0de32ecd"
|
||||
integrity sha512-YiJqF3U5KyO28VC5mPfehKJPF+n1Gni+cupK+D69TF0nm7wY6AXn3a4mPWIikfAXtl1u1F1+ZhSCS7KT8pVmqA==
|
||||
htmx.org@2.0.8:
|
||||
version "2.0.8"
|
||||
resolved "https://registry.yarnpkg.com/htmx.org/-/htmx.org-2.0.8.tgz#8ac8ba87c141b7bfda7576117476062eeb4aceda"
|
||||
integrity sha512-fm297iru0iWsNJlBrjvtN7V9zjaxd+69Oqjh4F/Vq9Wwi2kFisLcrLCiv5oBX0KLfOX/zG8AUo9ROMU5XUB44Q==
|
||||
|
||||
ignore@^5.2.0:
|
||||
version "5.3.2"
|
||||
@@ -3190,10 +3190,10 @@ safe-regex-test@^1.1.0:
|
||||
es-errors "^1.3.0"
|
||||
is-regex "^1.2.1"
|
||||
|
||||
sass@1.93.2:
|
||||
version "1.93.2"
|
||||
resolved "https://registry.yarnpkg.com/sass/-/sass-1.93.2.tgz#e97d225d60f59a3b3dbb6d2ae3c1b955fd1f2cd1"
|
||||
integrity sha512-t+YPtOQHpGW1QWsh1CHQ5cPIr9lbbGZLZnbihP/D/qZj/yuV68m8qarcV17nvkOX81BCrvzAlq2klCQFZghyTg==
|
||||
sass@1.94.2:
|
||||
version "1.94.2"
|
||||
resolved "https://registry.yarnpkg.com/sass/-/sass-1.94.2.tgz#198511fc6fdd2fc0a71b8d1261735c12608d4ef3"
|
||||
integrity sha512-N+7WK20/wOr7CzA2snJcUSSNTCzeCGUTFY3OgeQP3mZ1aj9NMQ0mSTXwlrnd89j33zzQJGqIN52GIOmYrfq46A==
|
||||
dependencies:
|
||||
chokidar "^4.0.0"
|
||||
immutable "^5.0.2"
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
version: "4.4.4"
|
||||
version: "4.4.7"
|
||||
edition: "Community"
|
||||
published: "2025-10-15"
|
||||
published: "2025-11-25"
|
||||
|
||||
@@ -69,6 +69,9 @@
|
||||
{% block layout %}{% endblock %}
|
||||
|
||||
{# Additional Javascript #}
|
||||
{% if copilot_enabled and request.user.is_authenticated %}
|
||||
<script src="{{ settings.NETBOX_COPILOT_URL }}" defer></script>
|
||||
{% endif %}
|
||||
{% block javascript %}{% endblock %}
|
||||
|
||||
{# User messages #}
|
||||
|
||||
@@ -129,6 +129,10 @@
|
||||
<th scope="row" class="ps-3">{% trans "Maintenance mode" %}</th>
|
||||
<td>{% checkmark config.MAINTENANCE_MODE %}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="row" class="ps-3">{% trans "NetBox Copilot enabled" %}</th>
|
||||
<td>{% checkmark config.COPILOT_ENABLED %}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="row" class="ps-3">{% trans "GraphQL enabled" %}</th>
|
||||
<td>{% checkmark config.GRAPHQL_ENABLED %}</td>
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
|
||||
{% block breadcrumbs %}
|
||||
<li class="breadcrumb-item"><a href="{% url 'core:background_queue_list' %}">{% trans 'Background Tasks' %}</a></li>
|
||||
<li class="breadcrumb-item"><a href="{% url 'core:background_task_list' queue_index=queue_index status=job.get_status %}">{{ queue.name }}</a></li>
|
||||
<li class="breadcrumb-item"><a href="{% url 'core:background_task_list' queue_index=queue_index status=job.get_status.value %}">{{ queue.name }}</a></li>
|
||||
{% endblock breadcrumbs %}
|
||||
|
||||
{% block title %}{% trans "Job" %} {{ job.id }}{% endblock %}
|
||||
|
||||
@@ -24,10 +24,6 @@
|
||||
<th scope="row">{% trans "Description" %}</th>
|
||||
<td>{{ object.description|placeholder }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="row">{% trans "Airflow" %}</th>
|
||||
<td>{{ object.get_airflow_display|placeholder }}</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
{% include 'dcim/inc/panels/racktype_dimensions.html' %}
|
||||
|
||||
@@ -8,10 +8,10 @@
|
||||
<p>
|
||||
<i class="mdi mdi-alert"></i>
|
||||
<strong>{% trans "Missing required packages" %}.</strong>
|
||||
{% blocktrans trimmed %}
|
||||
{% blocktrans trimmed with req_file="requirements.txt" local_req_file="local_requirements.txt" pip_cmd="pip freeze" %}
|
||||
This installation of NetBox might be missing one or more required Python packages. These packages are listed in
|
||||
<code>requirements.txt</code> and <code>local_requirements.txt</code>, and are normally installed as part of the
|
||||
installation or upgrade process. To verify installed packages, run <code>pip freeze</code> from the console and
|
||||
<code>{{ req_file }}</code> and <code>{{ local_req_file }}</code>, and are normally installed as part of the
|
||||
installation or upgrade process. To verify installed packages, run <code>{{ pip_cmd }}</code> from the console and
|
||||
compare the output to the list of required packages.
|
||||
{% endblocktrans %}
|
||||
</p>
|
||||
|
||||
@@ -8,17 +8,17 @@
|
||||
<p>
|
||||
<i class="mdi mdi-alert"></i>
|
||||
<strong>{% trans "Database migrations missing" %}.</strong>
|
||||
{% blocktrans trimmed %}
|
||||
{% blocktrans trimmed with command="python3 manage.py migrate" %}
|
||||
When upgrading to a new NetBox release, the upgrade script must be run to apply any new database migrations. You
|
||||
can run migrations manually by executing <code>python3 manage.py migrate</code> from the command line.
|
||||
can run migrations manually by executing <code>{{ command }}</code> from the command line.
|
||||
{% endblocktrans %}
|
||||
</p>
|
||||
<p>
|
||||
<i class="mdi mdi-alert"></i>
|
||||
<strong>{% trans "Unsupported PostgreSQL version" %}.</strong>
|
||||
{% blocktrans trimmed %}
|
||||
{% blocktrans trimmed with sql_query="SELECT VERSION()" %}
|
||||
Ensure that PostgreSQL version 14 or later is in use. You can check this by connecting to the database using
|
||||
NetBox's credentials and issuing a query for <code>SELECT VERSION()</code>.
|
||||
NetBox's credentials and issuing a query for <code>{{ sql_query }}</code>.
|
||||
{% endblocktrans %}
|
||||
</p>
|
||||
{% endblock message %}
|
||||
|
||||
@@ -62,6 +62,10 @@
|
||||
<th scope="row">{% trans "Data Synced" %}</th>
|
||||
<td>{{ object.data_synced|placeholder }}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="row">{% trans "Auto Sync Enabled" %}</th>
|
||||
<td>{% checkmark object.auto_sync_enabled %}</td>
|
||||
</tr>
|
||||
</table>
|
||||
</div>
|
||||
{% include 'inc/panels/tags.html' %}
|
||||
|
||||
@@ -1,4 +1,15 @@
|
||||
{% load i18n %}
|
||||
<div class="card-header px-2 py-1">
|
||||
<h3 class="card-title flex-fill">Notifications</h3>
|
||||
{% if notifications %}
|
||||
<a href="#" hx-get="{% url 'extras:notification_dismiss_all' %}" hx-target="closest .notifications"
|
||||
hx-confirm="{% blocktrans trimmed count count=unread_count %}Dismiss {{ count }} unread notification?{% plural %}Dismiss {{ count }} unread notifications?{% endblocktrans %}"
|
||||
class="btn btn-2 text-danger" title="{% trans 'Dismiss all unread notifications' %}">
|
||||
<i class="icon mdi mdi-delete-sweep-outline"></i>
|
||||
{% trans "Dismiss all" %}
|
||||
</a>
|
||||
{% endif %}
|
||||
</div>
|
||||
<div class="list-group list-group-flush list-group-hoverable" style="min-width: 300px">
|
||||
{% for notification in notifications %}
|
||||
<div class="list-group-item p-2">
|
||||
|
||||
@@ -17,15 +17,17 @@
|
||||
|
||||
{% if request.htmx %}
|
||||
{# Include the updated object count for display elsewhere on the page #}
|
||||
<div hx-swap-oob="innerHTML:.total-object-count">{{ table.rows|length }}</div>
|
||||
{% if not table.embedded %}
|
||||
<div hx-swap-oob="innerHTML:.total-object-count">{{ table.rows|length }}</div>
|
||||
{% endif %}
|
||||
|
||||
{# Include the updated "save" link for the table configuration #}
|
||||
{% if table.config_params %}
|
||||
{% if table.config_params and not table.embedded %}
|
||||
<a class="dropdown-item" hx-swap-oob="outerHTML:#table_save_link" href="{% url 'extras:tableconfig_add' %}?{{ table.config_params }}&return_url={{ request.path }}" id="table_save_link">Save</a>
|
||||
{% endif %}
|
||||
|
||||
{# Update the bulk action buttons with new query parameters #}
|
||||
{% if actions %}
|
||||
{% if actions and not table.embedded %}
|
||||
<div class="bulk-action-buttons" hx-swap-oob="outerHTML:.bulk-action-buttons">
|
||||
{% action_buttons actions model multi=True %}
|
||||
</div>
|
||||
|
||||
@@ -26,8 +26,8 @@
|
||||
<p>{% trans "Check the following" %}:</p>
|
||||
<ul>
|
||||
<li class="tip">
|
||||
{% blocktrans trimmed %}
|
||||
<code>manage.py collectstatic</code> was run during the most recent upgrade. This installs the most
|
||||
{% blocktrans trimmed with command="manage.py collectstatic" %}
|
||||
<code>{{ command }}</code> was run during the most recent upgrade. This installs the most
|
||||
recent iteration of each static file into the static root path.
|
||||
{% endblocktrans %}
|
||||
</li>
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user