Compare commits

..

58 Commits

Author SHA1 Message Date
Arthur
76caae12fa 19724 change from old to V1
Some checks failed
CI / build (20.x, 3.12) (push) Has been cancelled
CI / build (20.x, 3.13) (push) Has been cancelled
2025-10-22 08:57:36 -07:00
Arthur
26c91f01c6 19724 update docs
Some checks are pending
CI / build (20.x, 3.12) (push) Waiting to run
CI / build (20.x, 3.13) (push) Waiting to run
2025-10-21 10:27:10 -07:00
Arthur
af55da008b 19724 add the v2 to graphql testing 2025-10-21 10:16:16 -07:00
Arthur
810d1c2418 19724 add the v2 to graphql testing 2025-10-21 10:01:00 -07:00
Arthur
91b2d61ea4 19724 Use v2 API for new pagination queries
Some checks are pending
CI / build (20.x, 3.12) (push) Waiting to run
CI / build (20.x, 3.13) (push) Waiting to run
2025-10-20 16:52:46 -07:00
Arthur
b7b7b00885 Merge branch 'feature' into 19724-graphql 2025-10-20 16:29:32 -07:00
bctiemann
87505e0bb9 Merge pull request #20632 from netbox-community/20603-graphql-api-v2
Some checks are pending
CI / build (20.x, 3.12) (push) Waiting to run
CI / build (20.x, 3.13) (push) Waiting to run
CodeQL / Analyze (${{ matrix.language }}) (none, actions) (push) Waiting to run
CodeQL / Analyze (${{ matrix.language }}) (none, javascript-typescript) (push) Waiting to run
CodeQL / Analyze (${{ matrix.language }}) (none, python) (push) Waiting to run
#20603: Split GraphQL API into v1 & v2
2025-10-20 13:00:54 -04:00
Jeremy Stretch
7d82493052 #20603: Split GraphQL API into v1 & v2 2025-10-20 11:00:23 -04:00
Jeremy Stretch
77c08b7bf9 Closes #20617: Introduce BaseModel 2025-10-20 08:35:08 -04:00
Jeremy Stretch
adad7c2209 Merge branch 'main' into feature
Some checks failed
CI / build (20.x, 3.12) (push) Has been cancelled
CI / build (20.x, 3.13) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, actions) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, javascript-typescript) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, python) (push) Has been cancelled
2025-10-16 14:31:52 -04:00
Arthur
595b343cd0 19724 add doc note 2025-10-14 14:38:16 -07:00
Arthur
730aee9b26 19724 fix doc query 2025-10-14 14:15:16 -07:00
Arthur
8aa1e2802b 19724 fix tests 2025-10-14 14:06:15 -07:00
Arthur
c2d19119cb 19724 update documentation 2025-10-14 13:54:58 -07:00
Arthur
0c4d0fa2e8 19724 pagingate graphql queries 2025-10-14 13:46:30 -07:00
bctiemann
5ad6bd88f6 Merge pull request #20577 from netbox-community/20492-disable-token-plaintext-retrieval
Some checks failed
CI / build (20.x, 3.12) (push) Has been cancelled
CI / build (20.x, 3.13) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, actions) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, javascript-typescript) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, python) (push) Has been cancelled
Closes #20492: Disable API token plaintext retrieval
2025-10-14 15:30:47 -04:00
Jeremy Stretch
2bebfccf9b Closes #20492: Disable API token plaintext retrieval 2025-10-14 14:57:37 -04:00
Jeremy Stretch
b7cc4c418b Fixes #20476: Prohibit changing a token's owner (#20576) 2025-10-14 13:12:15 -05:00
Jeremy Stretch
37a9d03348 Merge branch 'main' into feature 2025-10-14 13:54:47 -04:00
bctiemann
a91af996d5 Merge pull request #20537 from netbox-community/17571-remove-htmx-navigation
Some checks failed
CI / build (20.x, 3.12) (push) Has been cancelled
CI / build (20.x, 3.13) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, actions) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, javascript-typescript) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, python) (push) Has been cancelled
#17571 - Remove HTMX navigation
2025-10-09 17:49:35 -04:00
Brian Tiemann
bb290dc792 Remove from docs 2025-10-08 18:24:12 -04:00
Brian Tiemann
fcdb7ff6c8 Remove HTMX navigation 2025-10-08 14:33:23 -04:00
bctiemann
18a308ae3a Merge pull request #20477 from netbox-community/20210-new-token-auth
Some checks failed
CI / build (20.x, 3.12) (push) Has been cancelled
CI / build (20.x, 3.13) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, actions) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, javascript-typescript) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, python) (push) Has been cancelled
Closes #20210: Implement new version of API token
2025-10-07 11:21:02 -04:00
Jeremy Stretch
c63e60a62b Add a token prefix
Some checks failed
CI / build (20.x, 3.12) (push) Has been cancelled
CI / build (20.x, 3.13) (push) Has been cancelled
2025-10-06 17:04:10 -04:00
Jeremy Stretch
82db8a9c02 Update documentation
Some checks failed
CI / build (20.x, 3.12) (push) Has been cancelled
CI / build (20.x, 3.13) (push) Has been cancelled
2025-10-03 14:24:21 -04:00
Jeremy Stretch
bb75bceec5 Clean up tests 2025-10-03 13:55:48 -04:00
Jeremy Stretch
9a68cde95f Merge branch 'feature' into 20210-new-token-auth
Some checks are pending
CI / build (20.x, 3.12) (push) Waiting to run
CI / build (20.x, 3.13) (push) Waiting to run
2025-10-03 12:09:21 -04:00
Jeremy Stretch
6c723dfb1a Merge branch 'main' into feature
Some checks failed
CI / build (20.x, 3.12) (push) Has been cancelled
CI / build (20.x, 3.13) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, actions) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, javascript-typescript) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, python) (push) Has been cancelled
2025-10-03 12:09:03 -04:00
Jeremy Stretch
9b85d92ad0 Clean up auth backend 2025-10-03 12:08:24 -04:00
Jeremy Stretch
917a2c2618 Validate peppers on init 2025-10-03 11:41:04 -04:00
Jeremy Stretch
6388705e57 Clean up TokenForm 2025-10-03 10:45:54 -04:00
Jeremy Stretch
ac335c3d87 Clean up filterset tests 2025-10-03 10:26:13 -04:00
Jeremy Stretch
a54c508da2 Misc cleanup 2025-10-03 09:58:20 -04:00
Jeremy Stretch
d69042f26e Clean up token tables 2025-10-03 09:53:44 -04:00
Jeremy Stretch
f6290dd7af Toggle plaintext display for v1 tokens 2025-10-03 09:16:50 -04:00
Jeremy Stretch
adce67a7cf Standardize on the use of v2 tokens in tests 2025-10-02 16:37:28 -04:00
Jeremy Stretch
f82f084c02 Misc cleanup 2025-10-02 16:33:04 -04:00
Jeremy Stretch
43fc7fb58a Add constraints to enforce v1/v2-dependent fields 2025-10-02 16:05:09 -04:00
Jeremy Stretch
11099b01bb Rename pepper field to pepper_id for clarity 2025-10-02 15:38:17 -04:00
Jeremy Stretch
5dc48f3a88 Enforce a fixed key length for v2 tokens 2025-10-02 15:26:22 -04:00
Jeremy Stretch
1ee23ba6fa Initial work on #20210 2025-10-02 15:04:29 -04:00
Jeremy Stretch
23d7515b41 Merge branch 'main' into feature
Some checks failed
CodeQL / Analyze (${{ matrix.language }}) (none, javascript-typescript) (push) Has been cancelled
CI / build (20.x, 3.12) (push) Has been cancelled
CI / build (20.x, 3.13) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, actions) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, python) (push) Has been cancelled
2025-10-01 08:03:43 -04:00
Jeremy Stretch
12818f1786 Closes #20295: Make cable terminations REST API endpoint read-only (#20394)
Some checks failed
CI / build (20.x, 3.12) (push) Has been cancelled
CI / build (20.x, 3.13) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, actions) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, javascript-typescript) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, python) (push) Has been cancelled
2025-09-19 10:54:51 -07:00
Jeremy Stretch
f0ae0da1c7 Update OpenAPI schema
Some checks are pending
CI / build (20.x, 3.12) (push) Waiting to run
CI / build (20.x, 3.13) (push) Waiting to run
CodeQL / Analyze (${{ matrix.language }}) (none, actions) (push) Waiting to run
CodeQL / Analyze (${{ matrix.language }}) (none, javascript-typescript) (push) Waiting to run
CodeQL / Analyze (${{ matrix.language }}) (none, python) (push) Waiting to run
2025-09-18 15:09:07 -04:00
Jeremy Stretch
c30e4813b7 Merge branch 'main' into feature 2025-09-18 14:42:24 -04:00
Jeremy Stretch
57a7afd548 Merge branch 'main' into feature
Some checks failed
CodeQL / Analyze (${{ matrix.language }}) (none, actions) (push) Has been cancelled
CI / build (20.x, 3.12) (push) Has been cancelled
CI / build (20.x, 3.13) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, javascript-typescript) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, python) (push) Has been cancelled
2025-09-16 12:00:48 -04:00
Martin Hauser
b4eaeead13 Fixes #20342: Override create_superuser to drop is_staff (#20351)
Some checks are pending
CI / build (20.x, 3.12) (push) Waiting to run
CI / build (20.x, 3.13) (push) Waiting to run
CodeQL / Analyze (${{ matrix.language }}) (none, actions) (push) Waiting to run
CodeQL / Analyze (${{ matrix.language }}) (none, javascript-typescript) (push) Waiting to run
CodeQL / Analyze (${{ matrix.language }}) (none, python) (push) Waiting to run
* fix(users): Override create_superuser to drop is_staff

Override `UserManager.create_superuser()` to strip `is_staff` from
`extra_fields` and enforce `is_superuser=True`, fixing the `TypeError`
during `createsuperuser` with the custom `User` model.

Fixes #20342

* Set alters_data=True on manager methods

---------

Co-authored-by: Jeremy Stretch <jstretch@netboxlabs.com>
2025-09-15 14:36:51 -04:00
bctiemann
24fff6bd74 Merge pull request #20326 from netbox-community/20096-remove-legacy-script-methods
Some checks are pending
CI / build (20.x, 3.12) (push) Waiting to run
CI / build (20.x, 3.13) (push) Waiting to run
CodeQL / Analyze (${{ matrix.language }}) (none, actions) (push) Waiting to run
CodeQL / Analyze (${{ matrix.language }}) (none, javascript-typescript) (push) Waiting to run
CodeQL / Analyze (${{ matrix.language }}) (none, python) (push) Waiting to run
Closes #20096: Remove legacy `load_yaml()` & `load_json()` methods from BaseScript
2025-09-15 08:53:57 -04:00
Jeremy Stretch
b9567208d4 Closes #20088: Remove model from webhook context (replaced by object_type) (#20325)
Some checks failed
CodeQL / Analyze (${{ matrix.language }}) (none, python) (push) Has been cancelled
CI / build (20.x, 3.12) (push) Has been cancelled
CI / build (20.x, 3.13) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, actions) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, javascript-typescript) (push) Has been cancelled
2025-09-12 09:54:54 -07:00
Jeremy Stretch
cfcea7c941 Closes #19898: Remove legacy /api/extras/object-types/ endpoint (#20324)
Some checks are pending
CI / build (20.x, 3.12) (push) Waiting to run
CI / build (20.x, 3.13) (push) Waiting to run
CodeQL / Analyze (${{ matrix.language }}) (none, actions) (push) Waiting to run
CodeQL / Analyze (${{ matrix.language }}) (none, javascript-typescript) (push) Waiting to run
CodeQL / Analyze (${{ matrix.language }}) (none, python) (push) Waiting to run
Closes #19898: Remove legacy /api/extras/object-types/ endpoint
2025-09-11 15:09:49 -05:00
Jeremy Stretch
21ba27fb39 Closes #20096: Remove legacy load_yaml() & load_json() methods from BaseScript 2025-09-11 11:30:15 -04:00
Jeremy Stretch
c0e4d1c1e3 Closes #16137: Remove is_staff boolean from User model (#20306)
Some checks are pending
CI / build (20.x, 3.12) (push) Waiting to run
CI / build (20.x, 3.13) (push) Waiting to run
CodeQL / Analyze (${{ matrix.language }}) (none, actions) (push) Waiting to run
CodeQL / Analyze (${{ matrix.language }}) (none, javascript-typescript) (push) Waiting to run
CodeQL / Analyze (${{ matrix.language }}) (none, python) (push) Waiting to run
* Closes #16137: Remove is_staff boolean from User model

* Remove default is_staff value from UserManager.create_user()

* Restore staff_only on MenuItem

* Introduce IsSuperuser API permission to replace IsAdminUser

* Update and improve RQ task API view tests

* Remove is_staff attribute assignment from RemoteUserBackend
2025-09-10 16:51:59 -04:00
bctiemann
d95eaa7ba2 Merge pull request #20299 from netbox-community/19095-support-new-pythons
Some checks failed
CI / build (20.x, 3.12) (push) Has been cancelled
CI / build (20.x, 3.13) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, actions) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, javascript-typescript) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, python) (push) Has been cancelled
Closes #19095: Introduce support for Python 3.13 & 3.14
2025-09-08 16:19:54 -04:00
Jeremy Stretch
5506901867 Omit Python 3.14 from the testing matrix temporarily 2025-09-08 15:41:06 -04:00
Jeremy Stretch
ec9da88134 Closes #19095: Introduce support for Python 3.13 & 3.14 2025-09-08 15:36:12 -04:00
bctiemann
e221f1fffa Merge pull request #20231 from netbox-community/19889-drop-old-pythons
Some checks failed
CI / build (20.x, 3.12) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, actions) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, javascript-typescript) (push) Has been cancelled
CodeQL / Analyze (${{ matrix.language }}) (none, python) (push) Has been cancelled
Closes #19889: Drop support for Python 3.10 & 3.11
2025-09-05 11:21:10 -04:00
Jeremy Stretch
530dad279a Closes #20095: Remove obsolete module core.models.contenttypes (#20250) 2025-09-05 07:49:59 -05:00
Jeremy Stretch
b1439dc298 Closes #19889: Drop support for Python 3.10 & 3.11 2025-09-02 15:38:32 -04:00
252 changed files with 24746 additions and 31067 deletions

View File

@@ -2,7 +2,7 @@
name: ✨ Feature Request
type: Feature
description: Propose a new NetBox feature or enhancement
labels: ["netbox", "type: feature", "status: needs triage"]
labels: ["type: feature", "status: needs triage"]
body:
- type: markdown
attributes:
@@ -15,7 +15,7 @@ body:
attributes:
label: NetBox version
description: What version of NetBox are you currently running?
placeholder: v4.4.7
placeholder: v4.4.4
validations:
required: true
- type: dropdown

View File

@@ -2,7 +2,7 @@
name: 🐛 Bug Report
type: Bug
description: Report a reproducible bug in the current release of NetBox
labels: ["netbox", "type: bug", "status: needs triage"]
labels: ["type: bug", "status: needs triage"]
body:
- type: markdown
attributes:
@@ -27,7 +27,7 @@ body:
attributes:
label: NetBox Version
description: What version of NetBox are you currently running?
placeholder: v4.4.7
placeholder: v4.4.4
validations:
required: true
- type: dropdown
@@ -35,9 +35,9 @@ body:
label: Python Version
description: What version of Python are you currently running?
options:
- "3.10"
- "3.11"
- "3.12"
- "3.13"
- "3.14"
validations:
required: true
- type: textarea

View File

@@ -2,7 +2,7 @@
name: 📖 Documentation Change
type: Documentation
description: Suggest an addition or modification to the NetBox documentation
labels: ["netbox", "type: documentation", "status: needs triage"]
labels: ["type: documentation", "status: needs triage"]
body:
- type: dropdown
attributes:

View File

@@ -2,7 +2,7 @@
name: 🌍 Translation
type: Translation
description: Request support for a new language in the user interface
labels: ["netbox", "type: translation"]
labels: ["type: translation"]
body:
- type: markdown
attributes:

View File

@@ -2,7 +2,7 @@
name: 🏡 Housekeeping
type: Housekeeping
description: A change pertaining to the codebase itself (developers only)
labels: ["netbox", "type: housekeeping"]
labels: ["type: housekeeping"]
body:
- type: markdown
attributes:

View File

@@ -2,7 +2,7 @@
name: 🗑️ Deprecation
type: Deprecation
description: The removal of an existing feature or resource
labels: ["netbox", "type: deprecation"]
labels: ["type: deprecation"]
body:
- type: textarea
attributes:

View File

@@ -31,7 +31,7 @@ jobs:
NETBOX_CONFIGURATION: netbox.configuration_testing
strategy:
matrix:
python-version: ['3.10', '3.11', '3.12']
python-version: ['3.12', '3.13']
node-version: ['20.x']
services:
redis:

View File

@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.14.1
rev: v0.6.9
hooks:
- id: ruff
name: "Ruff linter"
@@ -21,6 +21,14 @@ repos:
language: system
pass_filenames: false
types: [python]
- id: openapi-check
name: "Validate OpenAPI schema"
description: "Check for any unexpected changes to the OpenAPI schema"
files: api/.*\.py$
entry: scripts/verify-openapi.sh
language: system
pass_filenames: false
types: [python]
- id: mkdocs-build
name: "Build documentation"
description: "Build the documentation with mkdocs"

View File

@@ -186,7 +186,6 @@
"usb-3-micro-b",
"molex-micro-fit-1x2",
"molex-micro-fit-2x2",
"molex-micro-fit-2x3",
"molex-micro-fit-2x4",
"dc-terminal",
"saf-d-grid",
@@ -294,7 +293,6 @@
"usb-c",
"molex-micro-fit-1x2",
"molex-micro-fit-2x2",
"molex-micro-fit-2x3",
"molex-micro-fit-2x4",
"dc-terminal",
"eaton-c39",

File diff suppressed because one or more lines are too long

View File

@@ -2,7 +2,7 @@
## Local Authentication
Local user accounts and groups can be created in NetBox under the "Authentication" section in the "Admin" menu. This section is available only to users with the "staff" permission enabled.
Local user accounts and groups can be created in NetBox under the "Authentication" section in the "Admin" menu.
At a minimum, each user account must have a username and password set. User accounts may also denote a first name, last name, and email address. [Permissions](../permissions.md) may also be assigned to individual users and/or groups as needed.

View File

@@ -1,5 +1,15 @@
# GraphQL API Parameters
## GRAPHQL_DEFAULT_VERSION
!!! note "This parameter was introduced in NetBox v4.5."
Default: `1`
Designates the default version of the GraphQL API served by `/graphql/`. To access a specific version, append the version number to the URL, e.g. `/graphql/v2/`.
---
## GRAPHQL_ENABLED
!!! tip "Dynamic Configuration Parameter"

View File

@@ -35,7 +35,6 @@ Some configuration parameters are primarily controlled via NetBox's admin interf
* [`POWERFEED_DEFAULT_MAX_UTILIZATION`](./default-values.md#powerfeed_default_max_utilization)
* [`POWERFEED_DEFAULT_VOLTAGE`](./default-values.md#powerfeed_default_voltage)
* [`PREFER_IPV4`](./miscellaneous.md#prefer_ipv4)
* [`PROTECTION_RULES`](./data-validation.md#protection_rules)
* [`RACK_ELEVATION_DEFAULT_UNIT_HEIGHT`](./default-values.md#rack_elevation_default_unit_height)
* [`RACK_ELEVATION_DEFAULT_UNIT_WIDTH`](./default-values.md#rack_elevation_default_unit_width)

View File

@@ -53,16 +53,6 @@ Sets content for the top banner in the user interface.
---
## COPILOT_ENABLED
!!! tip "Dynamic Configuration Parameter"
Default: `True`
Enables or disables the [NetBox Copilot](https://netboxlabs.com/docs/copilot/) agent globally. When enabled, users can opt to toggle the agent individually.
---
## CENSUS_REPORTING_ENABLED
Default: `True`

View File

@@ -127,19 +127,3 @@ The list of groups that promote an remote User to Superuser on Login. If group i
Default: `[]` (Empty list)
The list of users that get promoted to Superuser on Login. If user isn't present in list on next Login, the Role gets revoked. (Requires `REMOTE_AUTH_ENABLED` and `REMOTE_AUTH_GROUP_SYNC_ENABLED` )
---
## REMOTE_AUTH_STAFF_GROUPS
Default: `[]` (Empty list)
The list of groups that promote an remote User to Staff on Login. If group isn't present on next Login, the Role gets revoked. (Requires `REMOTE_AUTH_ENABLED` and `REMOTE_AUTH_GROUP_SYNC_ENABLED` )
---
## REMOTE_AUTH_STAFF_USERS
Default: `[]` (Empty list)
The list of users that get promoted to Staff on Login. If user isn't present in list on next Login, the Role gets revoked. (Requires `REMOTE_AUTH_ENABLED` and `REMOTE_AUTH_GROUP_SYNC_ENABLED` )

View File

@@ -23,6 +23,31 @@ ALLOWED_HOSTS = ['*']
---
## API_TOKEN_PEPPERS
!!! info "This parameter was introduced in NetBox v4.5."
[Cryptographic peppers](https://en.wikipedia.org/wiki/Pepper_(cryptography)) are employed to generate hashes of sensitive values on the server. This parameter defines the peppers used to hash v2 API tokens in NetBox. You must define at least one pepper before creating a v2 API token. See the [API documentation](../integrations/rest-api.md#authentication) for further information about how peppers are used.
```python
API_TOKEN_PEPPERS = {
# DO NOT USE THIS EXAMPLE PEPPER IN PRODUCTION
1: 'kp7ht*76fiQAhUi5dHfASLlYUE_S^gI^(7J^K5M!LfoH@vl&b_',
}
```
!!! warning "Peppers are sensitive"
Treat pepper values as extremely sensitive. Consider populating peppers from environment variables at initialization time rather than defining them in the configuration file, if feasible.
Peppers must be at least 50 characters in length and should comprise a random string with a diverse character set. Consider using the Python script at `$INSTALL_ROOT/netbox/generate_secret_key.py` to generate a pepper value.
It is recommended to start with a pepper ID of `1`. Additional peppers can be introduced later as needed to begin rotating token hashes.
!!! tip
Although NetBox will run without `API_TOKEN_PEPPERS` defined, the use of v2 API tokens will be unavailable.
---
## DATABASE
!!! warning "Legacy Configuration Parameter"

View File

@@ -1,16 +1,5 @@
# Security & Authentication Parameters
## ALLOW_TOKEN_RETRIEVAL
Default: `False`
!!! note
The default value of this parameter changed from `True` to `False` in NetBox v4.3.0.
If disabled, the values of API tokens will not be displayed after each token's initial creation. A user **must** record the value of a token prior to its creation, or it will be lost. Note that this affects _all_ users, regardless of assigned permissions.
---
## ALLOWED_URL_SCHEMES
!!! tip "Dynamic Configuration Parameter"
@@ -92,7 +81,7 @@ If `True`, the cookie employed for cross-site request forgery (CSRF) protection
Default: `[]`
Defines a list of trusted origins for unsafe (e.g. `POST`) requests. This is a pass-through to Django's [`CSRF_TRUSTED_ORIGINS`](https://docs.djangoproject.com/en/stable/ref/settings/#csrf-trusted-origins) setting. Note that each host listed must specify a scheme (e.g. `http://` or `https://`).
Defines a list of trusted origins for unsafe (e.g. `POST`) requests. This is a pass-through to Django's [`CSRF_TRUSTED_ORIGINS`](https://docs.djangoproject.com/en/stable/ref/settings/#csrf-trusted-origins) setting. Note that each host listed must specify a scheme (e.g. `http://` or `https://).
```python
CSRF_TRUSTED_ORIGINS = (

View File

@@ -232,9 +232,6 @@ STORAGES = {
},
"scripts": {
"BACKEND": "extras.storage.ScriptFileSystemStorage",
"OPTIONS": {
"allow_overwrite": True,
},
},
}
```
@@ -250,7 +247,6 @@ STORAGES = {
"OPTIONS": {
'access_key': 'access key',
'secret_key': 'secret key',
"allow_overwrite": True,
}
},
}

View File

@@ -95,7 +95,7 @@ An example fieldset definition is provided below:
```python
class MyScript(Script):
class Meta(Script.Meta):
class Meta:
fieldsets = (
('First group', ('field1', 'field2', 'field3')),
('Second group', ('field4', 'field5')),
@@ -131,17 +131,6 @@ self.log_info(f"Running as user {username} (IP: {ip_address})...")
For a complete list of available request parameters, please see the [Django documentation](https://docs.djangoproject.com/en/stable/ref/request-response/).
## Reading Data from Files
The Script class provides two convenience methods for reading data from files:
* `load_yaml`
* `load_json`
These two methods will load data in YAML or JSON format, respectively, from files within the local path (i.e. `SCRIPTS_ROOT`).
**Note:** These convenience methods are deprecated and will be removed in NetBox v4.4. These only work if running scripts within the local path, they will not work if using a storage other than ScriptFileSystemStorage.
## Logging
The Script object provides a set of convenient functions for recording messages at different severity levels:
@@ -404,61 +393,6 @@ A complete date & time. Returns a `datetime.datetime` object.
Custom scripts can be run via the web UI by navigating to the script, completing any required form data, and clicking the "run script" button. It is possible to schedule a script to be executed at specified time in the future. A scheduled script can be canceled by deleting the associated job result object.
#### Prefilling variables via URL parameters
Script form fields can be prefilled by appending query parameters to the script URL. Each parameter name must match the variable name defined on the script class. Prefilled values are treated as initial values and can be edited before execution. Multiple values can be supplied by repeating the same parameter. Query values must be percentencoded where required (for example, spaces as `%20`).
Examples:
For string and integer variables, when a script defines:
```python
from extras.scripts import Script, StringVar, IntegerVar
class MyScript(Script):
name = StringVar()
count = IntegerVar()
```
the following URL prefills the `name` and `count` fields:
```
https://<netbox>/extras/scripts/<script_id>/?name=Branch42&count=3
```
For object variables (`ObjectVar`), supply the objects primary key (PK):
```
https://<netbox>/extras/scripts/<script_id>/?device=1
```
If an object ID cannot be resolved or the object is not visible to the requesting user, the field remains unpopulated.
Supported variable types:
| Variable class | Expected input | Example query string |
|--------------------------|---------------------------------|---------------------------------------------|
| `StringVar` | string (percentencoded) | `?name=Branch42` |
| `TextVar` | string (percentencoded) | `?notes=Initial%20value` |
| `IntegerVar` | integer | `?count=3` |
| `DecimalVar` | decimal number | `?ratio=0.75` |
| `BooleanVar` | value → `True`; empty → `False` | `?enabled=true` (True), `?enabled=` (False) |
| `ChoiceVar` | choice value (not label) | `?role=edge` |
| `MultiChoiceVar` | choice values (repeat) | `?roles=edge&roles=core` |
| `ObjectVar(Device)` | PK (integer) | `?device=1` |
| `MultiObjectVar(Device)` | PKs (repeat) | `?devices=1&devices=2` |
| `IPAddressVar` | IP address | `?ip=198.51.100.10` |
| `IPAddressWithMaskVar` | IP address with mask | `?addr=192.0.2.1/24` |
| `IPNetworkVar` | IP network prefix | `?network=2001:db8::/64` |
| `DateVar` | date `YYYY-MM-DD` | `?date=2025-01-05` |
| `DateTimeVar` | ISO datetime | `?when=2025-01-05T14:30:00` |
| `FileVar` | — (not supported) | — |
!!! note
- The parameter names above are examples; use the actual variable attribute names defined by the script.
- For `BooleanVar`, only an empty value (`?enabled=`) unchecks the box; any other value including `false` or `0` checks it.
- File uploads (`FileVar`) cannot be prefilled via URL parameters.
### Via the API
To run a script via the REST API, issue a POST request to the script's endpoint specifying the form data and commitment. For example, to run a script named `example.MyReport`, we would make a request such as the following:
@@ -510,7 +444,7 @@ from extras.scripts import *
class NewBranchScript(Script):
class Meta(Script.Meta):
class Meta:
name = "New Branch"
description = "Provision a new branch site"
field_order = ['site_name', 'switch_count', 'switch_model']

View File

@@ -7,7 +7,7 @@ Getting started with NetBox development is pretty straightforward, and should fe
* A Linux system or compatible environment
* A PostgreSQL server, which can be installed locally [per the documentation](../installation/1-postgresql.md)
* A Redis server, which can also be [installed locally](../installation/2-redis.md)
* Python 3.10 or later
* Python 3.12 or later
### 1. Fork the Repo

View File

@@ -12,7 +12,7 @@ Depending on its classification, each NetBox model may support various features
| Feature | Feature Mixin | Registry Key | Description |
|------------------------------------------------------------|-------------------------|---------------------|-----------------------------------------------------------------------------------------|
| [Bookmarks](../features/user-preferences.md#bookmarks) | `BookmarksMixin` | `bookmarks` | These models can be bookmarked natively in the user interface |
| [Bookmarks](../features/customization.md#bookmarks) | `BookmarksMixin` | `bookmarks` | These models can be bookmarked natively in the user interface |
| [Change logging](../features/change-logging.md) | `ChangeLoggingMixin` | `change_logging` | Changes to these objects are automatically recorded in the change log |
| Cloning | `CloningMixin` | `cloning` | Provides the `clone()` method to prepare a copy |
| [Contacts](../features/contacts.md) | `ContactsMixin` | `contacts` | Contacts can be associated with these models |

View File

@@ -6,14 +6,10 @@ For enduser guidance on resetting saved table layouts, see [Features > User P
## Available Preferences
| Name | Description |
|----------------------------|---------------------------------------------------------------|
| `csv_delimiter` | The delimiting character used when exporting CSV data |
| `data_format` | Preferred format when rendering raw data (JSON or YAML) |
| `locale.language` | The language selected for UI translation |
| `pagination.per_page` | The number of items to display per page of a paginated table |
| `pagination.placement` | Where to display the paginator controls relative to the table |
| `tables.${table}.columns` | The ordered list of columns to display when viewing the table |
| `tables.${table}.ordering` | A list of column names by which the table should be ordered |
| `ui.copilot_enabled` | Toggles the NetBox Copilot AI agent |
| `ui.tables.striping` | Toggles visual striping of tables in the UI |
| Name | Description |
|--------------------------|---------------------------------------------------------------|
| data_format | Preferred format when rendering raw data (JSON or YAML) |
| pagination.per_page | The number of items to display per page of a paginated table |
| pagination.placement | Where to display the paginator controls relative to the table |
| tables.${table}.columns | The ordered list of columns to display when viewing the table |
| tables.${table}.ordering | A list of column names by which the table should be ordered |

View File

@@ -8,7 +8,7 @@ NetBox's REST API, powered by the [Django REST Framework](https://www.django-res
```no-highlight
curl -s -X POST \
-H "Authorization: Token $TOKEN" \
-H "Authorization: Bearer $TOKEN" \
-H "Content-Type: application/json" \
http://netbox/api/ipam/prefixes/ \
--data '{"prefix": "192.0.2.0/24", "site": {"name": "Branch 12"}}'

View File

@@ -34,9 +34,6 @@ Sets the default number of rows displayed on paginated tables.
### Paginator placement
Controls where pagination controls are rendered relative to a table.
### HTMX navigation (experimental)
Enables partialpage navigation for supported views. Disable this preference if unexpected behavior is observed.
### Striped table rows
Toggles alternating row backgrounds on tables.

View File

@@ -6,8 +6,8 @@ This section of the documentation discusses installing and configuring the NetBo
Begin by installing all system packages required by NetBox and its dependencies.
!!! warning "Python 3.10 or later required"
NetBox supports Python 3.10, 3.11, and 3.12.
!!! warning "Python 3.12 or later required"
NetBox supports only Python 3.12 or later.
```no-highlight
sudo apt install -y python3 python3-pip python3-venv python3-dev \
@@ -15,7 +15,7 @@ build-essential libxml2-dev libxslt1-dev libffi-dev libpq-dev \
libssl-dev zlib1g-dev
```
Before continuing, check that your installed Python version is at least 3.10:
Before continuing, check that your installed Python version is at least 3.12:
```no-highlight
python3 -V
@@ -120,6 +120,23 @@ If you are not yet sure what the domain name and/or IP address of the NetBox ins
ALLOWED_HOSTS = ['*']
```
### API_TOKEN_PEPPERS
Define at least one random cryptographic pepper, identified by a numeric ID starting at 1. This will be used to generate SHA256 checksums for API tokens.
```python
API_TOKEN_PEPPERS = {
# DO NOT USE THIS EXAMPLE PEPPER IN PRODUCTION
1: 'kp7ht*76fiQAhUi5dHfASLlYUE_S^gI^(7J^K5M!LfoH@vl&b_',
}
```
!!! tip
As with [`SECRET_KEY`](#secret_key) below, you can use the `generate_secret_key.py` script to generate a random pepper:
```no-highlight
python3 ../generate_secret_key.py
```
### DATABASES
This parameter holds the PostgreSQL database configuration details. The default database must be defined; additional databases may be defined as needed e.g. by plugins.
@@ -235,10 +252,10 @@ Once NetBox has been configured, we're ready to proceed with the actual installa
sudo /opt/netbox/upgrade.sh
```
Note that **Python 3.10 or later is required** for NetBox v4.0 and later releases. If the default Python installation on your server is set to a lesser version, pass the path to the supported installation as an environment variable named `PYTHON`. (Note that the environment variable must be passed _after_ the `sudo` command.)
Note that **Python 3.12 or later is required** for NetBox v4.5 and later releases. If the default Python installation on your server is set to a lesser version, pass the path to the supported installation as an environment variable named `PYTHON`. (Note that the environment variable must be passed _after_ the `sudo` command.)
```no-highlight
sudo PYTHON=/usr/bin/python3.10 /opt/netbox/upgrade.sh
sudo PYTHON=/usr/bin/python3.12 /opt/netbox/upgrade.sh
```
!!! note

View File

@@ -60,6 +60,3 @@ You should see output similar to the following:
If the NetBox service fails to start, issue the command `journalctl -eu netbox` to check for log messages that may indicate the problem.
Once you've verified that the WSGI workers are up and running, move on to HTTP server setup.
!!! note
There is a bug in the current stable release of gunicorn (v21.2.0) where automatic restarts of the worker processes can result in 502 errors under heavy load. (See [gunicorn bug #3038](https://github.com/benoitc/gunicorn/issues/3038) for more detail.) Users who encounter this issue may opt to downgrade to an earlier, unaffected release of gunicorn (`pip install gunicorn==20.1.0`). Note, however, that this earlier release does not officially support Python 3.11.

View File

@@ -121,7 +121,6 @@ AUTH_LDAP_MIRROR_GROUPS = True
# Define special user types using groups. Exercise great caution when assigning superuser status.
AUTH_LDAP_USER_FLAGS_BY_GROUP = {
"is_active": "cn=active,ou=groups,dc=example,dc=com",
"is_staff": "cn=staff,ou=groups,dc=example,dc=com",
"is_superuser": "cn=superuser,ou=groups,dc=example,dc=com"
}
@@ -134,7 +133,6 @@ AUTH_LDAP_CACHE_TIMEOUT = 3600
```
* `is_active` - All users must be mapped to at least this group to enable authentication. Without this, users cannot log in.
* `is_staff` - Users mapped to this group are enabled for access to the administration tools; this is the equivalent of checking the "staff status" box on a manually created user. This doesn't grant any specific permissions.
* `is_superuser` - Users mapped to this group will be granted superuser status. Superusers are implicitly granted all permissions.
!!! warning
@@ -248,7 +246,6 @@ AUTH_LDAP_MIRROR_GROUPS = True
# Define special user types using groups. Exercise great caution when assigning superuser status.
AUTH_LDAP_USER_FLAGS_BY_GROUP = {
"is_active": "cn=active,ou=groups,dc=example,dc=com",
"is_staff": "cn=staff,ou=groups,dc=example,dc=com",
"is_superuser": "cn=superuser,ou=groups,dc=example,dc=com"
}

View File

@@ -27,7 +27,7 @@ The following sections detail how to set up a new instance of NetBox:
| Dependency | Supported Versions |
|------------|--------------------|
| Python | 3.10, 3.11, 3.12 |
| Python | 3.12, 3.13, 3.14 |
| PostgreSQL | 14+ |
| Redis | 4.0+ |

View File

@@ -19,7 +19,7 @@ NetBox requires the following dependencies:
| Dependency | Supported Versions |
|------------|--------------------|
| Python | 3.10, 3.11, 3.12 |
| Python | 3.12, 3.13, 3.14 |
| PostgreSQL | 14+ |
| Redis | 4.0+ |
@@ -27,6 +27,7 @@ NetBox requires the following dependencies:
| NetBox Version | Python min | Python max | PostgreSQL min | Redis min | Documentation |
|:--------------:|:----------:|:----------:|:--------------:|:---------:|:-----------------------------------------------------------------------------------------:|
| 4.5 | 3.12 | 3.14 | 14 | 4.0 | [Link](https://github.com/netbox-community/netbox/blob/v4.5.0/docs/installation/index.md) |
| 4.4 | 3.10 | 3.12 | 14 | 4.0 | [Link](https://github.com/netbox-community/netbox/blob/v4.4.0/docs/installation/index.md) |
| 4.3 | 3.10 | 3.12 | 14 | 4.0 | [Link](https://github.com/netbox-community/netbox/blob/v4.3.0/docs/installation/index.md) |
| 4.2 | 3.10 | 3.12 | 13 | 4.0 | [Link](https://github.com/netbox-community/netbox/blob/v4.2.0/docs/installation/index.md) |
@@ -130,7 +131,7 @@ sudo ./upgrade.sh
If the default version of Python is not at least 3.10, you'll need to pass the path to a supported Python version as an environment variable when calling the upgrade script. For example:
```no-highlight
sudo PYTHON=/usr/bin/python3.10 ./upgrade.sh
sudo PYTHON=/usr/bin/python3.12 ./upgrade.sh
```
!!! note

View File

@@ -11,7 +11,7 @@ curl -H "Authorization: Token $TOKEN" \
-H "Content-Type: application/json" \
-H "Accept: application/json" \
http://netbox/graphql/ \
--data '{"query": "query {circuit_list(filters:{status: STATUS_ACTIVE}) {cid provider {name}}}"}'
--data '{"query": "query {circuit_list(filters:{status: STATUS_ACTIVE}) {results {cid provider {name}}}}"}'
```
The response will include the requested data formatted as JSON:
@@ -36,6 +36,30 @@ The response will include the requested data formatted as JSON:
}
}
```
If using the GraphQL API v2 the format will be:
```json
{
"data": {
"circuit_list": {
"results": [
{
"cid": "1002840283",
"provider": {
"name": "CenturyLink"
}
},
{
"cid": "1002840457",
"provider": {
"name": "CenturyLink"
}
}
]
}
}
}
```
!!! note
It's recommended to pass the return data through a JSON parser such as `jq` for better readability.
@@ -47,12 +71,15 @@ NetBox provides both a singular and plural query field for each object type:
For example, query `device(id:123)` to fetch a specific device (identified by its unique ID), and query `device_list` (with an optional set of filters) to fetch all devices.
!!! note "Changed in NetBox v4.5"
If using the GraphQL API v2, List queries now return paginated results. The actual objects are contained within the `results` field of the response, along with `total_count` and `page_info` fields for pagination metadata. Prior to v4.5, list queries returned objects directly as an array.
For more detail on constructing GraphQL queries, see the [GraphQL queries documentation](https://graphql.org/learn/queries/). For filtering and lookup syntax, please refer to the [Strawberry Django documentation](https://strawberry.rocks/docs/django/guide/filters).
## Filtering
!!! note "Changed in NetBox v4.3"
The filtering syntax fo the GraphQL API has changed substantially in NetBox v4.3.
The filtering syntax for the GraphQL API has changed substantially in NetBox v4.3.
Filters can be specified as key-value pairs within parentheses immediately following the query name. For example, the following will return only active sites:
@@ -67,6 +94,21 @@ query {
}
}
```
If using the GraphQL API v2 the format will be:
```
query {
site_list(
filters: {
status: STATUS_ACTIVE
}
) {
results {
name
}
}
}
```
Filters can be combined with logical operators, such as `OR` and `NOT`. For example, the following will return every site that is planned _or_ assigned to a tenant named Foo:
@@ -88,6 +130,28 @@ query {
}
}
```
If using the GraphQL API v2 the format will be:
```
query {
site_list(
filters: {
status: STATUS_PLANNED,
OR: {
tenant: {
name: {
exact: "Foo"
}
}
}
}
) {
results {
name
}
}
}
```
Filtering can also be applied to related objects. For example, the following query will return only enabled interfaces for each device:
@@ -102,6 +166,21 @@ query {
}
}
```
If using the GraphQL API v2 the format will be:
```
query {
device_list {
results {
id
name
interfaces(filters: {enabled: {exact: true}}) {
name
}
}
}
}
```
## Multiple Return Types
@@ -128,6 +207,31 @@ Certain queries can return multiple types of objects, for example cable terminat
}
}
```
If using the GraphQL API v2 the format will be:
```
{
cable_list {
results {
id
a_terminations {
... on CircuitTerminationType {
id
class_type
}
... on ConsolePortType {
id
class_type
}
... on ConsoleServerPortType {
id
class_type
}
}
}
}
}
```
The field "class_type" is an easy way to distinguish what type of object it is when viewing the returned data, or when filtering. It contains the class name, for example "CircuitTermination" or "ConsoleServerPort".
@@ -142,6 +246,47 @@ query {
}
}
```
### Pagination in GraphQL API V2
All list queries return paginated results using the `OffsetPaginated` type, which includes:
- `results`: The list of objects matching the query
- `total_count`: The total number of objects matching the filters (without pagination)
- `page_info`: Pagination metadata including `offset` and `limit`
By default, queries return up to 100 results. You can control pagination by specifying the `pagination` parameter with `offset` and `limit` values:
```
query {
device_list(pagination: { offset: 0, limit: 20 }) {
total_count
page_info {
offset
limit
}
results {
id
name
}
}
}
```
If you don't need pagination metadata, you can simply query the `results`:
```
query {
device_list {
results {
id
name
}
}
}
```
!!! note
When not specifying the `pagination` parameter, avoid querying `page_info.limit` as it may return an undefined value. Either provide explicit pagination parameters or only query the `results` and `total_count` fields.
## Authentication

View File

@@ -80,7 +80,7 @@ Likewise, the site, rack, and device objects are located under the "DCIM" applic
The full hierarchy of available endpoints can be viewed by navigating to the API root in a web browser.
Each model generally has two views associated with it: a list view and a detail view. The list view is used to retrieve a list of multiple objects and to create new objects. The detail view is used to retrieve, update, or delete an single existing object. All objects are referenced by their numeric primary key (`id`).
Each model generally has two views associated with it: a list view and a detail view. The list view is used to retrieve a list of multiple objects and to create new objects. The detail view is used to retrieve, update, or delete a single existing object. All objects are referenced by their numeric primary key (`id`).
* `/api/dcim/devices/` - List existing devices or create a new device
* `/api/dcim/devices/123/` - Retrieve, update, or delete the device with ID 123
@@ -653,18 +653,22 @@ The NetBox REST API primarily employs token-based authentication. For convenienc
### Tokens
A token is a unique identifier mapped to a NetBox user account. Each user may have one or more tokens which he or she can use for authentication when making REST API requests. To create a token, navigate to the API tokens page under your user profile.
A token is a secret, unique identifier mapped to a NetBox user account. Each user may have one or more tokens which he or she can use for authentication when making REST API requests. To create a token, navigate to the API tokens page under your user profile. When creating a token, NetBox will automatically populate a randomly-generated token value.
!!! note "Tokens cannot be retrieved once created"
Once a token has been created, its plaintext value cannot be retrieved. For this reason, you must take care to securely record the token locally immediately upon its creation. If a token plaintext is lost, it cannot be recovered: A new token must be created.
By default, all users can create and manage their own REST API tokens under the user control panel in the UI or via the REST API. This ability can be disabled by overriding the [`DEFAULT_PERMISSIONS`](../configuration/security.md#default_permissions) configuration parameter.
Each token contains a 160-bit key represented as 40 hexadecimal characters. When creating a token, you'll typically leave the key field blank so that a random key will be automatically generated. However, NetBox allows you to specify a key in case you need to restore a previously deleted token to operation.
Additionally, a token can be set to expire at a specific time. This can be useful if an external client needs to be granted temporary access to NetBox.
!!! info "Restricting Token Retrieval"
The ability to retrieve the key value of a previously-created API token can be restricted by disabling the [`ALLOW_TOKEN_RETRIEVAL`](../configuration/security.md#allow_token_retrieval) configuration parameter.
#### v1 and v2 Tokens
### Restricting Write Operations
Beginning with NetBox v4.5, two versions of API token are supported, denoted as v1 and v2. Users are strongly encouraged to create only v2 tokens and to discontinue the use of v1 tokens. Support for v1 tokens will be removed in a future NetBox release.
v2 API tokens offer much stronger security. The token plaintext given at creation time is hashed together with a configured [cryptographic pepper](../configuration/required-parameters.md#api_token_peppers) to generate a unique checksum. This checksum is irreversible; the token plaintext is never stored on the server and thus cannot be retrieved even with database-level access.
#### Restricting Write Operations
By default, a token can be used to perform all actions via the API that a user would be permitted to do via the web UI. Deselecting the "write enabled" option will restrict API requests made with the token to read operations (e.g. GET) only.
@@ -681,10 +685,22 @@ It is possible to provision authentication tokens for other users via the REST A
### Authenticating to the API
An authentication token is attached to a request by setting the `Authorization` header to the string `Token` followed by a space and the user's token:
An authentication token is included with a request in its `Authorization` header. The format of the header value depends on the version of token in use. v2 tokens use the following form, concatenating the token's prefix (`nbt_`) and key with its plaintext value, separated by a period:
```
$ curl -H "Authorization: Token $TOKEN" \
Authorization: Bearer nbt_<key>.<token>
```
Legacy v1 tokens use the prefix `Token` rather than `Bearer`, and include only the token plaintext. (v1 tokens do not have a key.)
```
Authorization: Token <token>
```
Below is an example REST API request utilizing a v2 token.
```
$ curl -H "Authorization: Bearer nbt_4F9DAouzURLb.zjebxBPzICiPbWz0Wtx0fTL7bCKXKGTYhNzkgC2S" \
-H "Accept: application/json; indent=4" \
https://netbox/api/dcim/sites/
{

View File

@@ -60,13 +60,6 @@ Four of the standard Python logging levels are supported:
Log entries recorded using the runner's logger will be saved in the job's log in the database in addition to being processed by other [system logging handlers](../../configuration/system.md#logging).
### Jobs running for Model instances
A Job can be executed for a specific instance of a Model.
To enable this functionality, the model must include the `JobsMixin`.
When enqueuing a Job, you can associate it with a particular instance by passing that instance to the `instance` parameter.
### Scheduled Jobs
As described above, jobs can be scheduled for immediate execution or at any later time using the `enqueue()` method. However, for management purposes, the `enqueue_once()` method allows a job to be scheduled exactly once avoiding duplicates. If a job is already scheduled for a particular instance, a second one won't be scheduled, respecting thread safety. An example use case would be to schedule a periodic task that is bound to an instance in general, but not to any event of that instance (such as updates). The parameters of the `enqueue_once()` method are identical to those of `enqueue()`.
@@ -80,10 +73,9 @@ As described above, jobs can be scheduled for immediate execution or at any late
from django.db import models
from core.choices import JobIntervalChoices
from netbox.models import NetBoxModel
from netbox.models.features import JobsMixin
from .jobs import MyTestJob
class MyModel(JobsMixin, NetBoxModel):
class MyModel(NetBoxModel):
foo = models.CharField()
def save(self, *args, **kwargs):

View File

@@ -55,27 +55,6 @@ class MyModelViewSet(...):
filterset_class = filtersets.MyModelFilterSet
```
### Implementing Quick Search
The `ObjectListView` has a field called Quick Search. For Quick Search to work the corresponding FilterSet has to override the `search` method that is implemented in `NetBoxModelFilterSet`. This function takes a queryset and can perform arbitrary operations on it and return it. A common use-case is to search for the given search value in multiple fields:
```python
from django.db.models import Q
from netbox.filtersets import NetBoxModelFilterSet
class MyFilterSet(NetBoxModelFilterSet):
...
def search(self, queryset, name, value):
if not value.strip():
return queryset
return queryset.filter(
Q(name__icontains=value) |
Q(description__icontains=value)
)
```
The `search` method is also used by the `q` filter in `NetBoxModelFilterSet` which in turn is used by the Search field in the filters tab.
## Filter Classes
### TagFilter

View File

@@ -173,12 +173,12 @@ classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
"Programming Language :: Python :: 3 :: Only",
'Programming Language :: Python :: 3.10',
'Programming Language :: Python :: 3.11',
'Programming Language :: Python :: 3.12',
'Programming Language :: Python :: 3.13',
'Programming Language :: Python :: 3.14',
]
requires-python = ">=3.10.0"
requires-python = ">=3.12.0"
```
@@ -195,7 +195,7 @@ python3 -m venv ~/.virtualenvs/my_plugin
You can make NetBox available within this environment by creating a path file pointing to its location. This will add NetBox to the Python path upon activation. (Be sure to adjust the command below to specify your actual virtual environment path, Python version, and NetBox installation.)
```shell
echo /opt/netbox/netbox > $VENV/lib/python3.10/site-packages/netbox.pth
echo /opt/netbox/netbox > $VENV/lib/python3.12/site-packages/netbox.pth
```
## Development Installation

View File

@@ -64,14 +64,17 @@ item1 = PluginMenuItem(
A `PluginMenuItem` has the following attributes:
| Attribute | Required | Description |
|-----------------|----------|----------------------------------------------------------------------------------------------------------|
| `link` | Yes | Name of the URL path to which this menu item links |
| `link_text` | Yes | The text presented to the user |
| `permissions` | - | A list of permissions required to display this link |
| `auth_required` | - | Display only for authenticated users |
| `staff_only` | - | Display only for users who have `is_staff` set to true (any specified permissions will also be required) |
| `buttons` | - | An iterable of PluginMenuButton instances to include |
| Attribute | Required | Description |
|-----------------|----------|------------------------------------------------------|
| `link` | Yes | Name of the URL path to which this menu item links |
| `link_text` | Yes | The text presented to the user |
| `permissions` | - | A list of permissions required to display this link |
| `auth_required` | - | Display only for authenticated users |
| `staff_only` | - | Display only for superusers |
| `buttons` | - | An iterable of PluginMenuButton instances to include |
!!! note "Changed in NetBox v4.5"
In releases prior to NetBox v4.5, `staff_only` restricted display of a menu item to only users with `is_staff` set to True. In NetBox v4.5, the `is_staff` flag was removed from the user model. Menu items with `staff_only` set to True are now displayed only for superusers.
## Menu Buttons

View File

@@ -1,94 +1,5 @@
# NetBox v4.4
## v4.4.7 (2025-11-25)
### Enhancements
* [#20371](https://github.com/netbox-community/netbox/issues/20371) - Add Molex Micro-Fit 2x3 for power ports & power outlets
* [#20731](https://github.com/netbox-community/netbox/issues/20731) - Enable specifying `data_source` & `data_file` when bulk import config templates
* [#20820](https://github.com/netbox-community/netbox/issues/20820) - Enable filtering of custom fields by object type
* [#20823](https://github.com/netbox-community/netbox/issues/20823) - Disallow creation of API tokens with an expiration date in the past
* [#20841](https://github.com/netbox-community/netbox/issues/20841) - Support advanced filtering for available rack types when creating/editing a rack
### Bug Fixes
* [#20134](https://github.com/netbox-community/netbox/issues/20134) - Prevent out-of-band HTMX content swaps in embedded tables
* [#20432](https://github.com/netbox-community/netbox/issues/20432) - Fix tracing of cables across multiple circuits in parallel
* [#20465](https://github.com/netbox-community/netbox/issues/20465) - Ensure that scripts are updated immediately when a new file is uploaded
* [#20638](https://github.com/netbox-community/netbox/issues/20638) - Correct OpenAPI schema for bulk create operations
* [#20649](https://github.com/netbox-community/netbox/issues/20649) - Enforce view permissions on REST API endpoint for custom scripts
* [#20740](https://github.com/netbox-community/netbox/issues/20740) - Ensure permissions constraints are enforced when executing custom scripts via the REST API
* [#20743](https://github.com/netbox-community/netbox/issues/20743) - Pass request context to custom script when triggered by an event rule
* [#20766](https://github.com/netbox-community/netbox/issues/20766) - Fix inadvertent translations on server error page
* [#20775](https://github.com/netbox-community/netbox/issues/20775) - Fix `TypeError` exception when bulk renaming unnamed devices
* [#20822](https://github.com/netbox-community/netbox/issues/20822) - Add missing `auto_sync_enabled` field in bulk edit forms
* [#20827](https://github.com/netbox-community/netbox/issues/20827) - Fix UI styling issue when toggling between light and dark mode
* [#20839](https://github.com/netbox-community/netbox/issues/20839) - Fix filtering by object type in UI for custom links and saved filters
* [#20840](https://github.com/netbox-community/netbox/issues/20840) - Remove extraneous references to airflow for RackType model
* [#20844](https://github.com/netbox-community/netbox/issues/20844) - Fix object type filter for L2VPN terminations
* [#20859](https://github.com/netbox-community/netbox/issues/20859) - Prevent dashboard crash due to exception raised by a widget
* [#20865](https://github.com/netbox-community/netbox/issues/20865) - Enforce proper min/max values for latitude & longitude fields
---
## v4.4.6 (2025-11-11)
### Enhancements
* [#14171](https://github.com/netbox-community/netbox/issues/14171) - Support VLAN assignment for device & VM interfaces being bulk imported
* [#20297](https://github.com/netbox-community/netbox/issues/20297) - Introduce additional coaxial cable types
### Bug Fixes
* [#20378](https://github.com/netbox-community/netbox/issues/20378) - Prevent exception when attempting to delete a data source utilized by a custom script
* [#20645](https://github.com/netbox-community/netbox/issues/20645) - CSVChoiceField should defer to model field's default value when CSV field is empty
* [#20647](https://github.com/netbox-community/netbox/issues/20647) - Improve handling of empty strings during bulk imports
* [#20653](https://github.com/netbox-community/netbox/issues/20653) - Fix filtering of jobs by object type ID
* [#20660](https://github.com/netbox-community/netbox/issues/20660) - Optimize loading of custom script modules from remote storage
* [#20670](https://github.com/netbox-community/netbox/issues/20670) - Improve validation of related objects during bulk import
* [#20688](https://github.com/netbox-community/netbox/issues/20688) - Suppress non-harmful "No active configuration revision found" warning message
* [#20697](https://github.com/netbox-community/netbox/issues/20697) - Prevent duplication of signals which increment/decrement related object counts
* [#20699](https://github.com/netbox-community/netbox/issues/20699) - Ensure proper ordering of changelog entries resulting from cascading deletions
* [#20713](https://github.com/netbox-community/netbox/issues/20713) - Ensure a pre-change snapshot is recorded on virtual chassis members being added/removed
* [#20721](https://github.com/netbox-community/netbox/issues/20721) - Fix breadcrumb navigation links in UI for background tasks
* [#20738](https://github.com/netbox-community/netbox/issues/20738) - Deleting a virtual chassis should nullify the `vc_position` of all former members
* [#20750](https://github.com/netbox-community/netbox/issues/20750) - Fix cloning of permissions when only one action is enabled
* [#20755](https://github.com/netbox-community/netbox/issues/20755) - Prevent duplicate results under certain conditions when filtering providers
* [#20771](https://github.com/netbox-community/netbox/issues/20771) - Comments are required when creating a new journal entry
* [#20774](https://github.com/netbox-community/netbox/issues/20774) - Bulk action button labels should be translated
---
## v4.4.5 (2025-10-28)
### Enhancements
* [#19751](https://github.com/netbox-community/netbox/issues/19751) - Disable occupied module bays in form dropdowns when installing a new module
* [#20301](https://github.com/netbox-community/netbox/issues/20301) - Add a "dismiss all" option to the notifications dropdown
* [#20399](https://github.com/netbox-community/netbox/issues/20399) - Add `assigned` and `primary` boolean filters for MAC addresses
* [#20567](https://github.com/netbox-community/netbox/issues/20567) - Add contacts column to services table
* [#20675](https://github.com/netbox-community/netbox/issues/20675) - Enable [NetBox Copilot](https://netboxlabs.com/products/netbox-copilot/) integration
* [#20692](https://github.com/netbox-community/netbox/issues/20692) - Add contacts column to IP addresses table
* [#20700](https://github.com/netbox-community/netbox/issues/20700) - Add contacts table column for various additional models
### Bug Fixes
* [#19872](https://github.com/netbox-community/netbox/issues/19872) - Ensure custom script validation failures display error messages
* [#20389](https://github.com/netbox-community/netbox/issues/20389) - Fix "select all" behavior for bulk rename views
* [#20422](https://github.com/netbox-community/netbox/issues/20422) - Enable filtering of aggregates and prefixes by family in GraphQL API
* [#20459](https://github.com/netbox-community/netbox/issues/20459) - Fix validation of `is_oob` & `is_primary` fields under IP address bulk import
* [#20466](https://github.com/netbox-community/netbox/issues/20466) - Fix querying of devices with a primary IP assigned in GraphQL API
* [#20498](https://github.com/netbox-community/netbox/issues/20498) - Enforce the validation regex (if set) for custom URL fields
* [#20524](https://github.com/netbox-community/netbox/issues/20524) - Raise a validation error when attempting to schedule a custom script for a past date/time
* [#20541](https://github.com/netbox-community/netbox/issues/20541) - Fix resolution of GraphQL object fields which rely on custom filters
* [#20551](https://github.com/netbox-community/netbox/issues/20551) - Fix automatic slug generation in quick-add UI form
* [#20606](https://github.com/netbox-community/netbox/issues/20606) - Enable copying of values from table columns rendered as badges
* [#20641](https://github.com/netbox-community/netbox/issues/20641) - Fix `AttributeError` exception raised by the object changes REST API endpoint
* [#20646](https://github.com/netbox-community/netbox/issues/20646) - Prevent cables from connecting to objects marked as connected
* [#20655](https://github.com/netbox-community/netbox/issues/20655) - Fix `FieldError` exception when attempting to sort permissions list by actions
---
## v4.4.4 (2025-10-15)
### Bug Fixes

View File

@@ -1,57 +0,0 @@
from django.utils.translation import gettext as _
from account.models import UserToken
from netbox.tables import NetBoxTable, columns
__all__ = (
'UserTokenTable',
)
TOKEN = """<samp><span id="token_{{ record.pk }}">{{ record }}</span></samp>"""
ALLOWED_IPS = """{{ value|join:", " }}"""
COPY_BUTTON = """
{% if settings.ALLOW_TOKEN_RETRIEVAL %}
{% copy_content record.pk prefix="token_" color="success" %}
{% endif %}
"""
class UserTokenTable(NetBoxTable):
"""
Table for users to manager their own API tokens under account views.
"""
key = columns.TemplateColumn(
verbose_name=_('Key'),
template_code=TOKEN,
)
write_enabled = columns.BooleanColumn(
verbose_name=_('Write Enabled')
)
created = columns.DateTimeColumn(
timespec='minutes',
verbose_name=_('Created'),
)
expires = columns.DateTimeColumn(
timespec='minutes',
verbose_name=_('Expires'),
)
last_used = columns.DateTimeColumn(
verbose_name=_('Last Used'),
)
allowed_ips = columns.TemplateColumn(
verbose_name=_('Allowed IPs'),
template_code=ALLOWED_IPS
)
actions = columns.ActionsColumn(
actions=('edit', 'delete'),
extra_buttons=COPY_BUTTON
)
class Meta(NetBoxTable.Meta):
model = UserToken
fields = (
'pk', 'id', 'key', 'description', 'write_enabled', 'created', 'expires', 'last_used', 'allowed_ips',
)

View File

@@ -26,8 +26,9 @@ from extras.tables import BookmarkTable, NotificationTable, SubscriptionTable
from netbox.authentication import get_auth_backend_display, get_saml_idps
from netbox.config import get_config
from netbox.views import generic
from users import forms, tables
from users import forms
from users.models import UserConfig
from users.tables import TokenTable
from utilities.request import safe_for_redirect
from utilities.string import remove_linebreaks
from utilities.views import register_model_view
@@ -328,7 +329,8 @@ class UserTokenListView(LoginRequiredMixin, View):
def get(self, request):
tokens = UserToken.objects.filter(user=request.user)
table = tables.UserTokenTable(tokens)
table = TokenTable(tokens)
table.columns.hide('user')
table.configure(request)
return render(request, 'account/token_list.html', {
@@ -343,11 +345,9 @@ class UserTokenView(LoginRequiredMixin, View):
def get(self, request, pk):
token = get_object_or_404(UserToken.objects.filter(user=request.user), pk=pk)
key = token.key if settings.ALLOW_TOKEN_RETRIEVAL else None
return render(request, 'account/token.html', {
'object': token,
'key': key,
})

View File

@@ -89,6 +89,8 @@ class ProviderFilterSet(NetBoxModelFilterSet, ContactModelFilterSet):
return queryset.filter(
Q(name__icontains=value) |
Q(description__icontains=value) |
Q(accounts__account__icontains=value) |
Q(accounts__name__icontains=value) |
Q(comments__icontains=value)
)

View File

@@ -2,12 +2,13 @@ from typing import List
import strawberry
import strawberry_django
from strawberry_django.pagination import OffsetPaginated
from .types import *
@strawberry.type(name="Query")
class CircuitsQuery:
class CircuitsQueryV1:
circuit: CircuitType = strawberry_django.field()
circuit_list: List[CircuitType] = strawberry_django.field()
@@ -40,3 +41,41 @@ class CircuitsQuery:
virtual_circuit_type: VirtualCircuitTypeType = strawberry_django.field()
virtual_circuit_type_list: List[VirtualCircuitTypeType] = strawberry_django.field()
@strawberry.type(name="Query")
class CircuitsQuery:
circuit: CircuitType = strawberry_django.field()
circuit_list: OffsetPaginated[CircuitType] = strawberry_django.offset_paginated()
circuit_termination: CircuitTerminationType = strawberry_django.field()
circuit_termination_list: OffsetPaginated[CircuitTerminationType] = strawberry_django.offset_paginated()
circuit_type: CircuitTypeType = strawberry_django.field()
circuit_type_list: OffsetPaginated[CircuitTypeType] = strawberry_django.offset_paginated()
circuit_group: CircuitGroupType = strawberry_django.field()
circuit_group_list: OffsetPaginated[CircuitGroupType] = strawberry_django.offset_paginated()
circuit_group_assignment: CircuitGroupAssignmentType = strawberry_django.field()
circuit_group_assignment_list: OffsetPaginated[CircuitGroupAssignmentType] = strawberry_django.offset_paginated()
provider: ProviderType = strawberry_django.field()
provider_list: OffsetPaginated[ProviderType] = strawberry_django.offset_paginated()
provider_account: ProviderAccountType = strawberry_django.field()
provider_account_list: OffsetPaginated[ProviderAccountType] = strawberry_django.offset_paginated()
provider_network: ProviderNetworkType = strawberry_django.field()
provider_network_list: OffsetPaginated[ProviderNetworkType] = strawberry_django.offset_paginated()
virtual_circuit: VirtualCircuitType = strawberry_django.field()
virtual_circuit_list: OffsetPaginated[VirtualCircuitType] = strawberry_django.offset_paginated()
virtual_circuit_termination: VirtualCircuitTerminationType = strawberry_django.field()
virtual_circuit_termination_list: OffsetPaginated[VirtualCircuitTerminationType] = (
strawberry_django.offset_paginated()
)
virtual_circuit_type: VirtualCircuitTypeType = strawberry_django.field()
virtual_circuit_type_list: OffsetPaginated[VirtualCircuitTypeType] = strawberry_django.offset_paginated()

View File

@@ -83,7 +83,6 @@ class ProviderBulkEditView(generic.BulkEditView):
@register_model_view(Provider, 'bulk_rename', path='rename', detail=False)
class ProviderBulkRenameView(generic.BulkRenameView):
queryset = Provider.objects.all()
filterset = filtersets.ProviderFilterSet
@register_model_view(Provider, 'bulk_delete', path='delete', detail=False)
@@ -151,7 +150,6 @@ class ProviderAccountBulkEditView(generic.BulkEditView):
@register_model_view(ProviderAccount, 'bulk_rename', path='rename', detail=False)
class ProviderAccountBulkRenameView(generic.BulkRenameView):
queryset = ProviderAccount.objects.all()
filterset = filtersets.ProviderAccountFilterSet
@register_model_view(ProviderAccount, 'bulk_delete', path='delete', detail=False)
@@ -228,7 +226,6 @@ class ProviderNetworkBulkEditView(generic.BulkEditView):
@register_model_view(ProviderNetwork, 'bulk_rename', path='rename', detail=False)
class ProviderNetworkBulkRenameView(generic.BulkRenameView):
queryset = ProviderNetwork.objects.all()
filterset = filtersets.ProviderNetworkFilterSet
@register_model_view(ProviderNetwork, 'bulk_delete', path='delete', detail=False)
@@ -293,7 +290,6 @@ class CircuitTypeBulkEditView(generic.BulkEditView):
@register_model_view(CircuitType, 'bulk_rename', path='rename', detail=False)
class CircuitTypeBulkRenameView(generic.BulkRenameView):
queryset = CircuitType.objects.all()
filterset = filtersets.CircuitTypeFilterSet
@register_model_view(CircuitType, 'bulk_delete', path='delete', detail=False)
@@ -366,7 +362,6 @@ class CircuitBulkEditView(generic.BulkEditView):
class CircuitBulkRenameView(generic.BulkRenameView):
queryset = Circuit.objects.all()
field_name = 'cid'
filterset = filtersets.CircuitFilterSet
@register_model_view(Circuit, 'bulk_delete', path='delete', detail=False)
@@ -562,7 +557,6 @@ class CircuitGroupBulkEditView(generic.BulkEditView):
@register_model_view(CircuitGroup, 'bulk_rename', path='rename', detail=False)
class CircuitGroupBulkRenameView(generic.BulkRenameView):
queryset = CircuitGroup.objects.all()
filterset = filtersets.CircuitGroupFilterSet
@register_model_view(CircuitGroup, 'bulk_delete', path='delete', detail=False)
@@ -678,7 +672,6 @@ class VirtualCircuitTypeBulkEditView(generic.BulkEditView):
@register_model_view(VirtualCircuitType, 'bulk_rename', path='rename', detail=False)
class VirtualCircuitTypeBulkRenameView(generic.BulkRenameView):
queryset = VirtualCircuitType.objects.all()
filterset = filtersets.VirtualCircuitTypeFilterSet
@register_model_view(VirtualCircuitType, 'bulk_delete', path='delete', detail=False)
@@ -751,7 +744,6 @@ class VirtualCircuitBulkEditView(generic.BulkEditView):
class VirtualCircuitBulkRenameView(generic.BulkRenameView):
queryset = VirtualCircuit.objects.all()
field_name = 'cid'
filterset = filtersets.VirtualCircuitFilterSet
@register_model_view(VirtualCircuit, 'bulk_delete', path='delete', detail=False)

View File

@@ -12,7 +12,6 @@ from drf_spectacular.utils import Direction
from netbox.api.fields import ChoiceField
from netbox.api.serializers import WritableNestedSerializer
from netbox.api.viewsets import NetBoxModelViewSet
# see netbox.api.routers.NetBoxRouter
BULK_ACTIONS = ("bulk_destroy", "bulk_partial_update", "bulk_update")
@@ -50,11 +49,6 @@ class ChoiceFieldFix(OpenApiSerializerFieldExtension):
)
def viewset_handles_bulk_create(view):
"""Check if view automatically provides list-based bulk create"""
return isinstance(view, NetBoxModelViewSet)
class NetBoxAutoSchema(AutoSchema):
"""
Overrides to drf_spectacular.openapi.AutoSchema to fix following issues:
@@ -134,36 +128,6 @@ class NetBoxAutoSchema(AutoSchema):
return response_serializers
def _get_request_for_media_type(self, serializer, direction='request'):
"""
Override to generate oneOf schema for serializers that support both
single object and array input (NetBoxModelViewSet POST operations).
Refs: #20638
"""
# Get the standard schema first
schema, required = super()._get_request_for_media_type(serializer, direction)
# If this serializer supports arrays (marked in get_request_serializer),
# wrap the schema in oneOf to allow single object OR array
if (
direction == 'request' and
schema is not None and
getattr(self.view, 'action', None) == 'create' and
viewset_handles_bulk_create(self.view)
):
return {
'oneOf': [
schema, # Single object
{
'type': 'array',
'items': schema, # Array of objects
}
]
}, required
return schema, required
def _get_serializer_name(self, serializer, direction, bypass_extensions=False) -> str:
name = super()._get_serializer_name(serializer, direction, bypass_extensions)

View File

@@ -1,13 +1,8 @@
from drf_spectacular.utils import extend_schema_field
from rest_framework import serializers
from core.choices import *
from core.models import Job
from netbox.api.exceptions import SerializerNotFound
from netbox.api.fields import ChoiceField, ContentTypeField
from netbox.api.serializers import BaseModelSerializer
from users.api.serializers_.users import UserSerializer
from utilities.api import get_serializer_for_model
__all__ = (
'JobSerializer',
@@ -23,28 +18,11 @@ class JobSerializer(BaseModelSerializer):
object_type = ContentTypeField(
read_only=True
)
object = serializers.SerializerMethodField(
read_only=True
)
class Meta:
model = Job
fields = [
'id', 'url', 'display_url', 'display', 'object_type', 'object_id', 'object', 'name', 'status', 'created',
'scheduled', 'interval', 'started', 'completed', 'user', 'data', 'error', 'job_id', 'log_entries',
'id', 'url', 'display_url', 'display', 'object_type', 'object_id', 'name', 'status', 'created', 'scheduled',
'interval', 'started', 'completed', 'user', 'data', 'error', 'job_id', 'log_entries',
]
brief_fields = ('url', 'created', 'completed', 'user', 'status')
@extend_schema_field(serializers.JSONField(allow_null=True))
def get_object(self, obj):
"""
Serialize a nested representation of the object.
"""
if obj.object is None:
return None
try:
serializer = get_serializer_for_model(obj.object)
except SerializerNotFound:
return obj.object_repr
context = {'request': self.context['request']}
return serializer(obj.object, nested=True, context=context).data

View File

@@ -9,7 +9,6 @@ from drf_spectacular.utils import OpenApiParameter, extend_schema
from rest_framework import viewsets
from rest_framework.decorators import action
from rest_framework.exceptions import PermissionDenied
from rest_framework.permissions import IsAdminUser
from rest_framework.response import Response
from rest_framework.routers import APIRootView
from rest_framework.viewsets import ReadOnlyModelViewSet
@@ -24,7 +23,7 @@ from netbox.api.authentication import IsAuthenticatedOrLoginNotRequired
from netbox.api.metadata import ContentTypeMetadata
from netbox.api.pagination import LimitOffsetListPagination
from netbox.api.viewsets import NetBoxModelViewSet, NetBoxReadOnlyModelViewSet
from utilities.api import IsSuperuser
from . import serializers
@@ -100,7 +99,7 @@ class BaseRQViewSet(viewsets.ViewSet):
"""
Base class for RQ view sets. Provides a list() method. Subclasses must implement get_data().
"""
permission_classes = [IsAdminUser]
permission_classes = [IsSuperuser]
serializer_class = None
def get_data(self):

View File

@@ -80,10 +80,6 @@ class JobFilterSet(BaseFilterSet):
method='search',
label=_('Search'),
)
object_type_id = django_filters.ModelMultipleChoiceFilter(
queryset=ObjectType.objects.with_feature('jobs'),
field_name='object_type_id',
)
object_type = ContentTypeFilter()
created = django_filters.DateTimeFilter()
created__before = django_filters.DateTimeFilter(
@@ -128,7 +124,7 @@ class JobFilterSet(BaseFilterSet):
class Meta:
model = Job
fields = ('id', 'object_type', 'object_type_id', 'object_id', 'name', 'interval', 'status', 'user', 'job_id')
fields = ('id', 'object_type', 'object_id', 'name', 'interval', 'status', 'user', 'job_id')
def search(self, queryset, name, value):
if not value.strip():

View File

@@ -70,13 +70,13 @@ class JobFilterForm(SavedFiltersMixin, FilterForm):
model = Job
fieldsets = (
FieldSet('q', 'filter_id'),
FieldSet('object_type_id', 'status', name=_('Attributes')),
FieldSet('object_type', 'status', name=_('Attributes')),
FieldSet(
'created__before', 'created__after', 'scheduled__before', 'scheduled__after', 'started__before',
'started__after', 'completed__before', 'completed__after', 'user', name=_('Creation')
),
)
object_type_id = ContentTypeChoiceField(
object_type = ContentTypeChoiceField(
label=_('Object Type'),
queryset=ObjectType.objects.with_feature('jobs'),
required=False,

View File

@@ -166,8 +166,8 @@ class ConfigRevisionForm(forms.ModelForm, metaclass=ConfigFormMetaclass):
FieldSet('CUSTOM_VALIDATORS', 'PROTECTION_RULES', name=_('Validation')),
FieldSet('DEFAULT_USER_PREFERENCES', name=_('User Preferences')),
FieldSet(
'MAINTENANCE_MODE', 'COPILOT_ENABLED', 'GRAPHQL_ENABLED', 'CHANGELOG_RETENTION', 'JOB_RETENTION',
'MAPS_URL', name=_('Miscellaneous'),
'MAINTENANCE_MODE', 'GRAPHQL_ENABLED', 'CHANGELOG_RETENTION', 'JOB_RETENTION', 'MAPS_URL',
name=_('Miscellaneous')
),
FieldSet('comment', name=_('Config Revision'))
)

View File

@@ -2,14 +2,24 @@ from typing import List
import strawberry
import strawberry_django
from strawberry_django.pagination import OffsetPaginated
from .types import *
@strawberry.type(name="Query")
class CoreQuery:
class CoreQueryV1:
data_file: DataFileType = strawberry_django.field()
data_file_list: List[DataFileType] = strawberry_django.field()
data_source: DataSourceType = strawberry_django.field()
data_source_list: List[DataSourceType] = strawberry_django.field()
@strawberry.type(name="Query")
class CoreQuery:
data_file: DataFileType = strawberry_django.field()
data_file_list: OffsetPaginated[DataFileType] = strawberry_django.offset_paginated()
data_source: DataSourceType = strawberry_django.field()
data_source_list: OffsetPaginated[DataSourceType] = strawberry_django.offset_paginated()

View File

@@ -1,3 +0,0 @@
# TODO: Remove this module in NetBox v4.5
# Provided for backward compatibility
from .object_types import *

View File

@@ -6,6 +6,7 @@ from django.conf import settings
from django.core.exceptions import ValidationError
from django.db import models
from django.core.files.storage import storages
from django.urls import reverse
from django.utils.translation import gettext as _
from ..choices import ManagedFileRootPathChoices
@@ -63,6 +64,9 @@ class ManagedFile(SyncedDataMixin, models.Model):
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('core:managedfile', args=[self.pk])
@property
def name(self):
return self.file_path

View File

@@ -1,4 +1,4 @@
from django.utils.translation import gettext_lazy as _
from django.utils.translation import gettext as _
from netbox.object_actions import ObjectAction

View File

@@ -3,7 +3,6 @@ from threading import local
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.db.models import CASCADE
from django.db.models.fields.reverse_related import ManyToManyRel, ManyToOneRel
from django.db.models.signals import m2m_changed, post_migrate, post_save, pre_delete
from django.dispatch import receiver, Signal
@@ -221,8 +220,14 @@ def handle_deleted_object(sender, instance, **kwargs):
obj.snapshot() # Ensure the change record includes the "before" state
if type(relation) is ManyToManyRel:
getattr(obj, related_field_name).remove(instance)
elif type(relation) is ManyToOneRel and relation.null and relation.on_delete is not CASCADE:
elif type(relation) is ManyToOneRel and relation.field.null is True:
setattr(obj, related_field_name, None)
# make sure the object hasn't been deleted - in case of
# deletion chaining of related objects
try:
obj.refresh_from_db()
except DoesNotExist:
continue
obj.save()
# Enqueue the object for event processing

View File

@@ -8,6 +8,7 @@ from rq.job import Job as RQ_Job, JobStatus
from rq.registry import FailedJobRegistry, StartedJobRegistry
from rest_framework import status
from users.constants import TOKEN_PREFIX
from users.models import Token, User
from utilities.testing import APITestCase, APIViewTestCases, TestCase
from utilities.testing.utils import disable_logging
@@ -107,14 +108,14 @@ class ObjectTypeTest(APITestCase):
def test_list_objects(self):
object_type_count = ObjectType.objects.count()
response = self.client.get(reverse('extras-api:objecttype-list'), **self.header)
response = self.client.get(reverse('core-api:objecttype-list'), **self.header)
self.assertHttpStatus(response, status.HTTP_200_OK)
self.assertEqual(response.data['count'], object_type_count)
def test_get_object(self):
object_type = ObjectType.objects.first()
url = reverse('extras-api:objecttype-detail', kwargs={'pk': object_type.pk})
url = reverse('core-api:objecttype-detail', kwargs={'pk': object_type.pk})
self.assertHttpStatus(self.client.get(url, **self.header), status.HTTP_200_OK)
@@ -134,12 +135,9 @@ class BackgroundTaskTestCase(TestCase):
Create a user and token for API calls.
"""
# Create the test user and assign permissions
self.user = User.objects.create_user(username='testuser')
self.user.is_staff = True
self.user.is_active = True
self.user.save()
self.user = User.objects.create_user(username='testuser', is_active=True)
self.token = Token.objects.create(user=self.user)
self.header = {'HTTP_AUTHORIZATION': f'Token {self.token.key}'}
self.header = {'HTTP_AUTHORIZATION': f'Bearer {TOKEN_PREFIX}{self.token.key}.{self.token.token}'}
# Clear all queues prior to running each test
get_queue('default').connection.flushall()
@@ -150,13 +148,11 @@ class BackgroundTaskTestCase(TestCase):
url = reverse('core-api:rqqueue-list')
# Attempt to load view without permission
self.user.is_staff = False
self.user.save()
response = self.client.get(url, **self.header)
self.assertEqual(response.status_code, 403)
# Load view with permission
self.user.is_staff = True
self.user.is_superuser = True
self.user.save()
response = self.client.get(url, **self.header)
self.assertEqual(response.status_code, 200)
@@ -165,7 +161,16 @@ class BackgroundTaskTestCase(TestCase):
self.assertIn('low', str(response.content))
def test_background_queue(self):
response = self.client.get(reverse('core-api:rqqueue-detail', args=['default']), **self.header)
url = reverse('core-api:rqqueue-detail', args=['default'])
# Attempt to load view without permission
response = self.client.get(url, **self.header)
self.assertEqual(response.status_code, 403)
# Load view with permission
self.user.is_superuser = True
self.user.save()
response = self.client.get(url, **self.header)
self.assertEqual(response.status_code, 200)
self.assertIn('default', str(response.content))
self.assertIn('oldest_job_timestamp', str(response.content))
@@ -174,8 +179,16 @@ class BackgroundTaskTestCase(TestCase):
def test_background_task_list(self):
queue = get_queue('default')
queue.enqueue(self.dummy_job_default)
url = reverse('core-api:rqtask-list')
response = self.client.get(reverse('core-api:rqtask-list'), **self.header)
# Attempt to load view without permission
response = self.client.get(url, **self.header)
self.assertEqual(response.status_code, 403)
# Load view with permission
self.user.is_superuser = True
self.user.save()
response = self.client.get(url, **self.header)
self.assertEqual(response.status_code, 200)
self.assertIn('origin', str(response.content))
self.assertIn('core.tests.test_api.BackgroundTaskTestCase.dummy_job_default()', str(response.content))
@@ -183,8 +196,16 @@ class BackgroundTaskTestCase(TestCase):
def test_background_task(self):
queue = get_queue('default')
job = queue.enqueue(self.dummy_job_default)
url = reverse('core-api:rqtask-detail', args=[job.id])
response = self.client.get(reverse('core-api:rqtask-detail', args=[job.id]), **self.header)
# Attempt to load view without permission
response = self.client.get(url, **self.header)
self.assertEqual(response.status_code, 403)
# Load view with permission
self.user.is_superuser = True
self.user.save()
response = self.client.get(url, **self.header)
self.assertEqual(response.status_code, 200)
self.assertIn(str(job.id), str(response.content))
self.assertIn('origin', str(response.content))
@@ -194,45 +215,65 @@ class BackgroundTaskTestCase(TestCase):
def test_background_task_delete(self):
queue = get_queue('default')
job = queue.enqueue(self.dummy_job_default)
url = reverse('core-api:rqtask-delete', args=[job.id])
response = self.client.post(reverse('core-api:rqtask-delete', args=[job.id]), **self.header)
# Attempt to load view without permission
response = self.client.get(url, **self.header)
self.assertEqual(response.status_code, 403)
# Load view with permission
self.user.is_superuser = True
self.user.save()
response = self.client.post(url, **self.header)
self.assertEqual(response.status_code, 200)
self.assertFalse(RQ_Job.exists(job.id, connection=queue.connection))
queue = get_queue('default')
self.assertNotIn(job.id, queue.job_ids)
def test_background_task_requeue(self):
queue = get_queue('default')
# Enqueue & run a job that will fail
queue = get_queue('default')
job = queue.enqueue(self.dummy_job_failing)
worker = get_worker('default')
with disable_logging():
worker.work(burst=True)
self.assertTrue(job.is_failed)
url = reverse('core-api:rqtask-requeue', args=[job.id])
# Attempt to requeue the job without permission
response = self.client.post(url, **self.header)
self.assertEqual(response.status_code, 403)
# Re-enqueue the failed job and check that its status has been reset
response = self.client.post(reverse('core-api:rqtask-requeue', args=[job.id]), **self.header)
self.user.is_superuser = True
self.user.save()
response = self.client.post(url, **self.header)
self.assertEqual(response.status_code, 200)
job = RQ_Job.fetch(job.id, queue.connection)
self.assertFalse(job.is_failed)
def test_background_task_enqueue(self):
queue = get_queue('default')
# Enqueue some jobs that each depends on its predecessor
queue = get_queue('default')
job = previous_job = None
for _ in range(0, 3):
job = queue.enqueue(self.dummy_job_default, depends_on=previous_job)
previous_job = job
url = reverse('core-api:rqtask-enqueue', args=[job.id])
# Check that the last job to be enqueued has a status of deferred
self.assertIsNotNone(job)
self.assertEqual(job.get_status(), JobStatus.DEFERRED)
self.assertIsNone(job.enqueued_at)
# Attempt to force-enqueue the job without permission
response = self.client.post(url, **self.header)
self.assertEqual(response.status_code, 403)
# Force-enqueue the deferred job
response = self.client.post(reverse('core-api:rqtask-enqueue', args=[job.id]), **self.header)
self.user.is_superuser = True
self.user.save()
response = self.client.post(url, **self.header)
self.assertEqual(response.status_code, 200)
# Check that job's status is updated correctly
@@ -242,19 +283,27 @@ class BackgroundTaskTestCase(TestCase):
def test_background_task_stop(self):
queue = get_queue('default')
worker = get_worker('default')
job = queue.enqueue(self.dummy_job_default)
worker.prepare_job_execution(job)
url = reverse('core-api:rqtask-stop', args=[job.id])
self.assertEqual(job.get_status(), JobStatus.STARTED)
response = self.client.post(reverse('core-api:rqtask-stop', args=[job.id]), **self.header)
# Attempt to stop the task without permission
response = self.client.post(url, **self.header)
self.assertEqual(response.status_code, 403)
# Stop the task
self.user.is_superuser = True
self.user.save()
response = self.client.post(url, **self.header)
self.assertEqual(response.status_code, 200)
with disable_logging():
worker.monitor_work_horse(job, queue) # Sets the job as Failed and removes from Started
started_job_registry = StartedJobRegistry(queue.name, connection=queue.connection)
self.assertEqual(len(started_job_registry), 0)
# Verify that the task was cancelled
canceled_job_registry = FailedJobRegistry(queue.name, connection=queue.connection)
self.assertEqual(len(canceled_job_registry), 1)
self.assertIn(job.id, canceled_job_registry)
@@ -262,19 +311,34 @@ class BackgroundTaskTestCase(TestCase):
def test_worker_list(self):
worker1 = get_worker('default', name=uuid.uuid4().hex)
worker1.register_birth()
worker2 = get_worker('high')
worker2.register_birth()
url = reverse('core-api:rqworker-list')
response = self.client.get(reverse('core-api:rqworker-list'), **self.header)
# Attempt to fetch the worker list without permission
response = self.client.get(url, **self.header)
self.assertEqual(response.status_code, 403)
# Fetch the worker list
self.user.is_superuser = True
self.user.save()
response = self.client.get(url, **self.header)
self.assertEqual(response.status_code, 200)
self.assertIn(str(worker1.name), str(response.content))
def test_worker(self):
worker1 = get_worker('default', name=uuid.uuid4().hex)
worker1.register_birth()
url = reverse('core-api:rqworker-detail', args=[worker1.name])
response = self.client.get(reverse('core-api:rqworker-detail', args=[worker1.name]), **self.header)
# Attempt to fetch a worker without permission
response = self.client.get(url, **self.header)
self.assertEqual(response.status_code, 403)
# Fetch the worker
self.user.is_superuser = True
self.user.save()
response = self.client.get(url, **self.header)
self.assertEqual(response.status_code, 200)
self.assertIn(str(worker1.name), str(response.content))
self.assertIn('birth_date', str(response.content))

View File

@@ -5,16 +5,14 @@ from rest_framework import status
from core.choices import ObjectChangeActionChoices
from core.models import ObjectChange, ObjectType
from dcim.choices import InterfaceTypeChoices, ModuleStatusChoices, SiteStatusChoices
from dcim.models import (
Cable, CableTermination, Device, DeviceRole, DeviceType, Manufacturer, Module, ModuleBay, ModuleType, Interface,
Site,
)
from dcim.choices import SiteStatusChoices
from dcim.models import Site, CableTermination, Device, DeviceType, DeviceRole, Interface, Cable
from extras.choices import *
from extras.models import CustomField, CustomFieldChoiceSet, Tag
from utilities.testing import APITestCase
from utilities.testing.utils import create_tags, create_test_device, post_data
from utilities.testing.utils import create_tags, post_data
from utilities.testing.views import ModelViewTestCase
from dcim.models import Manufacturer
class ChangeLogViewTest(ModelViewTestCase):
@@ -624,64 +622,3 @@ class ChangeLogAPITest(APITestCase):
self.assertEqual(objectchange.prechange_data['name'], 'Site 1')
self.assertEqual(objectchange.prechange_data['slug'], 'site-1')
self.assertEqual(objectchange.postchange_data, None)
def test_deletion_ordering(self):
"""
Check that the cascading deletion of dependent objects is recorded in the correct order.
"""
device = create_test_device('device1')
module_bay = ModuleBay.objects.create(device=device, name='Module Bay 1')
module_type = ModuleType.objects.create(manufacturer=Manufacturer.objects.first(), model='Module Type 1')
self.add_permissions('dcim.add_module', 'dcim.add_interface', 'dcim.delete_module')
self.assertEqual(ObjectChange.objects.count(), 0) # Sanity check
# Create a new Module
data = {
'device': device.pk,
'module_bay': module_bay.pk,
'module_type': module_type.pk,
'status': ModuleStatusChoices.STATUS_ACTIVE,
}
url = reverse('dcim-api:module-list')
response = self.client.post(url, data, format='json', **self.header)
self.assertHttpStatus(response, status.HTTP_201_CREATED)
module = device.modules.first()
# Create an Interface on the Module
data = {
'device': device.pk,
'module': module.pk,
'name': 'Interface 1',
'type': InterfaceTypeChoices.TYPE_1GE_FIXED,
}
url = reverse('dcim-api:interface-list')
response = self.client.post(url, data, format='json', **self.header)
self.assertHttpStatus(response, status.HTTP_201_CREATED)
interface = device.interfaces.first()
# Delete the Module
url = reverse('dcim-api:module-detail', kwargs={'pk': module.pk})
response = self.client.delete(url, **self.header)
self.assertHttpStatus(response, status.HTTP_204_NO_CONTENT)
self.assertEqual(Module.objects.count(), 0)
self.assertEqual(Interface.objects.count(), 0)
# Verify the creation of the expected ObjectChange records. We should see four total records, in this order:
# 1. Module created
# 2. Interface created
# 3. Interface deleted
# 4. Module deleted
changes = ObjectChange.objects.order_by('time')
self.assertEqual(len(changes), 4)
self.assertEqual(changes[0].changed_object_type, ContentType.objects.get_for_model(Module))
self.assertEqual(changes[0].changed_object_id, module.pk)
self.assertEqual(changes[0].action, ObjectChangeActionChoices.ACTION_CREATE)
self.assertEqual(changes[1].changed_object_type, ContentType.objects.get_for_model(Interface))
self.assertEqual(changes[1].changed_object_id, interface.pk)
self.assertEqual(changes[1].action, ObjectChangeActionChoices.ACTION_CREATE)
self.assertEqual(changes[2].changed_object_type, ContentType.objects.get_for_model(Interface))
self.assertEqual(changes[2].changed_object_id, interface.pk)
self.assertEqual(changes[2].action, ObjectChangeActionChoices.ACTION_DELETE)
self.assertEqual(changes[3].changed_object_type, ContentType.objects.get_for_model(Module))
self.assertEqual(changes[3].changed_object_id, module.pk)
self.assertEqual(changes[3].action, ObjectChangeActionChoices.ACTION_DELETE)

View File

@@ -1,108 +0,0 @@
"""
Unit tests for OpenAPI schema generation.
Refs: #20638
"""
import json
from django.test import TestCase
class OpenAPISchemaTestCase(TestCase):
"""Tests for OpenAPI schema generation."""
def setUp(self):
"""Fetch schema via API endpoint."""
response = self.client.get('/api/schema/', {'format': 'json'})
self.assertEqual(response.status_code, 200)
self.schema = json.loads(response.content)
def test_post_operation_documents_single_or_array(self):
"""
POST operations on NetBoxModelViewSet endpoints should document
support for both single objects and arrays via oneOf.
Refs: #20638
"""
# Test representative endpoints across different apps
test_paths = [
'/api/core/data-sources/',
'/api/dcim/sites/',
'/api/users/users/',
'/api/ipam/ip-addresses/',
]
for path in test_paths:
with self.subTest(path=path):
operation = self.schema['paths'][path]['post']
# Get the request body schema
request_schema = operation['requestBody']['content']['application/json']['schema']
# Should have oneOf with two options
self.assertIn('oneOf', request_schema, f"POST {path} should have oneOf schema")
self.assertEqual(
len(request_schema['oneOf']), 2,
f"POST {path} oneOf should have exactly 2 options"
)
# First option: single object (has $ref or properties)
single_schema = request_schema['oneOf'][0]
self.assertTrue(
'$ref' in single_schema or 'properties' in single_schema,
f"POST {path} first oneOf option should be single object"
)
# Second option: array of objects
array_schema = request_schema['oneOf'][1]
self.assertEqual(
array_schema['type'], 'array',
f"POST {path} second oneOf option should be array"
)
self.assertIn('items', array_schema, f"POST {path} array should have items")
def test_bulk_update_operations_require_array_only(self):
"""
Bulk update/patch operations should require arrays only, not oneOf.
They don't support single object input.
Refs: #20638
"""
test_paths = [
'/api/dcim/sites/',
'/api/users/users/',
]
for path in test_paths:
for method in ['put', 'patch']:
with self.subTest(path=path, method=method):
operation = self.schema['paths'][path][method]
request_schema = operation['requestBody']['content']['application/json']['schema']
# Should be array-only, not oneOf
self.assertNotIn(
'oneOf', request_schema,
f"{method.upper()} {path} should NOT have oneOf (array-only)"
)
self.assertEqual(
request_schema['type'], 'array',
f"{method.upper()} {path} should require array"
)
self.assertIn(
'items', request_schema,
f"{method.upper()} {path} array should have items"
)
def test_bulk_delete_requires_array(self):
"""
Bulk delete operations should require arrays.
Refs: #20638
"""
path = '/api/dcim/sites/'
operation = self.schema['paths'][path]['delete']
request_schema = operation['requestBody']['content']['application/json']['schema']
# Should be array-only
self.assertNotIn('oneOf', request_schema, "DELETE should NOT have oneOf")
self.assertEqual(request_schema['type'], 'array', "DELETE should require array")
self.assertIn('items', request_schema, "DELETE array should have items")

View File

@@ -158,7 +158,7 @@ class BackgroundTaskTestCase(TestCase):
def setUp(self):
super().setUp()
self.user.is_staff = True
self.user.is_superuser = True
self.user.is_active = True
self.user.save()
@@ -171,13 +171,13 @@ class BackgroundTaskTestCase(TestCase):
url = reverse('core:background_queue_list')
# Attempt to load view without permission
self.user.is_staff = False
self.user.is_superuser = False
self.user.save()
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
# Load view with permission
self.user.is_staff = True
self.user.is_superuser = True
self.user.save()
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
@@ -356,7 +356,7 @@ class SystemTestCase(TestCase):
def setUp(self):
super().setUp()
self.user.is_staff = True
self.user.is_superuser = True
self.user.save()
def test_system_view_default(self):

View File

@@ -125,7 +125,6 @@ class DataSourceBulkEditView(generic.BulkEditView):
@register_model_view(DataSource, 'bulk_rename', path='rename', detail=False)
class DataSourceBulkRenameView(generic.BulkRenameView):
queryset = DataSource.objects.all()
filterset = filtersets.DataSourceFilterSet
@register_model_view(DataSource, 'bulk_delete', path='delete', detail=False)
@@ -373,7 +372,7 @@ class ConfigRevisionRestoreView(ContentTypePermissionRequiredMixin, View):
class BaseRQView(UserPassesTestMixin, View):
def test_func(self):
return self.request.user.is_staff
return self.request.user.is_superuser
class BackgroundQueueListView(TableMixin, BaseRQView):
@@ -556,7 +555,7 @@ class WorkerView(BaseRQView):
class SystemView(UserPassesTestMixin, View):
def test_func(self):
return self.request.user.is_staff
return self.request.user.is_superuser
def get(self, request):
@@ -639,7 +638,7 @@ class BasePluginView(UserPassesTestMixin, View):
CACHE_KEY_CATALOG_ERROR = 'plugins-catalog-error'
def test_func(self):
return self.request.user.is_staff
return self.request.user.is_superuser
def get_cached_plugins(self, request):
catalog_plugins = {}

View File

@@ -1,10 +1,8 @@
from django.contrib.contenttypes.models import ContentType
from drf_spectacular.types import OpenApiTypes
from drf_spectacular.utils import extend_schema_field
from rest_framework import serializers
from dcim.choices import *
from dcim.constants import *
from dcim.models import Cable, CablePath, CableTermination
from netbox.api.fields import ChoiceField, ContentTypeField
from netbox.api.serializers import BaseModelSerializer, GenericObjectSerializer, NetBoxModelSerializer
@@ -51,9 +49,11 @@ class TracedCableSerializer(BaseModelSerializer):
class CableTerminationSerializer(NetBoxModelSerializer):
termination_type = ContentTypeField(
queryset=ContentType.objects.filter(CABLE_TERMINATION_MODELS)
read_only=True,
)
termination = serializers.SerializerMethodField(
read_only=True,
)
termination = serializers.SerializerMethodField(read_only=True)
class Meta:
model = CableTermination
@@ -61,6 +61,8 @@ class CableTerminationSerializer(NetBoxModelSerializer):
'id', 'url', 'display', 'cable', 'cable_end', 'termination_type', 'termination_id',
'termination', 'created', 'last_updated',
]
read_only_fields = fields
brief_fields = ('id', 'url', 'display', 'cable', 'cable_end', 'termination_type', 'termination_id')
@extend_schema_field(serializers.JSONField(allow_null=True))
def get_termination(self, obj):

View File

@@ -16,7 +16,7 @@ from extras.api.mixins import ConfigContextQuerySetMixin, RenderConfigMixin
from netbox.api.authentication import IsAuthenticatedOrLoginNotRequired
from netbox.api.metadata import ContentTypeMetadata
from netbox.api.pagination import StripCountAnnotationsPaginator
from netbox.api.viewsets import NetBoxModelViewSet, MPTTLockedMixin
from netbox.api.viewsets import NetBoxModelViewSet, MPTTLockedMixin, NetBoxReadOnlyModelViewSet
from netbox.api.viewsets.mixins import SequentialBulkCreatesMixin
from utilities.api import get_serializer_for_model
from utilities.query_functions import CollateAsChar
@@ -563,7 +563,7 @@ class CableViewSet(NetBoxModelViewSet):
filterset_class = filtersets.CableFilterSet
class CableTerminationViewSet(NetBoxModelViewSet):
class CableTerminationViewSet(NetBoxReadOnlyModelViewSet):
metadata_class = ContentTypeMetadata
queryset = CableTermination.objects.all()
serializer_class = serializers.CableTerminationSerializer

View File

@@ -461,7 +461,6 @@ class PowerPortTypeChoices(ChoiceSet):
# Molex
TYPE_MOLEX_MICRO_FIT_1X2 = 'molex-micro-fit-1x2'
TYPE_MOLEX_MICRO_FIT_2X2 = 'molex-micro-fit-2x2'
TYPE_MOLEX_MICRO_FIT_2X3 = 'molex-micro-fit-2x3'
TYPE_MOLEX_MICRO_FIT_2X4 = 'molex-micro-fit-2x4'
# Direct current (DC)
TYPE_DC = 'dc-terminal'
@@ -589,7 +588,6 @@ class PowerPortTypeChoices(ChoiceSet):
('Molex', (
(TYPE_MOLEX_MICRO_FIT_1X2, 'Molex Micro-Fit 1x2'),
(TYPE_MOLEX_MICRO_FIT_2X2, 'Molex Micro-Fit 2x2'),
(TYPE_MOLEX_MICRO_FIT_2X3, 'Molex Micro-Fit 2x3'),
(TYPE_MOLEX_MICRO_FIT_2X4, 'Molex Micro-Fit 2x4'),
)),
('DC', (
@@ -712,7 +710,6 @@ class PowerOutletTypeChoices(ChoiceSet):
# Molex
TYPE_MOLEX_MICRO_FIT_1X2 = 'molex-micro-fit-1x2'
TYPE_MOLEX_MICRO_FIT_2X2 = 'molex-micro-fit-2x2'
TYPE_MOLEX_MICRO_FIT_2X3 = 'molex-micro-fit-2x3'
TYPE_MOLEX_MICRO_FIT_2X4 = 'molex-micro-fit-2x4'
# Direct current (DC)
TYPE_DC = 'dc-terminal'
@@ -834,7 +831,6 @@ class PowerOutletTypeChoices(ChoiceSet):
('Molex', (
(TYPE_MOLEX_MICRO_FIT_1X2, 'Molex Micro-Fit 1x2'),
(TYPE_MOLEX_MICRO_FIT_2X2, 'Molex Micro-Fit 2x2'),
(TYPE_MOLEX_MICRO_FIT_2X3, 'Molex Micro-Fit 2x3'),
(TYPE_MOLEX_MICRO_FIT_2X4, 'Molex Micro-Fit 2x4'),
)),
('DC', (
@@ -1740,15 +1736,6 @@ class CableTypeChoices(ChoiceSet):
# Copper - Coaxial
TYPE_COAXIAL = 'coaxial'
TYPE_RG_6 = 'rg-6'
TYPE_RG_8 = 'rg-8'
TYPE_RG_11 = 'rg-11'
TYPE_RG_59 = 'rg-59'
TYPE_RG_62 = 'rg-62'
TYPE_RG_213 = 'rg-213'
TYPE_LMR_100 = 'lmr-100'
TYPE_LMR_200 = 'lmr-200'
TYPE_LMR_400 = 'lmr-400'
# Fiber Optic - Multimode
TYPE_MMF = 'mmf'
@@ -1798,15 +1785,6 @@ class CableTypeChoices(ChoiceSet):
_('Copper - Coaxial'),
(
(TYPE_COAXIAL, 'Coaxial'),
(TYPE_RG_6, 'RG-6'),
(TYPE_RG_8, 'RG-8'),
(TYPE_RG_11, 'RG-11'),
(TYPE_RG_59, 'RG-59'),
(TYPE_RG_62, 'RG-62'),
(TYPE_RG_213, 'RG-213'),
(TYPE_LMR_100, 'LMR-100'),
(TYPE_LMR_200, 'LMR-200'),
(TYPE_LMR_400, 'LMR-400'),
),
),
(

View File

@@ -14,16 +14,16 @@ from netbox.filtersets import (
AttributeFiltersMixin, BaseFilterSet, ChangeLoggedModelFilterSet, NestedGroupModelFilterSet, NetBoxModelFilterSet,
OrganizationalModelFilterSet,
)
from tenancy.filtersets import ContactModelFilterSet, TenancyFilterSet
from tenancy.filtersets import TenancyFilterSet, ContactModelFilterSet
from tenancy.models import *
from users.models import User
from utilities.filters import (
ContentTypeFilter, MultiValueCharFilter, MultiValueMACAddressFilter, MultiValueNumberFilter, MultiValueWWNFilter,
NumericArrayFilter, TreeNodeMultipleChoiceFilter,
)
from virtualization.models import Cluster, ClusterGroup, VirtualMachine, VMInterface
from virtualization.models import Cluster, ClusterGroup, VMInterface, VirtualMachine
from vpn.models import L2VPN
from wireless.choices import WirelessChannelChoices, WirelessRoleChoices
from wireless.choices import WirelessRoleChoices, WirelessChannelChoices
from wireless.models import WirelessLAN, WirelessLink
from .choices import *
from .constants import *
@@ -1288,6 +1288,7 @@ class DeviceFilterSet(
Q(name__icontains=value) |
Q(virtual_chassis__name__icontains=value) |
Q(serial__icontains=value.strip()) |
Q(inventoryitems__serial__icontains=value.strip()) |
Q(asset_tag__icontains=value.strip()) |
Q(description__icontains=value.strip()) |
Q(comments__icontains=value) |
@@ -1806,14 +1807,6 @@ class MACAddressFilterSet(NetBoxModelFilterSet):
queryset=VMInterface.objects.all(),
label=_('VM interface (ID)'),
)
assigned = django_filters.BooleanFilter(
method='filter_assigned',
label=_('Is assigned'),
)
primary = django_filters.BooleanFilter(
method='filter_primary',
label=_('Is primary'),
)
class Meta:
model = MACAddress
@@ -1850,29 +1843,6 @@ class MACAddressFilterSet(NetBoxModelFilterSet):
vminterface__in=interface_ids
)
def filter_assigned(self, queryset, name, value):
params = {
'assigned_object_type__isnull': True,
'assigned_object_id__isnull': True,
}
if value:
return queryset.exclude(**params)
else:
return queryset.filter(**params)
def filter_primary(self, queryset, name, value):
interface_mac_ids = Interface.objects.filter(primary_mac_address_id__isnull=False).values_list(
'primary_mac_address_id', flat=True
)
vminterface_mac_ids = VMInterface.objects.filter(primary_mac_address_id__isnull=False).values_list(
'primary_mac_address_id', flat=True
)
query = Q(pk__in=interface_mac_ids) | Q(pk__in=vminterface_mac_ids)
if value:
return queryset.filter(query)
else:
return queryset.exclude(query)
class CommonInterfaceFilterSet(django_filters.FilterSet):
mode = django_filters.MultipleChoiceFilter(

View File

@@ -9,8 +9,7 @@ from dcim.choices import *
from dcim.constants import *
from dcim.models import *
from extras.models import ConfigTemplate
from ipam.choices import VLANQinQRoleChoices
from ipam.models import VLAN, VRF, IPAddress, VLANGroup
from ipam.models import VRF, IPAddress
from netbox.choices import *
from netbox.forms import NetBoxModelImportForm
from tenancy.models import Tenant
@@ -18,7 +17,7 @@ from utilities.forms.fields import (
CSVChoiceField, CSVContentTypeField, CSVModelChoiceField, CSVModelMultipleChoiceField, CSVTypedChoiceField,
SlugField,
)
from virtualization.models import Cluster, VirtualMachine, VMInterface
from virtualization.models import Cluster, VMInterface, VirtualMachine
from wireless.choices import WirelessRoleChoices
from .common import ModuleCommonForm
@@ -472,30 +471,14 @@ class ModuleTypeImportForm(NetBoxModelImportForm):
required=False,
help_text=_('Unit for module weight')
)
attribute_data = forms.JSONField(
label=_('Attributes'),
required=False,
help_text=_('Attribute values for the assigned profile, passed as a dictionary')
)
class Meta:
model = ModuleType
fields = [
'manufacturer', 'model', 'part_number', 'description', 'airflow', 'weight', 'weight_unit', 'profile',
'attribute_data', 'comments', 'tags',
'comments', 'tags'
]
def clean(self):
super().clean()
# Attribute data may be included only if a profile is specified
if self.cleaned_data.get('attribute_data') and not self.cleaned_data.get('profile'):
raise forms.ValidationError(_("Profile must be specified if attribute data is provided."))
# Default attribute_data to an empty dictionary if a profile is specified (to enforce schema validation)
if self.cleaned_data.get('profile') and not self.cleaned_data.get('attribute_data'):
self.cleaned_data['attribute_data'] = {}
class DeviceRoleImportForm(NetBoxModelImportForm):
parent = CSVModelChoiceField(
@@ -955,7 +938,7 @@ class InterfaceImportForm(NetBoxModelImportForm):
required=False,
to_field_name='name',
help_text=mark_safe(
_('VDC names separated by commas, encased with double quotes. Example:') + ' <code>"vdc1,vdc2,vdc3"</code>'
_('VDC names separated by commas, encased with double quotes. Example:') + ' <code>vdc1,vdc2,vdc3</code>'
)
)
type = CSVChoiceField(
@@ -984,41 +967,7 @@ class InterfaceImportForm(NetBoxModelImportForm):
label=_('Mode'),
choices=InterfaceModeChoices,
required=False,
help_text=_('IEEE 802.1Q operational mode (for L2 interfaces)'),
)
vlan_group = CSVModelChoiceField(
label=_('VLAN group'),
queryset=VLANGroup.objects.all(),
required=False,
to_field_name='name',
help_text=_('Filter VLANs available for assignment by group'),
)
untagged_vlan = CSVModelChoiceField(
label=_('Untagged VLAN'),
queryset=VLAN.objects.all(),
required=False,
to_field_name='vid',
help_text=_('Assigned untagged VLAN ID (filtered by VLAN group)'),
)
tagged_vlans = CSVModelMultipleChoiceField(
label=_('Tagged VLANs'),
queryset=VLAN.objects.all(),
required=False,
to_field_name='vid',
help_text=mark_safe(
_(
'Assigned tagged VLAN IDs separated by commas, encased with double quotes '
'(filtered by VLAN group). Example:'
)
+ ' <code>"100,200,300"</code>'
),
)
qinq_svlan = CSVModelChoiceField(
label=_('Q-in-Q Service VLAN'),
queryset=VLAN.objects.filter(qinq_role=VLANQinQRoleChoices.ROLE_SERVICE),
required=False,
to_field_name='vid',
help_text=_('Assigned Q-in-Q Service VLAN ID (filtered by VLAN group)'),
help_text=_('IEEE 802.1Q operational mode (for L2 interfaces)')
)
vrf = CSVModelChoiceField(
label=_('VRF'),
@@ -1039,8 +988,7 @@ class InterfaceImportForm(NetBoxModelImportForm):
fields = (
'device', 'name', 'label', 'parent', 'bridge', 'lag', 'type', 'speed', 'duplex', 'enabled',
'mark_connected', 'wwn', 'vdcs', 'mtu', 'mgmt_only', 'description', 'poe_mode', 'poe_type', 'mode',
'vlan_group', 'untagged_vlan', 'tagged_vlans', 'qinq_svlan', 'vrf', 'rf_role', 'rf_channel',
'rf_channel_frequency', 'rf_channel_width', 'tx_power', 'tags'
'vrf', 'rf_role', 'rf_channel', 'rf_channel_frequency', 'rf_channel_width', 'tx_power', 'tags'
)
def __init__(self, data=None, *args, **kwargs):
@@ -1057,13 +1005,6 @@ class InterfaceImportForm(NetBoxModelImportForm):
self.fields['lag'].queryset = self.fields['lag'].queryset.filter(**params)
self.fields['vdcs'].queryset = self.fields['vdcs'].queryset.filter(**params)
# Limit choices for VLANs to the assigned VLAN group
if vlan_group := data.get('vlan_group'):
params = {f"group__{self.fields['vlan_group'].to_field_name}": vlan_group}
self.fields['untagged_vlan'].queryset = self.fields['untagged_vlan'].queryset.filter(**params)
self.fields['tagged_vlans'].queryset = self.fields['tagged_vlans'].queryset.filter(**params)
self.fields['qinq_svlan'].queryset = self.fields['qinq_svlan'].queryset.filter(**params)
def clean_enabled(self):
# Make sure enabled is True when it's not included in the uploaded data
if 'enabled' not in self.data:

View File

@@ -278,6 +278,11 @@ class RackBaseFilterForm(NetBoxModelFilterSetForm):
choices=BOOLEAN_WITH_BLANK_CHOICES
)
)
airflow = forms.MultipleChoiceField(
label=_('Airflow'),
choices=add_blank_choice(RackAirflowChoices),
required=False
)
weight = forms.DecimalField(
label=_('Weight'),
required=False,
@@ -376,11 +381,6 @@ class RackFilterForm(TenancyFilterForm, ContactModelFilterForm, RackBaseFilterFo
},
label=_('Rack type')
)
airflow = forms.MultipleChoiceField(
label=_('Airflow'),
choices=add_blank_choice(RackAirflowChoices),
required=False
)
serial = forms.CharField(
label=_('Serial'),
required=False
@@ -1676,16 +1676,12 @@ class MACAddressFilterForm(NetBoxModelFilterSetForm):
model = MACAddress
fieldsets = (
FieldSet('q', 'filter_id', 'tag'),
FieldSet('mac_address', name=_('Attributes')),
FieldSet(
'device_id', 'virtual_machine_id', 'assigned', 'primary',
name=_('Assignments'),
),
FieldSet('mac_address', 'device_id', 'virtual_machine_id', name=_('MAC address')),
)
selector_fields = ('filter_id', 'q', 'device_id', 'virtual_machine_id')
mac_address = forms.CharField(
required=False,
label=_('MAC address'),
label=_('MAC address')
)
device_id = DynamicModelMultipleChoiceField(
queryset=Device.objects.all(),
@@ -1697,20 +1693,6 @@ class MACAddressFilterForm(NetBoxModelFilterSetForm):
required=False,
label=_('Assigned VM'),
)
assigned = forms.NullBooleanField(
required=False,
label=_('Assigned to an interface'),
widget=forms.Select(
choices=BOOLEAN_WITH_BLANK_CHOICES
),
)
primary = forms.NullBooleanField(
required=False,
label=_('Primary MAC of an interface'),
widget=forms.Select(
choices=BOOLEAN_WITH_BLANK_CHOICES
),
)
tag = TagFilterField(model)

View File

@@ -269,8 +269,7 @@ class RackForm(TenancyForm, NetBoxModelForm):
label=_('Rack Type'),
queryset=RackType.objects.all(),
required=False,
selector=True,
help_text=_("Select a pre-defined rack type, or set physical characteristics below."),
help_text=_("Select a pre-defined rack type, or set physical characteristics below.")
)
comments = CommentField()
@@ -756,10 +755,7 @@ class ModuleForm(ModuleCommonForm, NetBoxModelForm):
queryset=ModuleBay.objects.all(),
query_params={
'device_id': '$device'
},
context={
'disabled': 'installed_module',
},
}
)
module_type = DynamicModelChoiceField(
label=_('Module type'),

View File

@@ -453,7 +453,6 @@ class VirtualChassisCreateForm(NetBoxModelForm):
if instance.pk and self.cleaned_data['members']:
initial_position = self.cleaned_data.get('initial_position', 1)
for i, member in enumerate(self.cleaned_data['members'], start=initial_position):
member.snapshot()
member.virtual_chassis = instance
member.vc_position = i
member.save()

View File

@@ -18,9 +18,7 @@ from netbox.graphql.filter_mixins import (
ImageAttachmentFilterMixin,
WeightFilterMixin,
)
from tenancy.graphql.filter_mixins import ContactFilterMixin, TenancyFilterMixin
from virtualization.models import VMInterface
from tenancy.graphql.filter_mixins import TenancyFilterMixin, ContactFilterMixin
from .filter_mixins import (
CabledObjectModelFilterMixin,
ComponentModelFilterMixin,
@@ -421,24 +419,6 @@ class MACAddressFilter(PrimaryModelFilterMixin):
)
assigned_object_id: ID | None = strawberry_django.filter_field()
@strawberry_django.filter_field()
def assigned(self, value: bool, prefix) -> Q:
return Q(**{f'{prefix}assigned_object_id__isnull': (not value)})
@strawberry_django.filter_field()
def primary(self, value: bool, prefix) -> Q:
interface_mac_ids = models.Interface.objects.filter(primary_mac_address_id__isnull=False).values_list(
'primary_mac_address_id', flat=True
)
vminterface_mac_ids = VMInterface.objects.filter(primary_mac_address_id__isnull=False).values_list(
'primary_mac_address_id', flat=True
)
query = Q(**{f'{prefix}pk__in': interface_mac_ids}) | Q(**{f'{prefix}pk__in': vminterface_mac_ids})
if value:
return Q(query)
else:
return ~Q(query)
@strawberry_django.filter_type(models.Interface, lookups=True)
class InterfaceFilter(ModularComponentModelFilterMixin, InterfaceBaseFilterMixin, CabledObjectModelFilterMixin):

View File

@@ -2,12 +2,13 @@ from typing import List
import strawberry
import strawberry_django
from strawberry_django.pagination import OffsetPaginated
from .types import *
@strawberry.type(name="Query")
class DCIMQuery:
class DCIMQueryV1:
cable: CableType = strawberry_django.field()
cable_list: List[CableType] = strawberry_django.field()
@@ -136,3 +137,137 @@ class DCIMQuery:
virtual_device_context: VirtualDeviceContextType = strawberry_django.field()
virtual_device_context_list: List[VirtualDeviceContextType] = strawberry_django.field()
@strawberry.type(name="Query")
class DCIMQuery:
cable: CableType = strawberry_django.field()
cable_list: OffsetPaginated[CableType] = strawberry_django.offset_paginated()
console_port: ConsolePortType = strawberry_django.field()
console_port_list: OffsetPaginated[ConsolePortType] = strawberry_django.offset_paginated()
console_port_template: ConsolePortTemplateType = strawberry_django.field()
console_port_template_list: OffsetPaginated[ConsolePortTemplateType] = strawberry_django.offset_paginated()
console_server_port: ConsoleServerPortType = strawberry_django.field()
console_server_port_list: OffsetPaginated[ConsoleServerPortType] = strawberry_django.offset_paginated()
console_server_port_template: ConsoleServerPortTemplateType = strawberry_django.field()
console_server_port_template_list: OffsetPaginated[ConsoleServerPortTemplateType] = (
strawberry_django.offset_paginated()
)
device: DeviceType = strawberry_django.field()
device_list: OffsetPaginated[DeviceType] = strawberry_django.offset_paginated()
device_bay: DeviceBayType = strawberry_django.field()
device_bay_list: OffsetPaginated[DeviceBayType] = strawberry_django.offset_paginated()
device_bay_template: DeviceBayTemplateType = strawberry_django.field()
device_bay_template_list: OffsetPaginated[DeviceBayTemplateType] = strawberry_django.offset_paginated()
device_role: DeviceRoleType = strawberry_django.field()
device_role_list: OffsetPaginated[DeviceRoleType] = strawberry_django.offset_paginated()
device_type: DeviceTypeType = strawberry_django.field()
device_type_list: OffsetPaginated[DeviceTypeType] = strawberry_django.offset_paginated()
front_port: FrontPortType = strawberry_django.field()
front_port_list: OffsetPaginated[FrontPortType] = strawberry_django.offset_paginated()
front_port_template: FrontPortTemplateType = strawberry_django.field()
front_port_template_list: OffsetPaginated[FrontPortTemplateType] = strawberry_django.offset_paginated()
mac_address: MACAddressType = strawberry_django.field()
mac_address_list: OffsetPaginated[MACAddressType] = strawberry_django.offset_paginated()
interface: InterfaceType = strawberry_django.field()
interface_list: OffsetPaginated[InterfaceType] = strawberry_django.offset_paginated()
interface_template: InterfaceTemplateType = strawberry_django.field()
interface_template_list: OffsetPaginated[InterfaceTemplateType] = strawberry_django.offset_paginated()
inventory_item: InventoryItemType = strawberry_django.field()
inventory_item_list: OffsetPaginated[InventoryItemType] = strawberry_django.offset_paginated()
inventory_item_role: InventoryItemRoleType = strawberry_django.field()
inventory_item_role_list: OffsetPaginated[InventoryItemRoleType] = strawberry_django.offset_paginated()
inventory_item_template: InventoryItemTemplateType = strawberry_django.field()
inventory_item_template_list: OffsetPaginated[InventoryItemTemplateType] = strawberry_django.offset_paginated()
location: LocationType = strawberry_django.field()
location_list: OffsetPaginated[LocationType] = strawberry_django.offset_paginated()
manufacturer: ManufacturerType = strawberry_django.field()
manufacturer_list: OffsetPaginated[ManufacturerType] = strawberry_django.offset_paginated()
module: ModuleType = strawberry_django.field()
module_list: OffsetPaginated[ModuleType] = strawberry_django.offset_paginated()
module_bay: ModuleBayType = strawberry_django.field()
module_bay_list: OffsetPaginated[ModuleBayType] = strawberry_django.offset_paginated()
module_bay_template: ModuleBayTemplateType = strawberry_django.field()
module_bay_template_list: OffsetPaginated[ModuleBayTemplateType] = strawberry_django.offset_paginated()
module_type_profile: ModuleTypeProfileType = strawberry_django.field()
module_type_profile_list: OffsetPaginated[ModuleTypeProfileType] = strawberry_django.offset_paginated()
module_type: ModuleTypeType = strawberry_django.field()
module_type_list: OffsetPaginated[ModuleTypeType] = strawberry_django.offset_paginated()
platform: PlatformType = strawberry_django.field()
platform_list: OffsetPaginated[PlatformType] = strawberry_django.offset_paginated()
power_feed: PowerFeedType = strawberry_django.field()
power_feed_list: OffsetPaginated[PowerFeedType] = strawberry_django.offset_paginated()
power_outlet: PowerOutletType = strawberry_django.field()
power_outlet_list: OffsetPaginated[PowerOutletType] = strawberry_django.offset_paginated()
power_outlet_template: PowerOutletTemplateType = strawberry_django.field()
power_outlet_template_list: OffsetPaginated[PowerOutletTemplateType] = strawberry_django.offset_paginated()
power_panel: PowerPanelType = strawberry_django.field()
power_panel_list: OffsetPaginated[PowerPanelType] = strawberry_django.offset_paginated()
power_port: PowerPortType = strawberry_django.field()
power_port_list: OffsetPaginated[PowerPortType] = strawberry_django.offset_paginated()
power_port_template: PowerPortTemplateType = strawberry_django.field()
power_port_template_list: OffsetPaginated[PowerPortTemplateType] = strawberry_django.offset_paginated()
rack_type: RackTypeType = strawberry_django.field()
rack_type_list: OffsetPaginated[RackTypeType] = strawberry_django.offset_paginated()
rack: RackType = strawberry_django.field()
rack_list: OffsetPaginated[RackType] = strawberry_django.offset_paginated()
rack_reservation: RackReservationType = strawberry_django.field()
rack_reservation_list: OffsetPaginated[RackReservationType] = strawberry_django.offset_paginated()
rack_role: RackRoleType = strawberry_django.field()
rack_role_list: OffsetPaginated[RackRoleType] = strawberry_django.offset_paginated()
rear_port: RearPortType = strawberry_django.field()
rear_port_list: OffsetPaginated[RearPortType] = strawberry_django.offset_paginated()
rear_port_template: RearPortTemplateType = strawberry_django.field()
rear_port_template_list: OffsetPaginated[RearPortTemplateType] = strawberry_django.offset_paginated()
region: RegionType = strawberry_django.field()
region_list: OffsetPaginated[RegionType] = strawberry_django.offset_paginated()
site: SiteType = strawberry_django.field()
site_list: OffsetPaginated[SiteType] = strawberry_django.offset_paginated()
site_group: SiteGroupType = strawberry_django.field()
site_group_list: OffsetPaginated[SiteGroupType] = strawberry_django.offset_paginated()
virtual_chassis: VirtualChassisType = strawberry_django.field()
virtual_chassis_list: OffsetPaginated[VirtualChassisType] = strawberry_django.offset_paginated()
virtual_device_context: VirtualDeviceContextType = strawberry_django.field()
virtual_device_context_list: OffsetPaginated[VirtualDeviceContextType] = strawberry_django.offset_paginated()

View File

@@ -1,69 +0,0 @@
import decimal
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('dcim', '0215_rackreservation_status'),
]
operations = [
migrations.AlterField(
model_name='device',
name='latitude',
field=models.DecimalField(
blank=True,
decimal_places=6,
max_digits=8,
null=True,
validators=[
django.core.validators.MinValueValidator(decimal.Decimal('-90.0')),
django.core.validators.MaxValueValidator(decimal.Decimal('90.0'))
],
),
),
migrations.AlterField(
model_name='device',
name='longitude',
field=models.DecimalField(
blank=True,
decimal_places=6,
max_digits=9,
null=True,
validators=[
django.core.validators.MinValueValidator(decimal.Decimal('-180.0')),
django.core.validators.MaxValueValidator(decimal.Decimal('180.0'))
],
),
),
migrations.AlterField(
model_name='site',
name='latitude',
field=models.DecimalField(
blank=True,
decimal_places=6,
max_digits=8,
null=True,
validators=[
django.core.validators.MinValueValidator(decimal.Decimal('-90.0')),
django.core.validators.MaxValueValidator(decimal.Decimal('90.0'))
],
),
),
migrations.AlterField(
model_name='site',
name='longitude',
field=models.DecimalField(
blank=True,
decimal_places=6,
max_digits=9,
null=True,
validators=[
django.core.validators.MinValueValidator(decimal.Decimal('-180.0')),
django.core.validators.MaxValueValidator(decimal.Decimal('180.0'))
],
),
),
]

View File

@@ -10,7 +10,6 @@ from django.utils.translation import gettext_lazy as _
from core.models import ObjectType
from dcim.choices import *
from dcim.constants import *
from dcim.exceptions import UnsupportedCablePath
from dcim.fields import PathField
from dcim.utils import decompile_path_node, object_to_path_node
from netbox.choices import ColorChoices
@@ -29,6 +28,8 @@ __all__ = (
'CableTermination',
)
from ..exceptions import UnsupportedCablePath
trace_paths = Signal()
@@ -392,17 +393,6 @@ class CableTermination(ChangeLoggedModel):
def clean(self):
super().clean()
# Disallow connecting a cable to any termination object that is
# explicitly flagged as "mark connected".
termination = getattr(self, 'termination', None)
if termination is not None and getattr(termination, "mark_connected", False):
raise ValidationError(
_("Cannot connect a cable to {obj_parent} > {obj} because it is marked as connected.").format(
obj_parent=termination.parent_object,
obj=termination,
)
)
# Check for existing termination
qs = CableTermination.objects.filter(
termination_type=self.termination_type,
@@ -414,14 +404,14 @@ class CableTermination(ChangeLoggedModel):
existing_termination = qs.first()
if existing_termination is not None:
raise ValidationError(
_("Duplicate termination found for {app_label}.{model} {termination_id}: cable {cable_pk}").format(
_("Duplicate termination found for {app_label}.{model} {termination_id}: cable {cable_pk}".format(
app_label=self.termination_type.app_label,
model=self.termination_type.model,
termination_id=self.termination_id,
cable_pk=existing_termination.cable.pk
)
))
)
# Validate the interface type (if applicable)
# Validate interface type (if applicable)
if self.termination_type.model == 'interface' and self.termination.type in NONCONNECTABLE_IFACE_TYPES:
raise ValidationError(
_("Cables cannot be terminated to {type_display} interfaces").format(
@@ -614,7 +604,7 @@ class CablePath(models.Model):
Cable or WirelessLink connects (interfaces, console ports, circuit termination, etc.). All terminations must be
of the same type and must belong to the same parent object.
"""
from circuits.models import CircuitTermination, Circuit
from circuits.models import CircuitTermination
if not terminations:
return None
@@ -636,11 +626,8 @@ class CablePath(models.Model):
raise UnsupportedCablePath(_("All mid-span terminations must have the same termination type"))
# All mid-span terminations must all be attached to the same device
if (
not isinstance(terminations[0], PathEndpoint) and
not isinstance(terminations[0].parent_object, Circuit) and
not all(t.parent_object == terminations[0].parent_object for t in terminations[1:])
):
if (not isinstance(terminations[0], PathEndpoint) and not
all(t.parent_object == terminations[0].parent_object for t in terminations[1:])):
raise UnsupportedCablePath(_("All mid-span terminations must have the same parent object"))
# Check for a split path (e.g. rear port fanning out to multiple front ports with
@@ -784,39 +771,32 @@ class CablePath(models.Model):
elif isinstance(remote_terminations[0], CircuitTermination):
# Follow a CircuitTermination to its corresponding CircuitTermination (A to Z or vice versa)
qs = Q()
for remote_termination in remote_terminations:
qs |= Q(
circuit=remote_termination.circuit,
term_side='Z' if remote_termination.term_side == 'A' else 'A'
)
# Get all circuit terminations
circuit_terminations = CircuitTermination.objects.filter(qs)
if not circuit_terminations.exists():
if len(remote_terminations) > 1:
is_split = True
break
elif all([ct._provider_network for ct in circuit_terminations]):
circuit_termination = CircuitTermination.objects.filter(
circuit=remote_terminations[0].circuit,
term_side='Z' if remote_terminations[0].term_side == 'A' else 'A'
).first()
if circuit_termination is None:
break
elif circuit_termination._provider_network:
# Circuit terminates to a ProviderNetwork
path.extend([
[object_to_path_node(ct) for ct in circuit_terminations],
[object_to_path_node(ct._provider_network) for ct in circuit_terminations],
[object_to_path_node(circuit_termination)],
[object_to_path_node(circuit_termination._provider_network)],
])
is_complete = True
break
elif all([ct.termination and not ct.cable for ct in circuit_terminations]):
elif circuit_termination.termination and not circuit_termination.cable:
# Circuit terminates to a Region/Site/etc.
path.extend([
[object_to_path_node(ct) for ct in circuit_terminations],
[object_to_path_node(ct.termination) for ct in circuit_terminations],
[object_to_path_node(circuit_termination)],
[object_to_path_node(circuit_termination.termination)],
])
break
elif any([ct.cable in links for ct in circuit_terminations]):
# No valid path
is_split = True
break
terminations = circuit_terminations
terminations = [circuit_termination]
else:
# Check for non-symmetric path

View File

@@ -681,8 +681,8 @@ class ModuleBayTemplate(ModularComponentTemplateModel):
def instantiate(self, **kwargs):
return self.component_model(
name=self.resolve_name(kwargs.get('module')),
label=self.resolve_label(kwargs.get('module')),
name=self.name,
label=self.label,
position=self.position,
**kwargs
)

View File

@@ -646,10 +646,6 @@ class Device(
decimal_places=6,
blank=True,
null=True,
validators=[
MinValueValidator(decimal.Decimal('-90.0')),
MaxValueValidator(decimal.Decimal('90.0'))
],
help_text=_("GPS coordinate in decimal format (xx.yyyyyy)")
)
longitude = models.DecimalField(
@@ -658,10 +654,6 @@ class Device(
decimal_places=6,
blank=True,
null=True,
validators=[
MinValueValidator(decimal.Decimal('-180.0')),
MaxValueValidator(decimal.Decimal('180.0'))
],
help_text=_("GPS coordinate in decimal format (xx.yyyyyy)")
)
services = GenericRelation(
@@ -1162,6 +1154,7 @@ class VirtualChassis(PrimaryModel):
})
def delete(self, *args, **kwargs):
# Check for LAG interfaces split across member chassis
interfaces = Interface.objects.filter(
device__in=self.members.all(),
@@ -1175,13 +1168,6 @@ class VirtualChassis(PrimaryModel):
"interfaces."
).format(self=self, interfaces=InterfaceSpeedChoices))
# Clear vc_position and vc_priority on member devices BEFORE calling super().delete()
# This must be done here because on_delete=SET_NULL executes before pre_delete signal
for device in self.members.all():
device.vc_position = None
device.vc_priority = None
device.save()
return super().delete(*args, **kwargs)

View File

@@ -7,6 +7,7 @@ from django.utils.translation import gettext_lazy as _
from jsonschema.exceptions import ValidationError as JSONValidationError
from dcim.choices import *
from dcim.constants import MODULE_TOKEN
from dcim.utils import update_interface_bridges
from extras.models import ConfigContextModel, CustomField
from netbox.models import PrimaryModel
@@ -330,6 +331,7 @@ class Module(PrimaryModel, ConfigContextModel):
else:
# ModuleBays must be saved individually for MPTT
for instance in create_instances:
instance.name = instance.name.replace(MODULE_TOKEN, str(self.module_bay.position))
instance.save()
update_fields = ['module']

View File

@@ -1,8 +1,5 @@
import decimal
from django.contrib.contenttypes.fields import GenericRelation
from django.core.exceptions import ValidationError
from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models
from django.utils.translation import gettext_lazy as _
from timezone_field import TimeZoneField
@@ -213,10 +210,6 @@ class Site(ContactsMixin, ImageAttachmentsMixin, PrimaryModel):
decimal_places=6,
blank=True,
null=True,
validators=[
MinValueValidator(decimal.Decimal('-90.0')),
MaxValueValidator(decimal.Decimal('90.0'))
],
help_text=_('GPS coordinate in decimal format (xx.yyyyyy)')
)
longitude = models.DecimalField(
@@ -225,10 +218,6 @@ class Site(ContactsMixin, ImageAttachmentsMixin, PrimaryModel):
decimal_places=6,
blank=True,
null=True,
validators=[
MinValueValidator(decimal.Decimal('-180.0')),
MaxValueValidator(decimal.Decimal('180.0'))
],
help_text=_('GPS coordinate in decimal format (xx.yyyyyy)')
)

View File

@@ -1,4 +1,4 @@
from django.utils.translation import gettext_lazy as _
from django.utils.translation import gettext as _
from netbox.object_actions import ObjectAction

View File

@@ -1,6 +1,6 @@
import logging
from django.db.models.signals import post_save, post_delete
from django.db.models.signals import post_save, post_delete, pre_delete
from django.dispatch import receiver
from dcim.choices import CableEndChoices, LinkStatusChoices
@@ -85,6 +85,18 @@ def assign_virtualchassis_master(instance, created, **kwargs):
master.save()
@receiver(pre_delete, sender=VirtualChassis)
def clear_virtualchassis_members(instance, **kwargs):
"""
When a VirtualChassis is deleted, nullify the vc_position and vc_priority fields of its prior members.
"""
devices = Device.objects.filter(virtual_chassis=instance.pk)
for device in devices:
device.vc_position = None
device.vc_priority = None
device.save()
#
# Cables
#

View File

@@ -1174,9 +1174,6 @@ class MACAddressTable(NetBoxTable):
orderable=False,
verbose_name=_('Parent')
)
is_primary = columns.BooleanColumn(
verbose_name=_('Primary')
)
tags = columns.TagColumn(
url_name='dcim:macaddress_list'
)
@@ -1187,7 +1184,7 @@ class MACAddressTable(NetBoxTable):
class Meta(DeviceComponentTable.Meta):
model = models.MACAddress
fields = (
'pk', 'id', 'mac_address', 'assigned_object_parent', 'assigned_object', 'description', 'is_primary',
'comments', 'tags', 'created', 'last_updated',
'pk', 'id', 'mac_address', 'assigned_object_parent', 'assigned_object', 'description', 'comments', 'tags',
'created', 'last_updated',
)
default_columns = ('pk', 'mac_address', 'assigned_object_parent', 'assigned_object', 'description')

View File

@@ -100,7 +100,7 @@ class RackTypeTable(NetBoxTable):
model = RackType
fields = (
'pk', 'id', 'model', 'manufacturer', 'form_factor', 'u_height', 'starting_unit', 'width', 'outer_width',
'outer_height', 'outer_depth', 'mounting_depth', 'weight', 'max_weight', 'description',
'outer_height', 'outer_depth', 'mounting_depth', 'airflow', 'weight', 'max_weight', 'description',
'comments', 'instance_count', 'tags', 'created', 'last_updated',
)
default_columns = (

View File

@@ -2376,6 +2376,33 @@ class CableTest(APIViewTestCases.APIViewTestCase):
]
class CableTerminationTest(
APIViewTestCases.GetObjectViewTestCase,
APIViewTestCases.ListObjectsViewTestCase,
):
model = CableTermination
brief_fields = ['cable', 'cable_end', 'display', 'id', 'termination_id', 'termination_type', 'url']
@classmethod
def setUpTestData(cls):
device1 = create_test_device('Device 1')
device2 = create_test_device('Device 2')
interfaces = []
for device in (device1, device2):
for i in range(0, 10):
interfaces.append(Interface(device=device, type=InterfaceTypeChoices.TYPE_1GE_FIXED, name=f'eth{i}'))
Interface.objects.bulk_create(interfaces)
cables = (
Cable(a_terminations=[interfaces[0]], b_terminations=[interfaces[10]], label='Cable 1'),
Cable(a_terminations=[interfaces[1]], b_terminations=[interfaces[11]], label='Cable 2'),
Cable(a_terminations=[interfaces[2]], b_terminations=[interfaces[12]], label='Cable 3'),
)
for cable in cables:
cable.save()
class ConnectedDeviceTest(APITestCase):
@classmethod

View File

@@ -2270,80 +2270,6 @@ class CablePathTestCase(TestCase):
CableTraceSVG(interface1).render()
CableTraceSVG(interface2).render()
def test_223_interface_to_interface_via_multiple_circuit_terminations(self):
provider = Provider.objects.first()
circuit_type = CircuitType.objects.first()
circuit1 = self.circuit
circuit2 = Circuit.objects.create(provider=provider, type=circuit_type, cid='Circuit 2')
interface1 = Interface.objects.create(device=self.device, name='Interface 1')
interface2 = Interface.objects.create(device=self.device, name='Interface 2')
circuittermination1_A = CircuitTermination.objects.create(
circuit=circuit1,
termination=self.site,
term_side='A'
)
circuittermination1_Z = CircuitTermination.objects.create(
circuit=circuit1,
termination=self.site,
term_side='Z'
)
circuittermination2_A = CircuitTermination.objects.create(
circuit=circuit2,
termination=self.site,
term_side='A'
)
circuittermination2_Z = CircuitTermination.objects.create(
circuit=circuit2,
termination=self.site,
term_side='Z'
)
# Create cables
cable1 = Cable(
a_terminations=[interface1],
b_terminations=[circuittermination1_A, circuittermination2_A]
)
cable2 = Cable(
a_terminations=[interface2],
b_terminations=[circuittermination1_Z, circuittermination2_Z]
)
cable1.save()
cable2.save()
self.assertEqual(CablePath.objects.count(), 2)
path1 = self.assertPathExists(
(
interface1,
cable1,
(circuittermination1_A, circuittermination2_A),
(circuittermination1_Z, circuittermination2_Z),
cable2,
interface2
),
is_active=True,
is_complete=True,
)
interface1.refresh_from_db()
self.assertPathIsSet(interface1, path1)
path2 = self.assertPathExists(
(
interface2,
cable2,
(circuittermination1_Z, circuittermination2_Z),
(circuittermination1_A, circuittermination2_A),
cable1,
interface1
),
is_active=True,
is_complete=True,
)
interface2.refresh_from_db()
self.assertPathIsSet(interface2, path2)
def test_301_create_path_via_existing_cable(self):
"""
[IF1] --C1-- [FP1] [RP1] --C2-- [RP2] [FP2] --C3-- [IF2]
@@ -2584,33 +2510,3 @@ class CablePathTestCase(TestCase):
is_active=True
)
self.assertEqual(CablePath.objects.count(), 0)
def test_402_exclude_circuit_loopback(self):
interface = Interface.objects.create(device=self.device, name='Interface 1')
circuittermination1 = CircuitTermination.objects.create(
circuit=self.circuit,
termination=self.site,
term_side='A'
)
circuittermination2 = CircuitTermination.objects.create(
circuit=self.circuit,
termination=self.site,
term_side='Z'
)
# Create cables
cable = Cable(
a_terminations=[interface],
b_terminations=[circuittermination1, circuittermination2]
)
cable.save()
path = self.assertPathExists(
(interface, cable, (circuittermination1, circuittermination2)),
is_active=True,
is_complete=False,
is_split=True
)
self.assertEqual(CablePath.objects.count(), 1)
interface.refresh_from_db()
self.assertPathIsSet(interface, path)

View File

@@ -10,7 +10,7 @@ from netbox.choices import ColorChoices, WeightUnitChoices
from tenancy.models import Tenant, TenantGroup
from users.models import User
from utilities.testing import ChangeLoggedFilterSetTests, create_test_device, create_test_virtualmachine
from virtualization.models import Cluster, ClusterGroup, ClusterType, VirtualMachine, VMInterface
from virtualization.models import Cluster, ClusterType, ClusterGroup, VMInterface, VirtualMachine
from wireless.choices import WirelessChannelChoices, WirelessRoleChoices
from wireless.models import WirelessLink
@@ -7164,20 +7164,9 @@ class MACAddressTestCase(TestCase, ChangeLoggedFilterSetTests):
MACAddress(mac_address='00-00-00-05-01-01', assigned_object=vm_interfaces[1]),
MACAddress(mac_address='00-00-00-06-01-01', assigned_object=vm_interfaces[2]),
MACAddress(mac_address='00-00-00-06-01-02', assigned_object=vm_interfaces[2]),
# unassigned
MACAddress(mac_address='00-00-00-07-01-01'),
)
MACAddress.objects.bulk_create(mac_addresses)
# Set MAC addresses as primary
for idx, interface in enumerate(interfaces):
interface.primary_mac_address = mac_addresses[idx]
interface.save()
for idx, vm_interface in enumerate(vm_interfaces):
# Offset by 4 for device MACs
vm_interface.primary_mac_address = mac_addresses[idx + 4]
vm_interface.save()
def test_mac_address(self):
params = {'mac_address': ['00-00-00-01-01-01', '00-00-00-02-01-01']}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
@@ -7209,15 +7198,3 @@ class MACAddressTestCase(TestCase, ChangeLoggedFilterSetTests):
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
params = {'vminterface': [vm_interfaces[0].name, vm_interfaces[1].name]}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
def test_assigned(self):
params = {'assigned': True}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 8)
params = {'assigned': False}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1)
def test_primary(self):
params = {'primary': True}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 6)
params = {'primary': False}
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 3)

View File

@@ -792,54 +792,8 @@ class ModuleBayTestCase(TestCase):
)
device.consoleports.first()
@tag('regression') # #19918
def test_nested_module_bay_label_resolution(self):
"""Test that nested module bay labels properly resolve {module} placeholders"""
manufacturer = Manufacturer.objects.first()
site = Site.objects.first()
device_role = DeviceRole.objects.first()
# Create device type with module bay template (position='A')
device_type = DeviceType.objects.create(
manufacturer=manufacturer,
model='Device with Bays',
slug='device-with-bays'
)
ModuleBayTemplate.objects.create(
device_type=device_type,
name='Bay A',
position='A'
)
# Create module type with nested bay template using {module} placeholder
module_type = ModuleType.objects.create(
manufacturer=manufacturer,
model='Module with Nested Bays'
)
ModuleBayTemplate.objects.create(
module_type=module_type,
name='SFP {module}-21',
label='{module}-21',
position='21'
)
# Create device and install module
device = Device.objects.create(
name='Test Device',
device_type=device_type,
role=device_role,
site=site
)
module_bay = device.modulebays.get(name='Bay A')
module = Module.objects.create(
device=device,
module_bay=module_bay,
module_type=module_type
)
# Verify nested bay label resolves {module} to parent position
nested_bay = module.modulebays.get(name='SFP A-21')
self.assertEqual(nested_bay.label, 'A-21')
def test_nested_module_token(self):
pass
class CableTestCase(TestCase):
@@ -1013,18 +967,6 @@ class CableTestCase(TestCase):
with self.assertRaises(ValidationError):
cable.clean()
def test_cannot_cable_to_mark_connected(self):
"""
Test that a cable cannot be connected to an interface marked as connected.
"""
device1 = Device.objects.get(name='TestDevice1')
interface1 = Interface.objects.get(device__name='TestDevice2', name='eth1')
mark_connected_interface = Interface(device=device1, name='mark_connected1', mark_connected=True)
cable = Cable(a_terminations=[mark_connected_interface], b_terminations=[interface1])
with self.assertRaises(ValidationError):
cable.clean()
class VirtualDeviceContextTestCase(TestCase):
@@ -1077,92 +1019,3 @@ class VirtualDeviceContextTestCase(TestCase):
vdc2 = VirtualDeviceContext(device=device, name="VDC 2", identifier=1, status='active')
with self.assertRaises(ValidationError):
vdc2.full_clean()
class VirtualChassisTestCase(TestCase):
@classmethod
def setUpTestData(cls):
site = Site.objects.create(name='Test Site 1', slug='test-site-1')
manufacturer = Manufacturer.objects.create(name='Test Manufacturer 1', slug='test-manufacturer-1')
devicetype = DeviceType.objects.create(
manufacturer=manufacturer, model='Test Device Type 1', slug='test-device-type-1'
)
role = DeviceRole.objects.create(
name='Test Device Role 1', slug='test-device-role-1', color='ff0000'
)
Device.objects.create(
device_type=devicetype, role=role, name='TestDevice1', site=site
)
Device.objects.create(
device_type=devicetype, role=role, name='TestDevice2', site=site
)
def test_virtualchassis_deletion_clears_vc_position(self):
"""
Test that when a VirtualChassis is deleted, member devices have their
vc_position and vc_priority fields set to None.
"""
devices = Device.objects.all()
device1 = devices[0]
device2 = devices[1]
# Create a VirtualChassis with two member devices
vc = VirtualChassis.objects.create(name='Test VC', master=device1)
device1.virtual_chassis = vc
device1.vc_position = 1
device1.vc_priority = 10
device1.save()
device2.virtual_chassis = vc
device2.vc_position = 2
device2.vc_priority = 20
device2.save()
# Verify devices are members of the VC with positions set
device1.refresh_from_db()
device2.refresh_from_db()
self.assertEqual(device1.virtual_chassis, vc)
self.assertEqual(device1.vc_position, 1)
self.assertEqual(device1.vc_priority, 10)
self.assertEqual(device2.virtual_chassis, vc)
self.assertEqual(device2.vc_position, 2)
self.assertEqual(device2.vc_priority, 20)
# Delete the VirtualChassis
vc.delete()
# Verify devices have vc_position and vc_priority set to None
device1.refresh_from_db()
device2.refresh_from_db()
self.assertIsNone(device1.virtual_chassis)
self.assertIsNone(device1.vc_position)
self.assertIsNone(device1.vc_priority)
self.assertIsNone(device2.virtual_chassis)
self.assertIsNone(device2.vc_position)
self.assertIsNone(device2.vc_priority)
def test_virtualchassis_duplicate_vc_position(self):
"""
Test that two devices cannot be assigned to the same vc_position
within the same VirtualChassis.
"""
devices = Device.objects.all()
device1 = devices[0]
device2 = devices[1]
# Create a VirtualChassis
vc = VirtualChassis.objects.create(name='Test VC')
# Assign first device to vc_position 1
device1.virtual_chassis = vc
device1.vc_position = 1
device1.full_clean()
device1.save()
# Try to assign second device to the same vc_position
device2.virtual_chassis = vc
device2.vc_position = 1
with self.assertRaises(ValidationError):
device2.full_clean()

View File

@@ -986,131 +986,6 @@ inventory-items:
ii1 = InventoryItemTemplate.objects.first()
self.assertEqual(ii1.name, 'Inventory Item 1')
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*'])
def test_import_error_numbering(self):
# Add all required permissions to the test user
self.add_permissions(
'dcim.view_devicetype',
'dcim.add_devicetype',
'dcim.add_consoleporttemplate',
'dcim.add_consoleserverporttemplate',
'dcim.add_powerporttemplate',
'dcim.add_poweroutlettemplate',
'dcim.add_interfacetemplate',
'dcim.add_frontporttemplate',
'dcim.add_rearporttemplate',
'dcim.add_modulebaytemplate',
'dcim.add_devicebaytemplate',
'dcim.add_inventoryitemtemplate',
)
import_data = '''
---
manufacturer: Manufacturer 1
model: TEST-2001
slug: test-2001
u_height: 1
module-bays:
- name: Module Bay 1-1
- name: Module Bay 1-2
---
- manufacturer: Manufacturer 1
model: TEST-2002
slug: test-2002
u_height: 1
module-bays:
- name: Module Bay 2-1
- name: Module Bay 2-2
- not_name: Module Bay 2-3
- manufacturer: Manufacturer 1
model: TEST-2003
slug: test-2003
u_height: 1
module-bays:
- name: Module Bay 3-1
'''
form_data = {
'data': import_data,
'format': 'yaml'
}
response = self.client.post(reverse('dcim:devicetype_bulk_import'), data=form_data, follow=True)
self.assertHttpStatus(response, 200)
self.assertContains(response, "Record 2 module-bays[3].name: This field is required.")
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*'])
def test_import_nolist(self):
# Add all required permissions to the test user
self.add_permissions(
'dcim.view_devicetype',
'dcim.add_devicetype',
'dcim.add_consoleporttemplate',
'dcim.add_consoleserverporttemplate',
'dcim.add_powerporttemplate',
'dcim.add_poweroutlettemplate',
'dcim.add_interfacetemplate',
'dcim.add_frontporttemplate',
'dcim.add_rearporttemplate',
'dcim.add_modulebaytemplate',
'dcim.add_devicebaytemplate',
'dcim.add_inventoryitemtemplate',
)
for value in ('', 'null', '3', '"My console port"', '{name: "My other console port"}'):
with self.subTest(value=value):
import_data = f'''
manufacturer: Manufacturer 1
model: TEST-3000
slug: test-3000
u_height: 1
console-ports: {value}
'''
form_data = {
'data': import_data,
'format': 'yaml'
}
response = self.client.post(reverse('dcim:devicetype_bulk_import'), data=form_data, follow=True)
self.assertHttpStatus(response, 200)
self.assertContains(response, "Record 1 console-ports: Must be a list.")
@override_settings(EXEMPT_VIEW_PERMISSIONS=['*'])
def test_import_nodict(self):
# Add all required permissions to the test user
self.add_permissions(
'dcim.view_devicetype',
'dcim.add_devicetype',
'dcim.add_consoleporttemplate',
'dcim.add_consoleserverporttemplate',
'dcim.add_powerporttemplate',
'dcim.add_poweroutlettemplate',
'dcim.add_interfacetemplate',
'dcim.add_frontporttemplate',
'dcim.add_rearporttemplate',
'dcim.add_modulebaytemplate',
'dcim.add_devicebaytemplate',
'dcim.add_inventoryitemtemplate',
)
for value in ('', 'null', '3', '"My console port"', '["My other console port"]'):
with self.subTest(value=value):
import_data = f'''
manufacturer: Manufacturer 1
model: TEST-4000
slug: test-4000
u_height: 1
console-ports:
- {value}
'''
form_data = {
'data': import_data,
'format': 'yaml'
}
response = self.client.post(reverse('dcim:devicetype_bulk_import'), data=form_data, follow=True)
self.assertHttpStatus(response, 200)
self.assertContains(response, "Record 1 console-ports[1]: Must be a dictionary.")
def test_export_objects(self):
url = reverse('dcim:devicetype_list')
self.add_permissions('dcim.view_devicetype')
@@ -2959,19 +2834,10 @@ class InterfaceTestCase(ViewTestCases.DeviceComponentViewTestCase):
}
cls.csv_data = (
"device,name,type,vrf.pk,poe_mode,poe_type,mode,untagged_vlan,tagged_vlans",
(
f"Device 1,Interface 4,1000base-t,{vrfs[0].pk},pse,type1-ieee802.3af,"
f"tagged,{vlans[0].vid},'{','.join([str(v.vid) for v in vlans[1:4]])}'"
),
(
f"Device 1,Interface 5,1000base-t,{vrfs[0].pk},pse,type1-ieee802.3af,"
f"tagged,{vlans[0].vid},'{','.join([str(v.vid) for v in vlans[1:4]])}'"
),
(
f"Device 1,Interface 6,1000base-t,{vrfs[0].pk},pse,type1-ieee802.3af,"
f"tagged,{vlans[0].vid},'{','.join([str(v.vid) for v in vlans[1:4]])}'"
),
"device,name,type,vrf.pk,poe_mode,poe_type",
f"Device 1,Interface 4,1000base-t,{vrfs[0].pk},pse,type1-ieee802.3af",
f"Device 1,Interface 5,1000base-t,{vrfs[0].pk},pse,type1-ieee802.3af",
f"Device 1,Interface 6,1000base-t,{vrfs[0].pk},pse,type1-ieee802.3af",
)
cls.csv_update_data = (
@@ -3019,43 +2885,6 @@ class InterfaceTestCase(ViewTestCases.DeviceComponentViewTestCase):
self.client.post(self._get_url('bulk_delete'), data)
self.assertEqual(device.interfaces.count(), 4) # Child & parent were both deleted
def test_rename_select_all_spans_pages(self):
"""
Tests the bulk rename functionality for interfaces spanning multiple pages in the UI.
"""
device_name = 'DeviceRename'
device = create_test_device(device_name)
# Create > default page size (25) so selection spans multiple pages
for i in range(37):
Interface.objects.create(device=device, name=f'eth{i}')
self.add_permissions('dcim.change_interface')
# Filter to this device's interfaces to simulate a real list filter
get_qs = {'device_id': Device.objects.get(name=device_name).pk}
post_url = f'{self._get_url("bulk_rename")}?device_id={get_qs["device_id"]}'
# Preview step: ensure 37 selected (not just one page)
data = {'_preview': '1', '_all': '1', 'find': 'eth', 'replace': 'xe'}
response = self.client.post(post_url, data=data)
self.assertHttpStatus(response, 200)
self.assertEqual(len(response.context['selected_objects']), 37)
# Extract pk[] just like the browser would submit on Apply
# (either from the form's initial, or from selected_objects)
pk_list = response.context['form'].initial.get('pk')
if not pk_list:
pk_list = [obj.pk for obj in response.context['selected_objects']]
pk_list = [str(pk) for pk in pk_list]
# Apply step: include pk[] in the POST
apply_data = {'_apply': '1', '_all': '1', 'find': 'eth', 'replace': 'xe', 'pk': pk_list}
response = self.client.post(post_url, data=apply_data)
# On success the view redirects back to the return URL
self.assertHttpStatus(response, 302)
self.assertEqual(Interface.objects.filter(device=device, name__startswith='xe').count(), 37)
class FrontPortTestCase(ViewTestCases.DeviceComponentViewTestCase):
model = FrontPort

View File

@@ -295,7 +295,6 @@ class RegionBulkEditView(generic.BulkEditView):
@register_model_view(Region, 'bulk_rename', path='rename', detail=False)
class RegionBulkRenameView(generic.BulkRenameView):
queryset = Region.objects.all()
filterset = filtersets.RegionFilterSet
@register_model_view(Region, 'bulk_delete', path='delete', detail=False)
@@ -427,7 +426,6 @@ class SiteGroupBulkEditView(generic.BulkEditView):
@register_model_view(SiteGroup, 'bulk_rename', path='rename', detail=False)
class SiteGroupBulkRenameView(generic.BulkRenameView):
queryset = SiteGroup.objects.all()
filterset = filtersets.SiteGroupFilterSet
@register_model_view(SiteGroup, 'bulk_delete', path='delete', detail=False)
@@ -518,7 +516,6 @@ class SiteBulkEditView(generic.BulkEditView):
@register_model_view(Site, 'bulk_rename', path='rename', detail=False)
class SiteBulkRenameView(generic.BulkRenameView):
queryset = Site.objects.all()
filterset = filtersets.SiteFilterSet
@register_model_view(Site, 'bulk_delete', path='delete', detail=False)
@@ -628,7 +625,6 @@ class LocationBulkEditView(generic.BulkEditView):
@register_model_view(Location, 'bulk_rename', path='rename', detail=False)
class LocationBulkRenameView(generic.BulkRenameView):
queryset = Location.objects.all()
filterset = filtersets.LocationFilterSet
@register_model_view(Location, 'bulk_delete', path='delete', detail=False)
@@ -699,7 +695,6 @@ class RackRoleBulkEditView(generic.BulkEditView):
@register_model_view(RackRole, 'bulk_rename', path='rename', detail=False)
class RackRoleBulkRenameView(generic.BulkRenameView):
queryset = RackRole.objects.all()
filterset = filtersets.RackRoleFilterSet
@register_model_view(RackRole, 'bulk_delete', path='delete', detail=False)
@@ -765,7 +760,6 @@ class RackTypeBulkEditView(generic.BulkEditView):
class RackTypeBulkRenameView(generic.BulkRenameView):
queryset = RackType.objects.all()
field_name = 'model'
filterset = filtersets.RackTypeFilterSet
@register_model_view(RackType, 'bulk_delete', path='delete', detail=False)
@@ -950,7 +944,6 @@ class RackBulkEditView(generic.BulkEditView):
@register_model_view(Rack, 'bulk_rename', path='rename', detail=False)
class RackBulkRenameView(generic.BulkRenameView):
queryset = Rack.objects.all()
filterset = filtersets.RackFilterSet
@register_model_view(Rack, 'bulk_delete', path='delete', detail=False)
@@ -1090,7 +1083,6 @@ class ManufacturerBulkEditView(generic.BulkEditView):
@register_model_view(Manufacturer, 'bulk_rename', path='rename', detail=False)
class ManufacturerBulkRenameView(generic.BulkRenameView):
queryset = Manufacturer.objects.all()
filterset = filtersets.ManufacturerFilterSet
@register_model_view(Manufacturer, 'bulk_delete', path='delete', detail=False)
@@ -1344,7 +1336,6 @@ class DeviceTypeBulkEditView(generic.BulkEditView):
class DeviceTypeBulkRenameView(generic.BulkRenameView):
queryset = DeviceType.objects.all()
field_name = 'model'
filterset = filtersets.DeviceTypeFilterSet
@register_model_view(DeviceType, 'bulk_delete', path='delete', detail=False)
@@ -1406,7 +1397,6 @@ class ModuleTypeProfileBulkEditView(generic.BulkEditView):
@register_model_view(ModuleTypeProfile, 'bulk_rename', path='rename', detail=False)
class ModuleTypeProfileBulkRenameView(generic.BulkRenameView):
queryset = ModuleTypeProfile.objects.all()
filterset = filtersets.ModuleTypeProfileFilterSet
@register_model_view(ModuleTypeProfile, 'bulk_delete', path='delete', detail=False)
@@ -1622,7 +1612,6 @@ class ModuleTypeBulkEditView(generic.BulkEditView):
@register_model_view(ModuleType, 'bulk_rename', path='rename', detail=False)
class ModuleTypeBulkRenameView(generic.BulkRenameView):
queryset = ModuleType.objects.all()
filterset = filtersets.ModuleTypeFilterSet
@register_model_view(ModuleType, 'bulk_delete', path='delete', detail=False)
@@ -2111,7 +2100,6 @@ class DeviceRoleBulkEditView(generic.BulkEditView):
@register_model_view(DeviceRole, 'bulk_rename', path='rename', detail=False)
class DeviceRoleBulkRenameView(generic.BulkRenameView):
queryset = DeviceRole.objects.all()
filterset = filtersets.DeviceRoleFilterSet
@register_model_view(DeviceRole, 'bulk_delete', path='delete', detail=False)
@@ -2187,7 +2175,6 @@ class PlatformBulkEditView(generic.BulkEditView):
@register_model_view(Platform, 'bulk_rename', path='rename', detail=False)
class PlatformBulkRenameView(generic.BulkRenameView):
queryset = Platform.objects.all()
filterset = filtersets.PlatformFilterSet
@register_model_view(Platform, 'bulk_delete', path='delete', detail=False)
@@ -2595,7 +2582,6 @@ class ConsolePortBulkEditView(generic.BulkEditView):
@register_model_view(ConsolePort, 'bulk_rename', path='rename', detail=False)
class ConsolePortBulkRenameView(generic.BulkRenameView):
queryset = ConsolePort.objects.all()
filterset = filtersets.ConsolePortFilterSet
@register_model_view(ConsolePort, 'bulk_disconnect', path='disconnect', detail=False)
@@ -2666,7 +2652,6 @@ class ConsoleServerPortBulkEditView(generic.BulkEditView):
@register_model_view(ConsoleServerPort, 'bulk_rename', path='rename', detail=False)
class ConsoleServerPortBulkRenameView(generic.BulkRenameView):
queryset = ConsoleServerPort.objects.all()
filterset = filtersets.ConsoleServerPortFilterSet
@register_model_view(ConsoleServerPort, 'bulk_disconnect', path='disconnect', detail=False)
@@ -2737,7 +2722,6 @@ class PowerPortBulkEditView(generic.BulkEditView):
@register_model_view(PowerPort, 'bulk_rename', path='rename', detail=False)
class PowerPortBulkRenameView(generic.BulkRenameView):
queryset = PowerPort.objects.all()
filterset = filtersets.PowerPortFilterSet
@register_model_view(PowerPort, 'bulk_disconnect', path='disconnect', detail=False)
@@ -2808,7 +2792,6 @@ class PowerOutletBulkEditView(generic.BulkEditView):
@register_model_view(PowerOutlet, 'bulk_rename', path='rename', detail=False)
class PowerOutletBulkRenameView(generic.BulkRenameView):
queryset = PowerOutlet.objects.all()
filterset = filtersets.PowerOutletFilterSet
@register_model_view(PowerOutlet, 'bulk_disconnect', path='disconnect', detail=False)
@@ -2951,7 +2934,6 @@ class InterfaceBulkEditView(generic.BulkEditView):
@register_model_view(Interface, 'bulk_rename', path='rename', detail=False)
class InterfaceBulkRenameView(generic.BulkRenameView):
queryset = Interface.objects.all()
filterset = filtersets.InterfaceFilterSet
@register_model_view(Interface, 'bulk_disconnect', path='disconnect', detail=False)
@@ -3023,7 +3005,6 @@ class FrontPortBulkEditView(generic.BulkEditView):
@register_model_view(FrontPort, 'bulk_rename', path='rename', detail=False)
class FrontPortBulkRenameView(generic.BulkRenameView):
queryset = FrontPort.objects.all()
filterset = filtersets.FrontPortFilterSet
@register_model_view(FrontPort, 'bulk_disconnect', path='disconnect', detail=False)
@@ -3099,7 +3080,6 @@ class RearPortBulkRenameView(generic.BulkRenameView):
@register_model_view(RearPort, 'bulk_disconnect', path='disconnect', detail=False)
class RearPortBulkDisconnectView(BulkDisconnectView):
queryset = RearPort.objects.all()
filterset = filtersets.RearPortFilterSet
@register_model_view(RearPort, 'bulk_delete', path='delete', detail=False)
@@ -3165,7 +3145,6 @@ class ModuleBayBulkEditView(generic.BulkEditView):
@register_model_view(ModuleBay, 'bulk_rename', path='rename', detail=False)
class ModuleBayBulkRenameView(generic.BulkRenameView):
queryset = ModuleBay.objects.all()
filterset = filtersets.ModuleBayFilterSet
@register_model_view(ModuleBay, 'bulk_delete', path='delete', detail=False)
@@ -3308,7 +3287,6 @@ class DeviceBayBulkEditView(generic.BulkEditView):
@register_model_view(DeviceBay, 'bulk_rename', path='rename', detail=False)
class DeviceBayBulkRenameView(generic.BulkRenameView):
queryset = DeviceBay.objects.all()
filterset = filtersets.DeviceBayFilterSet
@register_model_view(DeviceBay, 'bulk_delete', path='delete', detail=False)
@@ -3370,7 +3348,6 @@ class InventoryItemBulkEditView(generic.BulkEditView):
@register_model_view(InventoryItem, 'bulk_rename', path='rename', detail=False)
class InventoryItemBulkRenameView(generic.BulkRenameView):
queryset = InventoryItem.objects.all()
filterset = filtersets.InventoryItemFilterSet
@register_model_view(InventoryItem, 'bulk_delete', path='delete', detail=False)
@@ -3454,7 +3431,6 @@ class InventoryItemRoleBulkEditView(generic.BulkEditView):
@register_model_view(InventoryItemRole, 'bulk_rename', path='rename', detail=False)
class InventoryItemRoleBulkRenameView(generic.BulkRenameView):
queryset = InventoryItemRole.objects.all()
filterset = filtersets.InventoryItemRoleFilterSet
@register_model_view(InventoryItemRole, 'bulk_delete', path='delete', detail=False)
@@ -3658,7 +3634,6 @@ class CableBulkEditView(generic.BulkEditView):
class CableBulkRenameView(generic.BulkRenameView):
queryset = Cable.objects.all()
field_name = 'label'
filterset = filtersets.CableFilterSet
@register_model_view(Cable, 'bulk_delete', path='delete', detail=False)
@@ -3779,7 +3754,6 @@ class VirtualChassisEditView(ObjectPermissionRequiredMixin, GetReturnURLMixin, V
def post(self, request, pk):
virtual_chassis = get_object_or_404(self.queryset, pk=pk)
virtual_chassis.snapshot()
VCMemberFormSet = modelformset_factory(
model=Device,
form=forms.DeviceVCMembershipForm,
@@ -3832,7 +3806,9 @@ class VirtualChassisAddMemberView(ObjectPermissionRequiredMixin, GetReturnURLMix
return 'dcim.change_virtualchassis'
def get(self, request, pk):
virtual_chassis = get_object_or_404(self.queryset, pk=pk)
initial_data = {k: request.GET[k] for k in request.GET}
member_select_form = forms.VCMemberSelectForm(initial=initial_data)
membership_form = forms.DeviceVCMembershipForm(initial=initial_data)
@@ -3845,20 +3821,20 @@ class VirtualChassisAddMemberView(ObjectPermissionRequiredMixin, GetReturnURLMix
})
def post(self, request, pk):
virtual_chassis = get_object_or_404(self.queryset, pk=pk)
member_select_form = forms.VCMemberSelectForm(request.POST)
if member_select_form.is_valid():
device = member_select_form.cleaned_data['device']
device.snapshot()
device.virtual_chassis = virtual_chassis
data = {
'vc_position': request.POST['vc_position'],
'vc_priority': request.POST['vc_priority'],
}
data = {k: request.POST[k] for k in ['vc_position', 'vc_priority']}
membership_form = forms.DeviceVCMembershipForm(data=data, validate_vc_position=True, instance=device)
if membership_form.is_valid():
membership_form.save()
messages.success(request, mark_safe(
_('Added member <a href="{url}">{device}</a>').format(
@@ -3868,9 +3844,11 @@ class VirtualChassisAddMemberView(ObjectPermissionRequiredMixin, GetReturnURLMix
if '_addanother' in request.POST and safe_for_redirect(request.get_full_path()):
return redirect(request.get_full_path())
return redirect(self.get_return_url(request, device))
else:
membership_form = forms.DeviceVCMembershipForm(data=request.POST)
return render(request, 'dcim/virtualchassis_add_member.html', {
@@ -3888,6 +3866,7 @@ class VirtualChassisRemoveMemberView(ObjectPermissionRequiredMixin, GetReturnURL
return 'dcim.change_device'
def get(self, request, pk):
device = get_object_or_404(self.queryset, pk=pk, virtual_chassis__isnull=False)
form = ConfirmationForm(initial=request.GET)
@@ -3898,6 +3877,7 @@ class VirtualChassisRemoveMemberView(ObjectPermissionRequiredMixin, GetReturnURL
})
def post(self, request, pk):
device = get_object_or_404(self.queryset, pk=pk, virtual_chassis__isnull=False)
form = ConfirmationForm(request.POST)
@@ -3911,11 +3891,13 @@ class VirtualChassisRemoveMemberView(ObjectPermissionRequiredMixin, GetReturnURL
return redirect(device.get_absolute_url())
if form.is_valid():
device.snapshot()
device.virtual_chassis = None
device.vc_position = None
device.vc_priority = None
device.save()
devices = Device.objects.filter(pk=device.pk)
for device in devices:
device.virtual_chassis = None
device.vc_position = None
device.vc_priority = None
device.save()
msg = _('Removed {device} from virtual chassis {chassis}').format(
device=device,
@@ -3949,7 +3931,6 @@ class VirtualChassisBulkEditView(generic.BulkEditView):
@register_model_view(VirtualChassis, 'bulk_rename', path='rename', detail=False)
class VirtualChassisBulkRenameView(generic.BulkRenameView):
queryset = VirtualChassis.objects.all()
filterset = filtersets.VirtualChassisFilterSet
@register_model_view(VirtualChassis, 'bulk_delete', path='delete', detail=False)
@@ -4012,7 +3993,6 @@ class PowerPanelBulkEditView(generic.BulkEditView):
@register_model_view(PowerPanel, 'bulk_rename', path='rename', detail=False)
class PowerPanelBulkRenameView(generic.BulkRenameView):
queryset = PowerPanel.objects.all()
filterset = filtersets.PowerPanelFilterSet
@register_model_view(PowerPanel, 'bulk_delete', path='delete', detail=False)
@@ -4070,7 +4050,6 @@ class PowerFeedBulkEditView(generic.BulkEditView):
@register_model_view(PowerFeed, 'bulk_rename', path='rename', detail=False)
class PowerFeedBulkRenameView(generic.BulkRenameView):
queryset = PowerFeed.objects.all()
filterset = filtersets.PowerFeedFilterSet
@register_model_view(PowerFeed, 'bulk_disconnect', path='disconnect', detail=False)
@@ -4149,7 +4128,6 @@ class VirtualDeviceContextBulkEditView(generic.BulkEditView):
@register_model_view(VirtualDeviceContext, 'bulk_rename', path='rename', detail=False)
class VirtualDeviceContextBulkRenameView(generic.BulkRenameView):
queryset = VirtualDeviceContext.objects.all()
filterset = filtersets.VirtualDeviceContextFilterSet
@register_model_view(VirtualDeviceContext, 'bulk_delete', path='delete', detail=False)

View File

@@ -23,6 +23,6 @@ class ConfigTemplateSerializer(ChangeLogMessageSerializer, TaggableModelSerializ
fields = [
'id', 'url', 'display_url', 'display', 'name', 'description', 'environment_params', 'template_code',
'mime_type', 'file_name', 'file_extension', 'as_attachment', 'data_source', 'data_path', 'data_file',
'auto_sync_enabled', 'data_synced', 'tags', 'created', 'last_updated',
'data_synced', 'tags', 'created', 'last_updated',
]
brief_fields = ('id', 'url', 'display', 'name', 'description')

View File

@@ -5,7 +5,6 @@ from rest_framework import serializers
from core.api.serializers_.jobs import JobSerializer
from extras.models import Script
from netbox.api.serializers import ValidatedModelSerializer
from utilities.datetime import local_now
__all__ = (
'ScriptDetailSerializer',
@@ -67,31 +66,11 @@ class ScriptInputSerializer(serializers.Serializer):
interval = serializers.IntegerField(required=False, allow_null=True)
def validate_schedule_at(self, value):
"""
Validates the specified schedule time for a script execution.
"""
if value:
if not self.context['script'].python_class.scheduling_enabled:
raise serializers.ValidationError(_('Scheduling is not enabled for this script.'))
if value < local_now():
raise serializers.ValidationError(_('Scheduled time must be in the future.'))
if value and not self.context['script'].python_class.scheduling_enabled:
raise serializers.ValidationError(_("Scheduling is not enabled for this script."))
return value
def validate_interval(self, value):
"""
Validates the provided interval based on the script's scheduling configuration.
"""
if value and not self.context['script'].python_class.scheduling_enabled:
raise serializers.ValidationError(_('Scheduling is not enabled for this script.'))
raise serializers.ValidationError(_("Scheduling is not enabled for this script."))
return value
def validate(self, data):
"""
Validates the given data and ensures the necessary fields are populated.
"""
# Set the schedule_at time to now if only an interval is provided
# while handling the case where schedule_at is null.
if data.get('interval') and not data.get('schedule_at'):
data['schedule_at'] = local_now()
return super().validate(data)

View File

@@ -1,10 +1,8 @@
from django.urls import include, path
from core.api.views import ObjectTypeViewSet
from netbox.api.routers import NetBoxRouter
from . import views
router = NetBoxRouter()
router.APIRootView = views.ExtrasRootView
@@ -29,9 +27,6 @@ router.register('config-context-profiles', views.ConfigContextProfileViewSet)
router.register('config-templates', views.ConfigTemplateViewSet)
router.register('scripts', views.ScriptViewSet, basename='script')
# TODO: Remove in NetBox v4.5
router.register('object-types', ObjectTypeViewSet)
app_name = 'extras-api'
urlpatterns = [
path('dashboard/', views.DashboardView.as_view(), name='dashboard'),

View File

@@ -267,14 +267,6 @@ class ScriptViewSet(ModelViewSet):
_ignore_model_permissions = True
lookup_value_regex = '[^/]+' # Allow dots
def initial(self, request, *args, **kwargs):
super().initial(request, *args, **kwargs)
# Restrict the view's QuerySet to allow only the permitted objects
if request.user.is_authenticated:
action = 'run' if request.method == 'POST' else 'view'
self.queryset = self.queryset.restrict(request.user, action)
def _get_script(self, pk):
# If pk is numeric, retrieve script by ID
if pk.isnumeric():
@@ -298,12 +290,10 @@ class ScriptViewSet(ModelViewSet):
"""
Run a Script identified by its numeric PK or module & name and return the pending Job as the result
"""
if not request.user.has_perm('extras.run_script'):
raise PermissionDenied("This user does not have permission to run scripts.")
script = self._get_script(pk)
if not request.user.has_perm('extras.run_script', obj=script):
raise PermissionDenied("This user does not have permission to run this script.")
input_serializer = serializers.ScriptInputSerializer(
data=request.data,
context={'script': script}

View File

@@ -209,10 +209,7 @@ class ObjectCountsWidget(DashboardWidget):
url = get_action_url(model, action='list')
except NoReverseMatch:
url = None
try:
qs = model.objects.restrict(request.user, 'view')
except AttributeError:
qs = model.objects.all()
qs = model.objects.restrict(request.user, 'view')
# Apply any specified filters
if url and (filters := self.config.get('filters')):
params = dict_to_querydict(filters)

View File

@@ -134,18 +134,11 @@ def process_event_rules(event_rules, object_type, event_type, data, username=Non
# Enqueue a Job to record the script's execution
from extras.jobs import ScriptJob
params = {
"instance": event_rule.action_object,
"name": script.name,
"user": user,
"data": event_data
}
if snapshots:
params["snapshots"] = snapshots
if request:
params["request"] = copy_safe_request(request)
ScriptJob.enqueue(
**params
instance=event_rule.action_object,
name=script.name,
user=user,
data=event_data
)
# Notification groups

View File

@@ -398,12 +398,8 @@ class ConfigTemplateBulkEditForm(ChangelogMessageMixin, BulkEditForm):
required=False,
widget=BulkEditNullBooleanSelect()
)
auto_sync_enabled = forms.NullBooleanField(
label=_('Auto sync enabled'),
required=False,
widget=BulkEditNullBooleanSelect()
)
nullable_fields = ('description', 'mime_type', 'file_name', 'file_extension', 'auto_sync_enabled',)
nullable_fields = ('description', 'mime_type', 'file_name', 'file_extension')
class ImageAttachmentBulkEditForm(ChangelogMessageMixin, BulkEditForm):

View File

@@ -5,7 +5,7 @@ from django.contrib.postgres.forms import SimpleArrayField
from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import gettext_lazy as _
from core.models import DataFile, DataSource, ObjectType
from core.models import ObjectType
from extras.choices import *
from extras.models import *
from netbox.events import get_event_type_choices
@@ -160,41 +160,14 @@ class ConfigContextProfileImportForm(NetBoxModelImportForm):
class ConfigTemplateImportForm(CSVModelForm):
data_source = CSVModelChoiceField(
label=_('Data source'),
queryset=DataSource.objects.all(),
required=False,
to_field_name='name',
help_text=_('Data source which provides the data file')
)
data_file = CSVModelChoiceField(
label=_('Data file'),
queryset=DataFile.objects.all(),
required=False,
to_field_name='path',
help_text=_('Data file containing the template code')
)
auto_sync_enabled = forms.BooleanField(
required=False,
label=_('Auto sync enabled'),
help_text=_("Enable automatic synchronization of template content when the data file is updated")
)
class Meta:
model = ConfigTemplate
fields = (
'name', 'description', 'template_code', 'data_source', 'data_file', 'auto_sync_enabled',
'environment_params', 'mime_type', 'file_name', 'file_extension', 'as_attachment', 'tags',
'name', 'description', 'template_code', 'environment_params', 'mime_type', 'file_name', 'file_extension',
'as_attachment', 'tags',
)
def clean(self):
super().clean()
# Make sure template_code is None when it's not included in the uploaded data
if not self.data.get('template_code') and not self.data.get('data_file'):
raise forms.ValidationError(_("Must specify either local content or a data file"))
return self.cleaned_data['template_code']
class SavedFilterImportForm(CSVModelForm):
object_types = CSVMultipleContentTypeField(
@@ -299,10 +272,6 @@ class JournalEntryImportForm(NetBoxModelImportForm):
choices=JournalEntryKindChoices,
help_text=_('The classification of entry')
)
comments = forms.CharField(
label=_('Comments'),
required=True
)
class Meta:
model = JournalEntry

View File

@@ -42,20 +42,17 @@ class CustomFieldFilterForm(SavedFiltersMixin, FilterForm):
model = CustomField
fieldsets = (
FieldSet('q', 'filter_id'),
FieldSet('object_type_id', 'type', 'group_name', 'weight', 'required', 'unique', name=_('Attributes')),
FieldSet('choice_set_id', 'related_object_type_id', name=_('Type Options')),
FieldSet(
'type', 'related_object_type_id', 'group_name', 'weight', 'required', 'unique', 'choice_set_id',
name=_('Attributes')
),
FieldSet('ui_visible', 'ui_editable', 'is_cloneable', name=_('Behavior')),
FieldSet('validation_minimum', 'validation_maximum', 'validation_regex', name=_('Validation')),
)
object_type_id = ContentTypeMultipleChoiceField(
related_object_type_id = ContentTypeMultipleChoiceField(
queryset=ObjectType.objects.with_feature('custom_fields'),
required=False,
label=_('Object types'),
)
related_object_type_id = ContentTypeMultipleChoiceField(
queryset=ObjectType.objects.public(),
required=False,
label=_('Related object type'),
label=_('Related object type')
)
type = forms.MultipleChoiceField(
choices=CustomFieldTypeChoices,
@@ -139,12 +136,12 @@ class CustomLinkFilterForm(SavedFiltersMixin, FilterForm):
model = CustomLink
fieldsets = (
FieldSet('q', 'filter_id'),
FieldSet('object_type_id', 'enabled', 'new_window', 'weight', name=_('Attributes')),
FieldSet('object_type', 'enabled', 'new_window', 'weight', name=_('Attributes')),
)
object_type_id = ContentTypeMultipleChoiceField(
object_type = ContentTypeMultipleChoiceField(
label=_('Object types'),
queryset=ObjectType.objects.with_feature('custom_links'),
required=False,
required=False
)
enabled = forms.NullBooleanField(
label=_('Enabled'),
@@ -233,12 +230,12 @@ class SavedFilterFilterForm(SavedFiltersMixin, FilterForm):
model = SavedFilter
fieldsets = (
FieldSet('q', 'filter_id'),
FieldSet('object_type_id', 'enabled', 'shared', 'weight', name=_('Attributes')),
FieldSet('object_type', 'enabled', 'shared', 'weight', name=_('Attributes')),
)
object_type_id = ContentTypeMultipleChoiceField(
object_type = ContentTypeMultipleChoiceField(
label=_('Object types'),
queryset=ObjectType.objects.public(),
required=False,
required=False
)
enabled = forms.NullBooleanField(
label=_('Enabled'),
@@ -479,7 +476,7 @@ class ConfigTemplateFilterForm(SavedFiltersMixin, FilterForm):
model = ConfigTemplate
fieldsets = (
FieldSet('q', 'filter_id', 'tag'),
FieldSet('data_source_id', 'data_file_id', 'auto_sync_enabled', name=_('Data')),
FieldSet('data_source_id', 'data_file_id', name=_('Data')),
FieldSet('mime_type', 'file_name', 'file_extension', 'as_attachment', name=_('Rendering'))
)
data_source_id = DynamicModelMultipleChoiceField(
@@ -495,13 +492,6 @@ class ConfigTemplateFilterForm(SavedFiltersMixin, FilterForm):
'source_id': '$data_source_id'
}
)
auto_sync_enabled = forms.NullBooleanField(
label=_('Auto sync enabled'),
required=False,
widget=forms.Select(
choices=BOOLEAN_WITH_BLANK_CHOICES
)
)
tag = TagFilterField(ConfigTemplate)
mime_type = forms.CharField(
required=False,

View File

@@ -793,7 +793,7 @@ class JournalEntryForm(NetBoxModelForm):
label=_('Kind'),
choices=JournalEntryKindChoices
)
comments = CommentField(required=True)
comments = CommentField()
class Meta:
model = JournalEntry

View File

@@ -2,12 +2,13 @@ from typing import List
import strawberry
import strawberry_django
from strawberry_django.pagination import OffsetPaginated
from .types import *
@strawberry.type(name="Query")
class ExtrasQuery:
class ExtrasQueryV1:
config_context: ConfigContextType = strawberry_django.field()
config_context_list: List[ConfigContextType] = strawberry_django.field()
@@ -58,3 +59,57 @@ class ExtrasQuery:
event_rule: EventRuleType = strawberry_django.field()
event_rule_list: List[EventRuleType] = strawberry_django.field()
@strawberry.type(name="Query")
class ExtrasQuery:
config_context: ConfigContextType = strawberry_django.field()
config_context_list: OffsetPaginated[ConfigContextType] = strawberry_django.offset_paginated()
config_context_profile: ConfigContextProfileType = strawberry_django.field()
config_context_profile_list: OffsetPaginated[ConfigContextProfileType] = strawberry_django.offset_paginated()
config_template: ConfigTemplateType = strawberry_django.field()
config_template_list: OffsetPaginated[ConfigTemplateType] = strawberry_django.offset_paginated()
custom_field: CustomFieldType = strawberry_django.field()
custom_field_list: OffsetPaginated[CustomFieldType] = strawberry_django.offset_paginated()
custom_field_choice_set: CustomFieldChoiceSetType = strawberry_django.field()
custom_field_choice_set_list: OffsetPaginated[CustomFieldChoiceSetType] = strawberry_django.offset_paginated()
custom_link: CustomLinkType = strawberry_django.field()
custom_link_list: OffsetPaginated[CustomLinkType] = strawberry_django.offset_paginated()
export_template: ExportTemplateType = strawberry_django.field()
export_template_list: OffsetPaginated[ExportTemplateType] = strawberry_django.offset_paginated()
image_attachment: ImageAttachmentType = strawberry_django.field()
image_attachment_list: OffsetPaginated[ImageAttachmentType] = strawberry_django.offset_paginated()
saved_filter: SavedFilterType = strawberry_django.field()
saved_filter_list: OffsetPaginated[SavedFilterType] = strawberry_django.offset_paginated()
table_config: TableConfigType = strawberry_django.field()
table_config_list: OffsetPaginated[TableConfigType] = strawberry_django.offset_paginated()
journal_entry: JournalEntryType = strawberry_django.field()
journal_entry_list: OffsetPaginated[JournalEntryType] = strawberry_django.offset_paginated()
notification: NotificationType = strawberry_django.field()
notification_list: OffsetPaginated[NotificationType] = strawberry_django.offset_paginated()
notification_group: NotificationGroupType = strawberry_django.field()
notification_group_list: OffsetPaginated[NotificationGroupType] = strawberry_django.offset_paginated()
subscription: SubscriptionType = strawberry_django.field()
subscription_list: OffsetPaginated[SubscriptionType] = strawberry_django.offset_paginated()
tag: TagType = strawberry_django.field()
tag_list: OffsetPaginated[TagType] = strawberry_django.offset_paginated()
webhook: WebhookType = strawberry_django.field()
webhook_list: OffsetPaginated[WebhookType] = strawberry_django.offset_paginated()
event_rule: EventRuleType = strawberry_django.field()
event_rule_list: OffsetPaginated[EventRuleType] = strawberry_django.offset_paginated()

View File

@@ -2,14 +2,11 @@ import logging
import traceback
from contextlib import ExitStack
from django.db import router, transaction
from django.db import DEFAULT_DB_ALIAS
from django.db import transaction
from django.utils.translation import gettext as _
from core.signals import clear_events
from dcim.models import Device
from extras.models import Script as ScriptModel
from netbox.context_managers import event_tracking
from netbox.jobs import JobRunner
from netbox.registry import registry
from utilities.exceptions import AbortScript, AbortTransaction
@@ -45,21 +42,10 @@ class ScriptJob(JobRunner):
# A script can modify multiple models so need to do an atomic lock on
# both the default database (for non ChangeLogged models) and potentially
# any other database (for ChangeLogged models)
changeloged_db = router.db_for_write(Device)
with transaction.atomic(using=DEFAULT_DB_ALIAS):
# If branch database is different from default, wrap in a second atomic transaction
# Note: Don't add any extra code between the two atomic transactions,
# otherwise the changes might get committed to the default database
# if there are any raised exceptions.
if changeloged_db != DEFAULT_DB_ALIAS:
with transaction.atomic(using=changeloged_db):
script.output = script.run(data, commit)
if not commit:
raise AbortTransaction()
else:
script.output = script.run(data, commit)
if not commit:
raise AbortTransaction()
with transaction.atomic():
script.output = script.run(data, commit)
if not commit:
raise AbortTransaction()
except AbortTransaction:
script.log_info(message=_("Database changes have been reverted automatically."))
if script.failed:
@@ -122,14 +108,14 @@ class ScriptJob(JobRunner):
script.request = request
self.logger.debug(f"Request ID: {request.id if request else None}")
# Execute the script. If commit is True, wrap it with the event_tracking context manager to ensure we process
# change logging, event rules, etc.
if commit:
self.logger.info("Executing script (commit enabled)")
with ExitStack() as stack:
for request_processor in registry['request_processors']:
stack.enter_context(request_processor(request))
self.run_script(script, request, data, commit)
else:
self.logger.warning("Executing script (commit disabled)")
with ExitStack() as stack:
for request_processor in registry['request_processors']:
if not commit and request_processor is event_tracking:
continue
stack.enter_context(request_processor(request))
self.run_script(script, request, data, commit)

View File

@@ -535,15 +535,6 @@ class CustomField(CloningMixin, ExportTemplatesMixin, ChangeLoggedModel):
# URL
elif self.type == CustomFieldTypeChoices.TYPE_URL:
field = LaxURLField(assume_scheme='https', required=required, initial=initial)
if self.validation_regex:
field.validators = [
RegexValidator(
regex=self.validation_regex,
message=mark_safe(_("Values must match this regex: <code>{regex}</code>").format(
regex=escape(self.validation_regex)
))
)
]
# JSON
elif self.type == CustomFieldTypeChoices.TYPE_JSON:
@@ -693,13 +684,6 @@ class CustomField(CloningMixin, ExportTemplatesMixin, ChangeLoggedModel):
if self.validation_regex and not re.match(self.validation_regex, value):
raise ValidationError(_("Value must match regex '{regex}'").format(regex=self.validation_regex))
# Validate URL field
elif self.type == CustomFieldTypeChoices.TYPE_URL:
if type(value) is not str:
raise ValidationError(_("Value must be a string."))
if self.validation_regex and not re.match(self.validation_regex, value):
raise ValidationError(_("Value must match regex '{regex}'").format(regex=self.validation_regex))
# Validate integer
elif self.type == CustomFieldTypeChoices.TYPE_INTEGER:
if type(value) is not int:

View File

@@ -30,7 +30,8 @@ class CustomStoragesLoader(importlib.abc.Loader):
return None # Use default module creation
def exec_module(self, module):
with storages["scripts"].open(self.filename, 'rb') as f:
storage = storages.create_storage(storages.backends["scripts"])
with storage.open(self.filename, 'rb') as f:
code = f.read()
exec(code, module.__dict__)

View File

@@ -126,7 +126,7 @@ class ScriptModule(PythonModuleMixin, JobsMixin, ManagedFile):
ordered.extend(script_objects.values())
return ordered
@cached_property
@property
def module_scripts(self):
def _get_name(cls):

View File

@@ -1,12 +1,9 @@
import inspect
import json
import logging
import os
import re
import yaml
from django import forms
from django.conf import settings
from django.core.files.storage import storages
from django.core.validators import RegexValidator
from django.utils import timezone
@@ -490,7 +487,7 @@ class BaseScript:
if self.fieldsets:
fieldsets.extend(self.fieldsets)
else:
fields = list(name for name, _ in self._get_vars().items())
fields = list(name for name, __ in self._get_vars().items())
fieldsets.append((_('Script Data'), fields))
# Append the default fieldset if defined in the Meta class
@@ -582,40 +579,6 @@ class BaseScript:
self._log(message, obj, level=LogLevelChoices.LOG_FAILURE)
self.failed = True
#
# Convenience functions
#
def load_yaml(self, filename):
"""
Return data from a YAML file
"""
# TODO: DEPRECATED: Remove this method in v4.5
self._log(
_("load_yaml is deprecated and will be removed in v4.5"),
level=LogLevelChoices.LOG_WARNING
)
file_path = os.path.join(settings.SCRIPTS_ROOT, filename)
with open(file_path, 'r') as datafile:
data = yaml.load(datafile, Loader=yaml.SafeLoader)
return data
def load_json(self, filename):
"""
Return data from a JSON file
"""
# TODO: DEPRECATED: Remove this method in v4.5
self._log(
_("load_json is deprecated and will be removed in v4.5"),
level=LogLevelChoices.LOG_WARNING
)
file_path = os.path.join(settings.SCRIPTS_ROOT, filename)
with open(file_path, 'r') as datafile:
data = json.load(datafile)
return data
#
# Legacy Report functionality
#

View File

@@ -632,10 +632,6 @@ class ConfigTemplateTable(NetBoxTable):
orderable=False,
verbose_name=_('Synced')
)
auto_sync_enabled = columns.BooleanColumn(
verbose_name=_('Auto Sync Enabled'),
orderable=False,
)
mime_type = tables.Column(
verbose_name=_('MIME Type')
)

Some files were not shown because too many files have changed in this diff Show More