mirror of
https://github.com/netbox-community/netbox.git
synced 2025-08-09 09:08:15 -06:00
Merge branch 'develop' into 14829-simple_condition_without_and_or_does_not_work_in_event_rule
This commit is contained in:
commit
3df04efecc
2
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
2
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
@ -26,7 +26,7 @@ body:
|
|||||||
attributes:
|
attributes:
|
||||||
label: NetBox Version
|
label: NetBox Version
|
||||||
description: What version of NetBox are you currently running?
|
description: What version of NetBox are you currently running?
|
||||||
placeholder: v4.0.2
|
placeholder: v4.0.3
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: dropdown
|
- type: dropdown
|
||||||
|
2
.github/ISSUE_TEMPLATE/feature_request.yaml
vendored
2
.github/ISSUE_TEMPLATE/feature_request.yaml
vendored
@ -14,7 +14,7 @@ body:
|
|||||||
attributes:
|
attributes:
|
||||||
label: NetBox version
|
label: NetBox version
|
||||||
description: What version of NetBox are you currently running?
|
description: What version of NetBox are you currently running?
|
||||||
placeholder: v4.0.2
|
placeholder: v4.0.3
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: dropdown
|
- type: dropdown
|
||||||
|
@ -30,4 +30,3 @@ jobs:
|
|||||||
This is a reminder that additional information is needed in order to further
|
This is a reminder that additional information is needed in order to further
|
||||||
triage this issue. If the requested details are not provided, the issue will
|
triage this issue. If the requested details are not provided, the issue will
|
||||||
soon be closed automatically.
|
soon be closed automatically.
|
||||||
start-date: 2024-05-14
|
|
||||||
|
21
.github/workflows/close-stale-issues.yml
vendored
21
.github/workflows/close-stale-issues.yml
vendored
@ -17,18 +17,19 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@v9
|
- uses: actions/stale@v9
|
||||||
with:
|
with:
|
||||||
|
# General parameters
|
||||||
|
operations-per-run: 100
|
||||||
|
remove-stale-when-updated: false
|
||||||
|
|
||||||
|
# Issue parameters
|
||||||
close-issue-message: >
|
close-issue-message: >
|
||||||
This issue has been automatically closed due to lack of activity. In an
|
This issue has been automatically closed due to lack of activity. In an
|
||||||
effort to reduce noise, please do not comment any further. Note that the
|
effort to reduce noise, please do not comment any further. Note that the
|
||||||
core maintainers may elect to reopen this issue at a later date if deemed
|
core maintainers may elect to reopen this issue at a later date if deemed
|
||||||
necessary.
|
necessary.
|
||||||
close-pr-message: >
|
days-before-issue-stale: 90
|
||||||
This PR has been automatically closed due to lack of activity.
|
days-before-issue-close: 30
|
||||||
days-before-stale: 90
|
exempt-issue-labels: 'status: accepted,status: backlog,status: blocked'
|
||||||
days-before-close: 30
|
|
||||||
exempt-issue-labels: 'status: accepted,status: blocked,status: needs milestone'
|
|
||||||
operations-per-run: 100
|
|
||||||
remove-stale-when-updated: false
|
|
||||||
stale-issue-label: 'pending closure'
|
stale-issue-label: 'pending closure'
|
||||||
stale-issue-message: >
|
stale-issue-message: >
|
||||||
This issue has been automatically marked as stale because it has not had
|
This issue has been automatically marked as stale because it has not had
|
||||||
@ -38,6 +39,12 @@ jobs:
|
|||||||
process by "bumping" the issue; doing so will result in its immediate closure
|
process by "bumping" the issue; doing so will result in its immediate closure
|
||||||
and you may be barred from participating in any future discussions. Please see
|
and you may be barred from participating in any future discussions. Please see
|
||||||
our [contributing guide](https://github.com/netbox-community/netbox/blob/develop/CONTRIBUTING.md).
|
our [contributing guide](https://github.com/netbox-community/netbox/blob/develop/CONTRIBUTING.md).
|
||||||
|
|
||||||
|
# Pull request parameters
|
||||||
|
close-pr-message: >
|
||||||
|
This PR has been automatically closed due to lack of activity.
|
||||||
|
days-before-pr-stale: 15
|
||||||
|
days-before-pr-close: 15
|
||||||
stale-pr-label: 'pending closure'
|
stale-pr-label: 'pending closure'
|
||||||
stale-pr-message: >
|
stale-pr-message: >
|
||||||
This PR has been automatically marked as stale because it has not had
|
This PR has been automatically marked as stale because it has not had
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -21,6 +21,7 @@ local_settings.py
|
|||||||
!upgrade.sh
|
!upgrade.sh
|
||||||
fabfile.py
|
fabfile.py
|
||||||
gunicorn.py
|
gunicorn.py
|
||||||
|
uwsgi.ini
|
||||||
netbox.log
|
netbox.log
|
||||||
netbox.pid
|
netbox.pid
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
@ -179,6 +179,9 @@
|
|||||||
"usb-micro-ab",
|
"usb-micro-ab",
|
||||||
"usb-3-b",
|
"usb-3-b",
|
||||||
"usb-3-micro-b",
|
"usb-3-micro-b",
|
||||||
|
"molex-micro-fit-1x2",
|
||||||
|
"molex-micro-fit-2x2",
|
||||||
|
"molex-micro-fit-2x4",
|
||||||
"dc-terminal",
|
"dc-terminal",
|
||||||
"saf-d-grid",
|
"saf-d-grid",
|
||||||
"neutrik-powercon-20",
|
"neutrik-powercon-20",
|
||||||
@ -281,6 +284,9 @@
|
|||||||
"usb-a",
|
"usb-a",
|
||||||
"usb-micro-b",
|
"usb-micro-b",
|
||||||
"usb-c",
|
"usb-c",
|
||||||
|
"molex-micro-fit-1x2",
|
||||||
|
"molex-micro-fit-2x2",
|
||||||
|
"molex-micro-fit-2x4",
|
||||||
"dc-terminal",
|
"dc-terminal",
|
||||||
"hdot-cx",
|
"hdot-cx",
|
||||||
"saf-d-grid",
|
"saf-d-grid",
|
||||||
@ -375,6 +381,8 @@
|
|||||||
"gsm",
|
"gsm",
|
||||||
"cdma",
|
"cdma",
|
||||||
"lte",
|
"lte",
|
||||||
|
"4g",
|
||||||
|
"5g",
|
||||||
"sonet-oc3",
|
"sonet-oc3",
|
||||||
"sonet-oc12",
|
"sonet-oc12",
|
||||||
"sonet-oc48",
|
"sonet-oc48",
|
||||||
@ -408,12 +416,15 @@
|
|||||||
"e3",
|
"e3",
|
||||||
"xdsl",
|
"xdsl",
|
||||||
"docsis",
|
"docsis",
|
||||||
|
"bpon",
|
||||||
|
"epon",
|
||||||
|
"10g-epon",
|
||||||
"gpon",
|
"gpon",
|
||||||
"xg-pon",
|
"xg-pon",
|
||||||
"xgs-pon",
|
"xgs-pon",
|
||||||
"ng-pon2",
|
"ng-pon2",
|
||||||
"epon",
|
"25g-pon",
|
||||||
"10g-epon",
|
"50g-pon",
|
||||||
"cisco-stackwise",
|
"cisco-stackwise",
|
||||||
"cisco-stackwise-plus",
|
"cisco-stackwise-plus",
|
||||||
"cisco-flexstack",
|
"cisco-flexstack",
|
||||||
|
4
docs/_theme/main.html
vendored
4
docs/_theme/main.html
vendored
@ -2,8 +2,8 @@
|
|||||||
|
|
||||||
{% block site_meta %}
|
{% block site_meta %}
|
||||||
{{ super() }}
|
{{ super() }}
|
||||||
{# Disable search indexing unless we're building for ReadTheDocs #}
|
{# Disable search indexing unless we're building for public consumption #}
|
||||||
{% if not config.extra.readthedocs %}
|
{% if not config.extra.build_public %}
|
||||||
<meta name="robots" content="noindex">
|
<meta name="robots" content="noindex">
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
@ -94,15 +94,25 @@ REDIS = {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
!!! note
|
|
||||||
If you are upgrading from a NetBox release older than v2.7.0, please note that the Redis connection configuration
|
|
||||||
settings have changed. Manual modification to bring the `REDIS` section inline with the above specification is
|
|
||||||
necessary
|
|
||||||
|
|
||||||
!!! warning
|
!!! warning
|
||||||
It is highly recommended to keep the task and cache databases separate. Using the same database number on the
|
It is highly recommended to keep the task and cache databases separate. Using the same database number on the
|
||||||
same Redis instance for both may result in queued background tasks being lost during cache flushing events.
|
same Redis instance for both may result in queued background tasks being lost during cache flushing events.
|
||||||
|
|
||||||
|
### UNIX Socket Support
|
||||||
|
|
||||||
|
Redis may alternatively be configured by specifying a complete URL instead of individual components. This approach supports the use of a UNIX socket connection. For example:
|
||||||
|
|
||||||
|
```python
|
||||||
|
REDIS = {
|
||||||
|
'tasks': {
|
||||||
|
'URL': 'unix:///run/redis-netbox/redis.sock?db=0'
|
||||||
|
},
|
||||||
|
'caching': {
|
||||||
|
'URL': 'unix:///run/redis-netbox/redis.sock?db=1'
|
||||||
|
},
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
### Using Redis Sentinel
|
### Using Redis Sentinel
|
||||||
|
|
||||||
If you are using [Redis Sentinel](https://redis.io/topics/sentinel) for high-availability purposes, there is minimal
|
If you are using [Redis Sentinel](https://redis.io/topics/sentinel) for high-availability purposes, there is minimal
|
||||||
|
@ -86,15 +86,7 @@ This will automatically update the schema file at `contrib/generated_schema.json
|
|||||||
|
|
||||||
### Update & Compile Translations
|
### Update & Compile Translations
|
||||||
|
|
||||||
Log into [Transifex](https://app.transifex.com/netbox-community/netbox/dashboard/) to download the updated string maps. Download the resource (portable object, or `.po`) file for each language and save them to `netbox/translations/$lang/LC_MESSAGES/django.po`, overwriting the current files. (Be sure to click the **Download for use** link.)
|
Updated language translations should be pulled from [Transifex](https://app.transifex.com/netbox-community/netbox/dashboard/) and re-compiled for each new release. Follow the documented process for [updating translated strings](./translations.md#updating-translated-strings) to do this.
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||
Once the resource files for all languages have been updated, compile the machine object (`.mo`) files using the `compilemessages` management command:
|
|
||||||
|
|
||||||
```nohighlight
|
|
||||||
./manage.py compilemessages
|
|
||||||
```
|
|
||||||
|
|
||||||
### Update Version and Changelog
|
### Update Version and Changelog
|
||||||
|
|
||||||
|
@ -6,17 +6,38 @@ All language translations in NetBox are generated from the source file found at
|
|||||||
|
|
||||||
Reviewers log into Transifex and navigate to their designated language(s) to translate strings. The initial translation for most strings will be machine-generated via the AWS Translate service. Human reviewers are responsible for reviewing these translations and making corrections where necessary.
|
Reviewers log into Transifex and navigate to their designated language(s) to translate strings. The initial translation for most strings will be machine-generated via the AWS Translate service. Human reviewers are responsible for reviewing these translations and making corrections where necessary.
|
||||||
|
|
||||||
Immediately prior to each NetBox release, the translation maps for all completed languages will be downloaded from Transifex, compiled, and checked into the NetBox code base by a maintainer.
|
|
||||||
|
|
||||||
## Updating Translation Sources
|
## Updating Translation Sources
|
||||||
|
|
||||||
To update the English `.po` file from which all translations are derived, use the `makemessages` management command:
|
To update the English `.po` file from which all translations are derived, use the `makemessages` management command (ignoring the `project-static/` directory):
|
||||||
|
|
||||||
```nohighlight
|
```nohighlight
|
||||||
./manage.py makemessages -l en
|
./manage.py makemessages -l en -i "project-static/*"
|
||||||
```
|
```
|
||||||
|
|
||||||
Then, commit the change and push to the `develop` branch on GitHub. After some time, any new strings will appear for translation on Transifex automatically.
|
Then, commit the change and push to the `develop` branch on GitHub. Any new strings will appear for translation on Transifex automatically.
|
||||||
|
|
||||||
|
## Updating Translated Strings
|
||||||
|
|
||||||
|
Typically, translated strings need to be updated only as part of the NetBox [release process](./release-checklist.md).
|
||||||
|
|
||||||
|
To update translated strings, start by initiating a sync from Transifex. From the Transifex dashboard, navigate to Settings > Integrations > GitHub > Manage, and click the **Manual Sync** button at top right.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
Enter a threshold percentage of 1 (to ensure all translations are captured) and select the `develop` branch, then click **Sync**. This will initiate a pull request to GitHub to update any newly modified translation (`.po`) files.
|
||||||
|
|
||||||
|
!!! tip
|
||||||
|
The new PR should appear within a few minutes. If it does not, check that there are in fact new translations to be added.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
Once the PR has been merged, the updated strings need to be compiled into new `.mo` files so they can be used by the application. Update the `develop` branch locally to pull in the changes from the Transifex PR, then run Django's [`compilemessages`](https://docs.djangoproject.com/en/stable/ref/django-admin/#django-admin-compilemessages) management command:
|
||||||
|
|
||||||
|
```nohighlight
|
||||||
|
./manage.py compilemessages
|
||||||
|
```
|
||||||
|
|
||||||
|
Once any new `.mo` files have been generated, they need to be committed and pushed back up to GitHub. (Again, this is typically done as part of publishing a new NetBox release.)
|
||||||
|
|
||||||
## Proposing New Languages
|
## Proposing New Languages
|
||||||
|
|
||||||
|
Binary file not shown.
Before Width: | Height: | Size: 54 KiB |
BIN
docs/media/development/transifex_pull_request.png
Normal file
BIN
docs/media/development/transifex_pull_request.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 108 KiB |
BIN
docs/media/development/transifex_sync.png
Normal file
BIN
docs/media/development/transifex_sync.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 42 KiB |
@ -89,13 +89,13 @@ This form facilitates editing multiple objects in bulk. Unlike a model form, thi
|
|||||||
from django import forms
|
from django import forms
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
from dcim.models import Site
|
from dcim.models import Site
|
||||||
from netbox.forms import NetBoxModelImportForm
|
from netbox.forms import NetBoxModelBulkEditForm
|
||||||
from utilities.forms import CommentField, DynamicModelChoiceField
|
from utilities.forms import CommentField, DynamicModelChoiceField
|
||||||
from utilities.forms.rendering import FieldSet
|
from utilities.forms.rendering import FieldSet
|
||||||
from .models import MyModel, MyModelStatusChoices
|
from .models import MyModel, MyModelStatusChoices
|
||||||
|
|
||||||
|
|
||||||
class MyModelEditForm(NetBoxModelImportForm):
|
class MyModelBulkEditForm(NetBoxModelBulkEditForm):
|
||||||
name = forms.CharField(
|
name = forms.CharField(
|
||||||
required=False
|
required=False
|
||||||
)
|
)
|
||||||
|
@ -1,22 +1,38 @@
|
|||||||
# NetBox v4.0
|
# NetBox v4.0
|
||||||
|
|
||||||
## v4.0.3 (FUTURE)
|
## v4.0.4 (FUTURE)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## v4.0.3 (2024-05-22)
|
||||||
|
|
||||||
### Enhancements
|
### Enhancements
|
||||||
|
|
||||||
* [#12984](https://github.com/netbox-community/netbox/issues/12984) - Add Molex Micro-Fit power port & outlet types
|
* [#12984](https://github.com/netbox-community/netbox/issues/12984) - Add Molex Micro-Fit power port & outlet types
|
||||||
|
* [#13764](https://github.com/netbox-community/netbox/issues/13764) - Enable contact assignments for aggregates, prefixes, IP ranges, and IP addresses
|
||||||
* [#14639](https://github.com/netbox-community/netbox/issues/14639) - Add Ukrainian translation support
|
* [#14639](https://github.com/netbox-community/netbox/issues/14639) - Add Ukrainian translation support
|
||||||
|
* [#14653](https://github.com/netbox-community/netbox/issues/14653) - Add an inventory items table column for all device components
|
||||||
* [#14686](https://github.com/netbox-community/netbox/issues/14686) - Add German translation support
|
* [#14686](https://github.com/netbox-community/netbox/issues/14686) - Add German translation support
|
||||||
* [#14855](https://github.com/netbox-community/netbox/issues/14855) - Add Chinese translation support
|
* [#14855](https://github.com/netbox-community/netbox/issues/14855) - Add Chinese translation support
|
||||||
|
* [#14948](https://github.com/netbox-community/netbox/issues/14948) - Introduce the `has_virtual_device_context` filter for devices
|
||||||
* [#15353](https://github.com/netbox-community/netbox/issues/15353) - Improve error reporting when custom scripts fail to load
|
* [#15353](https://github.com/netbox-community/netbox/issues/15353) - Improve error reporting when custom scripts fail to load
|
||||||
* [#15496](https://github.com/netbox-community/netbox/issues/15496) - Implement dedicated views for management of circuit terminations
|
* [#15496](https://github.com/netbox-community/netbox/issues/15496) - Implement dedicated views for management of circuit terminations
|
||||||
|
* [#15603](https://github.com/netbox-community/netbox/issues/15603) - Add 4G & 5G cellular interface types
|
||||||
|
* [#15962](https://github.com/netbox-community/netbox/issues/15962) - Enable UNIX socket connections for Redis
|
||||||
|
|
||||||
### Bug Fixes
|
### Bug Fixes
|
||||||
|
|
||||||
* [#13293](https://github.com/netbox-community/netbox/issues/13293) - Limit interface selector for IP address to current device/VM
|
* [#13293](https://github.com/netbox-community/netbox/issues/13293) - Limit interface selector for IP address to current device/VM
|
||||||
* [#14953](https://github.com/netbox-community/netbox/issues/14953) - Ensure annotated count fields are present in REST API response data when creating new objects
|
* [#14953](https://github.com/netbox-community/netbox/issues/14953) - Ensure annotated count fields are present in REST API response data when creating new objects
|
||||||
* [#14982](https://github.com/netbox-community/netbox/issues/14982) - Fix OpenAPI schema definition for SerializedPKRelatedFields
|
* [#14982](https://github.com/netbox-community/netbox/issues/14982) - Fix OpenAPI schema definition for SerializedPKRelatedFields
|
||||||
|
* [#15082](https://github.com/netbox-community/netbox/issues/15082) - Strip whitespace from choice values & labels when creating a custom field choice set
|
||||||
* [#16138](https://github.com/netbox-community/netbox/issues/16138) - Fix support for referencing users & groups in object permissions
|
* [#16138](https://github.com/netbox-community/netbox/issues/16138) - Fix support for referencing users & groups in object permissions
|
||||||
|
* [#16145](https://github.com/netbox-community/netbox/issues/16145) - Restore ability to reference custom scripts via module & name in REST API
|
||||||
|
* [#16164](https://github.com/netbox-community/netbox/issues/16164) - Correct display of selected values in UI when filtering object list by a null value
|
||||||
|
* [#16173](https://github.com/netbox-community/netbox/issues/16173) - Fix TypeError exception when viewing object list with no pagination preference defined
|
||||||
|
* [#16228](https://github.com/netbox-community/netbox/issues/16228) - Fix permissions enforcement for GraphQL queries of users & groups
|
||||||
|
* [#16232](https://github.com/netbox-community/netbox/issues/16232) - Preserve bulk action checkboxes on dynamic tables when using pagination
|
||||||
|
* [#16240](https://github.com/netbox-community/netbox/issues/16240) - Fixed NoReverseMatch exception when adding circuit terminations to an object counts dashboard widget
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
@ -42,7 +42,7 @@ plugins:
|
|||||||
show_root_toc_entry: false
|
show_root_toc_entry: false
|
||||||
show_source: false
|
show_source: false
|
||||||
extra:
|
extra:
|
||||||
readthedocs: !ENV READTHEDOCS
|
build_public: !ENV BUILD_PUBLIC
|
||||||
social:
|
social:
|
||||||
- icon: fontawesome/brands/github
|
- icon: fontawesome/brands/github
|
||||||
link: https://github.com/netbox-community/netbox
|
link: https://github.com/netbox-community/netbox
|
||||||
|
@ -48,6 +48,7 @@ class ProviderIndex(SearchIndex):
|
|||||||
display_attrs = ('description',)
|
display_attrs = ('description',)
|
||||||
|
|
||||||
|
|
||||||
|
@register_search
|
||||||
class ProviderAccountIndex(SearchIndex):
|
class ProviderAccountIndex(SearchIndex):
|
||||||
model = models.ProviderAccount
|
model = models.ProviderAccount
|
||||||
fields = (
|
fields = (
|
||||||
|
@ -828,6 +828,7 @@ class InterfaceTypeChoices(ChoiceSet):
|
|||||||
TYPE_100ME_FIXED = '100base-tx'
|
TYPE_100ME_FIXED = '100base-tx'
|
||||||
TYPE_100ME_T1 = '100base-t1'
|
TYPE_100ME_T1 = '100base-t1'
|
||||||
TYPE_1GE_FIXED = '1000base-t'
|
TYPE_1GE_FIXED = '1000base-t'
|
||||||
|
TYPE_1GE_TX_FIXED = '1000base-tx'
|
||||||
TYPE_1GE_GBIC = '1000base-x-gbic'
|
TYPE_1GE_GBIC = '1000base-x-gbic'
|
||||||
TYPE_1GE_SFP = '1000base-x-sfp'
|
TYPE_1GE_SFP = '1000base-x-sfp'
|
||||||
TYPE_2GE_FIXED = '2.5gbase-t'
|
TYPE_2GE_FIXED = '2.5gbase-t'
|
||||||
@ -892,6 +893,8 @@ class InterfaceTypeChoices(ChoiceSet):
|
|||||||
TYPE_GSM = 'gsm'
|
TYPE_GSM = 'gsm'
|
||||||
TYPE_CDMA = 'cdma'
|
TYPE_CDMA = 'cdma'
|
||||||
TYPE_LTE = 'lte'
|
TYPE_LTE = 'lte'
|
||||||
|
TYPE_4G = '4g'
|
||||||
|
TYPE_5G = '5g'
|
||||||
|
|
||||||
# SONET
|
# SONET
|
||||||
TYPE_SONET_OC3 = 'sonet-oc3'
|
TYPE_SONET_OC3 = 'sonet-oc3'
|
||||||
@ -939,12 +942,15 @@ class InterfaceTypeChoices(ChoiceSet):
|
|||||||
TYPE_DOCSIS = 'docsis'
|
TYPE_DOCSIS = 'docsis'
|
||||||
|
|
||||||
# PON
|
# PON
|
||||||
|
TYPE_BPON = 'bpon'
|
||||||
|
TYPE_EPON = 'epon'
|
||||||
|
TYPE_10G_EPON = '10g-epon'
|
||||||
TYPE_GPON = 'gpon'
|
TYPE_GPON = 'gpon'
|
||||||
TYPE_XG_PON = 'xg-pon'
|
TYPE_XG_PON = 'xg-pon'
|
||||||
TYPE_XGS_PON = 'xgs-pon'
|
TYPE_XGS_PON = 'xgs-pon'
|
||||||
TYPE_NG_PON2 = 'ng-pon2'
|
TYPE_NG_PON2 = 'ng-pon2'
|
||||||
TYPE_EPON = 'epon'
|
TYPE_25G_PON = '25g-pon'
|
||||||
TYPE_10G_EPON = '10g-epon'
|
TYPE_50G_PON = '50g-pon'
|
||||||
|
|
||||||
# Stacking
|
# Stacking
|
||||||
TYPE_STACKWISE = 'cisco-stackwise'
|
TYPE_STACKWISE = 'cisco-stackwise'
|
||||||
@ -982,6 +988,7 @@ class InterfaceTypeChoices(ChoiceSet):
|
|||||||
(TYPE_100ME_FIXED, '100BASE-TX (10/100ME)'),
|
(TYPE_100ME_FIXED, '100BASE-TX (10/100ME)'),
|
||||||
(TYPE_100ME_T1, '100BASE-T1 (10/100ME Single Pair)'),
|
(TYPE_100ME_T1, '100BASE-T1 (10/100ME Single Pair)'),
|
||||||
(TYPE_1GE_FIXED, '1000BASE-T (1GE)'),
|
(TYPE_1GE_FIXED, '1000BASE-T (1GE)'),
|
||||||
|
(TYPE_1GE_TX_FIXED, '1000BASE-TX (1GE)'),
|
||||||
(TYPE_2GE_FIXED, '2.5GBASE-T (2.5GE)'),
|
(TYPE_2GE_FIXED, '2.5GBASE-T (2.5GE)'),
|
||||||
(TYPE_5GE_FIXED, '5GBASE-T (5GE)'),
|
(TYPE_5GE_FIXED, '5GBASE-T (5GE)'),
|
||||||
(TYPE_10GE_FIXED, '10GBASE-T (10GE)'),
|
(TYPE_10GE_FIXED, '10GBASE-T (10GE)'),
|
||||||
@ -1060,6 +1067,8 @@ class InterfaceTypeChoices(ChoiceSet):
|
|||||||
(TYPE_GSM, 'GSM'),
|
(TYPE_GSM, 'GSM'),
|
||||||
(TYPE_CDMA, 'CDMA'),
|
(TYPE_CDMA, 'CDMA'),
|
||||||
(TYPE_LTE, 'LTE'),
|
(TYPE_LTE, 'LTE'),
|
||||||
|
(TYPE_4G, '4G'),
|
||||||
|
(TYPE_5G, '5G'),
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
@ -1128,12 +1137,15 @@ class InterfaceTypeChoices(ChoiceSet):
|
|||||||
(
|
(
|
||||||
'PON',
|
'PON',
|
||||||
(
|
(
|
||||||
(TYPE_GPON, 'GPON (2.5 Gbps / 1.25 Gps)'),
|
(TYPE_BPON, 'BPON (622 Mbps / 155 Mbps)'),
|
||||||
|
(TYPE_EPON, 'EPON (1 Gbps)'),
|
||||||
|
(TYPE_10G_EPON, '10G-EPON (10 Gbps)'),
|
||||||
|
(TYPE_GPON, 'GPON (2.5 Gbps / 1.25 Gbps)'),
|
||||||
(TYPE_XG_PON, 'XG-PON (10 Gbps / 2.5 Gbps)'),
|
(TYPE_XG_PON, 'XG-PON (10 Gbps / 2.5 Gbps)'),
|
||||||
(TYPE_XGS_PON, 'XGS-PON (10 Gbps)'),
|
(TYPE_XGS_PON, 'XGS-PON (10 Gbps)'),
|
||||||
(TYPE_NG_PON2, 'NG-PON2 (TWDM-PON) (4x10 Gbps)'),
|
(TYPE_NG_PON2, 'NG-PON2 (TWDM-PON) (4x10 Gbps)'),
|
||||||
(TYPE_EPON, 'EPON (1 Gbps)'),
|
(TYPE_25G_PON, '25G-PON (25 Gbps)'),
|
||||||
(TYPE_10G_EPON, '10G-EPON (10 Gbps)'),
|
(TYPE_50G_PON, '50G-PON (50 Gbps)'),
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
|
@ -1100,6 +1100,10 @@ class DeviceFilterSet(
|
|||||||
queryset=IPAddress.objects.all(),
|
queryset=IPAddress.objects.all(),
|
||||||
label=_('OOB IP (ID)'),
|
label=_('OOB IP (ID)'),
|
||||||
)
|
)
|
||||||
|
has_virtual_device_context = django_filters.BooleanFilter(
|
||||||
|
method='_has_virtual_device_context',
|
||||||
|
label=_('Has virtual device context'),
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = Device
|
model = Device
|
||||||
@ -1176,6 +1180,12 @@ class DeviceFilterSet(
|
|||||||
def _device_bays(self, queryset, name, value):
|
def _device_bays(self, queryset, name, value):
|
||||||
return queryset.exclude(devicebays__isnull=value)
|
return queryset.exclude(devicebays__isnull=value)
|
||||||
|
|
||||||
|
def _has_virtual_device_context(self, queryset, name, value):
|
||||||
|
params = Q(vdcs__isnull=False)
|
||||||
|
if value:
|
||||||
|
return queryset.filter(params).distinct()
|
||||||
|
return queryset.exclude(params)
|
||||||
|
|
||||||
|
|
||||||
class VirtualDeviceContextFilterSet(NetBoxModelFilterSet, TenancyFilterSet, PrimaryIPFilterSet):
|
class VirtualDeviceContextFilterSet(NetBoxModelFilterSet, TenancyFilterSet, PrimaryIPFilterSet):
|
||||||
device_id = django_filters.ModelMultipleChoiceFilter(
|
device_id = django_filters.ModelMultipleChoiceFilter(
|
||||||
|
@ -657,6 +657,7 @@ class DeviceFilterForm(
|
|||||||
),
|
),
|
||||||
FieldSet(
|
FieldSet(
|
||||||
'has_primary_ip', 'has_oob_ip', 'virtual_chassis_member', 'config_template_id', 'local_context_data',
|
'has_primary_ip', 'has_oob_ip', 'virtual_chassis_member', 'config_template_id', 'local_context_data',
|
||||||
|
'has_virtual_device_context',
|
||||||
name=_('Miscellaneous')
|
name=_('Miscellaneous')
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -813,6 +814,13 @@ class DeviceFilterForm(
|
|||||||
choices=BOOLEAN_WITH_BLANK_CHOICES
|
choices=BOOLEAN_WITH_BLANK_CHOICES
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
has_virtual_device_context = forms.NullBooleanField(
|
||||||
|
required=False,
|
||||||
|
label=_('Has virtual device contexts'),
|
||||||
|
widget=forms.Select(
|
||||||
|
choices=BOOLEAN_WITH_BLANK_CHOICES
|
||||||
|
)
|
||||||
|
)
|
||||||
tag = TagFilterField(model)
|
tag = TagFilterField(model)
|
||||||
|
|
||||||
|
|
||||||
|
@ -313,6 +313,10 @@ class ModularDeviceComponentTable(DeviceComponentTable):
|
|||||||
verbose_name=_('Module'),
|
verbose_name=_('Module'),
|
||||||
linkify=True
|
linkify=True
|
||||||
)
|
)
|
||||||
|
inventory_items = columns.ManyToManyColumn(
|
||||||
|
linkify_item=True,
|
||||||
|
verbose_name=_('Inventory Items'),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class CableTerminationTable(NetBoxTable):
|
class CableTerminationTable(NetBoxTable):
|
||||||
@ -366,7 +370,7 @@ class ConsolePortTable(ModularDeviceComponentTable, PathEndpointTable):
|
|||||||
model = models.ConsolePort
|
model = models.ConsolePort
|
||||||
fields = (
|
fields = (
|
||||||
'pk', 'id', 'name', 'device', 'module_bay', 'module', 'label', 'type', 'speed', 'description',
|
'pk', 'id', 'name', 'device', 'module_bay', 'module', 'label', 'type', 'speed', 'description',
|
||||||
'mark_connected', 'cable', 'cable_color', 'link_peer', 'connection', 'tags', 'created', 'last_updated',
|
'mark_connected', 'cable', 'cable_color', 'link_peer', 'connection', 'inventory_items', 'tags', 'created', 'last_updated',
|
||||||
)
|
)
|
||||||
default_columns = ('pk', 'name', 'device', 'label', 'type', 'speed', 'description')
|
default_columns = ('pk', 'name', 'device', 'label', 'type', 'speed', 'description')
|
||||||
|
|
||||||
@ -410,7 +414,7 @@ class ConsoleServerPortTable(ModularDeviceComponentTable, PathEndpointTable):
|
|||||||
model = models.ConsoleServerPort
|
model = models.ConsoleServerPort
|
||||||
fields = (
|
fields = (
|
||||||
'pk', 'id', 'name', 'device', 'module_bay', 'module', 'label', 'type', 'speed', 'description',
|
'pk', 'id', 'name', 'device', 'module_bay', 'module', 'label', 'type', 'speed', 'description',
|
||||||
'mark_connected', 'cable', 'cable_color', 'link_peer', 'connection', 'tags', 'created', 'last_updated',
|
'mark_connected', 'cable', 'cable_color', 'link_peer', 'connection', 'inventory_items', 'tags', 'created', 'last_updated',
|
||||||
)
|
)
|
||||||
default_columns = ('pk', 'name', 'device', 'label', 'type', 'speed', 'description')
|
default_columns = ('pk', 'name', 'device', 'label', 'type', 'speed', 'description')
|
||||||
|
|
||||||
@ -461,8 +465,8 @@ class PowerPortTable(ModularDeviceComponentTable, PathEndpointTable):
|
|||||||
model = models.PowerPort
|
model = models.PowerPort
|
||||||
fields = (
|
fields = (
|
||||||
'pk', 'id', 'name', 'device', 'module_bay', 'module', 'label', 'type', 'description', 'mark_connected',
|
'pk', 'id', 'name', 'device', 'module_bay', 'module', 'label', 'type', 'description', 'mark_connected',
|
||||||
'maximum_draw', 'allocated_draw', 'cable', 'cable_color', 'link_peer', 'connection', 'tags', 'created',
|
'maximum_draw', 'allocated_draw', 'cable', 'cable_color', 'link_peer', 'connection', 'inventory_items',
|
||||||
'last_updated',
|
'tags', 'created', 'last_updated',
|
||||||
)
|
)
|
||||||
default_columns = ('pk', 'name', 'device', 'label', 'type', 'maximum_draw', 'allocated_draw', 'description')
|
default_columns = ('pk', 'name', 'device', 'label', 'type', 'maximum_draw', 'allocated_draw', 'description')
|
||||||
|
|
||||||
@ -513,8 +517,8 @@ class PowerOutletTable(ModularDeviceComponentTable, PathEndpointTable):
|
|||||||
model = models.PowerOutlet
|
model = models.PowerOutlet
|
||||||
fields = (
|
fields = (
|
||||||
'pk', 'id', 'name', 'device', 'module_bay', 'module', 'label', 'type', 'description', 'power_port',
|
'pk', 'id', 'name', 'device', 'module_bay', 'module', 'label', 'type', 'description', 'power_port',
|
||||||
'feed_leg', 'mark_connected', 'cable', 'cable_color', 'link_peer', 'connection', 'tags', 'created',
|
'feed_leg', 'mark_connected', 'cable', 'cable_color', 'link_peer', 'connection', 'inventory_items',
|
||||||
'last_updated',
|
'tags', 'created', 'last_updated',
|
||||||
)
|
)
|
||||||
default_columns = ('pk', 'name', 'device', 'label', 'type', 'power_port', 'feed_leg', 'description')
|
default_columns = ('pk', 'name', 'device', 'label', 'type', 'power_port', 'feed_leg', 'description')
|
||||||
|
|
||||||
@ -618,10 +622,6 @@ class InterfaceTable(ModularDeviceComponentTable, BaseInterfaceTable, PathEndpoi
|
|||||||
verbose_name=_('VRF'),
|
verbose_name=_('VRF'),
|
||||||
linkify=True
|
linkify=True
|
||||||
)
|
)
|
||||||
inventory_items = columns.ManyToManyColumn(
|
|
||||||
linkify_item=True,
|
|
||||||
verbose_name=_('Inventory Items'),
|
|
||||||
)
|
|
||||||
tags = columns.TagColumn(
|
tags = columns.TagColumn(
|
||||||
url_name='dcim:interface_list'
|
url_name='dcim:interface_list'
|
||||||
)
|
)
|
||||||
@ -713,8 +713,8 @@ class FrontPortTable(ModularDeviceComponentTable, CableTerminationTable):
|
|||||||
model = models.FrontPort
|
model = models.FrontPort
|
||||||
fields = (
|
fields = (
|
||||||
'pk', 'id', 'name', 'device', 'module_bay', 'module', 'label', 'type', 'color', 'rear_port',
|
'pk', 'id', 'name', 'device', 'module_bay', 'module', 'label', 'type', 'color', 'rear_port',
|
||||||
'rear_port_position', 'description', 'mark_connected', 'cable', 'cable_color', 'link_peer', 'tags',
|
'rear_port_position', 'description', 'mark_connected', 'cable', 'cable_color', 'link_peer',
|
||||||
'created', 'last_updated',
|
'inventory_items', 'tags', 'created', 'last_updated',
|
||||||
)
|
)
|
||||||
default_columns = (
|
default_columns = (
|
||||||
'pk', 'name', 'device', 'label', 'type', 'color', 'rear_port', 'rear_port_position', 'description',
|
'pk', 'name', 'device', 'label', 'type', 'color', 'rear_port', 'rear_port_position', 'description',
|
||||||
@ -766,7 +766,7 @@ class RearPortTable(ModularDeviceComponentTable, CableTerminationTable):
|
|||||||
model = models.RearPort
|
model = models.RearPort
|
||||||
fields = (
|
fields = (
|
||||||
'pk', 'id', 'name', 'device', 'module_bay', 'module', 'label', 'type', 'color', 'positions', 'description',
|
'pk', 'id', 'name', 'device', 'module_bay', 'module', 'label', 'type', 'color', 'positions', 'description',
|
||||||
'mark_connected', 'cable', 'cable_color', 'link_peer', 'tags', 'created', 'last_updated',
|
'mark_connected', 'cable', 'cable_color', 'link_peer', 'inventory_items', 'tags', 'created', 'last_updated',
|
||||||
)
|
)
|
||||||
default_columns = ('pk', 'name', 'device', 'label', 'type', 'color', 'description')
|
default_columns = ('pk', 'name', 'device', 'label', 'type', 'color', 'description')
|
||||||
|
|
||||||
|
@ -2103,6 +2103,9 @@ class DeviceTestCase(TestCase, ChangeLoggedFilterSetTests):
|
|||||||
Device.objects.filter(pk=devices[0].pk).update(virtual_chassis=virtual_chassis, vc_position=1, vc_priority=1)
|
Device.objects.filter(pk=devices[0].pk).update(virtual_chassis=virtual_chassis, vc_position=1, vc_priority=1)
|
||||||
Device.objects.filter(pk=devices[1].pk).update(virtual_chassis=virtual_chassis, vc_position=2, vc_priority=2)
|
Device.objects.filter(pk=devices[1].pk).update(virtual_chassis=virtual_chassis, vc_position=2, vc_priority=2)
|
||||||
|
|
||||||
|
# VirtualDeviceContext assignment for filtering
|
||||||
|
VirtualDeviceContext.objects.create(device=devices[0], name="VDC 1", identifier=1, status='active')
|
||||||
|
|
||||||
def test_q(self):
|
def test_q(self):
|
||||||
params = {'q': 'foobar1'}
|
params = {'q': 'foobar1'}
|
||||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1)
|
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1)
|
||||||
@ -2336,6 +2339,12 @@ class DeviceTestCase(TestCase, ChangeLoggedFilterSetTests):
|
|||||||
params = {'tenant_group': [tenant_groups[0].slug, tenant_groups[1].slug]}
|
params = {'tenant_group': [tenant_groups[0].slug, tenant_groups[1].slug]}
|
||||||
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
|
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
|
||||||
|
|
||||||
|
def test_has_virtual_device_context(self):
|
||||||
|
params = {'has_virtual_device_context': 'true'}
|
||||||
|
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 1)
|
||||||
|
params = {'has_virtual_device_context': 'false'}
|
||||||
|
self.assertEqual(self.filterset(params, self.queryset).qs.count(), 2)
|
||||||
|
|
||||||
|
|
||||||
class ModuleTestCase(TestCase, ChangeLoggedFilterSetTests):
|
class ModuleTestCase(TestCase, ChangeLoggedFilterSetTests):
|
||||||
queryset = Module.objects.all()
|
queryset = Module.objects.all()
|
||||||
|
@ -30,6 +30,16 @@ class ObjectChangeSerializer(BaseModelSerializer):
|
|||||||
changed_object = serializers.SerializerMethodField(
|
changed_object = serializers.SerializerMethodField(
|
||||||
read_only=True
|
read_only=True
|
||||||
)
|
)
|
||||||
|
prechange_data = serializers.JSONField(
|
||||||
|
source='prechange_data_clean',
|
||||||
|
read_only=True,
|
||||||
|
allow_null=True
|
||||||
|
)
|
||||||
|
postchange_data = serializers.JSONField(
|
||||||
|
source='postchange_data_clean',
|
||||||
|
read_only=True,
|
||||||
|
allow_null=True
|
||||||
|
)
|
||||||
|
|
||||||
class Meta:
|
class Meta:
|
||||||
model = ObjectChange
|
model = ObjectChange
|
||||||
|
@ -43,7 +43,7 @@ class JournalEntrySerializer(NetBoxModelSerializer):
|
|||||||
def validate(self, data):
|
def validate(self, data):
|
||||||
|
|
||||||
# Validate that the parent object exists
|
# Validate that the parent object exists
|
||||||
if 'assigned_object_type' in data and 'assigned_object_id' in data:
|
if not self.nested and 'assigned_object_type' in data and 'assigned_object_id' in data:
|
||||||
try:
|
try:
|
||||||
data['assigned_object_type'].get_object_for_this_type(id=data['assigned_object_id'])
|
data['assigned_object_type'].get_object_for_this_type(id=data['assigned_object_id'])
|
||||||
except ObjectDoesNotExist:
|
except ObjectDoesNotExist:
|
||||||
@ -51,10 +51,7 @@ class JournalEntrySerializer(NetBoxModelSerializer):
|
|||||||
f"Invalid assigned_object: {data['assigned_object_type']} ID {data['assigned_object_id']}"
|
f"Invalid assigned_object: {data['assigned_object_type']} ID {data['assigned_object_id']}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Enforce model validation
|
return super().validate(data)
|
||||||
super().validate(data)
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
@extend_schema_field(serializers.JSONField(allow_null=True))
|
@extend_schema_field(serializers.JSONField(allow_null=True))
|
||||||
def get_assigned_object(self, instance):
|
def get_assigned_object(self, instance):
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
from django.http import Http404
|
||||||
from django.shortcuts import get_object_or_404
|
from django.shortcuts import get_object_or_404
|
||||||
from django_rq.queues import get_connection
|
from django_rq.queues import get_connection
|
||||||
from rest_framework import status
|
from rest_framework import status
|
||||||
@ -215,21 +216,32 @@ class ScriptViewSet(ModelViewSet):
|
|||||||
_ignore_model_permissions = True
|
_ignore_model_permissions = True
|
||||||
lookup_value_regex = '[^/]+' # Allow dots
|
lookup_value_regex = '[^/]+' # Allow dots
|
||||||
|
|
||||||
|
def _get_script(self, pk):
|
||||||
|
# If pk is numeric, retrieve script by ID
|
||||||
|
if pk.isnumeric():
|
||||||
|
return get_object_or_404(self.queryset, pk=pk)
|
||||||
|
|
||||||
|
# Default to retrieval by module & name
|
||||||
|
try:
|
||||||
|
module_name, script_name = pk.split('.', maxsplit=1)
|
||||||
|
except ValueError:
|
||||||
|
raise Http404
|
||||||
|
return get_object_or_404(self.queryset, module__file_path=f'{module_name}.py', name=script_name)
|
||||||
|
|
||||||
def retrieve(self, request, pk):
|
def retrieve(self, request, pk):
|
||||||
script = get_object_or_404(self.queryset, pk=pk)
|
script = self._get_script(pk)
|
||||||
serializer = serializers.ScriptDetailSerializer(script, context={'request': request})
|
serializer = serializers.ScriptDetailSerializer(script, context={'request': request})
|
||||||
|
|
||||||
return Response(serializer.data)
|
return Response(serializer.data)
|
||||||
|
|
||||||
def post(self, request, pk):
|
def post(self, request, pk):
|
||||||
"""
|
"""
|
||||||
Run a Script identified by the id and return the pending Job as the result
|
Run a Script identified by its numeric PK or module & name and return the pending Job as the result
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not request.user.has_perm('extras.run_script'):
|
if not request.user.has_perm('extras.run_script'):
|
||||||
raise PermissionDenied("This user does not have permission to run scripts.")
|
raise PermissionDenied("This user does not have permission to run scripts.")
|
||||||
|
|
||||||
script = get_object_or_404(self.queryset, pk=pk)
|
script = self._get_script(pk)
|
||||||
input_serializer = serializers.ScriptInputSerializer(
|
input_serializer = serializers.ScriptInputSerializer(
|
||||||
data=request.data,
|
data=request.data,
|
||||||
context={'script': script}
|
context={'script': script}
|
||||||
|
@ -122,7 +122,7 @@ class CustomFieldChoiceSetForm(forms.ModelForm):
|
|||||||
label = label.replace('\\:', ':')
|
label = label.replace('\\:', ':')
|
||||||
except ValueError:
|
except ValueError:
|
||||||
value, label = line, line
|
value, label = line, line
|
||||||
data.append((value, label))
|
data.append((value.strip(), label.strip()))
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,12 +1,17 @@
|
|||||||
|
from functools import cached_property
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
from django.contrib.contenttypes.fields import GenericForeignKey
|
from django.contrib.contenttypes.fields import GenericForeignKey
|
||||||
from django.core.exceptions import ValidationError
|
from django.core.exceptions import ValidationError
|
||||||
from django.db import models
|
from django.db import models
|
||||||
from django.urls import reverse
|
from django.urls import reverse
|
||||||
from django.utils.translation import gettext_lazy as _
|
from django.utils.translation import gettext_lazy as _
|
||||||
|
from mptt.models import MPTTModel
|
||||||
|
|
||||||
from core.models import ObjectType
|
from core.models import ObjectType
|
||||||
from extras.choices import *
|
from extras.choices import *
|
||||||
|
from netbox.models.features import ChangeLoggingMixin
|
||||||
|
from utilities.data import shallow_compare_dict
|
||||||
from ..querysets import ObjectChangeQuerySet
|
from ..querysets import ObjectChangeQuerySet
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
@ -136,6 +141,71 @@ class ObjectChange(models.Model):
|
|||||||
def get_action_color(self):
|
def get_action_color(self):
|
||||||
return ObjectChangeActionChoices.colors.get(self.action)
|
return ObjectChangeActionChoices.colors.get(self.action)
|
||||||
|
|
||||||
@property
|
@cached_property
|
||||||
def has_changes(self):
|
def has_changes(self):
|
||||||
return self.prechange_data != self.postchange_data
|
return self.prechange_data != self.postchange_data
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def diff_exclude_fields(self):
|
||||||
|
"""
|
||||||
|
Return a set of attributes which should be ignored when calculating a diff
|
||||||
|
between the pre- and post-change data. (For instance, it would not make
|
||||||
|
sense to compare the "last updated" times as these are expected to differ.)
|
||||||
|
"""
|
||||||
|
model = self.changed_object_type.model_class()
|
||||||
|
attrs = set()
|
||||||
|
|
||||||
|
# Exclude auto-populated change tracking fields
|
||||||
|
if issubclass(model, ChangeLoggingMixin):
|
||||||
|
attrs.update({'created', 'last_updated'})
|
||||||
|
|
||||||
|
# Exclude MPTT-internal fields
|
||||||
|
if issubclass(model, MPTTModel):
|
||||||
|
attrs.update({'level', 'lft', 'rght', 'tree_id'})
|
||||||
|
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
def get_clean_data(self, prefix):
|
||||||
|
"""
|
||||||
|
Return only the pre-/post-change attributes which are relevant for calculating a diff.
|
||||||
|
"""
|
||||||
|
ret = {}
|
||||||
|
change_data = getattr(self, f'{prefix}_data') or {}
|
||||||
|
for k, v in change_data.items():
|
||||||
|
if k not in self.diff_exclude_fields and not k.startswith('_'):
|
||||||
|
ret[k] = v
|
||||||
|
return ret
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def prechange_data_clean(self):
|
||||||
|
return self.get_clean_data('prechange')
|
||||||
|
|
||||||
|
@cached_property
|
||||||
|
def postchange_data_clean(self):
|
||||||
|
return self.get_clean_data('postchange')
|
||||||
|
|
||||||
|
def diff(self):
|
||||||
|
"""
|
||||||
|
Return a dictionary of pre- and post-change values for attributes which have changed.
|
||||||
|
"""
|
||||||
|
prechange_data = self.prechange_data_clean
|
||||||
|
postchange_data = self.postchange_data_clean
|
||||||
|
|
||||||
|
# Determine which attributes have changed
|
||||||
|
if self.action == ObjectChangeActionChoices.ACTION_CREATE:
|
||||||
|
changed_attrs = sorted(postchange_data.keys())
|
||||||
|
elif self.action == ObjectChangeActionChoices.ACTION_DELETE:
|
||||||
|
changed_attrs = sorted(prechange_data.keys())
|
||||||
|
else:
|
||||||
|
# TODO: Support deep (recursive) comparison
|
||||||
|
changed_data = shallow_compare_dict(prechange_data, postchange_data)
|
||||||
|
changed_attrs = sorted(changed_data.keys())
|
||||||
|
|
||||||
|
return {
|
||||||
|
'pre': {
|
||||||
|
k: prechange_data.get(k) for k in changed_attrs
|
||||||
|
},
|
||||||
|
'post': {
|
||||||
|
k: postchange_data.get(k) for k in changed_attrs
|
||||||
|
},
|
||||||
|
}
|
||||||
|
@ -75,6 +75,10 @@ class ChangeLogViewTest(ModelViewTestCase):
|
|||||||
self.assertEqual(oc.postchange_data['custom_fields']['cf2'], form_data['cf_cf2'])
|
self.assertEqual(oc.postchange_data['custom_fields']['cf2'], form_data['cf_cf2'])
|
||||||
self.assertEqual(oc.postchange_data['tags'], ['Tag 1', 'Tag 2'])
|
self.assertEqual(oc.postchange_data['tags'], ['Tag 1', 'Tag 2'])
|
||||||
|
|
||||||
|
# Check that private attributes were included in raw data but not display data
|
||||||
|
self.assertIn('_name', oc.postchange_data)
|
||||||
|
self.assertNotIn('_name', oc.postchange_data_clean)
|
||||||
|
|
||||||
def test_update_object(self):
|
def test_update_object(self):
|
||||||
site = Site(name='Site 1', slug='site-1')
|
site = Site(name='Site 1', slug='site-1')
|
||||||
site.save()
|
site.save()
|
||||||
@ -112,6 +116,12 @@ class ChangeLogViewTest(ModelViewTestCase):
|
|||||||
self.assertEqual(oc.postchange_data['custom_fields']['cf2'], form_data['cf_cf2'])
|
self.assertEqual(oc.postchange_data['custom_fields']['cf2'], form_data['cf_cf2'])
|
||||||
self.assertEqual(oc.postchange_data['tags'], ['Tag 3'])
|
self.assertEqual(oc.postchange_data['tags'], ['Tag 3'])
|
||||||
|
|
||||||
|
# Check that private attributes were included in raw data but not display data
|
||||||
|
self.assertIn('_name', oc.prechange_data)
|
||||||
|
self.assertNotIn('_name', oc.prechange_data_clean)
|
||||||
|
self.assertIn('_name', oc.postchange_data)
|
||||||
|
self.assertNotIn('_name', oc.postchange_data_clean)
|
||||||
|
|
||||||
def test_delete_object(self):
|
def test_delete_object(self):
|
||||||
site = Site(
|
site = Site(
|
||||||
name='Site 1',
|
name='Site 1',
|
||||||
@ -142,6 +152,10 @@ class ChangeLogViewTest(ModelViewTestCase):
|
|||||||
self.assertEqual(oc.prechange_data['tags'], ['Tag 1', 'Tag 2'])
|
self.assertEqual(oc.prechange_data['tags'], ['Tag 1', 'Tag 2'])
|
||||||
self.assertEqual(oc.postchange_data, None)
|
self.assertEqual(oc.postchange_data, None)
|
||||||
|
|
||||||
|
# Check that private attributes were included in raw data but not display data
|
||||||
|
self.assertIn('_name', oc.prechange_data)
|
||||||
|
self.assertNotIn('_name', oc.prechange_data_clean)
|
||||||
|
|
||||||
def test_bulk_update_objects(self):
|
def test_bulk_update_objects(self):
|
||||||
sites = (
|
sites = (
|
||||||
Site(name='Site 1', slug='site-1', status=SiteStatusChoices.STATUS_ACTIVE),
|
Site(name='Site 1', slug='site-1', status=SiteStatusChoices.STATUS_ACTIVE),
|
||||||
@ -338,6 +352,10 @@ class ChangeLogAPITest(APITestCase):
|
|||||||
self.assertEqual(oc.postchange_data['custom_fields'], data['custom_fields'])
|
self.assertEqual(oc.postchange_data['custom_fields'], data['custom_fields'])
|
||||||
self.assertEqual(oc.postchange_data['tags'], ['Tag 1', 'Tag 2'])
|
self.assertEqual(oc.postchange_data['tags'], ['Tag 1', 'Tag 2'])
|
||||||
|
|
||||||
|
# Check that private attributes were included in raw data but not display data
|
||||||
|
self.assertIn('_name', oc.postchange_data)
|
||||||
|
self.assertNotIn('_name', oc.postchange_data_clean)
|
||||||
|
|
||||||
def test_update_object(self):
|
def test_update_object(self):
|
||||||
site = Site(name='Site 1', slug='site-1')
|
site = Site(name='Site 1', slug='site-1')
|
||||||
site.save()
|
site.save()
|
||||||
@ -370,6 +388,12 @@ class ChangeLogAPITest(APITestCase):
|
|||||||
self.assertEqual(oc.postchange_data['custom_fields'], data['custom_fields'])
|
self.assertEqual(oc.postchange_data['custom_fields'], data['custom_fields'])
|
||||||
self.assertEqual(oc.postchange_data['tags'], ['Tag 3'])
|
self.assertEqual(oc.postchange_data['tags'], ['Tag 3'])
|
||||||
|
|
||||||
|
# Check that private attributes were included in raw data but not display data
|
||||||
|
self.assertIn('_name', oc.prechange_data)
|
||||||
|
self.assertNotIn('_name', oc.prechange_data_clean)
|
||||||
|
self.assertIn('_name', oc.postchange_data)
|
||||||
|
self.assertNotIn('_name', oc.postchange_data_clean)
|
||||||
|
|
||||||
def test_delete_object(self):
|
def test_delete_object(self):
|
||||||
site = Site(
|
site = Site(
|
||||||
name='Site 1',
|
name='Site 1',
|
||||||
@ -398,6 +422,10 @@ class ChangeLogAPITest(APITestCase):
|
|||||||
self.assertEqual(oc.prechange_data['tags'], ['Tag 1', 'Tag 2'])
|
self.assertEqual(oc.prechange_data['tags'], ['Tag 1', 'Tag 2'])
|
||||||
self.assertEqual(oc.postchange_data, None)
|
self.assertEqual(oc.postchange_data, None)
|
||||||
|
|
||||||
|
# Check that private attributes were included in raw data but not display data
|
||||||
|
self.assertIn('_name', oc.prechange_data)
|
||||||
|
self.assertNotIn('_name', oc.prechange_data_clean)
|
||||||
|
|
||||||
def test_bulk_create_objects(self):
|
def test_bulk_create_objects(self):
|
||||||
data = (
|
data = (
|
||||||
{
|
{
|
||||||
|
@ -723,15 +723,15 @@ class ObjectChangeView(generic.ObjectView):
|
|||||||
|
|
||||||
if not instance.prechange_data and instance.action in ['update', 'delete'] and prev_change:
|
if not instance.prechange_data and instance.action in ['update', 'delete'] and prev_change:
|
||||||
non_atomic_change = True
|
non_atomic_change = True
|
||||||
prechange_data = prev_change.postchange_data
|
prechange_data = prev_change.postchange_data_clean
|
||||||
else:
|
else:
|
||||||
non_atomic_change = False
|
non_atomic_change = False
|
||||||
prechange_data = instance.prechange_data
|
prechange_data = instance.prechange_data_clean
|
||||||
|
|
||||||
if prechange_data and instance.postchange_data:
|
if prechange_data and instance.postchange_data:
|
||||||
diff_added = shallow_compare_dict(
|
diff_added = shallow_compare_dict(
|
||||||
prechange_data or dict(),
|
prechange_data or dict(),
|
||||||
instance.postchange_data or dict(),
|
instance.postchange_data_clean or dict(),
|
||||||
exclude=['last_updated'],
|
exclude=['last_updated'],
|
||||||
)
|
)
|
||||||
diff_removed = {
|
diff_removed = {
|
||||||
|
@ -168,6 +168,7 @@ class PrefixFilterForm(TenancyFilterForm, NetBoxModelFilterSetForm):
|
|||||||
'within_include', 'family', 'status', 'role_id', 'mask_length', 'is_pool', 'mark_utilized',
|
'within_include', 'family', 'status', 'role_id', 'mask_length', 'is_pool', 'mark_utilized',
|
||||||
name=_('Addressing')
|
name=_('Addressing')
|
||||||
),
|
),
|
||||||
|
FieldSet('vlan_id', name=_('VLAN Assignment')),
|
||||||
FieldSet('vrf_id', 'present_in_vrf_id', name=_('VRF')),
|
FieldSet('vrf_id', 'present_in_vrf_id', name=_('VRF')),
|
||||||
FieldSet('region_id', 'site_group_id', 'site_id', name=_('Location')),
|
FieldSet('region_id', 'site_group_id', 'site_id', name=_('Location')),
|
||||||
FieldSet('tenant_group_id', 'tenant_id', name=_('Tenant')),
|
FieldSet('tenant_group_id', 'tenant_id', name=_('Tenant')),
|
||||||
@ -249,6 +250,12 @@ class PrefixFilterForm(TenancyFilterForm, NetBoxModelFilterSetForm):
|
|||||||
choices=BOOLEAN_WITH_BLANK_CHOICES
|
choices=BOOLEAN_WITH_BLANK_CHOICES
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
vlan_id = DynamicModelMultipleChoiceField(
|
||||||
|
queryset=VLAN.objects.all(),
|
||||||
|
required=False,
|
||||||
|
label=_('VLAN'),
|
||||||
|
)
|
||||||
|
|
||||||
tag = TagFilterField(model)
|
tag = TagFilterField(model)
|
||||||
|
|
||||||
|
|
||||||
|
@ -18,6 +18,7 @@ from ipam.querysets import PrefixQuerySet
|
|||||||
from ipam.validators import DNSValidator
|
from ipam.validators import DNSValidator
|
||||||
from netbox.config import get_config
|
from netbox.config import get_config
|
||||||
from netbox.models import OrganizationalModel, PrimaryModel
|
from netbox.models import OrganizationalModel, PrimaryModel
|
||||||
|
from netbox.models.features import ContactsMixin
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
'Aggregate',
|
'Aggregate',
|
||||||
@ -74,7 +75,7 @@ class RIR(OrganizationalModel):
|
|||||||
return reverse('ipam:rir', args=[self.pk])
|
return reverse('ipam:rir', args=[self.pk])
|
||||||
|
|
||||||
|
|
||||||
class Aggregate(GetAvailablePrefixesMixin, PrimaryModel):
|
class Aggregate(ContactsMixin, GetAvailablePrefixesMixin, PrimaryModel):
|
||||||
"""
|
"""
|
||||||
An aggregate exists at the root level of the IP address space hierarchy in NetBox. Aggregates are used to organize
|
An aggregate exists at the root level of the IP address space hierarchy in NetBox. Aggregates are used to organize
|
||||||
the hierarchy and track the overall utilization of available address space. Each Aggregate is assigned to a RIR.
|
the hierarchy and track the overall utilization of available address space. Each Aggregate is assigned to a RIR.
|
||||||
@ -206,7 +207,7 @@ class Role(OrganizationalModel):
|
|||||||
return reverse('ipam:role', args=[self.pk])
|
return reverse('ipam:role', args=[self.pk])
|
||||||
|
|
||||||
|
|
||||||
class Prefix(GetAvailablePrefixesMixin, PrimaryModel):
|
class Prefix(ContactsMixin, GetAvailablePrefixesMixin, PrimaryModel):
|
||||||
"""
|
"""
|
||||||
A Prefix represents an IPv4 or IPv6 network, including mask length. Prefixes can optionally be assigned to Sites and
|
A Prefix represents an IPv4 or IPv6 network, including mask length. Prefixes can optionally be assigned to Sites and
|
||||||
VRFs. A Prefix must be assigned a status and may optionally be assigned a used-define Role. A Prefix can also be
|
VRFs. A Prefix must be assigned a status and may optionally be assigned a used-define Role. A Prefix can also be
|
||||||
@ -486,7 +487,7 @@ class Prefix(GetAvailablePrefixesMixin, PrimaryModel):
|
|||||||
return min(utilization, 100)
|
return min(utilization, 100)
|
||||||
|
|
||||||
|
|
||||||
class IPRange(PrimaryModel):
|
class IPRange(ContactsMixin, PrimaryModel):
|
||||||
"""
|
"""
|
||||||
A range of IP addresses, defined by start and end addresses.
|
A range of IP addresses, defined by start and end addresses.
|
||||||
"""
|
"""
|
||||||
@ -695,7 +696,7 @@ class IPRange(PrimaryModel):
|
|||||||
return min(float(child_count) / self.size * 100, 100)
|
return min(float(child_count) / self.size * 100, 100)
|
||||||
|
|
||||||
|
|
||||||
class IPAddress(PrimaryModel):
|
class IPAddress(ContactsMixin, PrimaryModel):
|
||||||
"""
|
"""
|
||||||
An IPAddress represents an individual IPv4 or IPv6 address and its mask. The mask length should match what is
|
An IPAddress represents an individual IPv4 or IPv6 address and its mask. The mask length should match what is
|
||||||
configured in the real world. (Typically, only loopback interfaces are configured with /32 or /128 masks.) Like
|
configured in the real world. (Typically, only loopback interfaces are configured with /32 or /128 masks.) Like
|
||||||
|
@ -8,6 +8,7 @@ from django.utils.translation import gettext_lazy as _
|
|||||||
from ipam.choices import *
|
from ipam.choices import *
|
||||||
from ipam.constants import *
|
from ipam.constants import *
|
||||||
from netbox.models import PrimaryModel
|
from netbox.models import PrimaryModel
|
||||||
|
from netbox.models.features import ContactsMixin
|
||||||
from utilities.data import array_to_string
|
from utilities.data import array_to_string
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
@ -62,7 +63,7 @@ class ServiceTemplate(ServiceBase, PrimaryModel):
|
|||||||
return reverse('ipam:servicetemplate', args=[self.pk])
|
return reverse('ipam:servicetemplate', args=[self.pk])
|
||||||
|
|
||||||
|
|
||||||
class Service(ServiceBase, PrimaryModel):
|
class Service(ContactsMixin, ServiceBase, PrimaryModel):
|
||||||
"""
|
"""
|
||||||
A Service represents a layer-four service (e.g. HTTP or SSH) running on a Device or VirtualMachine. A Service may
|
A Service represents a layer-four service (e.g. HTTP or SSH) running on a Device or VirtualMachine. A Service may
|
||||||
optionally be tied to one or more specific IPAddresses belonging to its parent.
|
optionally be tied to one or more specific IPAddresses belonging to its parent.
|
||||||
|
@ -9,6 +9,7 @@ from circuits.models import Provider
|
|||||||
from dcim.filtersets import InterfaceFilterSet
|
from dcim.filtersets import InterfaceFilterSet
|
||||||
from dcim.models import Interface, Site
|
from dcim.models import Interface, Site
|
||||||
from netbox.views import generic
|
from netbox.views import generic
|
||||||
|
from tenancy.views import ObjectContactsView
|
||||||
from utilities.query import count_related
|
from utilities.query import count_related
|
||||||
from utilities.tables import get_table_ordering
|
from utilities.tables import get_table_ordering
|
||||||
from utilities.views import ViewTab, register_model_view
|
from utilities.views import ViewTab, register_model_view
|
||||||
@ -405,6 +406,11 @@ class AggregateBulkDeleteView(generic.BulkDeleteView):
|
|||||||
table = tables.AggregateTable
|
table = tables.AggregateTable
|
||||||
|
|
||||||
|
|
||||||
|
@register_model_view(Aggregate, 'contacts')
|
||||||
|
class AggregateContactsView(ObjectContactsView):
|
||||||
|
queryset = Aggregate.objects.all()
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# Prefix/VLAN roles
|
# Prefix/VLAN roles
|
||||||
#
|
#
|
||||||
@ -643,6 +649,11 @@ class PrefixBulkDeleteView(generic.BulkDeleteView):
|
|||||||
table = tables.PrefixTable
|
table = tables.PrefixTable
|
||||||
|
|
||||||
|
|
||||||
|
@register_model_view(Prefix, 'contacts')
|
||||||
|
class PrefixContactsView(ObjectContactsView):
|
||||||
|
queryset = Prefix.objects.all()
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# IP Ranges
|
# IP Ranges
|
||||||
#
|
#
|
||||||
@ -726,6 +737,11 @@ class IPRangeBulkDeleteView(generic.BulkDeleteView):
|
|||||||
table = tables.IPRangeTable
|
table = tables.IPRangeTable
|
||||||
|
|
||||||
|
|
||||||
|
@register_model_view(IPRange, 'contacts')
|
||||||
|
class IPRangeContactsView(ObjectContactsView):
|
||||||
|
queryset = IPRange.objects.all()
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# IP addresses
|
# IP addresses
|
||||||
#
|
#
|
||||||
@ -893,6 +909,11 @@ class IPAddressRelatedIPsView(generic.ObjectChildrenView):
|
|||||||
return parent.get_related_ips().restrict(request.user, 'view')
|
return parent.get_related_ips().restrict(request.user, 'view')
|
||||||
|
|
||||||
|
|
||||||
|
@register_model_view(IPAddress, 'contacts')
|
||||||
|
class IPAddressContactsView(ObjectContactsView):
|
||||||
|
queryset = IPAddress.objects.all()
|
||||||
|
|
||||||
|
|
||||||
#
|
#
|
||||||
# VLAN groups
|
# VLAN groups
|
||||||
#
|
#
|
||||||
@ -1259,3 +1280,8 @@ class ServiceBulkDeleteView(generic.BulkDeleteView):
|
|||||||
queryset = Service.objects.prefetch_related('device', 'virtual_machine')
|
queryset = Service.objects.prefetch_related('device', 'virtual_machine')
|
||||||
filterset = filtersets.ServiceFilterSet
|
filterset = filtersets.ServiceFilterSet
|
||||||
table = tables.ServiceTable
|
table = tables.ServiceTable
|
||||||
|
|
||||||
|
|
||||||
|
@register_model_view(Service, 'contacts')
|
||||||
|
class ServiceContactsView(ObjectContactsView):
|
||||||
|
queryset = Service.objects.all()
|
||||||
|
@ -25,7 +25,7 @@ from utilities.string import trailing_slash
|
|||||||
# Environment setup
|
# Environment setup
|
||||||
#
|
#
|
||||||
|
|
||||||
VERSION = '4.0.3-dev'
|
VERSION = '4.0.4-dev'
|
||||||
HOSTNAME = platform.node()
|
HOSTNAME = platform.node()
|
||||||
# Set the base directory two levels up
|
# Set the base directory two levels up
|
||||||
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||||
@ -242,6 +242,7 @@ if 'tasks' not in REDIS:
|
|||||||
TASKS_REDIS = REDIS['tasks']
|
TASKS_REDIS = REDIS['tasks']
|
||||||
TASKS_REDIS_HOST = TASKS_REDIS.get('HOST', 'localhost')
|
TASKS_REDIS_HOST = TASKS_REDIS.get('HOST', 'localhost')
|
||||||
TASKS_REDIS_PORT = TASKS_REDIS.get('PORT', 6379)
|
TASKS_REDIS_PORT = TASKS_REDIS.get('PORT', 6379)
|
||||||
|
TASKS_REDIS_URL = TASKS_REDIS.get('URL')
|
||||||
TASKS_REDIS_SENTINELS = TASKS_REDIS.get('SENTINELS', [])
|
TASKS_REDIS_SENTINELS = TASKS_REDIS.get('SENTINELS', [])
|
||||||
TASKS_REDIS_USING_SENTINEL = all([
|
TASKS_REDIS_USING_SENTINEL = all([
|
||||||
isinstance(TASKS_REDIS_SENTINELS, (list, tuple)),
|
isinstance(TASKS_REDIS_SENTINELS, (list, tuple)),
|
||||||
@ -270,7 +271,7 @@ CACHING_REDIS_SENTINEL_SERVICE = REDIS['caching'].get('SENTINEL_SERVICE', 'defau
|
|||||||
CACHING_REDIS_PROTO = 'rediss' if REDIS['caching'].get('SSL', False) else 'redis'
|
CACHING_REDIS_PROTO = 'rediss' if REDIS['caching'].get('SSL', False) else 'redis'
|
||||||
CACHING_REDIS_SKIP_TLS_VERIFY = REDIS['caching'].get('INSECURE_SKIP_TLS_VERIFY', False)
|
CACHING_REDIS_SKIP_TLS_VERIFY = REDIS['caching'].get('INSECURE_SKIP_TLS_VERIFY', False)
|
||||||
CACHING_REDIS_CA_CERT_PATH = REDIS['caching'].get('CA_CERT_PATH', False)
|
CACHING_REDIS_CA_CERT_PATH = REDIS['caching'].get('CA_CERT_PATH', False)
|
||||||
CACHING_REDIS_URL = f'{CACHING_REDIS_PROTO}://{CACHING_REDIS_USERNAME_HOST}:{CACHING_REDIS_PORT}/{CACHING_REDIS_DATABASE}'
|
CACHING_REDIS_URL = REDIS['caching'].get('URL', f'{CACHING_REDIS_PROTO}://{CACHING_REDIS_USERNAME_HOST}:{CACHING_REDIS_PORT}/{CACHING_REDIS_DATABASE}')
|
||||||
|
|
||||||
# Configure Django's default cache to use Redis
|
# Configure Django's default cache to use Redis
|
||||||
CACHES = {
|
CACHES = {
|
||||||
@ -678,6 +679,12 @@ if TASKS_REDIS_USING_SENTINEL:
|
|||||||
'socket_connect_timeout': TASKS_REDIS_SENTINEL_TIMEOUT
|
'socket_connect_timeout': TASKS_REDIS_SENTINEL_TIMEOUT
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
elif TASKS_REDIS_URL:
|
||||||
|
RQ_PARAMS = {
|
||||||
|
'URL': TASKS_REDIS_URL,
|
||||||
|
'SSL': TASKS_REDIS_SSL,
|
||||||
|
'SSL_CERT_REQS': None if TASKS_REDIS_SKIP_TLS_VERIFY else 'required',
|
||||||
|
}
|
||||||
else:
|
else:
|
||||||
RQ_PARAMS = {
|
RQ_PARAMS = {
|
||||||
'HOST': TASKS_REDIS_HOST,
|
'HOST': TASKS_REDIS_HOST,
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
|
from functools import cached_property
|
||||||
|
|
||||||
import django_tables2 as tables
|
import django_tables2 as tables
|
||||||
from django.contrib.auth.models import AnonymousUser
|
from django.contrib.auth.models import AnonymousUser
|
||||||
@ -189,6 +190,7 @@ class NetBoxTable(BaseTable):
|
|||||||
actions = columns.ActionsColumn()
|
actions = columns.ActionsColumn()
|
||||||
|
|
||||||
exempt_columns = ('pk', 'actions')
|
exempt_columns = ('pk', 'actions')
|
||||||
|
embedded = False
|
||||||
|
|
||||||
class Meta(BaseTable.Meta):
|
class Meta(BaseTable.Meta):
|
||||||
pass
|
pass
|
||||||
@ -218,12 +220,12 @@ class NetBoxTable(BaseTable):
|
|||||||
|
|
||||||
super().__init__(*args, extra_columns=extra_columns, **kwargs)
|
super().__init__(*args, extra_columns=extra_columns, **kwargs)
|
||||||
|
|
||||||
@property
|
@cached_property
|
||||||
def htmx_url(self):
|
def htmx_url(self):
|
||||||
"""
|
"""
|
||||||
Return the base HTML request URL for embedded tables.
|
Return the base HTML request URL for embedded tables.
|
||||||
"""
|
"""
|
||||||
if getattr(self, 'embedded', False):
|
if self.embedded:
|
||||||
viewname = get_viewname(self._meta.model, action='list')
|
viewname = get_viewname(self._meta.model, action='list')
|
||||||
try:
|
try:
|
||||||
return reverse(viewname)
|
return reverse(viewname)
|
||||||
|
@ -163,7 +163,7 @@ class ObjectListView(BaseMultiObjectView, ActionsMixin, TableMixin):
|
|||||||
|
|
||||||
# If this is an HTMX request, return only the rendered table HTML
|
# If this is an HTMX request, return only the rendered table HTML
|
||||||
if htmx_partial(request):
|
if htmx_partial(request):
|
||||||
if not request.htmx.target:
|
if request.GET.get('embedded', False):
|
||||||
table.embedded = True
|
table.embedded = True
|
||||||
# Hide selection checkboxes
|
# Hide selection checkboxes
|
||||||
if 'pk' in table.base_columns:
|
if 'pk' in table.base_columns:
|
||||||
|
BIN
netbox/project-static/dist/netbox.css
vendored
BIN
netbox/project-static/dist/netbox.css
vendored
Binary file not shown.
@ -30,7 +30,7 @@
|
|||||||
"gridstack": "10.1.2",
|
"gridstack": "10.1.2",
|
||||||
"htmx.org": "1.9.12",
|
"htmx.org": "1.9.12",
|
||||||
"query-string": "9.0.0",
|
"query-string": "9.0.0",
|
||||||
"sass": "1.77.1",
|
"sass": "1.77.2",
|
||||||
"tom-select": "2.3.1",
|
"tom-select": "2.3.1",
|
||||||
"typeface-inter": "3.18.1",
|
"typeface-inter": "3.18.1",
|
||||||
"typeface-roboto-mono": "1.1.13"
|
"typeface-roboto-mono": "1.1.13"
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
// Global variables
|
// Global variables
|
||||||
|
|
||||||
// Set base fonts
|
// Set base fonts
|
||||||
$font-family-base: 'Inter';
|
$font-family-sans-serif: 'Inter';
|
||||||
// See https://github.com/tabler/tabler/issues/1812
|
// See https://github.com/tabler/tabler/issues/1812
|
||||||
$font-family-monospace: 'Roboto Mono';
|
$font-family-monospace: 'Roboto Mono';
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
// Serialized data from change records
|
// Serialized data from change records
|
||||||
pre.change-data {
|
pre.change-data {
|
||||||
padding-right: 0;
|
border-radius: 0;
|
||||||
padding-left: 0;
|
padding: 0;
|
||||||
|
|
||||||
// Display each line individually for highlighting
|
// Display each line individually for highlighting
|
||||||
> span {
|
> span {
|
||||||
|
@ -2482,10 +2482,10 @@ safe-regex-test@^1.0.3:
|
|||||||
es-errors "^1.3.0"
|
es-errors "^1.3.0"
|
||||||
is-regex "^1.1.4"
|
is-regex "^1.1.4"
|
||||||
|
|
||||||
sass@1.77.1:
|
sass@1.77.2:
|
||||||
version "1.77.1"
|
version "1.77.2"
|
||||||
resolved "https://registry.yarnpkg.com/sass/-/sass-1.77.1.tgz#018cdfb206afd14724030c02e9fefd8f30a76cd0"
|
resolved "https://registry.yarnpkg.com/sass/-/sass-1.77.2.tgz#18d4ed2eefc260cdc8099c5439ec1303fd5863aa"
|
||||||
integrity sha512-OMEyfirt9XEfyvocduUIOlUSkWOXS/LAt6oblR/ISXCTukyavjex+zQNm51pPCOiFKY1QpWvEH1EeCkgyV3I6w==
|
integrity sha512-eb4GZt1C3avsX3heBNlrc7I09nyT00IUuo4eFhAbeXWU2fvA7oXI53SxODVAA+zgZCk9aunAZgO+losjR3fAwA==
|
||||||
dependencies:
|
dependencies:
|
||||||
chokidar ">=3.0.0 <4.0.0"
|
chokidar ">=3.0.0 <4.0.0"
|
||||||
immutable "^4.0.0"
|
immutable "^4.0.0"
|
||||||
|
@ -112,7 +112,7 @@
|
|||||||
{% if object.prechange_data %}
|
{% if object.prechange_data %}
|
||||||
{% spaceless %}
|
{% spaceless %}
|
||||||
<pre class="change-data">
|
<pre class="change-data">
|
||||||
{% for k, v in object.prechange_data.items %}
|
{% for k, v in object.prechange_data_clean.items %}
|
||||||
<span{% if k in diff_removed %} class="removed"{% endif %}>{{ k }}: {{ v|json }}</span>
|
<span{% if k in diff_removed %} class="removed"{% endif %}>{{ k }}: {{ v|json }}</span>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</pre>
|
</pre>
|
||||||
@ -132,7 +132,7 @@
|
|||||||
{% if object.postchange_data %}
|
{% if object.postchange_data %}
|
||||||
{% spaceless %}
|
{% spaceless %}
|
||||||
<pre class="change-data">
|
<pre class="change-data">
|
||||||
{% for k, v in object.postchange_data.items %}
|
{% for k, v in object.postchange_data_clean.items %}
|
||||||
<span{% if k in diff_added %} class="added"{% endif %}>{{ k }}: {{ v|json }}</span>
|
<span{% if k in diff_added %} class="added"{% endif %}>{{ k }}: {{ v|json }}</span>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</pre>
|
</pre>
|
||||||
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@ -1,13 +1,10 @@
|
|||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
import strawberry
|
|
||||||
import strawberry_django
|
import strawberry_django
|
||||||
from django.contrib.auth import get_user_model
|
from django.contrib.auth import get_user_model
|
||||||
from django.contrib.auth.models import Group
|
|
||||||
from strawberry import auto
|
from netbox.graphql.types import BaseObjectType
|
||||||
from users import filtersets
|
|
||||||
from users.models import Group
|
from users.models import Group
|
||||||
from utilities.querysets import RestrictedQuerySet
|
|
||||||
from .filters import *
|
from .filters import *
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
@ -21,17 +18,16 @@ __all__ = (
|
|||||||
fields=['id', 'name'],
|
fields=['id', 'name'],
|
||||||
filters=GroupFilter
|
filters=GroupFilter
|
||||||
)
|
)
|
||||||
class GroupType:
|
class GroupType(BaseObjectType):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@strawberry_django.type(
|
@strawberry_django.type(
|
||||||
get_user_model(),
|
get_user_model(),
|
||||||
fields=[
|
fields=[
|
||||||
'id', 'username', 'password', 'first_name', 'last_name', 'email', 'is_staff',
|
'id', 'username', 'first_name', 'last_name', 'email', 'is_staff', 'is_active', 'date_joined', 'groups',
|
||||||
'is_active', 'date_joined', 'groups',
|
|
||||||
],
|
],
|
||||||
filters=UserFilter
|
filters=UserFilter
|
||||||
)
|
)
|
||||||
class UserType:
|
class UserType(BaseObjectType):
|
||||||
groups: List[GroupType]
|
groups: List[GroupType]
|
||||||
|
@ -197,6 +197,6 @@ class DynamicModelMultipleChoiceField(DynamicModelChoiceMixin, forms.ModelMultip
|
|||||||
# string 'null'. This will check for that condition and gracefully handle the conversion to a NoneType.
|
# string 'null'. This will check for that condition and gracefully handle the conversion to a NoneType.
|
||||||
if self.null_option is not None and settings.FILTERS_NULL_CHOICE_VALUE in value:
|
if self.null_option is not None and settings.FILTERS_NULL_CHOICE_VALUE in value:
|
||||||
value = [v for v in value if v != settings.FILTERS_NULL_CHOICE_VALUE]
|
value = [v for v in value if v != settings.FILTERS_NULL_CHOICE_VALUE]
|
||||||
return [None, *value]
|
return [None, *super().clean(value)]
|
||||||
|
|
||||||
return super().clean(value)
|
return super().clean(value)
|
||||||
|
@ -87,7 +87,7 @@ def get_paginate_count(request):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
if request.user.is_authenticated:
|
if request.user.is_authenticated:
|
||||||
per_page = request.user.config.get('pagination.per_page', config.PAGINATE_COUNT)
|
per_page = request.user.config.get('pagination.per_page') or config.PAGINATE_COUNT
|
||||||
return _max_allowed(per_page)
|
return _max_allowed(per_page)
|
||||||
|
|
||||||
return _max_allowed(config.PAGINATE_COUNT)
|
return _max_allowed(config.PAGINATE_COUNT)
|
||||||
|
@ -2,7 +2,6 @@ import json
|
|||||||
|
|
||||||
from django.contrib.contenttypes.models import ContentType
|
from django.contrib.contenttypes.models import ContentType
|
||||||
from django.core import serializers
|
from django.core import serializers
|
||||||
from mptt.models import MPTTModel
|
|
||||||
|
|
||||||
from extras.utils import is_taggable
|
from extras.utils import is_taggable
|
||||||
|
|
||||||
@ -16,8 +15,7 @@ def serialize_object(obj, resolve_tags=True, extra=None, exclude=None):
|
|||||||
"""
|
"""
|
||||||
Return a generic JSON representation of an object using Django's built-in serializer. (This is used for things like
|
Return a generic JSON representation of an object using Django's built-in serializer. (This is used for things like
|
||||||
change logging, not the REST API.) Optionally include a dictionary to supplement the object data. A list of keys
|
change logging, not the REST API.) Optionally include a dictionary to supplement the object data. A list of keys
|
||||||
can be provided to exclude them from the returned dictionary. Private fields (prefaced with an underscore) are
|
can be provided to exclude them from the returned dictionary.
|
||||||
implicitly excluded.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
obj: The object to serialize
|
obj: The object to serialize
|
||||||
@ -30,11 +28,6 @@ def serialize_object(obj, resolve_tags=True, extra=None, exclude=None):
|
|||||||
data = json.loads(json_str)[0]['fields']
|
data = json.loads(json_str)[0]['fields']
|
||||||
exclude = exclude or []
|
exclude = exclude or []
|
||||||
|
|
||||||
# Exclude any MPTTModel fields
|
|
||||||
if issubclass(obj.__class__, MPTTModel):
|
|
||||||
for field in ['level', 'lft', 'rght', 'tree_id']:
|
|
||||||
data.pop(field)
|
|
||||||
|
|
||||||
# Include custom_field_data as "custom_fields"
|
# Include custom_field_data as "custom_fields"
|
||||||
if hasattr(obj, 'custom_field_data'):
|
if hasattr(obj, 'custom_field_data'):
|
||||||
data['custom_fields'] = data.pop('custom_field_data')
|
data['custom_fields'] = data.pop('custom_field_data')
|
||||||
@ -45,9 +38,9 @@ def serialize_object(obj, resolve_tags=True, extra=None, exclude=None):
|
|||||||
tags = getattr(obj, '_tags', None) or obj.tags.all()
|
tags = getattr(obj, '_tags', None) or obj.tags.all()
|
||||||
data['tags'] = sorted([tag.name for tag in tags])
|
data['tags'] = sorted([tag.name for tag in tags])
|
||||||
|
|
||||||
# Skip excluded and private (prefixes with an underscore) attributes
|
# Skip any excluded attributes
|
||||||
for key in list(data.keys()):
|
for key in list(data.keys()):
|
||||||
if key in exclude or (isinstance(key, str) and key.startswith('_')):
|
if key in exclude:
|
||||||
data.pop(key)
|
data.pop(key)
|
||||||
|
|
||||||
# Append any extra data
|
# Append any extra data
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
<div class="htmx-container table-responsive"
|
<div class="htmx-container table-responsive"
|
||||||
hx-get="{% url viewname %}{% if url_params %}?{{ url_params.urlencode }}{% endif %}"
|
hx-get="{% url viewname %}?embedded=True{% if url_params %}&{{ url_params.urlencode }}{% endif %}"
|
||||||
hx-target="this"
|
hx-target="this"
|
||||||
hx-trigger="load" hx-select=".htmx-container" hx-swap="outerHTML"
|
hx-trigger="load" hx-select=".htmx-container" hx-swap="outerHTML"
|
||||||
></div>
|
></div>
|
||||||
|
@ -20,17 +20,17 @@ feedparser==6.0.11
|
|||||||
gunicorn==22.0.0
|
gunicorn==22.0.0
|
||||||
Jinja2==3.1.4
|
Jinja2==3.1.4
|
||||||
Markdown==3.6
|
Markdown==3.6
|
||||||
mkdocs-material==9.5.22
|
mkdocs-material==9.5.24
|
||||||
mkdocstrings[python-legacy]==0.25.1
|
mkdocstrings[python-legacy]==0.25.1
|
||||||
netaddr==1.2.1
|
netaddr==1.2.1
|
||||||
nh3==0.2.17
|
nh3==0.2.17
|
||||||
Pillow==10.3.0
|
Pillow==10.3.0
|
||||||
psycopg[c,pool]==3.1.19
|
psycopg[c,pool]==3.1.19
|
||||||
PyYAML==6.0.1
|
PyYAML==6.0.1
|
||||||
requests==2.31.0
|
requests==2.32.2
|
||||||
social-auth-app-django==5.4.1
|
social-auth-app-django==5.4.1
|
||||||
social-auth-core==4.5.4
|
social-auth-core==4.5.4
|
||||||
strawberry-graphql==0.229.0
|
strawberry-graphql==0.230.0
|
||||||
strawberry-graphql-django==0.40.0
|
strawberry-graphql-django==0.40.0
|
||||||
svgwrite==1.4.3
|
svgwrite==1.4.3
|
||||||
tablib==3.6.1
|
tablib==3.6.1
|
||||||
|
Loading…
Reference in New Issue
Block a user