9856 merge feature
2
.github/ISSUE_TEMPLATE/bug_report.yaml
vendored
@ -23,7 +23,7 @@ body:
|
||||
attributes:
|
||||
label: NetBox Version
|
||||
description: What version of NetBox are you currently running?
|
||||
placeholder: v3.6.9
|
||||
placeholder: v3.7.2
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
3
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -7,6 +7,9 @@ contact_links:
|
||||
- name: ❓ Discussion
|
||||
url: https://github.com/netbox-community/netbox/discussions
|
||||
about: "If you're just looking for help, try starting a discussion instead."
|
||||
- name: 🌎 Correct a Translation
|
||||
url: https://explore.transifex.com/netbox-community/netbox/
|
||||
about: "Spot an incorrect translation? You can propose a fix on Transifex."
|
||||
- name: 💡 Plugin Idea
|
||||
url: https://plugin-ideas.netbox.dev
|
||||
about: "Have an idea for a plugin? Head over to the ideas board!"
|
||||
|
2
.github/ISSUE_TEMPLATE/feature_request.yaml
vendored
@ -14,7 +14,7 @@ body:
|
||||
attributes:
|
||||
label: NetBox version
|
||||
description: What version of NetBox are you currently running?
|
||||
placeholder: v3.6.9
|
||||
placeholder: v3.7.2
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
|
7
.github/workflows/ci.yml
vendored
@ -9,8 +9,8 @@ jobs:
|
||||
NETBOX_CONFIGURATION: netbox.configuration_testing
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ['3.10', '3.11']
|
||||
node-version: ['14.x']
|
||||
python-version: ['3.10', '3.11', '3.12']
|
||||
node-version: ['18.x']
|
||||
services:
|
||||
redis:
|
||||
image: redis
|
||||
@ -68,6 +68,9 @@ jobs:
|
||||
- name: Collect static files
|
||||
run: python netbox/manage.py collectstatic --no-input
|
||||
|
||||
- name: Check for missing migrations
|
||||
run: python netbox/manage.py makemigrations --check
|
||||
|
||||
- name: Check PEP8 compliance
|
||||
run: pycodestyle --ignore=W504,E501 --exclude=node_modules netbox/
|
||||
|
||||
|
4
.github/workflows/lock.yml
vendored
@ -9,13 +9,15 @@ on:
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
discussions: write
|
||||
|
||||
jobs:
|
||||
lock:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v4
|
||||
- uses: dessant/lock-threads@v5
|
||||
with:
|
||||
issue-inactive-days: 90
|
||||
pr-inactive-days: 30
|
||||
discussion-inactive-days: 180
|
||||
issue-lock-reason: 'resolved'
|
||||
|
@ -86,12 +86,16 @@ intake policy](https://github.com/netbox-community/netbox/wiki/Issue-Intake-Poli
|
||||
|
||||
* In most cases, it is not necessary to add a changelog entry: A maintainer will take care of this when the PR is merged. (This helps avoid merge conflicts resulting from multiple PRs being submitted simultaneously.)
|
||||
|
||||
* All code submissions should meet the following criteria (CI will enforce these checks):
|
||||
* All code submissions must meet the following criteria (CI will enforce these checks where feasible):
|
||||
* Consist entirely of original work
|
||||
* Python syntax is valid
|
||||
* All tests pass when run with `./manage.py test`
|
||||
* PEP 8 compliance is enforced, with the exception that lines may be
|
||||
greater than 80 characters in length
|
||||
|
||||
> [!CAUTION]
|
||||
> Any contributions which include AI-generated or reproduced content will be rejected.
|
||||
|
||||
* Some other tips to keep in mind:
|
||||
* If you'd like to volunteer for someone else's issue, please post a comment on that issue letting us know. (This will allow the maintainers to assign it to you.)
|
||||
* Check out our [developer docs](https://docs.netbox.dev/en/stable/development/getting-started/) for tips on setting up your development environment.
|
||||
@ -117,8 +121,6 @@ We're always looking for motivated individuals to join the maintainers team and
|
||||
|
||||
We generally ask that maintainers dedicate around four hours of work to the project each week on average, which includes both hands-on development and project management tasks such as issue triage. Maintainers are also encouraged (but not required) to attend our bi-weekly Zoom call to catch up on recent items.
|
||||
|
||||
Many maintainers petition their employer to grant some of their paid time to work on NetBox. In doing so, your employer becomes eligible to be featured as a [NetBox sponsor](https://github.com/netbox-community/netbox/wiki/Sponsorship).
|
||||
|
||||
Interested? You can contact our lead maintainer, Jeremy Stretch, at jeremy@netbox.dev or on the [NetDev Community Slack](https://netdev.chat/). We'd love to have you on the team!
|
||||
|
||||
## :heart: Other Ways to Contribute
|
||||
|
153
README.md
@ -1,86 +1,129 @@
|
||||
<div align="center">
|
||||
<img src="https://raw.githubusercontent.com/netbox-community/netbox/develop/docs/netbox_logo.svg" width="400" alt="NetBox logo" />
|
||||
<p>The premier source of truth powering network automation</p>
|
||||
<img src="https://github.com/netbox-community/netbox/workflows/CI/badge.svg?branch=master" alt="CI status" />
|
||||
<p><strong>The cornerstone of every automated network</strong></p>
|
||||
<a href="https://github.com/netbox-community/netbox/releases"><img src="https://img.shields.io/github/v/release/netbox-community/netbox" alt="Latest release" /></a>
|
||||
<a href="https://github.com/netbox-community/netbox/blob/master/LICENSE.txt"><img src="https://img.shields.io/badge/license-Apache_2.0-blue.svg" alt="License" /></a>
|
||||
<a href="https://github.com/netbox-community/netbox/graphs/contributors"><img src="https://img.shields.io/github/contributors/netbox-community/netbox?color=blue" alt="Contributors" /></a>
|
||||
<a href="https://github.com/netbox-community/netbox/stargazers"><img src="https://img.shields.io/github/stars/netbox-community/netbox?style=flat" alt="GitHub stars" /></a>
|
||||
<a href="https://explore.transifex.com/netbox-community/netbox/"><img src="https://img.shields.io/badge/languages-6-blue" alt="Languages supported" /></a>
|
||||
<a href="https://github.com/netbox-community/netbox/actions/workflows/ci.yml"><img src="https://github.com/netbox-community/netbox/workflows/CI/badge.svg?branch=master" alt="CI status" /></a>
|
||||
<p></p>
|
||||
</div>
|
||||
|
||||
NetBox is the leading solution for modeling and documenting modern networks. By
|
||||
combining the traditional disciplines of IP address management (IPAM) and
|
||||
datacenter infrastructure management (DCIM) with powerful APIs and extensions,
|
||||
NetBox provides the ideal "source of truth" to power network automation.
|
||||
Available as open source software under the Apache 2.0 license, NetBox serves
|
||||
as the cornerstone for network automation in thousands of organizations.
|
||||
NetBox exists to empower network engineers. Since its release in 2016, it has become the go-to solution for modeling and documenting network infrastructure for thousands of organizations worldwide. As a successor to legacy IPAM and DCIM applications, NetBox provides a cohesive, extensive, and accessible data model for all things networked. By providing a single robust user interface and programmable APIs for everything from cable maps to device configurations, NetBox serves as the central source of truth for the modern network.
|
||||
|
||||
* **Physical infrastructure:** Accurately model the physical world, from global regions down to individual racks of gear. Then connect everything - network, console, and power!
|
||||
* **Modern IPAM:** All the standard IPAM functionality you expect, plus VRF import/export tracking, VLAN management, and overlay support.
|
||||
* **Data circuits:** Confidently manage the delivery of critical circuits from various service providers, modeled seamlessly alongside your own infrastructure.
|
||||
* **Power tracking:** Map the distribution of power from upstream sources to individual feeds and outlets.
|
||||
* **Organization:** Manage tenant and contact assignments natively.
|
||||
* **Powerful search:** Easily find anything you need using a single global search function.
|
||||
* **Comprehensive logging:** Leverage both automatic change logging and user-submitted journal entries to track your network's growth over time.
|
||||
* **Endless customization:** Custom fields, custom links, tags, export templates, custom validation, reports, scripts, and more!
|
||||
* **Flexible permissions:** An advanced permissions systems enables very flexible delegation of permissions.
|
||||
* **Integrations:** Easily connect NetBox to your other tooling via its REST & GraphQL APIs.
|
||||
* **Plugins:** Not finding what you need in the core application? Try one of many community plugins - or build your own!
|
||||
<p align="center">
|
||||
<a href="#netboxs-role">NetBox's Role</a> |
|
||||
<a href="#why-netbox">Why NetBox?</a> |
|
||||
<a href="#getting-started">Getting Started</a> |
|
||||
<a href="#get-involved">Get Involved</a> |
|
||||
<a href="#project-stats">Project Stats</a> |
|
||||
<a href="#screenshots">Screenshots</a>
|
||||
</p>
|
||||
|
||||

|
||||
<p align="center">
|
||||
<img src="docs/media/screenshots/home-light.png" width="600" alt="NetBox user interface screenshot" />
|
||||
</p>
|
||||
|
||||
## NetBox's Role
|
||||
|
||||
NetBox functions as the **source of truth** for your network infrastructure. Its job is to define and validate the _intended state_ of all network components and resources. NetBox does not interact with network nodes directly; rather, it makes this data available programmatically to purpose-built automation, monitoring, and assurance tools. This separation of duties enables the construction of a robust yet flexible automation system.
|
||||
|
||||
<p align="center">
|
||||
<img src="docs/media/misc/reference_architecture.png" alt="Reference network automation architecture" />
|
||||
</p>
|
||||
|
||||
The diagram above illustrates the recommended deployment architecture for an automated network, leveraging NetBox as the central authority for network state. This approach allows your team to swap out individual tools to meet changing needs while retaining a predictable, modular workflow.
|
||||
|
||||
## Why NetBox?
|
||||
|
||||
### Comprehensive Data Model
|
||||
|
||||
Racks, devices, cables, IP addresses, VLANs, circuits, power, VPNs, and lots more: NetBox is built for networks. Its comprehensive and thoroughly inter-linked data model provides for natural and highly structured modeling of myriad network primitives that just isn't possible using general-purpose tools. And there's no need to waste time contemplating how to build out a database: Everything is ready to go upon installation.
|
||||
|
||||
### Focused Development
|
||||
|
||||
NetBox strives to meet a singular goal: Provide the best available solution for making network infrastructure programmatically accessible. Unlike "all-in-one" tools which awkwardly bolt on half-baked features in an attempt to check every box, NetBox is committed to its core function. NetBox provides the best possible solution for modeling network infrastructure, and provides rich APIs for integrating with tools that excel in other areas of network automation.
|
||||
|
||||
### Extensible and Customizable
|
||||
|
||||
No two networks are exactly the same. Users are empowered to extend NetBox's native data model with custom fields and tags to best suit their unique needs. You can even write your own plugins to introduce entirely new objects and functionality!
|
||||
|
||||
### Flexible Permissions
|
||||
|
||||
NetBox includes a fully customizable permission system, which affords administrators incredible granularity when assigning roles to users and groups. Want to restrict certain users to working only with cabling and not be able to change IP addresses? Or maybe each team should have access only to a particular tenant? NetBox enables you to craft roles as you see fit.
|
||||
|
||||
### Custom Validation & Protection Rules
|
||||
|
||||
The data you put into NetBox is crucial to network operations. In addition to its robust native validation rules, NetBox provides mechanisms for administrators to define their own custom validation rules for objects. Custom validation can be used both to ensure new or modified objects adhere to a set of rules, and to prevent the deletion of objects which don't meet certain criteria. (For example, you might want to prevent the deletion of a device with an "active" status.)
|
||||
|
||||
### Device Configuration Rendering
|
||||
|
||||
NetBox can render user-created Jinja2 templates to generate device configurations from its own data. Configuration templates can be uploaded individually or pulled automatically from an external source, such as a git repository. Rendered configurations can be retrieved via the REST API for application directly to network devices via a provisioning tool such as Ansible or Salt.
|
||||
|
||||
### Custom Scripts
|
||||
|
||||
Complex workflows, such as provisioning a new branch office, can be tedious to carry out via the user interface. NetBox allows you to write and upload custom scripts that can be run directly from the UI. Scripts prompt users for input and then automate the necessary tasks to greatly simplify otherwise burdensome processes.
|
||||
|
||||
### Automated Events
|
||||
|
||||
Users can define event rules to automatically trigger a custom script or outbound webhook in response to a NetBox event. For example, you might want to automatically update a network monitoring service whenever a new device is added to NetBox, or update a DHCP server when an IP range is allocated.
|
||||
|
||||
### Comprehensive Change Logging
|
||||
|
||||
NetBox automatically logs the creation, modification, and deletion of all managed objects, providing a thorough change history. Changes can be attributed to the executing user, and related changes are grouped automatically by request ID.
|
||||
|
||||
> [!NOTE]
|
||||
> A complete list of NetBox's myriad features can be found in [the introductory documentation](https://docs.netbox.dev/en/stable/introduction/).
|
||||
|
||||
## Getting Started
|
||||
|
||||
<div align="center">
|
||||
|
||||
[](https://github.com/netbox-community/netbox)
|
||||
|
||||
[](https://github.com/netbox-community/netbox-docker)
|
||||
|
||||
[](https://netboxlabs.com/netbox-cloud/)
|
||||
|
||||
</div>
|
||||
|
||||
* Just want to explore? Check out [our public demo](https://demo.netbox.dev/) right now!
|
||||
* The [official documentation](https://docs.netbox.dev) offers a comprehensive introduction.
|
||||
* Check out [our wiki](https://github.com/netbox-community/netbox/wiki/Community-Contributions) for even more projects to get the most out of NetBox!
|
||||
|
||||
<p align="center">
|
||||
<a href="https://netboxlabs.com/netbox-cloud/"><img src="docs/media/misc/netbox_cloud.png" alt="NetBox Cloud" /></a><br />
|
||||
Looking for an enterprise solution? Check out <strong><a href="https://netboxlabs.com/netbox-cloud/">NetBox Cloud</a></strong>!
|
||||
</p>
|
||||
|
||||
## Get Involved
|
||||
|
||||
* Follow [@NetBoxOfficial](https://twitter.com/NetBoxOfficial) on Twitter!
|
||||
* Join the conversation on [the discussion forum](https://github.com/netbox-community/netbox/discussions) and [Slack](https://netdev.chat/)!
|
||||
* Already a power user? You can [suggest a feature](https://github.com/netbox-community/netbox/issues/new?assignees=&labels=type%3A+feature&template=feature_request.yaml) or [report a bug](https://github.com/netbox-community/netbox/issues/new?assignees=&labels=type%3A+bug&template=bug_report.yaml) on GitHub.
|
||||
* Contributions from the community are encouraged and appreciated! Check out our [contributing guide](CONTRIBUTING.md) to get started.
|
||||
* [Share your idea](https://plugin-ideas.netbox.dev/) for a new plugin, or [learn how to build one](https://github.com/netbox-community/netbox-plugin-tutorial) yourself!
|
||||
|
||||
## Project Stats
|
||||
|
||||
<div align="center">
|
||||
<p align="center">
|
||||
<a href="https://github.com/netbox-community/netbox/commits"><img src="https://images.repography.com/29023055/netbox-community/netbox/recent-activity/whQtEr_TGD9PhW1BPlhlEQ5jnrgQ0KJpm-LlGtpoGO0/3Kx_iWUSBRJ5-AI4QwJEJWrUDEz3KrX2lvh8aYE0WXY_timeline.svg" alt="Timeline graph"></a>
|
||||
<a href="https://github.com/netbox-community/netbox/issues"><img src="https://images.repography.com/29023055/netbox-community/netbox/recent-activity/whQtEr_TGD9PhW1BPlhlEQ5jnrgQ0KJpm-LlGtpoGO0/3Kx_iWUSBRJ5-AI4QwJEJWrUDEz3KrX2lvh8aYE0WXY_issues.svg" alt="Issues graph"></a>
|
||||
<a href="https://github.com/netbox-community/netbox/pulls"><img src="https://images.repography.com/29023055/netbox-community/netbox/recent-activity/whQtEr_TGD9PhW1BPlhlEQ5jnrgQ0KJpm-LlGtpoGO0/3Kx_iWUSBRJ5-AI4QwJEJWrUDEz3KrX2lvh8aYE0WXY_prs.svg" alt="Pull requests graph"></a>
|
||||
<a href="https://github.com/netbox-community/netbox/graphs/contributors"><img src="https://images.repography.com/29023055/netbox-community/netbox/recent-activity/whQtEr_TGD9PhW1BPlhlEQ5jnrgQ0KJpm-LlGtpoGO0/3Kx_iWUSBRJ5-AI4QwJEJWrUDEz3KrX2lvh8aYE0WXY_users.svg" alt="Top contributors"></a>
|
||||
<br />Stats via <a href="https://repography.com">Repography</a>
|
||||
</div>
|
||||
|
||||
## Sponsors
|
||||
|
||||
<div align="center">
|
||||
|
||||
[](https://netboxlabs.com)
|
||||
|
||||
[](https://try.digitalocean.com/developer-cloud)
|
||||
|
||||
[](https://sentry.io)
|
||||
<br />
|
||||
[](https://metal.equinix.com)
|
||||
|
||||
[](https://onemindservices.com)
|
||||
|
||||
</div>
|
||||
</p>
|
||||
|
||||
## Screenshots
|
||||
|
||||
")
|
||||
|
||||

|
||||
|
||||

|
||||
|
||||

|
||||
<p align="center">
|
||||
<strong>NetBox Dashboard (Light Mode)</strong><br />
|
||||
<img src="docs/media/screenshots/home-light.png" width="600" alt="NetBox dashboard (light mode)" />
|
||||
</p>
|
||||
<p align="center">
|
||||
<strong>NetBox Dashboard (Dark Mode)</strong><br />
|
||||
<img src="docs/media/screenshots/home-dark.png" width="600" alt="NetBox dashboard (dark mode)" />
|
||||
</p>
|
||||
<p align="center">
|
||||
<strong>Prefixes List</strong><br />
|
||||
<img src="docs/media/screenshots/prefixes-list.png" width="600" alt="Prefixes list" />
|
||||
</p>
|
||||
<p align="center">
|
||||
<strong>Rack View</strong><br />
|
||||
<img src="docs/media/screenshots/rack.png" width="600" alt="Rack view" />
|
||||
</p>
|
||||
<p align="center">
|
||||
<strong>Cable Trace</strong><br />
|
||||
<img src="docs/media/screenshots/cable-trace.png" width="600" alt="Cable trace" />
|
||||
</p>
|
||||
|
@ -1,10 +1,6 @@
|
||||
# HTML sanitizer
|
||||
# https://github.com/mozilla/bleach/blob/main/CHANGES
|
||||
bleach
|
||||
|
||||
# The Python web framework on which NetBox is built
|
||||
# https://docs.djangoproject.com/en/stable/releases/
|
||||
Django<5.0
|
||||
Django<5.1
|
||||
|
||||
# Django middleware which permits cross-domain API requests
|
||||
# https://github.com/adamchainz/django-cors-headers/blob/main/CHANGELOG.rst
|
||||
@ -22,6 +18,10 @@ django-filter
|
||||
# https://github.com/flavors/django-graphiql-debug-toolbar/blob/main/CHANGES.rst
|
||||
django-graphiql-debug-toolbar
|
||||
|
||||
# HTMX utilities for Django
|
||||
# https://django-htmx.readthedocs.io/en/latest/changelog.html
|
||||
django-htmx
|
||||
|
||||
# Modified Preorder Tree Traversal (recursive nesting of objects)
|
||||
# Pinned to 0.14.0; 0.15.0 requires Python 3.9+
|
||||
# https://github.com/django-mptt/django-mptt/blob/main/CHANGELOG.rst
|
||||
@ -108,6 +108,10 @@ mkdocstrings[python-legacy]
|
||||
# https://github.com/netaddr/netaddr/blob/master/CHANGELOG
|
||||
netaddr
|
||||
|
||||
# Python bindings to the ammonia HTML sanitization library.
|
||||
# https://github.com/messense/nh3
|
||||
nh3
|
||||
|
||||
# Fork of PIL (Python Imaging Library) for image processing
|
||||
# https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst
|
||||
Pillow
|
||||
|
@ -73,7 +73,7 @@ You should be redirected to Microsoft's authentication portal. Enter the usernam
|
||||
|
||||
If successful, you will be redirected back to the NetBox UI, and will be logged in as the AD user. You can verify this by navigating to your profile (using the button at top right).
|
||||
|
||||
This user account has been replicated locally to NetBox, and can now be assigned groups and permissions within the NetBox admin UI.
|
||||
This user account has been replicated locally to NetBox, and can now be assigned groups and permissions.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
|
@ -67,4 +67,4 @@ You should be redirected to Okta's authentication portal. Enter the username/ema
|
||||
|
||||
If successful, you will be redirected back to the NetBox UI, and will be logged in as the Okta user. You can verify this by navigating to your profile (using the button at top right).
|
||||
|
||||
This user account has been replicated locally to NetBox, and can now be assigned groups and permissions within the NetBox admin UI.
|
||||
This user account has been replicated locally to NetBox, and can now be assigned groups and permissions.
|
||||
|
@ -2,9 +2,9 @@
|
||||
|
||||
## Local Authentication
|
||||
|
||||
Local user accounts and groups can be created in NetBox under the "Authentication and Authorization" section of the administrative user interface. This interface is available only to users with the "staff" permission enabled.
|
||||
Local user accounts and groups can be created in NetBox under the "Authentication" section in the "Admin" menu. This section is available only to users with the "staff" permission enabled.
|
||||
|
||||
At a minimum, each user account must have a username and password set. User accounts may also denote a first name, last name, and email address. [Permissions](../permissions.md) may also be assigned to users and/or groups within the admin UI.
|
||||
At a minimum, each user account must have a username and password set. User accounts may also denote a first name, last name, and email address. [Permissions](../permissions.md) may also be assigned to individual users and/or groups as needed.
|
||||
|
||||
## Remote Authentication
|
||||
|
||||
|
@ -10,6 +10,9 @@ The time zone NetBox will use when dealing with dates and times. It is recommend
|
||||
|
||||
You may define custom formatting for date and times. For detailed instructions on writing format strings, please see [the Django documentation](https://docs.djangoproject.com/en/stable/ref/templates/builtins/#date). Default formats are listed below.
|
||||
|
||||
!!! note
|
||||
These system defaults will be overridden by a user's selected language/locale when [localization](./system.md#enable_localization) is enabled.
|
||||
|
||||
```python
|
||||
DATE_FORMAT = 'N j, Y' # June 26, 2016
|
||||
SHORT_DATE_FORMAT = 'Y-m-d' # 2016-06-26
|
||||
|
@ -46,4 +46,4 @@ The configuration file may be modified at any time. However, the WSGI service (e
|
||||
$ sudo systemctl restart netbox
|
||||
```
|
||||
|
||||
Configuration parameters which are set via the admin UI (those listed under "dynamic settings") take effect immediately.
|
||||
Dynamic configuration parameters (those which can be modified via the UI) take effect immediately.
|
||||
|
@ -99,6 +99,14 @@ The maximum size (in bytes) of an incoming HTTP request (i.e. `GET` or `POST` da
|
||||
|
||||
---
|
||||
|
||||
## DJANGO_ADMIN_ENABLED
|
||||
|
||||
Default: False
|
||||
|
||||
Setting this to True installs the `django.contrib.admin` app and enables the [Django admin UI](https://docs.djangoproject.com/en/5.0/ref/contrib/admin/). This may be necessary to support older plugins which do not integrate with the native NetBox interface.
|
||||
|
||||
---
|
||||
|
||||
## ENFORCE_GLOBAL_UNIQUE
|
||||
|
||||
!!! tip "Dynamic Configuration Parameter"
|
||||
|
@ -69,15 +69,7 @@ Email is sent from NetBox only for critical events or if configured for [logging
|
||||
|
||||
Default: False
|
||||
|
||||
Determines if localization features are enabled or not. This should only be enabled for development or testing purposes as netbox is not yet fully localized. Turning this on will localize numeric and date formats (overriding what is set for DATE_FORMAT) based on the browser locale as well as translate certain strings from third party modules.
|
||||
|
||||
---
|
||||
|
||||
## GIT_PATH
|
||||
|
||||
Default: `git`
|
||||
|
||||
The system path to the `git` executable, used by the synchronization backend for remote git repositories.
|
||||
Determines if localization features are enabled or not. This should only be enabled for development or testing purposes as netbox is not yet fully localized. Turning this on will localize numeric and date formats (overriding any configured [system defaults](./date-time.md#date-and-time-formatting)) based on the browser locale as well as translate certain strings from third party modules.
|
||||
|
||||
---
|
||||
|
||||
|
@ -288,9 +288,9 @@ An IPv4 or IPv6 network with a mask. Returns a `netaddr.IPNetwork` object. Two a
|
||||
## Running Custom Scripts
|
||||
|
||||
!!! note
|
||||
To run a custom script, a user must be assigned via permissions for `Extras > Script`, `Extras > ScriptModule`, and `Core > ManagedFile` objects. They must also be assigned the `extras.run_script` permission. This is achieved by assigning the user (or group) a permission on the Script object and specifying the `run` action in the admin UI as shown below.
|
||||
To run a custom script, a user must be assigned permissions for `Extras > Script`, `Extras > Script Module`, and `Core > Managed File` objects. They must also be assigned the `extras.run_script` permission. This is achieved by assigning the user (or group) a permission on the Script object and specifying the `run` action in "Permissions" as shown below.
|
||||
|
||||

|
||||

|
||||
|
||||
### Via the Web UI
|
||||
|
||||
|
@ -132,9 +132,9 @@ Once you have created a report, it will appear in the reports list. Initially, r
|
||||
## Running Reports
|
||||
|
||||
!!! note
|
||||
To run a report, a user must be assigned via permissions for `Extras > Report`, `Extras > ReportModule`, and `Core > ManagedFile` objects. They must also be assigned the `extras.run_report` permission. This is achieved by assigning the user (or group) a permission on the Report object and specifying the `run` action in the admin UI as shown below.
|
||||
To run a report, a user must be assigned permissions for `Extras > Report`, `Extras > Report Module`, and `Core > Managed File` objects. They must also be assigned the `extras.run_report` permission. This is achieved by assigning the user (or group) a permission on the Report object and specifying the `run` action in "Permissions" as shown below.
|
||||
|
||||

|
||||

|
||||
|
||||
### Via the Web UI
|
||||
|
||||
|
@ -80,6 +80,18 @@ Run the following command to update the device type definition validation schema
|
||||
|
||||
This will automatically update the schema file at `contrib/generated_schema.json`.
|
||||
|
||||
### Update & Compile Translations
|
||||
|
||||
Log into [Transifex](https://app.transifex.com/netbox-community/netbox/dashboard/) to download the updated string maps. Download the resource (portable object, or `.po`) file for each language and save them to `netbox/translations/$lang/LC_MESSAGES/django.po`, overwriting the current files. (Be sure to click the **Download for use** link.)
|
||||
|
||||

|
||||
|
||||
Once the resource files for all languages have been updated, compile the machine object (`.mo`) files using the `compilemessages` management command:
|
||||
|
||||
```nohighlight
|
||||
./manage.py compilemessages
|
||||
```
|
||||
|
||||
### Update Version and Changelog
|
||||
|
||||
* Update the `VERSION` constant in `settings.py` to the new release version.
|
||||
@ -90,7 +102,7 @@ Commit these changes to the `develop` branch and push upstream.
|
||||
|
||||
### Verify CI Build Status
|
||||
|
||||
Ensure that continuous integration testing on the `develop` branch is completing successfully. If it fails, take action to correct the failure before proceding with the release.
|
||||
Ensure that continuous integration testing on the `develop` branch is completing successfully. If it fails, take action to correct the failure before proceeding with the release.
|
||||
|
||||
### Submit a Pull Request
|
||||
|
||||
|
30
docs/development/translations.md
Normal file
@ -0,0 +1,30 @@
|
||||
# Translations
|
||||
|
||||
NetBox coordinates all translation work using the [Transifex](https://explore.transifex.com/netbox-community/netbox/) platform. Signing up for a Transifex account is free.
|
||||
|
||||
All language translations in NetBox are generated from the source file found at `netbox/translations/en/LC_MESSAGES/django.po`. This file contains the original English strings with empty mappings, and is generated as part of NetBox's release process. Transifex updates source strings from this file on a recurring basis, so new translation strings will appear in the platform automatically as it is updated in the code base.
|
||||
|
||||
Reviewers log into Transifex and navigate to their designated language(s) to translate strings. The initial translation for most strings will be machine-generated via the AWS Translate service. Human reviewers are responsible for reviewing these translations and making corrections where necessary.
|
||||
|
||||
Immediately prior to each NetBox release, the translation maps for all completed languages will be downloaded from Transifex, compiled, and checked into the NetBox code base by a maintainer.
|
||||
|
||||
## Updating Translation Sources
|
||||
|
||||
To update the English `.po` file from which all translations are derived, use the `makemessages` management command:
|
||||
|
||||
```nohighlight
|
||||
./manage.py makemessages -l en
|
||||
```
|
||||
|
||||
Then, commit the change and push to the `develop` branch on GitHub. After some time, any new strings will appear for translation on Transifex automatically.
|
||||
|
||||
## Proposing New Languages
|
||||
|
||||
If you'd like to add support for a new language to NetBox, the first step is to [submit a GitHub issue](https://github.com/netbox-community/netbox/issues/new?assignees=&labels=type%3A+translation&projects=&template=translation.yaml) to capture the proposal. While we'd like to add as many languages as possible, we do need to limit the rate at which new languages are added. New languages will be selected according to community interest and the number of volunteers who sign up as translators.
|
||||
|
||||
Once a proposed language has been approved, a NetBox maintainer will:
|
||||
|
||||
* Add it to the Transifex platform
|
||||
* Designate one or more reviewers
|
||||
* Create the initial machine-generated translations for review
|
||||
* Add it to the list of supported languages
|
@ -39,7 +39,7 @@ When rendered for a specific NetBox device, the template's `device` variable wil
|
||||
|
||||
### Context Data
|
||||
|
||||
The objet for which the configuration is being rendered is made available as template context as `device` or `virtualmachine` for devices and virtual machines, respectively. Additionally, NetBox model classes can be accessed by the app or plugin in which they reside. For example:
|
||||
The object for which the configuration is being rendered is made available as template context as `device` or `virtualmachine` for devices and virtual machines, respectively. Additionally, NetBox model classes can be accessed by the app or plugin in which they reside. For example:
|
||||
|
||||
```
|
||||
There are {{ dcim.Site.objects.count() }} sites.
|
||||
@ -70,6 +70,11 @@ This request will trigger resolution of the device's preferred config template i
|
||||
|
||||
If no config template has been assigned to any of these three objects, the request will fail.
|
||||
|
||||
The configuration can be rendered as JSON or as plaintext by setting the `Accept:` HTTP header. For example:
|
||||
|
||||
* `Accept: application/json`
|
||||
* `Accept: text/plain`
|
||||
|
||||
### General Purpose Use
|
||||
|
||||
NetBox config templates can also be rendered without being tied to any specific device, using a separate general purpose REST API endpoint. Any data included with a POST request to this endpoint will be passed as context data for the template.
|
||||
|
@ -28,4 +28,4 @@ For more detail, see the reference documentation for NetBox's [conditional logic
|
||||
|
||||
## Event Rule Processing
|
||||
|
||||
When a change is detected, any resulting events are placed into a Redis queue for processing. This allows the user's request to complete without needing to wait for the outgoing event(s) to be processed. The events are then extracted from the queue by the `rqworker` process. The current event queue and any failed events can be inspected in the admin UI under System > Background Tasks.
|
||||
When a change is detected, any resulting events are placed into a Redis queue for processing. This allows the user's request to complete without needing to wait for the outgoing event(s) to be processed. The events are then extracted from the queue by the `rqworker` process. The current event queue and any failed events can be inspected under System > Background Tasks.
|
||||
|
@ -1,6 +1,6 @@
|
||||
# Synchronized Data
|
||||
|
||||
Several models in NetBox support the automatic synchronization of local data from a designated remote source. For example, [configuration templates](./configuration-rendering.md) defined in NetBox can source their content from text files stored in a remote git repository. This accomplished using the core [data source](../models/core/datasource.md) and [data file](../models/core/datafile.md) models.
|
||||
Several models in NetBox support the automatic synchronization of local data from a designated remote source. For example, [configuration templates](./configuration-rendering.md) defined in NetBox can source their content from text files stored in a remote git repository. This is accomplished using the core [data source](../models/core/datasource.md) and [data file](../models/core/datafile.md) models.
|
||||
|
||||
To enable remote data synchronization, the NetBox administrator first designates one or more remote data sources. NetBox currently supports the following source types:
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
|
||||
NetBox is the leading solution for modeling and documenting modern networks. By combining the traditional disciplines of IP address management (IPAM) and datacenter infrastructure management (DCIM) with powerful APIs and extensions, NetBox provides the ideal "source of truth" to power network automation. Read on to discover why thousands of organizations worldwide put NetBox at the heart of their infrastructure.
|
||||
|
||||
[](./media/screenshots/netbox-ui.png)
|
||||
[](./media/screenshots/home-light.png)
|
||||
|
||||
## :material-server-network: Built for Networks
|
||||
|
||||
|
@ -7,7 +7,7 @@ This section of the documentation discusses installing and configuring the NetBo
|
||||
Begin by installing all system packages required by NetBox and its dependencies.
|
||||
|
||||
!!! warning "Python 3.10 or later required"
|
||||
NetBox requires Python 3.10 or 3.11.
|
||||
NetBox supports Python 3.10, 3.11, and 3.12.
|
||||
|
||||
=== "Ubuntu"
|
||||
|
||||
|
@ -58,3 +58,6 @@ You should see output similar to the following:
|
||||
If the NetBox service fails to start, issue the command `journalctl -eu netbox` to check for log messages that may indicate the problem.
|
||||
|
||||
Once you've verified that the WSGI workers are up and running, move on to HTTP server setup.
|
||||
|
||||
!!! note
|
||||
There is a bug in the current stable release of gunicorn (v21.2.0) where automatic restarts of the worker processes can result in 502 errors under heavy load. (See [gunicorn bug #3038](https://github.com/benoitc/gunicorn/issues/3038) for more detail.) Users who encounter this issue may opt to downgrade to an earlier, unaffected release of gunicorn (`pip install gunicorn==20.1.0`). Note, however, that this earlier release does not officially support Python 3.11.
|
||||
|
@ -18,11 +18,11 @@ The following sections detail how to set up a new instance of NetBox:
|
||||
|
||||
## Requirements
|
||||
|
||||
| Dependency | Minimum Version |
|
||||
|------------|-----------------|
|
||||
| Python | 3.10 |
|
||||
| PostgreSQL | 12 |
|
||||
| Redis | 4.0 |
|
||||
| Dependency | Supported Versions |
|
||||
|------------|--------------------|
|
||||
| Python | 3.10, 3.11, 3.12 |
|
||||
| PostgreSQL | 12+ |
|
||||
| Redis | 4.0+ |
|
||||
|
||||
Below is a simplified overview of the NetBox application stack for reference:
|
||||
|
||||
|
@ -17,11 +17,11 @@ Prior to upgrading your NetBox instance, be sure to carefully review all [releas
|
||||
|
||||
NetBox requires the following dependencies:
|
||||
|
||||
| Dependency | Minimum Version |
|
||||
|------------|-----------------|
|
||||
| Python | 3.10 |
|
||||
| PostgreSQL | 12 |
|
||||
| Redis | 4.0 |
|
||||
| Dependency | Supported Versions |
|
||||
|------------|--------------------|
|
||||
| Python | 3.10, 3.11, 3.12 |
|
||||
| PostgreSQL | 12+ |
|
||||
| Redis | 4.0+ |
|
||||
|
||||
## 3. Install the Latest Release
|
||||
|
||||
|
@ -73,9 +73,9 @@ If no body template is specified, the request body will be populated with a JSON
|
||||
|
||||
## Webhook Processing
|
||||
|
||||
Using [Event Rules](../features/event-rules.md), when a change is detected, any resulting webhooks are placed into a Redis queue for processing. This allows the user's request to complete without needing to wait for the outgoing webhook(s) to be processed. The webhooks are then extracted from the queue by the `rqworker` process and HTTP requests are sent to their respective destinations. The current webhook queue and any failed webhooks can be inspected in the admin UI under System > Background Tasks.
|
||||
Using [Event Rules](../features/event-rules.md), when a change is detected, any resulting webhooks are placed into a Redis queue for processing. This allows the user's request to complete without needing to wait for the outgoing webhook(s) to be processed. The webhooks are then extracted from the queue by the `rqworker` process and HTTP requests are sent to their respective destinations. The current webhook queue and any failed webhooks can be inspected under System > Background Tasks.
|
||||
|
||||
A request is considered successful if the response has a 2XX status code; otherwise, the request is marked as having failed. Failed requests may be retried manually via the admin UI.
|
||||
A request is considered successful if the response has a 2XX status code; otherwise, the request is marked as having failed. Failed requests may be requeued manually under System > Background Tasks.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
@ -106,6 +106,6 @@ Content-Type: application/x-www-form-urlencoded
|
||||
------------
|
||||
```
|
||||
|
||||
Note that `webhook_receiver` does not actually _do_ anything with the information received: It merely prints the request headers and body for inspection.
|
||||
Note that `webhook_receiver` does not actually _do_ anything with the information received: It merely prints the request headers and body for inspection. If you don't see any output, check that the `rqworker` process is running and that webhook events are being placed into the queue.
|
||||
|
||||
Now, when the NetBox webhook is triggered and processed, you should see its headers and content appear in the terminal where the webhook receiver is listening. If you don't, check that the `rqworker` process is running and that webhook events are being placed into the queue (visible under the NetBox admin UI).
|
||||
Webhook results can be found in the NetBox admin UI under the Background Tasks section. You can see any finished or failed runs, as well as the error log for failed webhooks.
|
||||
|
BIN
docs/media/development/transifex_download.png
Normal file
After Width: | Height: | Size: 54 KiB |
BIN
docs/media/misc/netbox_cloud.png
Normal file
After Width: | Height: | Size: 6.8 KiB |
BIN
docs/media/misc/reference_architecture.png
Normal file
After Width: | Height: | Size: 46 KiB |
Before Width: | Height: | Size: 22 KiB After Width: | Height: | Size: 22 KiB |
Before Width: | Height: | Size: 100 KiB After Width: | Height: | Size: 207 KiB |
Before Width: | Height: | Size: 173 KiB After Width: | Height: | Size: 316 KiB |
BIN
docs/media/screenshots/home-light.png
Normal file
After Width: | Height: | Size: 309 KiB |
Before Width: | Height: | Size: 171 KiB |
Before Width: | Height: | Size: 116 KiB After Width: | Height: | Size: 356 KiB |
Before Width: | Height: | Size: 81 KiB After Width: | Height: | Size: 235 KiB |
@ -14,7 +14,7 @@ The IKE version employed (v1 or v2).
|
||||
|
||||
### Mode
|
||||
|
||||
The IKE mode employed (main or aggressive).
|
||||
The mode employed (main or aggressive) when IKEv1 is in use. This setting is not supported for IKEv2.
|
||||
|
||||
### Proposals
|
||||
|
||||
|
@ -47,3 +47,14 @@ class ReminderWidget(DashboardWidget):
|
||||
def render(self, request):
|
||||
return self.config.get('content')
|
||||
```
|
||||
|
||||
## Initialization
|
||||
|
||||
To register the widget, it becomes essential to import the widget module. The recommended approach is to accomplish this within the `ready` method situated in your `PluginConfig`:
|
||||
|
||||
```python
|
||||
class FooBarConfig(PluginConfig):
|
||||
def ready(self):
|
||||
super().ready()
|
||||
from . import widgets # point this to the above widget module you created
|
||||
```
|
||||
|
@ -20,4 +20,4 @@ backends = [MyDataBackend]
|
||||
!!! tip
|
||||
The path to the list of search indexes can be modified by setting `data_backends` in the PluginConfig instance.
|
||||
|
||||
::: core.data_backends.DataBackend
|
||||
::: netbox.data_backends.DataBackend
|
||||
|
@ -1,5 +1,60 @@
|
||||
# NetBox v3.7
|
||||
|
||||
## v3.7.3 (FUTURE)
|
||||
|
||||
---
|
||||
|
||||
## v3.7.2 (2024-02-05)
|
||||
|
||||
### Enhancements
|
||||
|
||||
* [#13729](https://github.com/netbox-community/netbox/issues/13729) - Omit sensitive data source parameters from change log data
|
||||
* [#14645](https://github.com/netbox-community/netbox/issues/14645) - Limit the number of assigned IP addresses displayed under interfaces list
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* [#14500](https://github.com/netbox-community/netbox/issues/14500) - Optimize calculation of available child prefixes & ranges when viewing a prefix
|
||||
* [#14511](https://github.com/netbox-community/netbox/issues/14511) - Fix GraphQL support for interfaces connected to provider networks
|
||||
* [#14572](https://github.com/netbox-community/netbox/issues/14572) - Correct the number of jobs listed for individual report & script modules
|
||||
* [#14703](https://github.com/netbox-community/netbox/issues/14703) - Revert to the default layout when encountering a misconfigured dashboard
|
||||
* [#14755](https://github.com/netbox-community/netbox/issues/14755) - Fix validation of choice values & labels when creating a custom field choice set via the REST API
|
||||
* [#14838](https://github.com/netbox-community/netbox/issues/14838) - Avoid corrupting JSON data when changing the action type while editing an event rule
|
||||
* [#14839](https://github.com/netbox-community/netbox/issues/14839) - Fix form validation error when attempting to terminate a tunnel to a virtual machine interface
|
||||
* [#14840](https://github.com/netbox-community/netbox/issues/14840) - Fix `NoReverseMatch` exception when rendering a custom field which references a user
|
||||
* [#14847](https://github.com/netbox-community/netbox/issues/14847) - IKE policy mode may be set inly when IKEv1 is selected
|
||||
* [#14851](https://github.com/netbox-community/netbox/issues/14851) - Automatically remove any associated bookmarks when deleting a user
|
||||
* [#14879](https://github.com/netbox-community/netbox/issues/14879) - Include custom fields in REST API representation of data sources
|
||||
* [#14885](https://github.com/netbox-community/netbox/issues/14885) - Add missing "group" field to VPN tunnel creation form
|
||||
* [#14892](https://github.com/netbox-community/netbox/issues/14892) - Fix exception when running report/script via command line due to missing username
|
||||
* [#14920](https://github.com/netbox-community/netbox/issues/14920) - Include button to display available status choices when bulk importing virtual device contexts
|
||||
* [#14945](https://github.com/netbox-community/netbox/issues/14945) - Fix "select all" button for device type components
|
||||
* [#14947](https://github.com/netbox-community/netbox/issues/14947) - Ensure that application & removal of tags is always recorded in an object's change log
|
||||
* [#14962](https://github.com/netbox-community/netbox/issues/14962) - Fix config context rendering for VMs assigned directly to a site (rather than via a cluster)
|
||||
* [#14999](https://github.com/netbox-community/netbox/issues/14999) - Fix "create & add another" link for interface FHRP group assignment
|
||||
* [#15015](https://github.com/netbox-community/netbox/issues/15015) - Pre-populate assigned tenant when allocating next available IP address under prefix view
|
||||
* [#15020](https://github.com/netbox-community/netbox/issues/15020) - Automatically update all VMs when changing a cluster's assigned site
|
||||
* [#15025](https://github.com/netbox-community/netbox/issues/15025) - The `can_add()` template filter should accept a model (not an instance)
|
||||
|
||||
---
|
||||
|
||||
## v3.7.1 (2024-01-17)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* [#13844](https://github.com/netbox-community/netbox/issues/13844) - Use `available_at_site` filter when filtering VLANs under prefix form
|
||||
* [#14663](https://github.com/netbox-community/netbox/issues/14663) - Fix tunnel creation when setting initial termination to a VM interface
|
||||
* [#14706](https://github.com/netbox-community/netbox/issues/14706) - Relax one-to-one mapping of tunnel termination to IP address
|
||||
* [#14709](https://github.com/netbox-community/netbox/issues/14709) - Fix typo in tunnel termination type choice name
|
||||
* [#14749](https://github.com/netbox-community/netbox/issues/14749) - Remove errant translation wrapper from `installed_device` on DeviceBay
|
||||
* [#14778](https://github.com/netbox-community/netbox/issues/14778) - Custom field API serializer should accept null values for all optional fields
|
||||
* [#14791](https://github.com/netbox-community/netbox/issues/14791) - Hide available prefixes when searching within a parent prefix
|
||||
* [#14793](https://github.com/netbox-community/netbox/issues/14793) - Add missing Diffie-Hellman group 15
|
||||
* [#14816](https://github.com/netbox-community/netbox/issues/14816) - Ensure default contact assignment ordering is consistent
|
||||
* [#14817](https://github.com/netbox-community/netbox/issues/14817) - Relax required fields for IKE & IPSec models on bulk import
|
||||
* [#14827](https://github.com/netbox-community/netbox/issues/14827) - Ensure all matching event rules are processed in response to an event
|
||||
|
||||
---
|
||||
|
||||
## v3.7.0 (2023-12-29)
|
||||
|
||||
### Breaking Changes
|
||||
|
28
docs/release-notes/version-4.0.md
Normal file
@ -0,0 +1,28 @@
|
||||
# NetBox v4.0
|
||||
|
||||
## v4.0.0 (FUTURE)
|
||||
|
||||
### New Features
|
||||
|
||||
#### Complete UI Refresh ([#12128](https://github.com/netbox-community/netbox/issues/12128))
|
||||
|
||||
The NetBox user interface has been completely refreshed and updated.
|
||||
|
||||
### Enhancements
|
||||
|
||||
* [#12851](https://github.com/netbox-community/netbox/issues/12851) - Replace bleach HTML sanitization library with nh3
|
||||
* [#14637](https://github.com/netbox-community/netbox/issues/14637) - Upgrade to Django 5.0
|
||||
* [#14672](https://github.com/netbox-community/netbox/issues/14672) - Add support for Python 3.12
|
||||
* [#14728](https://github.com/netbox-community/netbox/issues/14728) - The plugins list view has been moved from the legacy admin UI to the main NetBox UI
|
||||
* [#14729](https://github.com/netbox-community/netbox/issues/14729) - All background task views have been moved from the legacy admin UI to the main NetBox UI
|
||||
|
||||
### Other Changes
|
||||
|
||||
* [#12325](https://github.com/netbox-community/netbox/issues/12325) - The Django admin UI is now disabled by default (set `DJANGO_ADMIN_ENABLED` to True to enable it)
|
||||
* [#12795](https://github.com/netbox-community/netbox/issues/12795) - NetBox now uses a custom User model rather than the stock model provided by Django
|
||||
* [#13647](https://github.com/netbox-community/netbox/issues/13647) - Squash all database migrations prior to v3.7
|
||||
* [#14092](https://github.com/netbox-community/netbox/issues/14092) - Remove backward compatibility for importing plugin resources from `extras.plugins` (now `netbox.plugins`)
|
||||
* [#14638](https://github.com/netbox-community/netbox/issues/14638) - Drop support for Python 3.8 and 3.9
|
||||
* [#14657](https://github.com/netbox-community/netbox/issues/14657) - Remove backward compatibility for old permissions mapping under `ActionsMixin`
|
||||
* [#14658](https://github.com/netbox-community/netbox/issues/14658) - Remove backward compatibility for importing `process_webhook()` (now `extras.webhooks.send_webhook()`)
|
||||
* [#14740](https://github.com/netbox-community/netbox/issues/14740) - Remove the obsolete `BootstrapMixin` form mixin class
|
@ -286,6 +286,7 @@ nav:
|
||||
- User Preferences: 'development/user-preferences.md'
|
||||
- Web UI: 'development/web-ui.md'
|
||||
- Internationalization: 'development/internationalization.md'
|
||||
- Translations: 'development/translations.md'
|
||||
- Release Checklist: 'development/release-checklist.md'
|
||||
- git Cheat Sheet: 'development/git-cheat-sheet.md'
|
||||
- Release Notes:
|
||||
|
@ -2,8 +2,8 @@ import logging
|
||||
|
||||
from django.conf import settings
|
||||
from django.contrib import messages
|
||||
from django.contrib.auth import login as auth_login, logout as auth_logout
|
||||
from django.contrib.auth import update_session_auth_hash
|
||||
from django.contrib.auth import login as auth_login, logout as auth_logout, update_session_auth_hash
|
||||
from django.contrib.auth.forms import AuthenticationForm, PasswordChangeForm
|
||||
from django.contrib.auth.mixins import LoginRequiredMixin
|
||||
from django.contrib.auth.models import update_last_login
|
||||
from django.contrib.auth.signals import user_logged_in
|
||||
@ -72,7 +72,7 @@ class LoginView(View):
|
||||
return auth_backends
|
||||
|
||||
def get(self, request):
|
||||
form = forms.LoginForm(request)
|
||||
form = AuthenticationForm(request)
|
||||
|
||||
if request.user.is_authenticated:
|
||||
logger = logging.getLogger('netbox.auth.login')
|
||||
@ -85,7 +85,7 @@ class LoginView(View):
|
||||
|
||||
def post(self, request):
|
||||
logger = logging.getLogger('netbox.auth.login')
|
||||
form = forms.LoginForm(request, data=request.POST)
|
||||
form = AuthenticationForm(request, data=request.POST)
|
||||
|
||||
if form.is_valid():
|
||||
logger.debug("Login form validation was successful")
|
||||
@ -220,7 +220,7 @@ class ChangePasswordView(LoginRequiredMixin, View):
|
||||
messages.warning(request, "LDAP-authenticated user credentials cannot be changed within NetBox.")
|
||||
return redirect('account:profile')
|
||||
|
||||
form = forms.PasswordChangeForm(user=request.user)
|
||||
form = PasswordChangeForm(user=request.user)
|
||||
|
||||
return render(request, self.template_name, {
|
||||
'form': form,
|
||||
@ -228,7 +228,7 @@ class ChangePasswordView(LoginRequiredMixin, View):
|
||||
})
|
||||
|
||||
def post(self, request):
|
||||
form = forms.PasswordChangeForm(user=request.user, data=request.POST)
|
||||
form = PasswordChangeForm(user=request.user, data=request.POST)
|
||||
if form.is_valid():
|
||||
form.save()
|
||||
update_session_auth_hash(request, form.user)
|
||||
|
@ -7,7 +7,6 @@ from django.utils.safestring import mark_safe
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from netbox.forms import NetBoxModelImportForm
|
||||
from tenancy.models import Tenant
|
||||
from utilities.forms import BootstrapMixin
|
||||
from utilities.forms.fields import CSVChoiceField, CSVModelChoiceField, SlugField
|
||||
|
||||
__all__ = (
|
||||
@ -112,7 +111,7 @@ class CircuitImportForm(NetBoxModelImportForm):
|
||||
]
|
||||
|
||||
|
||||
class CircuitTerminationImportForm(BootstrapMixin, forms.ModelForm):
|
||||
class CircuitTerminationImportForm(forms.ModelForm):
|
||||
site = CSVModelChoiceField(
|
||||
label=_('Site'),
|
||||
queryset=Site.objects.all(),
|
||||
|
@ -1,20 +0,0 @@
|
||||
# Generated by Django 3.2.8 on 2021-10-21 14:50
|
||||
|
||||
from django.db import migrations
|
||||
import taggit.managers
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('extras', '0062_clear_secrets_changelog'),
|
||||
('circuits', '0002_squashed_0029'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='circuittype',
|
||||
name='tags',
|
||||
field=taggit.managers.TaggableManager(through='extras.TaggedItem', to='extras.Tag'),
|
||||
),
|
||||
]
|
127
netbox/circuits/migrations/0003_squashed_0037.py
Normal file
@ -0,0 +1,127 @@
|
||||
import taggit.managers
|
||||
from django.db import migrations, models
|
||||
|
||||
import utilities.json
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
replaces = [
|
||||
('circuits', '0003_extend_tag_support'),
|
||||
('circuits', '0004_rename_cable_peer'),
|
||||
('circuits', '0032_provider_service_id'),
|
||||
('circuits', '0033_standardize_id_fields'),
|
||||
('circuits', '0034_created_datetimefield'),
|
||||
('circuits', '0035_provider_asns'),
|
||||
('circuits', '0036_circuit_termination_date_tags_custom_fields'),
|
||||
('circuits', '0037_new_cabling_models')
|
||||
]
|
||||
|
||||
dependencies = [
|
||||
('ipam', '0047_squashed_0053'),
|
||||
('extras', '0002_squashed_0059'),
|
||||
('circuits', '0002_squashed_0029'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='circuittype',
|
||||
name='tags',
|
||||
field=taggit.managers.TaggableManager(through='extras.TaggedItem', to='extras.Tag'),
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='circuittermination',
|
||||
old_name='_cable_peer_id',
|
||||
new_name='_link_peer_id',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='circuittermination',
|
||||
old_name='_cable_peer_type',
|
||||
new_name='_link_peer_type',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='providernetwork',
|
||||
name='service_id',
|
||||
field=models.CharField(blank=True, max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='circuit',
|
||||
name='id',
|
||||
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='circuittermination',
|
||||
name='id',
|
||||
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='circuittype',
|
||||
name='id',
|
||||
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='provider',
|
||||
name='id',
|
||||
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='providernetwork',
|
||||
name='id',
|
||||
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='circuittermination',
|
||||
name='_link_peer_id',
|
||||
field=models.PositiveBigIntegerField(blank=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='circuit',
|
||||
name='created',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='circuittermination',
|
||||
name='created',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='circuittype',
|
||||
name='created',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='provider',
|
||||
name='created',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='providernetwork',
|
||||
name='created',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='provider',
|
||||
name='asns',
|
||||
field=models.ManyToManyField(blank=True, related_name='providers', to='ipam.asn'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='circuit',
|
||||
name='termination_date',
|
||||
field=models.DateField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='circuittermination',
|
||||
name='custom_field_data',
|
||||
field=models.JSONField(blank=True, default=dict, encoder=utilities.json.CustomFieldJSONEncoder),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='circuittermination',
|
||||
name='tags',
|
||||
field=taggit.managers.TaggableManager(through='extras.TaggedItem', to='extras.Tag'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='circuittermination',
|
||||
name='cable_end',
|
||||
field=models.CharField(blank=True, max_length=1),
|
||||
),
|
||||
]
|
@ -1,21 +0,0 @@
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('circuits', '0003_extend_tag_support'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='circuittermination',
|
||||
old_name='_cable_peer_id',
|
||||
new_name='_link_peer_id',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='circuittermination',
|
||||
old_name='_cable_peer_type',
|
||||
new_name='_link_peer_type',
|
||||
),
|
||||
]
|
@ -1,17 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('circuits', '0004_rename_cable_peer'),
|
||||
('dcim', '0145_site_remove_deprecated_fields'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='providernetwork',
|
||||
name='service_id',
|
||||
field=models.CharField(blank=True, max_length=100),
|
||||
),
|
||||
]
|
@ -1,44 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('circuits', '0032_provider_service_id'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
# Model IDs
|
||||
migrations.AlterField(
|
||||
model_name='circuit',
|
||||
name='id',
|
||||
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='circuittermination',
|
||||
name='id',
|
||||
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='circuittype',
|
||||
name='id',
|
||||
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='provider',
|
||||
name='id',
|
||||
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='providernetwork',
|
||||
name='id',
|
||||
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False),
|
||||
),
|
||||
|
||||
# GFK IDs
|
||||
migrations.AlterField(
|
||||
model_name='circuittermination',
|
||||
name='_link_peer_id',
|
||||
field=models.PositiveBigIntegerField(blank=True, null=True),
|
||||
),
|
||||
]
|
@ -1,38 +0,0 @@
|
||||
# Generated by Django 4.0.2 on 2022-02-08 18:54
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('circuits', '0033_standardize_id_fields'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='circuit',
|
||||
name='created',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='circuittermination',
|
||||
name='created',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='circuittype',
|
||||
name='created',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='provider',
|
||||
name='created',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='providernetwork',
|
||||
name='created',
|
||||
field=models.DateTimeField(auto_now_add=True, null=True),
|
||||
),
|
||||
]
|
@ -1,19 +0,0 @@
|
||||
# Generated by Django 4.0.3 on 2022-03-30 20:27
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('ipam', '0057_created_datetimefield'),
|
||||
('circuits', '0034_created_datetimefield'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='provider',
|
||||
name='asns',
|
||||
field=models.ManyToManyField(blank=True, related_name='providers', to='ipam.asn'),
|
||||
),
|
||||
]
|
@ -1,28 +0,0 @@
|
||||
from utilities.json import CustomFieldJSONEncoder
|
||||
from django.db import migrations, models
|
||||
import taggit.managers
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('circuits', '0035_provider_asns'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='circuit',
|
||||
name='termination_date',
|
||||
field=models.DateField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='circuittermination',
|
||||
name='custom_field_data',
|
||||
field=models.JSONField(blank=True, default=dict, encoder=CustomFieldJSONEncoder),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='circuittermination',
|
||||
name='tags',
|
||||
field=taggit.managers.TaggableManager(through='extras.TaggedItem', to='extras.Tag'),
|
||||
),
|
||||
]
|
@ -1,16 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('circuits', '0036_circuit_termination_date_tags_custom_fields'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='circuittermination',
|
||||
name='cable_end',
|
||||
field=models.CharField(blank=True, max_length=1),
|
||||
),
|
||||
]
|
@ -1,20 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('circuits', '0037_new_cabling_models'),
|
||||
('dcim', '0160_populate_cable_ends'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='circuittermination',
|
||||
name='_link_peer_id',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='circuittermination',
|
||||
name='_link_peer_type',
|
||||
),
|
||||
]
|
@ -1,46 +1,83 @@
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import taggit.managers
|
||||
from django.db import migrations, models
|
||||
|
||||
import utilities.json
|
||||
|
||||
|
||||
def create_provideraccounts_from_providers(apps, schema_editor):
|
||||
"""
|
||||
Migrate Account in Provider model to separate account model
|
||||
"""
|
||||
Provider = apps.get_model('circuits', 'Provider')
|
||||
ProviderAccount = apps.get_model('circuits', 'ProviderAccount')
|
||||
|
||||
provider_accounts = []
|
||||
for provider in Provider.objects.all():
|
||||
if provider.account:
|
||||
provider_accounts.append(ProviderAccount(
|
||||
provider=provider,
|
||||
account=provider.account
|
||||
))
|
||||
ProviderAccount.objects.bulk_create(provider_accounts, batch_size=100)
|
||||
|
||||
|
||||
def restore_providers_from_provideraccounts(apps, schema_editor):
|
||||
"""
|
||||
Restore Provider account values from auto-generated ProviderAccounts
|
||||
"""
|
||||
ProviderAccount = apps.get_model('circuits', 'ProviderAccount')
|
||||
provider_accounts = ProviderAccount.objects.order_by('pk')
|
||||
for provideraccount in provider_accounts:
|
||||
if provider_accounts.filter(provider=provideraccount.provider)[0] == provideraccount:
|
||||
provideraccount.provider.account = provideraccount.account
|
||||
provideraccount.provider.save()
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('extras', '0084_staging'),
|
||||
replaces = [
|
||||
('circuits', '0038_cabling_cleanup'),
|
||||
('circuits', '0039_unique_constraints'),
|
||||
('circuits', '0040_provider_remove_deprecated_fields'),
|
||||
('circuits', '0041_standardize_description_comments'),
|
||||
('circuits', '0042_provideraccount')
|
||||
]
|
||||
|
||||
dependencies = [
|
||||
('circuits', '0037_new_cabling_models'),
|
||||
('dcim', '0160_populate_cable_ends'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveField(
|
||||
model_name='circuittermination',
|
||||
name='_link_peer_id',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='circuittermination',
|
||||
name='_link_peer_type',
|
||||
),
|
||||
migrations.RemoveConstraint(
|
||||
model_name='providernetwork',
|
||||
name='circuits_providernetwork_provider_name',
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='circuit',
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='circuittermination',
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='providernetwork',
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='circuit',
|
||||
constraint=models.UniqueConstraint(fields=('provider', 'cid'), name='circuits_circuit_unique_provider_cid'),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='circuittermination',
|
||||
constraint=models.UniqueConstraint(fields=('circuit', 'term_side'), name='circuits_circuittermination_unique_circuit_term_side'),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='providernetwork',
|
||||
constraint=models.UniqueConstraint(fields=('provider', 'name'), name='circuits_providernetwork_unique_provider_name'),
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='provider',
|
||||
name='admin_contact',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='provider',
|
||||
name='asn',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='provider',
|
||||
name='noc_contact',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='provider',
|
||||
name='portal_url',
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='provider',
|
||||
name='description',
|
||||
field=models.CharField(blank=True, max_length=200),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ProviderAccount',
|
||||
fields=[
|
||||
@ -67,9 +104,6 @@ class Migration(migrations.Migration):
|
||||
model_name='provideraccount',
|
||||
constraint=models.UniqueConstraint(fields=('provider', 'account'), name='circuits_provideraccount_unique_provider_account'),
|
||||
),
|
||||
migrations.RunPython(
|
||||
create_provideraccounts_from_providers, restore_providers_from_provideraccounts
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='provider',
|
||||
name='account',
|
||||
@ -77,7 +111,7 @@ class Migration(migrations.Migration):
|
||||
migrations.AddField(
|
||||
model_name='circuit',
|
||||
name='provider_account',
|
||||
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='circuits', to='circuits.provideraccount', null=True, blank=True),
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='circuits', to='circuits.provideraccount'),
|
||||
preserve_default=False,
|
||||
),
|
||||
migrations.AlterModelOptions(
|
@ -1,39 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('circuits', '0038_cabling_cleanup'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RemoveConstraint(
|
||||
model_name='providernetwork',
|
||||
name='circuits_providernetwork_provider_name',
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='circuit',
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='circuittermination',
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='providernetwork',
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='circuit',
|
||||
constraint=models.UniqueConstraint(fields=('provider', 'cid'), name='circuits_circuit_unique_provider_cid'),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='circuittermination',
|
||||
constraint=models.UniqueConstraint(fields=('circuit', 'term_side'), name='circuits_circuittermination_unique_circuit_term_side'),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='providernetwork',
|
||||
constraint=models.UniqueConstraint(fields=('provider', 'name'), name='circuits_providernetwork_unique_provider_name'),
|
||||
),
|
||||
]
|
@ -1,59 +0,0 @@
|
||||
import os
|
||||
|
||||
from django.db import migrations
|
||||
from django.db.utils import DataError
|
||||
|
||||
|
||||
def check_legacy_data(apps, schema_editor):
|
||||
"""
|
||||
Abort the migration if any legacy provider fields still contain data.
|
||||
"""
|
||||
Provider = apps.get_model('circuits', 'Provider')
|
||||
|
||||
provider_count = Provider.objects.exclude(asn__isnull=True).count()
|
||||
if provider_count and 'NETBOX_DELETE_LEGACY_DATA' not in os.environ:
|
||||
raise DataError(
|
||||
f"Unable to proceed with deleting asn field from Provider model: Found {provider_count} "
|
||||
f"providers with legacy ASN data. Please ensure all legacy provider ASN data has been "
|
||||
f"migrated to ASN objects before proceeding. Or, set the NETBOX_DELETE_LEGACY_DATA "
|
||||
f"environment variable to bypass this safeguard and delete all legacy provider ASN data."
|
||||
)
|
||||
|
||||
provider_count = Provider.objects.exclude(admin_contact='', noc_contact='', portal_url='').count()
|
||||
if provider_count and 'NETBOX_DELETE_LEGACY_DATA' not in os.environ:
|
||||
raise DataError(
|
||||
f"Unable to proceed with deleting contact fields from Provider model: Found {provider_count} "
|
||||
f"providers with legacy contact data. Please ensure all legacy provider contact data has been "
|
||||
f"migrated to contact objects before proceeding. Or, set the NETBOX_DELETE_LEGACY_DATA "
|
||||
f"environment variable to bypass this safeguard and delete all legacy provider contact data."
|
||||
)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('circuits', '0039_unique_constraints'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
code=check_legacy_data,
|
||||
reverse_code=migrations.RunPython.noop
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='provider',
|
||||
name='admin_contact',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='provider',
|
||||
name='asn',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='provider',
|
||||
name='noc_contact',
|
||||
),
|
||||
migrations.RemoveField(
|
||||
model_name='provider',
|
||||
name='portal_url',
|
||||
),
|
||||
]
|
@ -1,18 +0,0 @@
|
||||
# Generated by Django 4.1.2 on 2022-11-03 18:24
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('circuits', '0040_provider_remove_deprecated_fields'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='provider',
|
||||
name='description',
|
||||
field=models.CharField(blank=True, max_length=200),
|
||||
),
|
||||
]
|
@ -36,7 +36,7 @@ class DataSourceSerializer(NetBoxModelSerializer):
|
||||
model = DataSource
|
||||
fields = [
|
||||
'id', 'url', 'display', 'name', 'type', 'source_url', 'enabled', 'status', 'description', 'comments',
|
||||
'parameters', 'ignore_rules', 'created', 'last_updated', 'file_count',
|
||||
'parameters', 'ignore_rules', 'custom_fields', 'created', 'last_updated', 'file_count',
|
||||
]
|
||||
|
||||
|
||||
|
26
netbox/core/constants.py
Normal file
@ -0,0 +1,26 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from rq.job import JobStatus
|
||||
|
||||
__all__ = (
|
||||
'RQ_TASK_STATUSES',
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Status:
|
||||
label: str
|
||||
color: str
|
||||
|
||||
|
||||
RQ_TASK_STATUSES = {
|
||||
JobStatus.QUEUED: Status(_('Queued'), 'cyan'),
|
||||
JobStatus.FINISHED: Status(_('Finished'), 'green'),
|
||||
JobStatus.FAILED: Status(_('Failed'), 'red'),
|
||||
JobStatus.STARTED: Status(_('Started'), 'blue'),
|
||||
JobStatus.DEFERRED: Status(_('Deferred'), 'gray'),
|
||||
JobStatus.SCHEDULED: Status(_('Scheduled'), 'purple'),
|
||||
JobStatus.STOPPED: Status(_('Stopped'), 'orange'),
|
||||
JobStatus.CANCELED: Status(_('Cancelled'), 'yellow'),
|
||||
}
|
@ -21,7 +21,7 @@ class DataSourceBulkEditForm(NetBoxModelBulkEditForm):
|
||||
enabled = forms.NullBooleanField(
|
||||
required=False,
|
||||
widget=BulkEditNullBooleanSelect(),
|
||||
label=_('Enforce unique space')
|
||||
label=_('Enabled')
|
||||
)
|
||||
description = forms.CharField(
|
||||
label=_('Description'),
|
||||
|
@ -11,7 +11,7 @@ from netbox.config import get_config, PARAMS
|
||||
from netbox.forms import NetBoxModelForm
|
||||
from netbox.registry import registry
|
||||
from netbox.utils import get_data_backend_choices
|
||||
from utilities.forms import BootstrapMixin, get_field_value
|
||||
from utilities.forms import get_field_value
|
||||
from utilities.forms.fields import CommentField
|
||||
from utilities.forms.widgets import HTMXSelect
|
||||
|
||||
@ -138,7 +138,7 @@ class ConfigFormMetaclass(forms.models.ModelFormMetaclass):
|
||||
return super().__new__(mcs, name, bases, attrs)
|
||||
|
||||
|
||||
class ConfigRevisionForm(BootstrapMixin, forms.ModelForm, metaclass=ConfigFormMetaclass):
|
||||
class ConfigRevisionForm(forms.ModelForm, metaclass=ConfigFormMetaclass):
|
||||
"""
|
||||
Form for creating a new ConfigRevision.
|
||||
"""
|
||||
|
@ -9,9 +9,9 @@ class Command(_Command):
|
||||
"""
|
||||
This built-in management command enables the creation of new database schema migration files, which should
|
||||
never be required by and ordinary user. We prevent this command from executing unless the configuration
|
||||
indicates that the user is a developer (i.e. configuration.DEVELOPER == True).
|
||||
indicates that the user is a developer (i.e. configuration.DEVELOPER == True), or it was run with --check.
|
||||
"""
|
||||
if not settings.DEVELOPER:
|
||||
if not kwargs['check_changes'] and not settings.DEVELOPER:
|
||||
raise CommandError(
|
||||
"This command is available for development purposes only. It will\n"
|
||||
"NOT resolve any issues with missing or unapplied migrations. For assistance,\n"
|
||||
|
@ -1,18 +1,26 @@
|
||||
# Generated by Django 4.1.5 on 2023-02-02 02:37
|
||||
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
import taggit.managers
|
||||
from django.conf import settings
|
||||
from django.db import migrations, models
|
||||
|
||||
import utilities.json
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
initial = True
|
||||
replaces = [
|
||||
('core', '0001_initial'),
|
||||
('core', '0002_managedfile'),
|
||||
('core', '0003_job'),
|
||||
('core', '0004_replicate_jobresults'),
|
||||
('core', '0005_job_created_auto_now')
|
||||
]
|
||||
|
||||
dependencies = [
|
||||
('extras', '0084_staging'),
|
||||
('contenttypes', '0002_remove_content_type_name'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('extras', '0002_squashed_0059'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
@ -71,13 +79,61 @@ class Migration(migrations.Migration):
|
||||
('datafile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to='core.datafile')),
|
||||
('object_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to='contenttypes.contenttype')),
|
||||
],
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='autosyncrecord',
|
||||
index=models.Index(fields=['object_type', 'object_id'], name='core_autosy_object__c17bac_idx'),
|
||||
options={
|
||||
'indexes': [models.Index(fields=['object_type', 'object_id'], name='core_autosy_object__c17bac_idx')],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='autosyncrecord',
|
||||
constraint=models.UniqueConstraint(fields=('object_type', 'object_id'), name='core_autosyncrecord_object'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='ManagedFile',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False)),
|
||||
('data_path', models.CharField(blank=True, editable=False, max_length=1000)),
|
||||
('data_synced', models.DateTimeField(blank=True, editable=False, null=True)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('last_updated', models.DateTimeField(blank=True, editable=False, null=True)),
|
||||
('file_root', models.CharField(max_length=1000)),
|
||||
('file_path', models.FilePathField(editable=False)),
|
||||
('data_file', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='core.datafile')),
|
||||
('data_source', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='core.datasource')),
|
||||
('auto_sync_enabled', models.BooleanField(default=False)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('file_root', 'file_path'),
|
||||
'indexes': [models.Index(fields=['file_root', 'file_path'], name='core_managedfile_root_path')],
|
||||
},
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='managedfile',
|
||||
constraint=models.UniqueConstraint(fields=('file_root', 'file_path'), name='core_managedfile_unique_root_path'),
|
||||
),
|
||||
migrations.CreateModel(
|
||||
name='Job',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False)),
|
||||
('object_id', models.PositiveBigIntegerField(blank=True, null=True)),
|
||||
('name', models.CharField(max_length=200)),
|
||||
('created', models.DateTimeField()),
|
||||
('scheduled', models.DateTimeField(blank=True, null=True)),
|
||||
('interval', models.PositiveIntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(1)])),
|
||||
('started', models.DateTimeField(blank=True, null=True)),
|
||||
('completed', models.DateTimeField(blank=True, null=True)),
|
||||
('status', models.CharField(default='pending', max_length=30)),
|
||||
('data', models.JSONField(blank=True, null=True)),
|
||||
('job_id', models.UUIDField(unique=True)),
|
||||
('object_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='jobs', to='contenttypes.contenttype')),
|
||||
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
'ordering': ['-created'],
|
||||
},
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='job',
|
||||
name='created',
|
||||
field=models.DateTimeField(auto_now_add=True),
|
||||
),
|
||||
]
|
@ -1,40 +0,0 @@
|
||||
# Generated by Django 4.1.7 on 2023-03-23 17:35
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='ManagedFile',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False)),
|
||||
('data_path', models.CharField(blank=True, editable=False, max_length=1000)),
|
||||
('data_synced', models.DateTimeField(blank=True, editable=False, null=True)),
|
||||
('created', models.DateTimeField(auto_now_add=True)),
|
||||
('last_updated', models.DateTimeField(blank=True, editable=False, null=True)),
|
||||
('file_root', models.CharField(max_length=1000)),
|
||||
('file_path', models.FilePathField(editable=False)),
|
||||
('data_file', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='core.datafile')),
|
||||
('data_source', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='+', to='core.datasource')),
|
||||
('auto_sync_enabled', models.BooleanField(default=False)),
|
||||
],
|
||||
options={
|
||||
'ordering': ('file_root', 'file_path'),
|
||||
},
|
||||
),
|
||||
migrations.AddIndex(
|
||||
model_name='managedfile',
|
||||
index=models.Index(fields=['file_root', 'file_path'], name='core_managedfile_root_path'),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='managedfile',
|
||||
constraint=models.UniqueConstraint(fields=('file_root', 'file_path'), name='core_managedfile_unique_root_path'),
|
||||
),
|
||||
]
|
@ -1,39 +0,0 @@
|
||||
# Generated by Django 4.1.7 on 2023-03-27 15:02
|
||||
|
||||
from django.conf import settings
|
||||
import django.core.validators
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('contenttypes', '0002_remove_content_type_name'),
|
||||
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
|
||||
('core', '0002_managedfile'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='Job',
|
||||
fields=[
|
||||
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False)),
|
||||
('object_id', models.PositiveBigIntegerField(blank=True, null=True)),
|
||||
('name', models.CharField(max_length=200)),
|
||||
('created', models.DateTimeField()),
|
||||
('scheduled', models.DateTimeField(blank=True, null=True)),
|
||||
('interval', models.PositiveIntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(1)])),
|
||||
('started', models.DateTimeField(blank=True, null=True)),
|
||||
('completed', models.DateTimeField(blank=True, null=True)),
|
||||
('status', models.CharField(default='pending', max_length=30)),
|
||||
('data', models.JSONField(blank=True, null=True)),
|
||||
('job_id', models.UUIDField(unique=True)),
|
||||
('object_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='jobs', to='contenttypes.contenttype')),
|
||||
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to=settings.AUTH_USER_MODEL)),
|
||||
],
|
||||
options={
|
||||
'ordering': ['-created'],
|
||||
},
|
||||
),
|
||||
]
|
@ -1,46 +0,0 @@
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
def replicate_jobresults(apps, schema_editor):
|
||||
"""
|
||||
Replicate existing JobResults to the new Jobs table before deleting the old JobResults table.
|
||||
"""
|
||||
Job = apps.get_model('core', 'Job')
|
||||
JobResult = apps.get_model('extras', 'JobResult')
|
||||
|
||||
jobs = []
|
||||
for job_result in JobResult.objects.order_by('pk').iterator(chunk_size=100):
|
||||
jobs.append(
|
||||
Job(
|
||||
object_type=job_result.obj_type,
|
||||
name=job_result.name,
|
||||
created=job_result.created,
|
||||
scheduled=job_result.scheduled,
|
||||
interval=job_result.interval,
|
||||
started=job_result.started,
|
||||
completed=job_result.completed,
|
||||
user=job_result.user,
|
||||
status=job_result.status,
|
||||
data=job_result.data,
|
||||
job_id=job_result.job_id,
|
||||
)
|
||||
)
|
||||
if len(jobs) == 100:
|
||||
Job.objects.bulk_create(jobs)
|
||||
jobs = []
|
||||
if jobs:
|
||||
Job.objects.bulk_create(jobs)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0003_job'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
code=replicate_jobresults,
|
||||
reverse_code=migrations.RunPython.noop
|
||||
),
|
||||
]
|
@ -1,18 +0,0 @@
|
||||
# Generated by Django 4.1.7 on 2023-03-27 17:28
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('core', '0004_replicate_jobresults'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='job',
|
||||
name='created',
|
||||
field=models.DateTimeField(auto_now_add=True),
|
||||
),
|
||||
]
|
@ -14,6 +14,7 @@ from django.utils import timezone
|
||||
from django.utils.module_loading import import_string
|
||||
from django.utils.translation import gettext as _
|
||||
|
||||
from netbox.constants import CENSOR_TOKEN, CENSOR_TOKEN_CHANGED
|
||||
from netbox.models import PrimaryModel
|
||||
from netbox.models.features import JobsMixin
|
||||
from netbox.registry import registry
|
||||
@ -130,6 +131,28 @@ class DataSource(JobsMixin, PrimaryModel):
|
||||
'source_url': f"URLs for local sources must start with file:// (or specify no scheme)"
|
||||
})
|
||||
|
||||
def to_objectchange(self, action):
|
||||
objectchange = super().to_objectchange(action)
|
||||
|
||||
# Censor any backend parameters marked as sensitive in the serialized data
|
||||
pre_change_params = {}
|
||||
post_change_params = {}
|
||||
if objectchange.prechange_data:
|
||||
pre_change_params = objectchange.prechange_data.get('parameters') or {} # parameters may be None
|
||||
if objectchange.postchange_data:
|
||||
post_change_params = objectchange.postchange_data.get('parameters') or {}
|
||||
for param in self.backend_class.sensitive_parameters:
|
||||
if post_change_params.get(param):
|
||||
if post_change_params[param] != pre_change_params.get(param):
|
||||
# Set the "changed" token if the parameter's value has been modified
|
||||
post_change_params[param] = CENSOR_TOKEN_CHANGED
|
||||
else:
|
||||
post_change_params[param] = CENSOR_TOKEN
|
||||
if pre_change_params.get(param):
|
||||
pre_change_params[param] = CENSOR_TOKEN
|
||||
|
||||
return objectchange
|
||||
|
||||
def enqueue_sync_job(self, request):
|
||||
"""
|
||||
Enqueue a background job to synchronize the DataSource by calling sync().
|
||||
|
@ -1,3 +1,5 @@
|
||||
from .config import *
|
||||
from .data import *
|
||||
from .jobs import *
|
||||
from .tasks import *
|
||||
from .plugins import *
|
||||
|
@ -1,9 +1,12 @@
|
||||
import django_tables2 as tables
|
||||
from django.utils.safestring import mark_safe
|
||||
|
||||
from core.constants import RQ_TASK_STATUSES
|
||||
from netbox.registry import registry
|
||||
|
||||
__all__ = (
|
||||
'BackendTypeColumn',
|
||||
'RQJobStatusColumn',
|
||||
)
|
||||
|
||||
|
||||
@ -18,3 +21,16 @@ class BackendTypeColumn(tables.Column):
|
||||
|
||||
def value(self, value):
|
||||
return value
|
||||
|
||||
|
||||
class RQJobStatusColumn(tables.Column):
|
||||
"""
|
||||
Render a colored label for the status of an RQ job.
|
||||
"""
|
||||
def render(self, value):
|
||||
status = RQ_TASK_STATUSES.get(value)
|
||||
return mark_safe(f'<span class="badge text-bg-{status.color}">{status.label}</span>')
|
||||
|
||||
def value(self, value):
|
||||
status = RQ_TASK_STATUSES.get(value)
|
||||
return status.label
|
||||
|
39
netbox/core/tables/plugins.py
Normal file
@ -0,0 +1,39 @@
|
||||
import django_tables2 as tables
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from netbox.tables import BaseTable
|
||||
|
||||
__all__ = (
|
||||
'PluginTable',
|
||||
)
|
||||
|
||||
|
||||
class PluginTable(BaseTable):
|
||||
name = tables.Column(
|
||||
accessor=tables.A('verbose_name'),
|
||||
verbose_name=_('Name')
|
||||
)
|
||||
version = tables.Column(
|
||||
verbose_name=_('Version')
|
||||
)
|
||||
package = tables.Column(
|
||||
accessor=tables.A('name'),
|
||||
verbose_name=_('Package')
|
||||
)
|
||||
author = tables.Column(
|
||||
verbose_name=_('Author')
|
||||
)
|
||||
author_email = tables.Column(
|
||||
verbose_name=_('Author Email')
|
||||
)
|
||||
description = tables.Column(
|
||||
verbose_name=_('Description')
|
||||
)
|
||||
|
||||
class Meta(BaseTable.Meta):
|
||||
empty_text = _('No plugins found')
|
||||
fields = (
|
||||
'name', 'version', 'package', 'author', 'author_email', 'description',
|
||||
)
|
||||
default_columns = (
|
||||
'name', 'version', 'package', 'author', 'author_email', 'description',
|
||||
)
|
134
netbox/core/tables/tasks.py
Normal file
@ -0,0 +1,134 @@
|
||||
import django_tables2 as tables
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django_tables2.utils import A
|
||||
|
||||
from core.tables.columns import RQJobStatusColumn
|
||||
from netbox.tables import BaseTable
|
||||
|
||||
|
||||
class BackgroundQueueTable(BaseTable):
|
||||
name = tables.Column(
|
||||
verbose_name=_("Name")
|
||||
)
|
||||
jobs = tables.Column(
|
||||
linkify=("core:background_task_list", [A("index"), "queued"]),
|
||||
verbose_name=_("Queued")
|
||||
)
|
||||
oldest_job_timestamp = tables.Column(
|
||||
verbose_name=_("Oldest Task")
|
||||
)
|
||||
started_jobs = tables.Column(
|
||||
linkify=("core:background_task_list", [A("index"), "started"]),
|
||||
verbose_name=_("Active")
|
||||
)
|
||||
deferred_jobs = tables.Column(
|
||||
linkify=("core:background_task_list", [A("index"), "deferred"]),
|
||||
verbose_name=_("Deferred")
|
||||
)
|
||||
finished_jobs = tables.Column(
|
||||
linkify=("core:background_task_list", [A("index"), "finished"]),
|
||||
verbose_name=_("Finished")
|
||||
)
|
||||
failed_jobs = tables.Column(
|
||||
linkify=("core:background_task_list", [A("index"), "failed"]),
|
||||
verbose_name=_("Failed")
|
||||
)
|
||||
scheduled_jobs = tables.Column(
|
||||
linkify=("core:background_task_list", [A("index"), "scheduled"]),
|
||||
verbose_name=_("Scheduled")
|
||||
)
|
||||
workers = tables.Column(
|
||||
linkify=("core:worker_list", [A("index")]),
|
||||
verbose_name=_("Workers")
|
||||
)
|
||||
host = tables.Column(
|
||||
accessor="connection_kwargs__host",
|
||||
verbose_name=_("Host")
|
||||
)
|
||||
port = tables.Column(
|
||||
accessor="connection_kwargs__port",
|
||||
verbose_name=_("Port")
|
||||
)
|
||||
db = tables.Column(
|
||||
accessor="connection_kwargs__db",
|
||||
verbose_name=_("DB")
|
||||
)
|
||||
pid = tables.Column(
|
||||
accessor="scheduler__pid",
|
||||
verbose_name=_("Scheduler PID")
|
||||
)
|
||||
|
||||
class Meta(BaseTable.Meta):
|
||||
empty_text = _('No queues found')
|
||||
fields = (
|
||||
'name', 'jobs', 'oldest_job_timestamp', 'started_jobs', 'deferred_jobs', 'finished_jobs', 'failed_jobs',
|
||||
'scheduled_jobs', 'workers', 'host', 'port', 'db', 'pid',
|
||||
)
|
||||
default_columns = (
|
||||
'name', 'jobs', 'started_jobs', 'deferred_jobs', 'finished_jobs', 'failed_jobs', 'scheduled_jobs',
|
||||
'workers',
|
||||
)
|
||||
|
||||
|
||||
class BackgroundTaskTable(BaseTable):
|
||||
id = tables.Column(
|
||||
linkify=("core:background_task", [A("id")]),
|
||||
verbose_name=_("ID")
|
||||
)
|
||||
created_at = tables.DateTimeColumn(
|
||||
verbose_name=_("Created")
|
||||
)
|
||||
enqueued_at = tables.DateTimeColumn(
|
||||
verbose_name=_("Enqueued")
|
||||
)
|
||||
ended_at = tables.DateTimeColumn(
|
||||
verbose_name=_("Ended")
|
||||
)
|
||||
status = RQJobStatusColumn(
|
||||
verbose_name=_("Status"),
|
||||
accessor='get_status'
|
||||
)
|
||||
callable = tables.Column(
|
||||
empty_values=(),
|
||||
verbose_name=_("Callable")
|
||||
)
|
||||
|
||||
class Meta(BaseTable.Meta):
|
||||
empty_text = _('No tasks found')
|
||||
fields = (
|
||||
'id', 'created_at', 'enqueued_at', 'ended_at', 'status', 'callable',
|
||||
)
|
||||
default_columns = (
|
||||
'id', 'created_at', 'enqueued_at', 'ended_at', 'status', 'callable',
|
||||
)
|
||||
|
||||
def render_callable(self, value, record):
|
||||
try:
|
||||
return record.func_name
|
||||
except Exception as e:
|
||||
return repr(e)
|
||||
|
||||
|
||||
class WorkerTable(BaseTable):
|
||||
name = tables.Column(
|
||||
linkify=("core:worker", [A("name")]),
|
||||
verbose_name=_("Name")
|
||||
)
|
||||
state = tables.Column(
|
||||
verbose_name=_("State")
|
||||
)
|
||||
birth_date = tables.DateTimeColumn(
|
||||
verbose_name=_("Birth")
|
||||
)
|
||||
pid = tables.Column(
|
||||
verbose_name=_("PID")
|
||||
)
|
||||
|
||||
class Meta(BaseTable.Meta):
|
||||
empty_text = _('No workers found')
|
||||
fields = (
|
||||
'name', 'state', 'birth_date', 'pid',
|
||||
)
|
||||
default_columns = (
|
||||
'name', 'state', 'birth_date', 'pid',
|
||||
)
|
@ -1,8 +1,6 @@
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from django.test import TestCase
|
||||
from django.utils import timezone
|
||||
|
||||
from utilities.testing import ChangeLoggedFilterSetTests
|
||||
from ..choices import *
|
||||
from ..filtersets import *
|
||||
|
122
netbox/core/tests/test_models.py
Normal file
@ -0,0 +1,122 @@
|
||||
from django.test import TestCase
|
||||
|
||||
from core.models import DataSource
|
||||
from extras.choices import ObjectChangeActionChoices
|
||||
from netbox.constants import CENSOR_TOKEN, CENSOR_TOKEN_CHANGED
|
||||
|
||||
|
||||
class DataSourceChangeLoggingTestCase(TestCase):
|
||||
|
||||
def test_password_added_on_create(self):
|
||||
datasource = DataSource.objects.create(
|
||||
name='Data Source 1',
|
||||
type='git',
|
||||
source_url='http://localhost/',
|
||||
parameters={
|
||||
'username': 'jeff',
|
||||
'password': 'foobar123',
|
||||
}
|
||||
)
|
||||
|
||||
objectchange = datasource.to_objectchange(ObjectChangeActionChoices.ACTION_CREATE)
|
||||
self.assertIsNone(objectchange.prechange_data)
|
||||
self.assertEqual(objectchange.postchange_data['parameters']['username'], 'jeff')
|
||||
self.assertEqual(objectchange.postchange_data['parameters']['password'], CENSOR_TOKEN_CHANGED)
|
||||
|
||||
def test_password_added_on_update(self):
|
||||
datasource = DataSource.objects.create(
|
||||
name='Data Source 1',
|
||||
type='git',
|
||||
source_url='http://localhost/'
|
||||
)
|
||||
datasource.snapshot()
|
||||
|
||||
# Add a blank password
|
||||
datasource.parameters = {
|
||||
'username': 'jeff',
|
||||
'password': '',
|
||||
}
|
||||
|
||||
objectchange = datasource.to_objectchange(ObjectChangeActionChoices.ACTION_UPDATE)
|
||||
self.assertIsNone(objectchange.prechange_data['parameters'])
|
||||
self.assertEqual(objectchange.postchange_data['parameters']['username'], 'jeff')
|
||||
self.assertEqual(objectchange.postchange_data['parameters']['password'], '')
|
||||
|
||||
# Add a password
|
||||
datasource.parameters = {
|
||||
'username': 'jeff',
|
||||
'password': 'foobar123',
|
||||
}
|
||||
|
||||
objectchange = datasource.to_objectchange(ObjectChangeActionChoices.ACTION_UPDATE)
|
||||
self.assertEqual(objectchange.postchange_data['parameters']['username'], 'jeff')
|
||||
self.assertEqual(objectchange.postchange_data['parameters']['password'], CENSOR_TOKEN_CHANGED)
|
||||
|
||||
def test_password_changed(self):
|
||||
datasource = DataSource.objects.create(
|
||||
name='Data Source 1',
|
||||
type='git',
|
||||
source_url='http://localhost/',
|
||||
parameters={
|
||||
'username': 'jeff',
|
||||
'password': 'password1',
|
||||
}
|
||||
)
|
||||
datasource.snapshot()
|
||||
|
||||
# Change the password
|
||||
datasource.parameters['password'] = 'password2'
|
||||
|
||||
objectchange = datasource.to_objectchange(ObjectChangeActionChoices.ACTION_UPDATE)
|
||||
self.assertEqual(objectchange.prechange_data['parameters']['username'], 'jeff')
|
||||
self.assertEqual(objectchange.prechange_data['parameters']['password'], CENSOR_TOKEN)
|
||||
self.assertEqual(objectchange.postchange_data['parameters']['username'], 'jeff')
|
||||
self.assertEqual(objectchange.postchange_data['parameters']['password'], CENSOR_TOKEN_CHANGED)
|
||||
|
||||
def test_password_removed_on_update(self):
|
||||
datasource = DataSource.objects.create(
|
||||
name='Data Source 1',
|
||||
type='git',
|
||||
source_url='http://localhost/',
|
||||
parameters={
|
||||
'username': 'jeff',
|
||||
'password': 'foobar123',
|
||||
}
|
||||
)
|
||||
datasource.snapshot()
|
||||
|
||||
objectchange = datasource.to_objectchange(ObjectChangeActionChoices.ACTION_UPDATE)
|
||||
self.assertEqual(objectchange.prechange_data['parameters']['username'], 'jeff')
|
||||
self.assertEqual(objectchange.prechange_data['parameters']['password'], CENSOR_TOKEN)
|
||||
self.assertEqual(objectchange.postchange_data['parameters']['username'], 'jeff')
|
||||
self.assertEqual(objectchange.postchange_data['parameters']['password'], CENSOR_TOKEN)
|
||||
|
||||
# Remove the password
|
||||
datasource.parameters['password'] = ''
|
||||
|
||||
objectchange = datasource.to_objectchange(ObjectChangeActionChoices.ACTION_UPDATE)
|
||||
self.assertEqual(objectchange.prechange_data['parameters']['username'], 'jeff')
|
||||
self.assertEqual(objectchange.prechange_data['parameters']['password'], CENSOR_TOKEN)
|
||||
self.assertEqual(objectchange.postchange_data['parameters']['username'], 'jeff')
|
||||
self.assertEqual(objectchange.postchange_data['parameters']['password'], '')
|
||||
|
||||
def test_password_not_modified(self):
|
||||
datasource = DataSource.objects.create(
|
||||
name='Data Source 1',
|
||||
type='git',
|
||||
source_url='http://localhost/',
|
||||
parameters={
|
||||
'username': 'username1',
|
||||
'password': 'foobar123',
|
||||
}
|
||||
)
|
||||
datasource.snapshot()
|
||||
|
||||
# Remove the password
|
||||
datasource.parameters['username'] = 'username2'
|
||||
|
||||
objectchange = datasource.to_objectchange(ObjectChangeActionChoices.ACTION_UPDATE)
|
||||
self.assertEqual(objectchange.prechange_data['parameters']['username'], 'username1')
|
||||
self.assertEqual(objectchange.prechange_data['parameters']['password'], CENSOR_TOKEN)
|
||||
self.assertEqual(objectchange.postchange_data['parameters']['username'], 'username2')
|
||||
self.assertEqual(objectchange.postchange_data['parameters']['password'], CENSOR_TOKEN)
|
@ -1,6 +1,16 @@
|
||||
from django.utils import timezone
|
||||
import logging
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
from utilities.testing import ViewTestCases, create_tags
|
||||
from django.urls import reverse
|
||||
from django.utils import timezone
|
||||
from django_rq import get_queue
|
||||
from django_rq.settings import QUEUES_MAP
|
||||
from django_rq.workers import get_worker
|
||||
from rq.job import Job as RQ_Job, JobStatus
|
||||
from rq.registry import DeferredJobRegistry, FailedJobRegistry, FinishedJobRegistry, StartedJobRegistry
|
||||
|
||||
from utilities.testing import TestCase, ViewTestCases, create_tags
|
||||
from ..models import *
|
||||
|
||||
|
||||
@ -87,3 +97,211 @@ class DataFileTestCase(
|
||||
),
|
||||
)
|
||||
DataFile.objects.bulk_create(data_files)
|
||||
|
||||
|
||||
class BackgroundTaskTestCase(TestCase):
|
||||
user_permissions = ()
|
||||
|
||||
# Dummy worker functions
|
||||
@staticmethod
|
||||
def dummy_job_default():
|
||||
return "Job finished"
|
||||
|
||||
@staticmethod
|
||||
def dummy_job_high():
|
||||
return "Job finished"
|
||||
|
||||
@staticmethod
|
||||
def dummy_job_failing():
|
||||
raise Exception("Job failed")
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.user.is_staff = True
|
||||
self.user.is_active = True
|
||||
self.user.save()
|
||||
|
||||
# Clear all queues prior to running each test
|
||||
get_queue('default').connection.flushall()
|
||||
get_queue('high').connection.flushall()
|
||||
get_queue('low').connection.flushall()
|
||||
|
||||
def test_background_queue_list(self):
|
||||
url = reverse('core:background_queue_list')
|
||||
|
||||
# Attempt to load view without permission
|
||||
self.user.is_staff = False
|
||||
self.user.save()
|
||||
response = self.client.get(url)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
|
||||
# Load view with permission
|
||||
self.user.is_staff = True
|
||||
self.user.save()
|
||||
response = self.client.get(url)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn('default', str(response.content))
|
||||
self.assertIn('high', str(response.content))
|
||||
self.assertIn('low', str(response.content))
|
||||
|
||||
def test_background_tasks_list_default(self):
|
||||
queue = get_queue('default')
|
||||
queue.enqueue(self.dummy_job_default)
|
||||
queue_index = QUEUES_MAP['default']
|
||||
|
||||
response = self.client.get(reverse('core:background_task_list', args=[queue_index, 'queued']))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn('BackgroundTaskTestCase.dummy_job_default', str(response.content))
|
||||
|
||||
def test_background_tasks_list_high(self):
|
||||
queue = get_queue('high')
|
||||
queue.enqueue(self.dummy_job_high)
|
||||
queue_index = QUEUES_MAP['high']
|
||||
|
||||
response = self.client.get(reverse('core:background_task_list', args=[queue_index, 'queued']))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn('BackgroundTaskTestCase.dummy_job_high', str(response.content))
|
||||
|
||||
def test_background_tasks_list_finished(self):
|
||||
queue = get_queue('default')
|
||||
job = queue.enqueue(self.dummy_job_default)
|
||||
queue_index = QUEUES_MAP['default']
|
||||
|
||||
registry = FinishedJobRegistry(queue.name, queue.connection)
|
||||
registry.add(job, 2)
|
||||
response = self.client.get(reverse('core:background_task_list', args=[queue_index, 'finished']))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn('BackgroundTaskTestCase.dummy_job_default', str(response.content))
|
||||
|
||||
def test_background_tasks_list_failed(self):
|
||||
queue = get_queue('default')
|
||||
job = queue.enqueue(self.dummy_job_default)
|
||||
queue_index = QUEUES_MAP['default']
|
||||
|
||||
registry = FailedJobRegistry(queue.name, queue.connection)
|
||||
registry.add(job, 2)
|
||||
response = self.client.get(reverse('core:background_task_list', args=[queue_index, 'failed']))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn('BackgroundTaskTestCase.dummy_job_default', str(response.content))
|
||||
|
||||
def test_background_tasks_scheduled(self):
|
||||
queue = get_queue('default')
|
||||
queue.enqueue_at(datetime.now(), self.dummy_job_default)
|
||||
queue_index = QUEUES_MAP['default']
|
||||
|
||||
response = self.client.get(reverse('core:background_task_list', args=[queue_index, 'scheduled']))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn('BackgroundTaskTestCase.dummy_job_default', str(response.content))
|
||||
|
||||
def test_background_tasks_list_deferred(self):
|
||||
queue = get_queue('default')
|
||||
job = queue.enqueue(self.dummy_job_default)
|
||||
queue_index = QUEUES_MAP['default']
|
||||
|
||||
registry = DeferredJobRegistry(queue.name, queue.connection)
|
||||
registry.add(job, 2)
|
||||
response = self.client.get(reverse('core:background_task_list', args=[queue_index, 'deferred']))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn('BackgroundTaskTestCase.dummy_job_default', str(response.content))
|
||||
|
||||
def test_background_task(self):
|
||||
queue = get_queue('default')
|
||||
job = queue.enqueue(self.dummy_job_default)
|
||||
|
||||
response = self.client.get(reverse('core:background_task', args=[job.id]))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn('Background Tasks', str(response.content))
|
||||
self.assertIn(str(job.id), str(response.content))
|
||||
self.assertIn('Callable', str(response.content))
|
||||
self.assertIn('Meta', str(response.content))
|
||||
self.assertIn('Keyword Arguments', str(response.content))
|
||||
|
||||
def test_background_task_delete(self):
|
||||
queue = get_queue('default')
|
||||
job = queue.enqueue(self.dummy_job_default)
|
||||
|
||||
response = self.client.post(reverse('core:background_task_delete', args=[job.id]), {'confirm': True})
|
||||
self.assertEqual(response.status_code, 302)
|
||||
self.assertFalse(RQ_Job.exists(job.id, connection=queue.connection))
|
||||
self.assertNotIn(job.id, queue.job_ids)
|
||||
|
||||
def test_background_task_requeue(self):
|
||||
queue = get_queue('default')
|
||||
|
||||
# Enqueue & run a job that will fail
|
||||
job = queue.enqueue(self.dummy_job_failing)
|
||||
worker = get_worker('default')
|
||||
worker.work(burst=True)
|
||||
self.assertTrue(job.is_failed)
|
||||
|
||||
# Re-enqueue the failed job and check that its status has been reset
|
||||
response = self.client.get(reverse('core:background_task_requeue', args=[job.id]))
|
||||
self.assertEqual(response.status_code, 302)
|
||||
self.assertFalse(job.is_failed)
|
||||
|
||||
def test_background_task_enqueue(self):
|
||||
queue = get_queue('default')
|
||||
|
||||
# Enqueue some jobs that each depends on its predecessor
|
||||
job = previous_job = None
|
||||
for _ in range(0, 3):
|
||||
job = queue.enqueue(self.dummy_job_default, depends_on=previous_job)
|
||||
previous_job = job
|
||||
|
||||
# Check that the last job to be enqueued has a status of deferred
|
||||
self.assertIsNotNone(job)
|
||||
self.assertEqual(job.get_status(), JobStatus.DEFERRED)
|
||||
self.assertIsNone(job.enqueued_at)
|
||||
|
||||
# Force-enqueue the deferred job
|
||||
response = self.client.get(reverse('core:background_task_enqueue', args=[job.id]))
|
||||
self.assertEqual(response.status_code, 302)
|
||||
|
||||
# Check that job's status is updated correctly
|
||||
job = queue.fetch_job(job.id)
|
||||
self.assertEqual(job.get_status(), JobStatus.QUEUED)
|
||||
self.assertIsNotNone(job.enqueued_at)
|
||||
|
||||
def test_background_task_stop(self):
|
||||
queue = get_queue('default')
|
||||
|
||||
worker = get_worker('default')
|
||||
job = queue.enqueue(self.dummy_job_default)
|
||||
worker.prepare_job_execution(job)
|
||||
|
||||
self.assertEqual(job.get_status(), JobStatus.STARTED)
|
||||
|
||||
# Stop those jobs using the view
|
||||
started_job_registry = StartedJobRegistry(queue.name, connection=queue.connection)
|
||||
self.assertEqual(len(started_job_registry), 1)
|
||||
response = self.client.get(reverse('core:background_task_stop', args=[job.id]))
|
||||
self.assertEqual(response.status_code, 302)
|
||||
worker.monitor_work_horse(job, queue) # Sets the job as Failed and removes from Started
|
||||
self.assertEqual(len(started_job_registry), 0)
|
||||
|
||||
canceled_job_registry = FailedJobRegistry(queue.name, connection=queue.connection)
|
||||
self.assertEqual(len(canceled_job_registry), 1)
|
||||
self.assertIn(job.id, canceled_job_registry)
|
||||
|
||||
def test_worker_list(self):
|
||||
worker1 = get_worker('default', name=uuid.uuid4().hex)
|
||||
worker1.register_birth()
|
||||
|
||||
worker2 = get_worker('high')
|
||||
worker2.register_birth()
|
||||
|
||||
queue_index = QUEUES_MAP['default']
|
||||
response = self.client.get(reverse('core:worker_list', args=[queue_index]))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(str(worker1.name), str(response.content))
|
||||
self.assertNotIn(str(worker2.name), str(response.content))
|
||||
|
||||
def test_worker(self):
|
||||
worker1 = get_worker('default', name=uuid.uuid4().hex)
|
||||
worker1.register_birth()
|
||||
|
||||
response = self.client.get(reverse('core:worker', args=[worker1.name]))
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertIn(str(worker1.name), str(response.content))
|
||||
self.assertIn('Birth', str(response.content))
|
||||
self.assertIn('Total working time', str(response.content))
|
||||
|
@ -25,6 +25,17 @@ urlpatterns = (
|
||||
path('jobs/<int:pk>/', views.JobView.as_view(), name='job'),
|
||||
path('jobs/<int:pk>/delete/', views.JobDeleteView.as_view(), name='job_delete'),
|
||||
|
||||
# Background Tasks
|
||||
path('background-queues/', views.BackgroundQueueListView.as_view(), name='background_queue_list'),
|
||||
path('background-queues/<int:queue_index>/<str:status>/', views.BackgroundTaskListView.as_view(), name='background_task_list'),
|
||||
path('background-tasks/<str:job_id>/', views.BackgroundTaskView.as_view(), name='background_task'),
|
||||
path('background-tasks/<str:job_id>/delete/', views.BackgroundTaskDeleteView.as_view(), name='background_task_delete'),
|
||||
path('background-tasks/<str:job_id>/requeue/', views.BackgroundTaskRequeueView.as_view(), name='background_task_requeue'),
|
||||
path('background-tasks/<str:job_id>/enqueue/', views.BackgroundTaskEnqueueView.as_view(), name='background_task_enqueue'),
|
||||
path('background-tasks/<str:job_id>/stop/', views.BackgroundTaskStopView.as_view(), name='background_task_stop'),
|
||||
path('background-workers/<int:queue_index>/', views.WorkerListView.as_view(), name='worker_list'),
|
||||
path('background-workers/<str:key>/', views.WorkerView.as_view(), name='worker'),
|
||||
|
||||
# Config revisions
|
||||
path('config-revisions/', views.ConfigRevisionListView.as_view(), name='configrevision_list'),
|
||||
path('config-revisions/add/', views.ConfigRevisionEditView.as_view(), name='configrevision_add'),
|
||||
@ -35,4 +46,6 @@ urlpatterns = (
|
||||
# Configuration
|
||||
path('config/', views.ConfigView.as_view(), name='config'),
|
||||
|
||||
# Plugins
|
||||
path('plugins/', views.PluginListView.as_view(), name='plugin_list'),
|
||||
)
|
||||
|
@ -1,12 +1,30 @@
|
||||
from django.apps import apps
|
||||
from django.conf import settings
|
||||
from django.contrib import messages
|
||||
from django.contrib.auth.mixins import UserPassesTestMixin
|
||||
from django.core.cache import cache
|
||||
from django.http import HttpResponseForbidden
|
||||
from django.http import HttpResponseForbidden, Http404
|
||||
from django.shortcuts import get_object_or_404, redirect, render
|
||||
from django.urls import reverse
|
||||
from django.utils.translation import gettext_lazy as _
|
||||
from django.views.generic import View
|
||||
from django_rq.queues import get_queue_by_index, get_redis_connection
|
||||
from django_rq.settings import QUEUES_MAP, QUEUES_LIST
|
||||
from django_rq.utils import get_jobs, get_statistics, stop_jobs
|
||||
from rq import requeue_job
|
||||
from rq.exceptions import NoSuchJobError
|
||||
from rq.job import Job as RQ_Job, JobStatus as RQJobStatus
|
||||
from rq.registry import (
|
||||
DeferredJobRegistry, FailedJobRegistry, FinishedJobRegistry, ScheduledJobRegistry, StartedJobRegistry,
|
||||
)
|
||||
from rq.worker import Worker
|
||||
from rq.worker_registration import clean_worker_registry
|
||||
|
||||
from netbox.config import get_config, PARAMS
|
||||
from netbox.views import generic
|
||||
from netbox.views.generic.base import BaseObjectView
|
||||
from netbox.views.generic.mixins import TableMixin
|
||||
from utilities.forms import ConfirmationForm
|
||||
from utilities.utils import count_related
|
||||
from utilities.views import ContentTypePermissionRequiredMixin, register_model_view
|
||||
from . import filtersets, forms, tables
|
||||
@ -232,3 +250,297 @@ class ConfigRevisionRestoreView(ContentTypePermissionRequiredMixin, View):
|
||||
messages.success(request, f"Restored configuration revision #{pk}")
|
||||
|
||||
return redirect(candidate_config.get_absolute_url())
|
||||
|
||||
|
||||
#
|
||||
# Background Tasks (RQ)
|
||||
#
|
||||
|
||||
class BaseRQView(UserPassesTestMixin, View):
|
||||
|
||||
def test_func(self):
|
||||
return self.request.user.is_staff
|
||||
|
||||
|
||||
class BackgroundQueueListView(TableMixin, BaseRQView):
|
||||
table = tables.BackgroundQueueTable
|
||||
|
||||
def get(self, request):
|
||||
data = get_statistics(run_maintenance_tasks=True)["queues"]
|
||||
table = self.get_table(data, request, bulk_actions=False)
|
||||
|
||||
return render(request, 'core/rq_queue_list.html', {
|
||||
'table': table,
|
||||
})
|
||||
|
||||
|
||||
class BackgroundTaskListView(TableMixin, BaseRQView):
|
||||
table = tables.BackgroundTaskTable
|
||||
|
||||
def get_table_data(self, request, queue, status):
|
||||
jobs = []
|
||||
|
||||
# Call get_jobs() to returned queued tasks
|
||||
if status == RQJobStatus.QUEUED:
|
||||
return queue.get_jobs()
|
||||
|
||||
# For other statuses, determine the registry to list (or raise a 404 for invalid statuses)
|
||||
try:
|
||||
registry_cls = {
|
||||
RQJobStatus.STARTED: StartedJobRegistry,
|
||||
RQJobStatus.DEFERRED: DeferredJobRegistry,
|
||||
RQJobStatus.FINISHED: FinishedJobRegistry,
|
||||
RQJobStatus.FAILED: FailedJobRegistry,
|
||||
RQJobStatus.SCHEDULED: ScheduledJobRegistry,
|
||||
}[status]
|
||||
except KeyError:
|
||||
raise Http404
|
||||
registry = registry_cls(queue.name, queue.connection)
|
||||
|
||||
job_ids = registry.get_job_ids()
|
||||
if status != RQJobStatus.DEFERRED:
|
||||
jobs = get_jobs(queue, job_ids, registry)
|
||||
else:
|
||||
# Deferred jobs require special handling
|
||||
for job_id in job_ids:
|
||||
try:
|
||||
jobs.append(RQ_Job.fetch(job_id, connection=queue.connection, serializer=queue.serializer))
|
||||
except NoSuchJobError:
|
||||
pass
|
||||
|
||||
if jobs and status == RQJobStatus.SCHEDULED:
|
||||
for job in jobs:
|
||||
job.scheduled_at = registry.get_scheduled_time(job)
|
||||
|
||||
return jobs
|
||||
|
||||
def get(self, request, queue_index, status):
|
||||
queue = get_queue_by_index(queue_index)
|
||||
data = self.get_table_data(request, queue, status)
|
||||
table = self.get_table(data, request, False)
|
||||
|
||||
# If this is an HTMX request, return only the rendered table HTML
|
||||
if request.htmx:
|
||||
return render(request, 'htmx/table.html', {
|
||||
'table': table,
|
||||
})
|
||||
|
||||
return render(request, 'core/rq_task_list.html', {
|
||||
'table': table,
|
||||
'queue': queue,
|
||||
'status': status,
|
||||
})
|
||||
|
||||
|
||||
class BackgroundTaskView(BaseRQView):
|
||||
|
||||
def get(self, request, job_id):
|
||||
# all the RQ queues should use the same connection
|
||||
config = QUEUES_LIST[0]
|
||||
try:
|
||||
job = RQ_Job.fetch(job_id, connection=get_redis_connection(config['connection_config']),)
|
||||
except NoSuchJobError:
|
||||
raise Http404(_("Job {job_id} not found").format(job_id=job_id))
|
||||
|
||||
queue_index = QUEUES_MAP[job.origin]
|
||||
queue = get_queue_by_index(queue_index)
|
||||
|
||||
try:
|
||||
exc_info = job._exc_info
|
||||
except AttributeError:
|
||||
exc_info = None
|
||||
|
||||
return render(request, 'core/rq_task.html', {
|
||||
'queue': queue,
|
||||
'job': job,
|
||||
'queue_index': queue_index,
|
||||
'dependency_id': job._dependency_id,
|
||||
'exc_info': exc_info,
|
||||
})
|
||||
|
||||
|
||||
class BackgroundTaskDeleteView(BaseRQView):
|
||||
|
||||
def get(self, request, job_id):
|
||||
if not request.htmx:
|
||||
return redirect(reverse('core:background_queue_list'))
|
||||
|
||||
form = ConfirmationForm(initial=request.GET)
|
||||
|
||||
return render(request, 'htmx/delete_form.html', {
|
||||
'object_type': 'background task',
|
||||
'object': job_id,
|
||||
'form': form,
|
||||
'form_url': reverse('core:background_task_delete', kwargs={'job_id': job_id})
|
||||
})
|
||||
|
||||
def post(self, request, job_id):
|
||||
form = ConfirmationForm(request.POST)
|
||||
|
||||
if form.is_valid():
|
||||
# all the RQ queues should use the same connection
|
||||
config = QUEUES_LIST[0]
|
||||
try:
|
||||
job = RQ_Job.fetch(job_id, connection=get_redis_connection(config['connection_config']),)
|
||||
except NoSuchJobError:
|
||||
raise Http404(_("Job {job_id} not found").format(job_id=job_id))
|
||||
|
||||
queue_index = QUEUES_MAP[job.origin]
|
||||
queue = get_queue_by_index(queue_index)
|
||||
|
||||
# Remove job id from queue and delete the actual job
|
||||
queue.connection.lrem(queue.key, 0, job.id)
|
||||
job.delete()
|
||||
messages.success(request, f'Deleted job {job_id}')
|
||||
else:
|
||||
messages.error(request, f'Error deleting job: {form.errors[0]}')
|
||||
|
||||
return redirect(reverse('core:background_queue_list'))
|
||||
|
||||
|
||||
class BackgroundTaskRequeueView(BaseRQView):
|
||||
|
||||
def get(self, request, job_id):
|
||||
# all the RQ queues should use the same connection
|
||||
config = QUEUES_LIST[0]
|
||||
try:
|
||||
job = RQ_Job.fetch(job_id, connection=get_redis_connection(config['connection_config']),)
|
||||
except NoSuchJobError:
|
||||
raise Http404(_("Job {job_id} not found").format(job_id=job_id))
|
||||
|
||||
queue_index = QUEUES_MAP[job.origin]
|
||||
queue = get_queue_by_index(queue_index)
|
||||
|
||||
requeue_job(job_id, connection=queue.connection, serializer=queue.serializer)
|
||||
messages.success(request, f'You have successfully requeued: {job_id}')
|
||||
return redirect(reverse('core:background_task', args=[job_id]))
|
||||
|
||||
|
||||
class BackgroundTaskEnqueueView(BaseRQView):
|
||||
|
||||
def get(self, request, job_id):
|
||||
# all the RQ queues should use the same connection
|
||||
config = QUEUES_LIST[0]
|
||||
try:
|
||||
job = RQ_Job.fetch(job_id, connection=get_redis_connection(config['connection_config']),)
|
||||
except NoSuchJobError:
|
||||
raise Http404(_("Job {job_id} not found").format(job_id=job_id))
|
||||
|
||||
queue_index = QUEUES_MAP[job.origin]
|
||||
queue = get_queue_by_index(queue_index)
|
||||
|
||||
try:
|
||||
# _enqueue_job is new in RQ 1.14, this is used to enqueue
|
||||
# job regardless of its dependencies
|
||||
queue._enqueue_job(job)
|
||||
except AttributeError:
|
||||
queue.enqueue_job(job)
|
||||
|
||||
# Remove job from correct registry if needed
|
||||
if job.get_status() == RQJobStatus.DEFERRED:
|
||||
registry = DeferredJobRegistry(queue.name, queue.connection)
|
||||
registry.remove(job)
|
||||
elif job.get_status() == RQJobStatus.FINISHED:
|
||||
registry = FinishedJobRegistry(queue.name, queue.connection)
|
||||
registry.remove(job)
|
||||
elif job.get_status() == RQJobStatus.SCHEDULED:
|
||||
registry = ScheduledJobRegistry(queue.name, queue.connection)
|
||||
registry.remove(job)
|
||||
|
||||
messages.success(request, f'You have successfully enqueued: {job_id}')
|
||||
return redirect(reverse('core:background_task', args=[job_id]))
|
||||
|
||||
|
||||
class BackgroundTaskStopView(BaseRQView):
|
||||
|
||||
def get(self, request, job_id):
|
||||
# all the RQ queues should use the same connection
|
||||
config = QUEUES_LIST[0]
|
||||
try:
|
||||
job = RQ_Job.fetch(job_id, connection=get_redis_connection(config['connection_config']),)
|
||||
except NoSuchJobError:
|
||||
raise Http404(_("Job {job_id} not found").format(job_id=job_id))
|
||||
|
||||
queue_index = QUEUES_MAP[job.origin]
|
||||
queue = get_queue_by_index(queue_index)
|
||||
|
||||
stopped, _ = stop_jobs(queue, job_id)
|
||||
if len(stopped) == 1:
|
||||
messages.success(request, f'You have successfully stopped {job_id}')
|
||||
else:
|
||||
messages.error(request, f'Failed to stop {job_id}')
|
||||
|
||||
return redirect(reverse('core:background_task', args=[job_id]))
|
||||
|
||||
|
||||
class WorkerListView(TableMixin, BaseRQView):
|
||||
table = tables.WorkerTable
|
||||
|
||||
def get_table_data(self, request, queue):
|
||||
clean_worker_registry(queue)
|
||||
all_workers = Worker.all(queue.connection)
|
||||
workers = [worker for worker in all_workers if queue.name in worker.queue_names()]
|
||||
return workers
|
||||
|
||||
def get(self, request, queue_index):
|
||||
queue = get_queue_by_index(queue_index)
|
||||
data = self.get_table_data(request, queue)
|
||||
|
||||
table = self.get_table(data, request, False)
|
||||
|
||||
# If this is an HTMX request, return only the rendered table HTML
|
||||
if request.htmx:
|
||||
if request.htmx.target != 'object_list':
|
||||
table.embedded = True
|
||||
# Hide selection checkboxes
|
||||
if 'pk' in table.base_columns:
|
||||
table.columns.hide('pk')
|
||||
return render(request, 'htmx/table.html', {
|
||||
'table': table,
|
||||
'queue': queue,
|
||||
})
|
||||
|
||||
return render(request, 'core/rq_worker_list.html', {
|
||||
'table': table,
|
||||
'queue': queue,
|
||||
})
|
||||
|
||||
|
||||
class WorkerView(BaseRQView):
|
||||
|
||||
def get(self, request, key):
|
||||
# all the RQ queues should use the same connection
|
||||
config = QUEUES_LIST[0]
|
||||
worker = Worker.find_by_key('rq:worker:' + key, connection=get_redis_connection(config['connection_config']))
|
||||
# Convert microseconds to milliseconds
|
||||
worker.total_working_time = worker.total_working_time / 1000
|
||||
|
||||
return render(request, 'core/rq_worker.html', {
|
||||
'worker': worker,
|
||||
'job': worker.get_current_job(),
|
||||
'total_working_time': worker.total_working_time * 1000,
|
||||
})
|
||||
|
||||
|
||||
#
|
||||
# Plugins
|
||||
#
|
||||
|
||||
class PluginListView(UserPassesTestMixin, View):
|
||||
|
||||
def test_func(self):
|
||||
return self.request.user.is_staff
|
||||
|
||||
def get(self, request):
|
||||
plugins = [
|
||||
# Look up app config by package name
|
||||
apps.get_app_config(plugin.rsplit('.', 1)[-1]) for plugin in settings.PLUGINS
|
||||
]
|
||||
table = tables.PluginTable(plugins, user=request.user)
|
||||
table.configure(request)
|
||||
|
||||
return render(request, 'core/plugin_list.html', {
|
||||
'plugins': plugins,
|
||||
'active_tab': 'api-tokens',
|
||||
'table': table,
|
||||
})
|
||||
|
@ -4,7 +4,7 @@ from django.utils.translation import gettext_lazy as _
|
||||
from dcim.models import *
|
||||
from extras.models import Tag
|
||||
from netbox.forms.mixins import CustomFieldsMixin
|
||||
from utilities.forms import BootstrapMixin, form_from_model
|
||||
from utilities.forms import form_from_model
|
||||
from utilities.forms.fields import DynamicModelMultipleChoiceField, ExpandableNameField
|
||||
from .object_create import ComponentCreateForm
|
||||
|
||||
@ -26,7 +26,7 @@ __all__ = (
|
||||
# Device components
|
||||
#
|
||||
|
||||
class DeviceBulkAddComponentForm(BootstrapMixin, CustomFieldsMixin, ComponentCreateForm):
|
||||
class DeviceBulkAddComponentForm(CustomFieldsMixin, ComponentCreateForm):
|
||||
pk = forms.ModelMultipleChoiceField(
|
||||
queryset=Device.objects.all(),
|
||||
widget=forms.MultipleHiddenInput()
|
||||
|
@ -727,7 +727,7 @@ class PowerOutletImportForm(NetBoxModelImportForm):
|
||||
help_text=_('Local power port which feeds this outlet')
|
||||
)
|
||||
feed_leg = CSVChoiceField(
|
||||
label=_('Feed lag'),
|
||||
label=_('Feed leg'),
|
||||
choices=PowerOutletFeedLegChoices,
|
||||
required=False,
|
||||
help_text=_('Electrical phase (for three-phase circuits)')
|
||||
@ -1359,6 +1359,10 @@ class VirtualDeviceContextImportForm(NetBoxModelImportForm):
|
||||
to_field_name='name',
|
||||
help_text='Assigned tenant'
|
||||
)
|
||||
status = CSVChoiceField(
|
||||
label=_('Status'),
|
||||
choices=VirtualDeviceContextStatusChoices,
|
||||
)
|
||||
|
||||
class Meta:
|
||||
fields = [
|
||||
|
@ -11,7 +11,7 @@ from extras.models import ConfigTemplate
|
||||
from ipam.models import ASN, IPAddress, VLAN, VLANGroup, VRF
|
||||
from netbox.forms import NetBoxModelForm
|
||||
from tenancy.forms import TenancyForm
|
||||
from utilities.forms import BootstrapMixin, add_blank_choice
|
||||
from utilities.forms import add_blank_choice
|
||||
from utilities.forms.fields import (
|
||||
CommentField, ContentTypeChoiceField, DynamicModelChoiceField, DynamicModelMultipleChoiceField, JSONField,
|
||||
NumericArrayField, SlugField,
|
||||
@ -748,7 +748,7 @@ class DeviceVCMembershipForm(forms.ModelForm):
|
||||
return vc_position
|
||||
|
||||
|
||||
class VCMemberSelectForm(BootstrapMixin, forms.Form):
|
||||
class VCMemberSelectForm(forms.Form):
|
||||
device = DynamicModelChoiceField(
|
||||
label=_('Device'),
|
||||
queryset=Device.objects.all(),
|
||||
@ -771,7 +771,7 @@ class VCMemberSelectForm(BootstrapMixin, forms.Form):
|
||||
# Device component templates
|
||||
#
|
||||
|
||||
class ComponentTemplateForm(BootstrapMixin, forms.ModelForm):
|
||||
class ComponentTemplateForm(forms.ModelForm):
|
||||
device_type = DynamicModelChoiceField(
|
||||
label=_('Device type'),
|
||||
queryset=DeviceType.objects.all()
|
||||
@ -1272,7 +1272,7 @@ class DeviceBayForm(DeviceComponentForm):
|
||||
]
|
||||
|
||||
|
||||
class PopulateDeviceBayForm(BootstrapMixin, forms.Form):
|
||||
class PopulateDeviceBayForm(forms.Form):
|
||||
installed_device = forms.ModelChoiceField(
|
||||
queryset=Device.objects.all(),
|
||||
label=_('Child Device'),
|
||||
|
@ -3,7 +3,6 @@ from django.utils.translation import gettext_lazy as _
|
||||
|
||||
from dcim.choices import InterfacePoEModeChoices, InterfacePoETypeChoices, InterfaceTypeChoices, PortTypeChoices
|
||||
from dcim.models import *
|
||||
from utilities.forms import BootstrapMixin
|
||||
from wireless.choices import WirelessRoleChoices
|
||||
|
||||
__all__ = (
|
||||
@ -24,11 +23,7 @@ __all__ = (
|
||||
# Component template import forms
|
||||
#
|
||||
|
||||
class ComponentTemplateImportForm(BootstrapMixin, forms.ModelForm):
|
||||
pass
|
||||
|
||||
|
||||
class ConsolePortTemplateImportForm(ComponentTemplateImportForm):
|
||||
class ConsolePortTemplateImportForm(forms.ModelForm):
|
||||
|
||||
class Meta:
|
||||
model = ConsolePortTemplate
|
||||
@ -37,7 +32,7 @@ class ConsolePortTemplateImportForm(ComponentTemplateImportForm):
|
||||
]
|
||||
|
||||
|
||||
class ConsoleServerPortTemplateImportForm(ComponentTemplateImportForm):
|
||||
class ConsoleServerPortTemplateImportForm(forms.ModelForm):
|
||||
|
||||
class Meta:
|
||||
model = ConsoleServerPortTemplate
|
||||
@ -46,7 +41,7 @@ class ConsoleServerPortTemplateImportForm(ComponentTemplateImportForm):
|
||||
]
|
||||
|
||||
|
||||
class PowerPortTemplateImportForm(ComponentTemplateImportForm):
|
||||
class PowerPortTemplateImportForm(forms.ModelForm):
|
||||
|
||||
class Meta:
|
||||
model = PowerPortTemplate
|
||||
@ -55,7 +50,7 @@ class PowerPortTemplateImportForm(ComponentTemplateImportForm):
|
||||
]
|
||||
|
||||
|
||||
class PowerOutletTemplateImportForm(ComponentTemplateImportForm):
|
||||
class PowerOutletTemplateImportForm(forms.ModelForm):
|
||||
power_port = forms.ModelChoiceField(
|
||||
label=_('Power port'),
|
||||
queryset=PowerPortTemplate.objects.all(),
|
||||
@ -84,7 +79,7 @@ class PowerOutletTemplateImportForm(ComponentTemplateImportForm):
|
||||
return module_type
|
||||
|
||||
|
||||
class InterfaceTemplateImportForm(ComponentTemplateImportForm):
|
||||
class InterfaceTemplateImportForm(forms.ModelForm):
|
||||
type = forms.ChoiceField(
|
||||
label=_('Type'),
|
||||
choices=InterfaceTypeChoices.CHOICES
|
||||
@ -113,7 +108,7 @@ class InterfaceTemplateImportForm(ComponentTemplateImportForm):
|
||||
]
|
||||
|
||||
|
||||
class FrontPortTemplateImportForm(ComponentTemplateImportForm):
|
||||
class FrontPortTemplateImportForm(forms.ModelForm):
|
||||
type = forms.ChoiceField(
|
||||
label=_('Type'),
|
||||
choices=PortTypeChoices.CHOICES
|
||||
@ -145,7 +140,7 @@ class FrontPortTemplateImportForm(ComponentTemplateImportForm):
|
||||
]
|
||||
|
||||
|
||||
class RearPortTemplateImportForm(ComponentTemplateImportForm):
|
||||
class RearPortTemplateImportForm(forms.ModelForm):
|
||||
type = forms.ChoiceField(
|
||||
label=_('Type'),
|
||||
choices=PortTypeChoices.CHOICES
|
||||
@ -158,7 +153,7 @@ class RearPortTemplateImportForm(ComponentTemplateImportForm):
|
||||
]
|
||||
|
||||
|
||||
class ModuleBayTemplateImportForm(ComponentTemplateImportForm):
|
||||
class ModuleBayTemplateImportForm(forms.ModelForm):
|
||||
|
||||
class Meta:
|
||||
model = ModuleBayTemplate
|
||||
@ -167,7 +162,7 @@ class ModuleBayTemplateImportForm(ComponentTemplateImportForm):
|
||||
]
|
||||
|
||||
|
||||
class DeviceBayTemplateImportForm(ComponentTemplateImportForm):
|
||||
class DeviceBayTemplateImportForm(forms.ModelForm):
|
||||
|
||||
class Meta:
|
||||
model = DeviceBayTemplate
|
||||
@ -176,7 +171,7 @@ class DeviceBayTemplateImportForm(ComponentTemplateImportForm):
|
||||
]
|
||||
|
||||
|
||||
class InventoryItemTemplateImportForm(ComponentTemplateImportForm):
|
||||
class InventoryItemTemplateImportForm(forms.ModelForm):
|
||||
parent = forms.ModelChoiceField(
|
||||
label=_('Parent'),
|
||||
queryset=InventoryItemTemplate.objects.all(),
|
||||
|
@ -1,6 +1,6 @@
|
||||
import graphene
|
||||
from circuits.graphql.types import CircuitTerminationType
|
||||
from circuits.models import CircuitTermination
|
||||
from circuits.graphql.types import CircuitTerminationType, ProviderNetworkType
|
||||
from circuits.models import CircuitTermination, ProviderNetwork
|
||||
from dcim.graphql.types import (
|
||||
ConsolePortTemplateType,
|
||||
ConsolePortType,
|
||||
@ -167,3 +167,42 @@ class InventoryItemComponentType(graphene.Union):
|
||||
return PowerPortType
|
||||
if type(instance) is RearPort:
|
||||
return RearPortType
|
||||
|
||||
|
||||
class ConnectedEndpointType(graphene.Union):
|
||||
class Meta:
|
||||
types = (
|
||||
CircuitTerminationType,
|
||||
ConsolePortType,
|
||||
ConsoleServerPortType,
|
||||
FrontPortType,
|
||||
InterfaceType,
|
||||
PowerFeedType,
|
||||
PowerOutletType,
|
||||
PowerPortType,
|
||||
ProviderNetworkType,
|
||||
RearPortType,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def resolve_type(cls, instance, info):
|
||||
if type(instance) is CircuitTermination:
|
||||
return CircuitTerminationType
|
||||
if type(instance) is ConsolePortType:
|
||||
return ConsolePortType
|
||||
if type(instance) is ConsoleServerPort:
|
||||
return ConsoleServerPortType
|
||||
if type(instance) is FrontPort:
|
||||
return FrontPortType
|
||||
if type(instance) is Interface:
|
||||
return InterfaceType
|
||||
if type(instance) is PowerFeed:
|
||||
return PowerFeedType
|
||||
if type(instance) is PowerOutlet:
|
||||
return PowerOutletType
|
||||
if type(instance) is PowerPort:
|
||||
return PowerPortType
|
||||
if type(instance) is ProviderNetwork:
|
||||
return ProviderNetworkType
|
||||
if type(instance) is RearPort:
|
||||
return RearPortType
|
||||
|
@ -13,7 +13,7 @@ class CabledObjectMixin:
|
||||
|
||||
|
||||
class PathEndpointMixin:
|
||||
connected_endpoints = graphene.List('dcim.graphql.gfk_mixins.LinkPeerType')
|
||||
connected_endpoints = graphene.List('dcim.graphql.gfk_mixins.ConnectedEndpointType')
|
||||
|
||||
def resolve_connected_endpoints(self, info):
|
||||
# Handle empty values
|
||||
|
@ -34,7 +34,7 @@ class Command(BaseCommand):
|
||||
Draw a simple progress bar 20 increments wide illustrating the specified percentage.
|
||||
"""
|
||||
bar_size = int(percentage / 5)
|
||||
self.stdout.write(f"\r [{'#' * bar_size}{' ' * (20-bar_size)}] {int(percentage)}%", ending='')
|
||||
self.stdout.write(f"\r [{'#' * bar_size}{' ' * (20 - bar_size)}] {int(percentage)}%", ending='')
|
||||
|
||||
def handle(self, *model_names, **options):
|
||||
|
||||
|
@ -1,21 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dcim', '0130_sitegroup'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='consoleport',
|
||||
name='speed',
|
||||
field=models.PositiveIntegerField(blank=True, null=True),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='consoleserverport',
|
||||
name='speed',
|
||||
field=models.PositiveIntegerField(blank=True, null=True),
|
||||
),
|
||||
]
|
1194
netbox/dcim/migrations/0131_squashed_0159.py
Normal file
@ -1,16 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dcim', '0131_consoleport_speed'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='cable',
|
||||
name='length',
|
||||
field=models.DecimalField(blank=True, decimal_places=2, max_digits=8, null=True),
|
||||
),
|
||||
]
|
@ -1,32 +0,0 @@
|
||||
from django.db import migrations
|
||||
import utilities.fields
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dcim', '0132_cable_length'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='frontport',
|
||||
name='color',
|
||||
field=utilities.fields.ColorField(blank=True, max_length=6),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='frontporttemplate',
|
||||
name='color',
|
||||
field=utilities.fields.ColorField(blank=True, max_length=6),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='rearport',
|
||||
name='color',
|
||||
field=utilities.fields.ColorField(blank=True, max_length=6),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='rearporttemplate',
|
||||
name='color',
|
||||
field=utilities.fields.ColorField(blank=True, max_length=6),
|
||||
),
|
||||
]
|
@ -1,23 +0,0 @@
|
||||
import dcim.fields
|
||||
import django.db.models.deletion
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dcim', '0133_port_colors'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='interface',
|
||||
name='wwn',
|
||||
field=dcim.fields.WWNField(blank=True, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='interface',
|
||||
name='bridge',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='bridge_interfaces', to='dcim.interface'),
|
||||
),
|
||||
]
|
@ -1,23 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('tenancy', '0002_tenant_ordering'),
|
||||
('dcim', '0134_interface_wwn_bridge'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='location',
|
||||
name='tenant',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='locations', to='tenancy.tenant'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='cable',
|
||||
name='tenant',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='cables', to='tenancy.tenant'),
|
||||
),
|
||||
]
|
@ -1,21 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dcim', '0135_tenancy_extensions'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='devicetype',
|
||||
name='airflow',
|
||||
field=models.CharField(blank=True, max_length=50),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='device',
|
||||
name='airflow',
|
||||
field=models.CharField(blank=True, max_length=50),
|
||||
),
|
||||
]
|
@ -1,83 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dcim', '0136_device_airflow'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AlterField(
|
||||
model_name='region',
|
||||
name='name',
|
||||
field=models.CharField(max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='region',
|
||||
name='slug',
|
||||
field=models.SlugField(max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='sitegroup',
|
||||
name='name',
|
||||
field=models.CharField(max_length=100),
|
||||
),
|
||||
migrations.AlterField(
|
||||
model_name='sitegroup',
|
||||
name='slug',
|
||||
field=models.SlugField(max_length=100),
|
||||
),
|
||||
migrations.AlterUniqueTogether(
|
||||
name='location',
|
||||
unique_together=set(),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='location',
|
||||
constraint=models.UniqueConstraint(fields=('site', 'parent', 'name'), name='dcim_location_parent_name'),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='location',
|
||||
constraint=models.UniqueConstraint(condition=models.Q(('parent', None)), fields=('site', 'name'), name='dcim_location_name'),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='location',
|
||||
constraint=models.UniqueConstraint(fields=('site', 'parent', 'slug'), name='dcim_location_parent_slug'),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='location',
|
||||
constraint=models.UniqueConstraint(condition=models.Q(('parent', None)), fields=('site', 'slug'), name='dcim_location_slug'),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='region',
|
||||
constraint=models.UniqueConstraint(fields=('parent', 'name'), name='dcim_region_parent_name'),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='region',
|
||||
constraint=models.UniqueConstraint(condition=models.Q(('parent', None)), fields=('name',), name='dcim_region_name'),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='region',
|
||||
constraint=models.UniqueConstraint(fields=('parent', 'slug'), name='dcim_region_parent_slug'),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='region',
|
||||
constraint=models.UniqueConstraint(condition=models.Q(('parent', None)), fields=('slug',), name='dcim_region_slug'),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='sitegroup',
|
||||
constraint=models.UniqueConstraint(fields=('parent', 'name'), name='dcim_sitegroup_parent_name'),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='sitegroup',
|
||||
constraint=models.UniqueConstraint(condition=models.Q(('parent', None)), fields=('name',), name='dcim_sitegroup_name'),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='sitegroup',
|
||||
constraint=models.UniqueConstraint(fields=('parent', 'slug'), name='dcim_sitegroup_parent_slug'),
|
||||
),
|
||||
migrations.AddConstraint(
|
||||
model_name='sitegroup',
|
||||
constraint=models.UniqueConstraint(condition=models.Q(('parent', None)), fields=('slug',), name='dcim_sitegroup_slug'),
|
||||
),
|
||||
]
|
@ -1,50 +0,0 @@
|
||||
# Generated by Django 3.2.8 on 2021-10-21 14:50
|
||||
|
||||
from django.db import migrations
|
||||
import taggit.managers
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('extras', '0062_clear_secrets_changelog'),
|
||||
('dcim', '0137_relax_uniqueness_constraints'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='devicerole',
|
||||
name='tags',
|
||||
field=taggit.managers.TaggableManager(through='extras.TaggedItem', to='extras.Tag'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='location',
|
||||
name='tags',
|
||||
field=taggit.managers.TaggableManager(through='extras.TaggedItem', to='extras.Tag'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='manufacturer',
|
||||
name='tags',
|
||||
field=taggit.managers.TaggableManager(through='extras.TaggedItem', to='extras.Tag'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='platform',
|
||||
name='tags',
|
||||
field=taggit.managers.TaggableManager(through='extras.TaggedItem', to='extras.Tag'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='rackrole',
|
||||
name='tags',
|
||||
field=taggit.managers.TaggableManager(through='extras.TaggedItem', to='extras.Tag'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='region',
|
||||
name='tags',
|
||||
field=taggit.managers.TaggableManager(through='extras.TaggedItem', to='extras.Tag'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='sitegroup',
|
||||
name='tags',
|
||||
field=taggit.managers.TaggableManager(through='extras.TaggedItem', to='extras.Tag'),
|
||||
),
|
||||
]
|
@ -1,91 +0,0 @@
|
||||
from django.db import migrations
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dcim', '0138_extend_tag_support'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RenameField(
|
||||
model_name='consoleport',
|
||||
old_name='_cable_peer_id',
|
||||
new_name='_link_peer_id',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='consoleport',
|
||||
old_name='_cable_peer_type',
|
||||
new_name='_link_peer_type',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='consoleserverport',
|
||||
old_name='_cable_peer_id',
|
||||
new_name='_link_peer_id',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='consoleserverport',
|
||||
old_name='_cable_peer_type',
|
||||
new_name='_link_peer_type',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='frontport',
|
||||
old_name='_cable_peer_id',
|
||||
new_name='_link_peer_id',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='frontport',
|
||||
old_name='_cable_peer_type',
|
||||
new_name='_link_peer_type',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='interface',
|
||||
old_name='_cable_peer_id',
|
||||
new_name='_link_peer_id',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='interface',
|
||||
old_name='_cable_peer_type',
|
||||
new_name='_link_peer_type',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='powerfeed',
|
||||
old_name='_cable_peer_id',
|
||||
new_name='_link_peer_id',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='powerfeed',
|
||||
old_name='_cable_peer_type',
|
||||
new_name='_link_peer_type',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='poweroutlet',
|
||||
old_name='_cable_peer_id',
|
||||
new_name='_link_peer_id',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='poweroutlet',
|
||||
old_name='_cable_peer_type',
|
||||
new_name='_link_peer_type',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='powerport',
|
||||
old_name='_cable_peer_id',
|
||||
new_name='_link_peer_id',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='powerport',
|
||||
old_name='_cable_peer_type',
|
||||
new_name='_link_peer_type',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='rearport',
|
||||
old_name='_cable_peer_id',
|
||||
new_name='_link_peer_id',
|
||||
),
|
||||
migrations.RenameField(
|
||||
model_name='rearport',
|
||||
old_name='_cable_peer_type',
|
||||
new_name='_link_peer_type',
|
||||
),
|
||||
]
|
@ -1,49 +0,0 @@
|
||||
from django.db import migrations, models
|
||||
import django.core.validators
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dcim', '0139_rename_cable_peer'),
|
||||
('wireless', '0001_wireless'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='interface',
|
||||
name='rf_role',
|
||||
field=models.CharField(blank=True, max_length=30),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='interface',
|
||||
name='rf_channel',
|
||||
field=models.CharField(blank=True, max_length=50),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='interface',
|
||||
name='rf_channel_frequency',
|
||||
field=models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='interface',
|
||||
name='rf_channel_width',
|
||||
field=models.DecimalField(blank=True, decimal_places=3, max_digits=7, null=True),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='interface',
|
||||
name='tx_power',
|
||||
field=models.PositiveSmallIntegerField(blank=True, null=True, validators=[django.core.validators.MaxValueValidator(127)]),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='interface',
|
||||
name='wireless_lans',
|
||||
field=models.ManyToManyField(blank=True, related_name='interfaces', to='wireless.WirelessLAN'),
|
||||
),
|
||||
migrations.AddField(
|
||||
model_name='interface',
|
||||
name='wireless_link',
|
||||
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wireless.wirelesslink'),
|
||||
),
|
||||
]
|
@ -1,19 +0,0 @@
|
||||
# Generated by Django 3.2.8 on 2021-11-02 16:16
|
||||
|
||||
from django.db import migrations, models
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('ipam', '0053_asn_model'),
|
||||
('dcim', '0140_wireless'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.AddField(
|
||||
model_name='site',
|
||||
name='asns',
|
||||
field=models.ManyToManyField(blank=True, related_name='sites', to='ipam.ASN'),
|
||||
),
|
||||
]
|
@ -1,29 +0,0 @@
|
||||
from django.db import migrations
|
||||
|
||||
OLD_VALUE = '128gfc-sfp28'
|
||||
NEW_VALUE = '128gfc-qsfp28'
|
||||
|
||||
|
||||
def correct_type(apps, schema_editor):
|
||||
"""
|
||||
Correct TYPE_128GFC_QSFP28 interface type.
|
||||
"""
|
||||
Interface = apps.get_model('dcim', 'Interface')
|
||||
InterfaceTemplate = apps.get_model('dcim', 'InterfaceTemplate')
|
||||
|
||||
for model in (Interface, InterfaceTemplate):
|
||||
model.objects.filter(type=OLD_VALUE).update(type=NEW_VALUE)
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('dcim', '0141_asn_model'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.RunPython(
|
||||
code=correct_type,
|
||||
reverse_code=migrations.RunPython.noop
|
||||
),
|
||||
]
|