diff --git a/.cookiecutter.json b/.cookiecutter.json
index cf6b786ec..779e3c36e 100644
--- a/.cookiecutter.json
+++ b/.cookiecutter.json
@@ -21,16 +21,13 @@
"_drift_manager": {
"template": "https://github.com/nautobot/cookiecutter-nautobot-app.git",
"template_dir": "nautobot-app",
- "template_ref": "refs/tags/nautobot-app-v2.4.2",
+ "template_ref": "nautobot-app-v2.5.1",
"cookie_dir": "",
"branch_prefix": "drift-manager",
"pull_request_strategy": "create",
- "post_actions": [
- "ruff",
- "poetry"
- ],
+ "post_actions": [],
"draft": false,
- "baked_commit_ref": "cc1a95931455ed52b0a30b22257e5c8fbdcd212d"
+ "baked_commit_ref": "bef141f8bdbc25beec0f7138010447b8e8e9fee4"
}
}
}
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
index 8d0594cba..5c20df5aa 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -1,6 +1,8 @@
---
name: 🐛 Bug Report
about: Report a reproducible bug in the current release of nautobot-golden-config
+labels:
+ - "type: bug"
---
### Environment
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
index 3506acdeb..31f9f647e 100644
--- a/.github/ISSUE_TEMPLATE/feature_request.md
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -1,7 +1,8 @@
---
name: ✨ Feature Request
about: Propose a new feature or enhancement
-
+labels:
+ - "type: feature"
---
### Environment
diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
index 7cf1631ed..3013ce766 100644
--- a/.github/pull_request_template.md
+++ b/.github/pull_request_template.md
@@ -27,6 +27,7 @@
Please feel free to update todos to keep track of your own notes for WIP PRs.
-->
- [ ] Explanation of Change(s)
+- [ ] Added change log fragment(s) (for more information see [the documentation](https://docs.nautobot.com/projects/core/en/stable/development/core/#creating-changelog-fragments))
- [ ] Attached Screenshots, Payload Example
- [ ] Unit, Integration Tests
- [ ] Documentation Updates (when adding/changing features)
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 8d8559c4b..da6d73ff4 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -82,12 +82,26 @@ jobs:
poetry-version: "1.8.5"
- name: "Linting: yamllint"
run: "poetry run invoke yamllint"
+ markdownlint:
+ runs-on: "ubuntu-22.04"
+ env:
+ INVOKE_NAUTOBOT_GOLDEN_CONFIG_LOCAL: "True"
+ steps:
+ - name: "Check out repository code"
+ uses: "actions/checkout@v4"
+ - name: "Setup environment"
+ uses: "networktocode/gh-action-setup-poetry-environment@v6"
+ with:
+ poetry-version: "1.8.5"
+ - name: "Linting: markdownlint"
+ run: "poetry run invoke markdownlint"
check-in-docker:
needs:
- "ruff-format"
- "ruff-lint"
- "poetry"
- "yamllint"
+ - "markdownlint"
runs-on: "ubuntu-22.04"
strategy:
fail-fast: true
@@ -110,9 +124,9 @@ jobs:
run: "poetry run invoke lock --constrain-nautobot-ver --constrain-python-ver"
- name: "Set up Docker Buildx"
id: "buildx"
- uses: "docker/setup-buildx-action@v3"
+ uses: "docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2" # v3.10.0
- name: "Build"
- uses: "docker/build-push-action@v5"
+ uses: "docker/build-push-action@ca052bb54ab0790a636c9b5f226502c73d547a25" # v5.4.0
with:
builder: "${{ steps.buildx.outputs.name }}"
context: "./"
@@ -126,8 +140,6 @@ jobs:
NAUTOBOT_VER=${{ matrix.nautobot-version }}
PYTHON_VER=${{ matrix.python-version }}
CI=true
- - name: "Copy credentials"
- run: "cp development/creds.example.env development/creds.env"
- name: "Linting: pylint"
run: "poetry run invoke pylint"
- name: "Checking: App Config"
@@ -140,7 +152,7 @@ jobs:
strategy:
fail-fast: true
matrix:
- python-version: ["3.9", "3.12"]
+ python-version: ["3.9"] # 3.12 stable is tested in unittest_report stage.
db-backend: ["postgresql"]
nautobot-version: ["stable"]
include:
@@ -167,9 +179,60 @@ jobs:
run: "poetry run invoke lock --constrain-nautobot-ver --constrain-python-ver"
- name: "Set up Docker Buildx"
id: "buildx"
- uses: "docker/setup-buildx-action@v3"
+ uses: "docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2" # v3.10.0
+ - name: "Build"
+ uses: "docker/build-push-action@ca052bb54ab0790a636c9b5f226502c73d547a25" # v5.4.0
+ with:
+ builder: "${{ steps.buildx.outputs.name }}"
+ context: "./"
+ push: false
+ load: true
+ tags: "${{ env.APP_NAME }}/nautobot:${{ matrix.nautobot-version }}-py${{ matrix.python-version }}"
+ file: "./development/Dockerfile"
+ cache-from: "type=gha,scope=${{ matrix.nautobot-version }}-py${{ matrix.python-version }}"
+ cache-to: "type=gha,scope=${{ matrix.nautobot-version }}-py${{ matrix.python-version }}"
+ build-args: |
+ NAUTOBOT_VER=${{ matrix.nautobot-version }}
+ PYTHON_VER=${{ matrix.python-version }}
+ CI=true
+ - name: "Use Mysql invoke settings when needed"
+ run: "cp invoke.mysql.yml invoke.yml"
+ if: "matrix.db-backend == 'mysql'"
+ - name: "Run Tests"
+ run: "poetry run invoke unittest"
+
+ unittest_report:
+ needs:
+ - "check-in-docker"
+ strategy:
+ fail-fast: true
+ matrix:
+ python-version: ["3.12"]
+ db-backend: ["postgresql"]
+ nautobot-version: ["stable"]
+ runs-on: "ubuntu-22.04"
+ permissions:
+ pull-requests: "write"
+ contents: "write"
+ env:
+ INVOKE_NAUTOBOT_GOLDEN_CONFIG_PYTHON_VER: "${{ matrix.python-version }}"
+ INVOKE_NAUTOBOT_GOLDEN_CONFIG_NAUTOBOT_VER: "${{ matrix.nautobot-version }}"
+ steps:
+ - name: "Check out repository code"
+ uses: "actions/checkout@v4"
+ - name: "Setup environment"
+ uses: "networktocode/gh-action-setup-poetry-environment@v6"
+ with:
+ poetry-version: "1.8.5"
+ - name: "Constrain Nautobot version and regenerate lock file"
+ env:
+ INVOKE_NAUTOBOT_GOLDEN_CONFIG_LOCAL: "true"
+ run: "poetry run invoke lock --constrain-nautobot-ver --constrain-python-ver"
+ - name: "Set up Docker Buildx"
+ id: "buildx"
+ uses: "docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2" # v3.10.0
- name: "Build"
- uses: "docker/build-push-action@v5"
+ uses: "docker/build-push-action@ca052bb54ab0790a636c9b5f226502c73d547a25" # v5.4.0
with:
builder: "${{ steps.buildx.outputs.name }}"
context: "./"
@@ -189,7 +252,28 @@ jobs:
run: "cp invoke.mysql.yml invoke.yml"
if: "matrix.db-backend == 'mysql'"
- name: "Run Tests"
- run: "poetry run invoke unittest"
+ run: "poetry run invoke unittest --coverage"
+ - name: "Generate Coverage Comment"
+ if: >
+ contains(fromJson('["develop","ltm-1.6"]'), github.base_ref) &&
+ (github.head_ref != 'main') && (!startsWith(github.head_ref, 'release'))
+ id: "coverage_comment"
+ uses: "py-cov-action/python-coverage-comment-action@d1ff8fbb5ff80feedb3faa0f6d7b424f417ad0e1" # v3.30
+ with:
+ GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
+ MINIMUM_GREEN: 90
+ MINIMUM_ORANGE: 80
+ ANNOTATE_MISSING_LINES: true
+ ANNOTATION_TYPE: "warning"
+ - name: "Store Pull Request comment to be posted"
+ if: >
+ contains(fromJson('["develop","ltm-1.6"]'), github.base_ref) &&
+ (github.head_ref != 'main') && (!startsWith(github.head_ref, 'release'))
+ uses: "actions/upload-artifact@v4"
+ with:
+ name: "python-coverage-comment-action"
+ path: "python-coverage-comment-action.txt"
+
changelog:
if: >
contains(fromJson('["develop","ltm-1.6"]'), github.base_ref) &&
@@ -211,41 +295,39 @@ jobs:
publish_gh:
needs:
- "unittest"
+ - "unittest_report"
name: "Publish to GitHub"
runs-on: "ubuntu-22.04"
if: "startsWith(github.ref, 'refs/tags/v')"
env:
INVOKE_NAUTOBOT_GOLDEN_CONFIG_LOCAL: "True"
+ permissions:
+ contents: "write"
steps:
- name: "Check out repository code"
uses: "actions/checkout@v4"
- - name: "Set up Python"
- uses: "actions/setup-python@v5"
+ - name: "Setup environment"
+ uses: "networktocode/gh-action-setup-poetry-environment@v6"
with:
+ poetry-version: "1.8.5"
python-version: "3.12"
- - name: "Install Python Packages"
- run: "pip install poetry"
+ poetry-install-options: "--no-root"
- name: "Set env"
run: "echo RELEASE_VERSION=${GITHUB_REF:10} >> $GITHUB_ENV"
- name: "Run Poetry Version"
run: "poetry version $RELEASE_VERSION"
- - name: "Install Dependencies (needed for mkdocs)"
- run: "poetry install --no-root"
- name: "Build Documentation"
run: "poetry run invoke build-and-check-docs"
- name: "Run Poetry Build"
run: "poetry build"
- name: "Upload binaries to release"
- uses: "svenstaro/upload-release-action@v2"
- with:
- repo_token: "${{ secrets.GH_NAUTOBOT_BOT_TOKEN }}"
- file: "dist/*"
- tag: "${{ github.ref }}"
- overwrite: true
- file_glob: true
+ run: "gh release upload ${{ github.ref_name }} dist/*.{tar.gz,whl}"
+ env:
+ GH_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
publish_pypi:
needs:
- "unittest"
+ - "unittest_report"
name: "Push Package to PyPI"
runs-on: "ubuntu-22.04"
if: "startsWith(github.ref, 'refs/tags/v')"
@@ -254,24 +336,22 @@ jobs:
steps:
- name: "Check out repository code"
uses: "actions/checkout@v4"
- - name: "Set up Python"
- uses: "actions/setup-python@v5"
+ - name: "Setup environment"
+ uses: "networktocode/gh-action-setup-poetry-environment@v6"
with:
+ poetry-version: "1.8.5"
python-version: "3.12"
- - name: "Install Python Packages"
- run: "pip install poetry"
+ poetry-install-options: "--no-root"
- name: "Set env"
run: "echo RELEASE_VERSION=${GITHUB_REF:10} >> $GITHUB_ENV"
- name: "Run Poetry Version"
run: "poetry version $RELEASE_VERSION"
- - name: "Install Dependencies (needed for mkdocs)"
- run: "poetry install --no-root"
- name: "Build Documentation"
run: "poetry run invoke build-and-check-docs"
- name: "Run Poetry Build"
run: "poetry build"
- name: "Push to PyPI"
- uses: "pypa/gh-action-pypi-publish@release/v1"
+ uses: "pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc" # v1.12.4
with:
user: "__token__"
password: "${{ secrets.PYPI_API_TOKEN }}"
@@ -292,7 +372,7 @@ jobs:
# ENVs cannot be used directly in job.if. This is a workaround to check
# if SLACK_WEBHOOK_URL is present.
if: "env.SLACK_WEBHOOK_URL != ''"
- uses: "slackapi/slack-github-action@v1"
+ uses: "slackapi/slack-github-action@fcfb566f8b0aab22203f066d80ca1d7e4b5d05b3" # v1.27.1
with:
payload: |
{
diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml
new file mode 100644
index 000000000..089283094
--- /dev/null
+++ b/.github/workflows/coverage.yml
@@ -0,0 +1,36 @@
+---
+name: "Post coverage comment"
+
+on: # yamllint disable-line rule:truthy rule:comments
+ workflow_run:
+ workflows: ["CI"]
+ types:
+ - "completed"
+
+jobs:
+ test:
+ name: "Post coverage comment to PR"
+ runs-on: "ubuntu-latest"
+ if: github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success' # yamllint disable-line rule:quoted-strings rule:comments
+ permissions:
+ # Gives the action the necessary permissions for publishing new
+ # comments in pull requests.
+ pull-requests: "write"
+ # Gives the action the necessary permissions for editing existing
+ # comments (to avoid publishing multiple comments in the same PR)
+ contents: "write" # yamllint disable-line rule:indentation rule:comments
+ # Gives the action the necessary permissions for looking up the
+ # workflow that launched this workflow, and download the related
+ # artifact that contains the comment to be published
+ actions: "read"
+ steps:
+ # DO NOT run actions/checkout here, for security reasons
+ # For details, refer to https://securitylab.github.com/research/github-actions-preventing-pwn-requests/
+ - name: "Post comment"
+ uses: "py-cov-action/python-coverage-comment-action@d1ff8fbb5ff80feedb3faa0f6d7b424f417ad0e13" # v3.30
+ with:
+ GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}"
+ GITHUB_PR_RUN_ID: "${{ github.event.workflow_run.id }}"
+ # Update those if you changed the default values:
+ # COMMENT_ARTIFACT_NAME: python-coverage-comment-action
+ # COMMENT_FILENAME: python-coverage-comment-action.txt
diff --git a/.gitignore b/.gitignore
index c812ea13e..0a1803598 100644
--- a/.gitignore
+++ b/.gitignore
@@ -2,6 +2,9 @@ nautobot_golden_config/transposer.py
golden-config/
nautobot.sql
+# Avoid accidental commit of analytics template override generated via ./docs/insert-analytics.sh
+docs/assets/overrides/main.html
+
# Ansible Retry Files
*.retry
@@ -60,6 +63,7 @@ coverage.xml
*.py,cover
.hypothesis/
.pytest_cache/
+lcov.info
# Translations
*.mo
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
index 1aa8ad220..e18fecfb8 100644
--- a/.readthedocs.yaml
+++ b/.readthedocs.yaml
@@ -11,6 +11,9 @@ build:
os: "ubuntu-22.04"
tools:
python: "3.11"
+ jobs:
+ pre_build:
+ - "/bin/bash ./docs/insert-analytics.sh"
mkdocs:
configuration: "mkdocs.yml"
diff --git a/changes/+dynamic-form.changed b/changes/+dynamic-form.changed
new file mode 100644
index 000000000..dbe85d0bb
--- /dev/null
+++ b/changes/+dynamic-form.changed
@@ -0,0 +1 @@
+Changed the Golden Config Setting form to use dynamic dropdowns for the related models.
\ No newline at end of file
diff --git a/changes/+nautobot-app-v2-4-2.housekeeping b/changes/+nautobot-app-v2-4-2.housekeeping
deleted file mode 100644
index c3274daf2..000000000
--- a/changes/+nautobot-app-v2-4-2.housekeeping
+++ /dev/null
@@ -1 +0,0 @@
-Rebaked from the cookie `nautobot-app-v2.4.2`.
diff --git a/changes/+nautobot-app-v2.5.0.housekeeping b/changes/+nautobot-app-v2.5.0.housekeeping
new file mode 100644
index 000000000..c6d9601ed
--- /dev/null
+++ b/changes/+nautobot-app-v2.5.0.housekeeping
@@ -0,0 +1 @@
+Rebaked from the cookie `nautobot-app-v2.5.0`.
diff --git a/changes/+nautobot-app-v2.5.1.housekeeping b/changes/+nautobot-app-v2.5.1.housekeeping
new file mode 100644
index 000000000..2211b4807
--- /dev/null
+++ b/changes/+nautobot-app-v2.5.1.housekeeping
@@ -0,0 +1 @@
+Rebaked from the cookie `nautobot-app-v2.5.1`.
diff --git a/changes/870.added b/changes/870.added
new file mode 100644
index 000000000..f24f54f66
--- /dev/null
+++ b/changes/870.added
@@ -0,0 +1 @@
+Added the ability to sync remediation settings from a Git repository.
\ No newline at end of file
diff --git a/changes/881.fixed b/changes/881.fixed
deleted file mode 100644
index f59794407..000000000
--- a/changes/881.fixed
+++ /dev/null
@@ -1 +0,0 @@
-Fixed generate intended config view to use Golden Config `sot_agg_transposer`.
diff --git a/changes/886.changed b/changes/886.changed
deleted file mode 100644
index 048c0e679..000000000
--- a/changes/886.changed
+++ /dev/null
@@ -1 +0,0 @@
-Updated the generate intended config api to perform a shallow git clone.
diff --git a/changes/898.added b/changes/898.added
new file mode 100644
index 000000000..eb133ad49
--- /dev/null
+++ b/changes/898.added
@@ -0,0 +1 @@
+Added tests to ensure that mkdocs release notes are properly set.
\ No newline at end of file
diff --git a/changes/899.fixed b/changes/899.fixed
new file mode 100644
index 000000000..21a308f00
--- /dev/null
+++ b/changes/899.fixed
@@ -0,0 +1 @@
+Fixed template include errors during intended config rendering due to incorrect Jinja root path.
\ No newline at end of file
diff --git a/changes/902.changed b/changes/902.changed
new file mode 100644
index 000000000..ae718bc23
--- /dev/null
+++ b/changes/902.changed
@@ -0,0 +1 @@
+Upgraded from hier_config v2.2.2 to v3.2.2, which is a breaking change from the hier_config side. The hier_config implementation was updated to reflect hier_config v3.
\ No newline at end of file
diff --git a/changes/921.added b/changes/921.added
new file mode 100644
index 000000000..8f5a33b41
--- /dev/null
+++ b/changes/921.added
@@ -0,0 +1 @@
+Add testing for the two issues with mkdocs versus markdown rendering and associated fixes.
\ No newline at end of file
diff --git a/changes/940.fixed b/changes/940.fixed
new file mode 100644
index 000000000..b1e66fa12
--- /dev/null
+++ b/changes/940.fixed
@@ -0,0 +1 @@
+Implement a more performant ORM/DB query in-place of the existing for loop logic for device_to_settings_map.
diff --git a/changes/951.fixed b/changes/951.fixed
new file mode 100644
index 000000000..331d5dbc6
--- /dev/null
+++ b/changes/951.fixed
@@ -0,0 +1 @@
+Fixed potential duplicates in data migration by adding a check to validate time uniqueness.
\ No newline at end of file
diff --git a/changes/955.added b/changes/955.added
new file mode 100644
index 000000000..7568dd594
--- /dev/null
+++ b/changes/955.added
@@ -0,0 +1 @@
+Added an option to fail the Config Plan Deployment Job if any tasks for any device fails.
\ No newline at end of file
diff --git a/changes/961.housekeeping b/changes/961.housekeeping
new file mode 100644
index 000000000..2d3fa81db
--- /dev/null
+++ b/changes/961.housekeeping
@@ -0,0 +1 @@
+Migrate Golden Config Setting, Config Plan, Golden Config models to UI Component Framework.
diff --git a/changes/969.documentation b/changes/969.documentation
new file mode 100644
index 000000000..d29c88f59
--- /dev/null
+++ b/changes/969.documentation
@@ -0,0 +1 @@
+Added Analytics GTM template override only to the public ReadTheDocs build.
diff --git a/changes/970.housekeeping b/changes/970.housekeeping
new file mode 100644
index 000000000..59b48abde
--- /dev/null
+++ b/changes/970.housekeeping
@@ -0,0 +1 @@
+Regenerate lock file with poetry 1.8 version.
diff --git a/development/Dockerfile b/development/Dockerfile
index b391dcb55..c59cc81ae 100644
--- a/development/Dockerfile
+++ b/development/Dockerfile
@@ -9,8 +9,8 @@
# Accepts a desired Nautobot version as build argument, default to 2.4.2
ARG NAUTOBOT_VER="2.4.2"
-# Accepts a desired Python version as build argument, default to 3.12
-ARG PYTHON_VER="3.12"
+# Accepts a desired Python version as build argument, default to 3.11
+ARG PYTHON_VER="3.11"
# Retrieve published development image of Nautobot base which should include most CI dependencies
FROM ghcr.io/nautobot/nautobot-dev:${NAUTOBOT_VER}-py${PYTHON_VER}
diff --git a/development/development.env b/development/development.env
index 175bf91af..d7b57daf4 100644
--- a/development/development.env
+++ b/development/development.env
@@ -38,6 +38,9 @@ MYSQL_USER=${NAUTOBOT_DB_USER}
MYSQL_DATABASE=${NAUTOBOT_DB_NAME}
MYSQL_ROOT_HOST=%
+# Use a less verbose log level for Celery Beat
+NAUTOBOT_BEAT_LOG_LEVEL=INFO
+
# Golden Configuration specific
PER_FEATURE_WIDTH=13
PER_FEATURE_HEIGHT=4
diff --git a/development/docker-compose.base.yml b/development/docker-compose.base.yml
index 0b6ac1a28..338bc27ab 100644
--- a/development/docker-compose.base.yml
+++ b/development/docker-compose.base.yml
@@ -41,8 +41,8 @@ services:
beat:
entrypoint:
- "sh"
- - "-c" # this is to evaluate the $NAUTOBOT_LOG_LEVEL from the env
- - "nautobot-server celery beat -l $$NAUTOBOT_LOG_LEVEL" ## $$ because of docker-compose
+ - "-c" # this is to evaluate the $NAUTOBOT_BEAT_LOG_LEVEL from the env
+ - "nautobot-server celery beat -l $$NAUTOBOT_BEAT_LOG_LEVEL" ## $$ because of docker-compose
depends_on:
nautobot:
condition: "service_healthy"
diff --git a/development/docker-compose.dev.yml b/development/docker-compose.dev.yml
index 2a1987278..c2e1e0179 100644
--- a/development/docker-compose.dev.yml
+++ b/development/docker-compose.dev.yml
@@ -15,7 +15,7 @@ services:
# have all of your projects in the same directory. Uncomment out as required.
# - "../../netutils/netutils:/usr/local/lib/python3.11/site-packages/netutils"
# - "../../nornir-nautobot/nornir_nautobot:/usr/local/lib/python3.11/site-packages/nornir_nautobot"
- # - "../../nautobot-plugin-nornir/nautobot_plugin_nornir:/usr/local/lib/python3.11/site-packages/nautobot_plugin_nornir"
+ - "../../nautobot-plugin-nornir/nautobot_plugin_nornir:/usr/local/lib/python3.11/site-packages/nautobot_plugin_nornir"
# - "../../nautobot/nautobot:/usr/local/lib/python3.11/site-packages/nautobot"
healthcheck:
@@ -39,7 +39,7 @@ services:
- "sh"
- "-c" # this is to evaluate the $NAUTOBOT_LOG_LEVEL from the env
- "watchmedo auto-restart --directory './' --pattern '*.py' --recursive -- nautobot-server celery worker -l $$NAUTOBOT_LOG_LEVEL --events" ## $$ because of docker-compose
- # - "watchmedo auto-restart --directory './' --directory '/usr/local/lib/python3.11/site-packages/' --pattern '*.py' --recursive -- nautobot-server celery worker -l $$NAUTOBOT_LOG_LEVEL --events" ## $$ because of docker-compose
+ - "watchmedo auto-restart --directory './' --directory '/usr/local/lib/python3.11/site-packages/' --pattern '*.py' --recursive -- nautobot-server celery worker -l $$NAUTOBOT_LOG_LEVEL --events" ## $$ because of docker-compose
volumes:
- "./nautobot_config.py:/opt/nautobot/nautobot_config.py"
- "../:/source"
@@ -47,15 +47,15 @@ services:
# have all of your projects in the same directory. Uncomment out as required.
# - "../../netutils/netutils:/usr/local/lib/python3.11/site-packages/netutils"
# - "../../nornir-nautobot/nornir_nautobot:/usr/local/lib/python3.11/site-packages/nornir_nautobot"
- # - "../../nautobot-plugin-nornir/nautobot_plugin_nornir:/usr/local/lib/python3.11/site-packages/nautobot_plugin_nornir"
+ - "../../nautobot-plugin-nornir/nautobot_plugin_nornir:/usr/local/lib/python3.11/site-packages/nautobot_plugin_nornir"
# - "../../nautobot/nautobot:/usr/local/lib/python3.11/site-packages/nautobot"
healthcheck:
test: ["CMD", "true"] # Due to layering, disable: true won't work. Instead, change the test
beat:
entrypoint:
- "sh"
- - "-c" # this is to evaluate the $NAUTOBOT_LOG_LEVEL from the env
- - "watchmedo auto-restart --directory './' --pattern '*.py' --recursive -- nautobot-server celery beat -l $$NAUTOBOT_LOG_LEVEL" ## $$ because of docker-compose
+ - "-c" # this is to evaluate the $NAUTOBOT_BEAT_LOG_LEVEL from the env
+ - "watchmedo auto-restart --directory './' --pattern '*.py' --recursive -- nautobot-server celery beat -l $$NAUTOBOT_BEAT_LOG_LEVEL" ## $$ because of docker-compose
volumes:
- "./nautobot_config.py:/opt/nautobot/nautobot_config.py"
- "../:/source"
diff --git a/development/docker-compose.mysql.yml b/development/docker-compose.mysql.yml
index 6751d7207..77190d89e 100644
--- a/development/docker-compose.mysql.yml
+++ b/development/docker-compose.mysql.yml
@@ -22,7 +22,7 @@ services:
- "creds.env"
- "development_mysql.env"
db:
- image: "mysql:8"
+ image: "mysql:lts"
command:
- "--max_connections=1000"
env_file:
diff --git a/development/docker-compose.postgres.yml b/development/docker-compose.postgres.yml
index 8d96fdba9..12b8f85d3 100644
--- a/development/docker-compose.postgres.yml
+++ b/development/docker-compose.postgres.yml
@@ -4,7 +4,7 @@ services:
environment:
- "NAUTOBOT_DB_ENGINE=django.db.backends.postgresql"
db:
- image: "postgres:13-alpine"
+ image: "postgres:17-alpine"
command:
- "-c"
- "max_connections=200"
diff --git a/development/nautobot_config.py b/development/nautobot_config.py
index 45b81386b..2a43f5cb1 100644
--- a/development/nautobot_config.py
+++ b/development/nautobot_config.py
@@ -135,6 +135,7 @@
},
},
},
+ "use_config_context": {"connection_options": True},
},
"nautobot_golden_config": {
"per_feature_bar_width": float(os.environ.get("PER_FEATURE_BAR_WIDTH", 0.15)),
diff --git a/development/towncrier_template.j2 b/development/towncrier_template.j2
index 8b0f7f872..61903f817 100644
--- a/development/towncrier_template.j2
+++ b/development/towncrier_template.j2
@@ -1,9 +1,7 @@
# v{{ versiondata.version.split(".")[:2] | join(".") }} Release Notes
-This document describes all new features and changes in the release. The format is based on [Keep a
-Changelog](https://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic
-Versioning](https://semver.org/spec/v2.0.0.html).
+This document describes all new features and changes in the release. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## Release Overview
diff --git a/docs/admin/install.md b/docs/admin/install.md
index 10549ed14..fde63f572 100644
--- a/docs/admin/install.md
+++ b/docs/admin/install.md
@@ -99,9 +99,9 @@ The app behavior can be controlled with the following list of settings. All of t
You can easily manage these feature toggles in the UI:
- 1. Navigate to **Golden Config > Golden Config Settings**.
- 2. Select **Default Settings** (or your chosen Setting if multiple exist).
- 3. Click **Edit** and adjust the relevant toggles (e.g., **Enable Backup**, **Enable Compliance**, **Enable Intended**, **Enable Plan**, **Enable Deploy**).
+1. Navigate to **Golden Config > Golden Config Settings**.
+2. Select **Default Settings** (or your chosen Setting if multiple exist).
+3. Click **Edit** and adjust the relevant toggles (e.g., **Enable Backup**, **Enable Compliance**, **Enable Intended**, **Enable Plan**, **Enable Deploy**).

diff --git a/docs/admin/release_notes/version_0.9.md b/docs/admin/release_notes/version_0.9.md
index 36470912d..7149e7768 100644
--- a/docs/admin/release_notes/version_0.9.md
+++ b/docs/admin/release_notes/version_0.9.md
@@ -90,9 +90,9 @@
### Added
- - [#81](https://github.com/nautobot/nautobot-app-golden-config/issues/85) Added changelog
- - [#74](https://github.com/nautobot/nautobot-app-golden-config/issues/74) Added hover text to icon on app home screen
- - [#84](https://github.com/nautobot/nautobot-app-golden-config/issues/84) Added auto-deploy to PyPI
+- [#81](https://github.com/nautobot/nautobot-app-golden-config/issues/85) Added changelog
+- [#74](https://github.com/nautobot/nautobot-app-golden-config/issues/74) Added hover text to icon on app home screen
+- [#84](https://github.com/nautobot/nautobot-app-golden-config/issues/84) Added auto-deploy to PyPI
### Fixed
diff --git a/docs/admin/release_notes/version_2.0.md b/docs/admin/release_notes/version_2.0.md
index c59f0ee38..2916ba1eb 100755
--- a/docs/admin/release_notes/version_2.0.md
+++ b/docs/admin/release_notes/version_2.0.md
@@ -26,6 +26,7 @@
### Housekeeping
- [#741](https://github.com/nautobot/nautobot-app-golden-config/issues/741) - Re-baked from the latest template.
+
## v2.0.3 2024-03
### Added
diff --git a/docs/admin/release_notes/version_2.4.md b/docs/admin/release_notes/version_2.4.md
index 84e89525b..013b583be 100644
--- a/docs/admin/release_notes/version_2.4.md
+++ b/docs/admin/release_notes/version_2.4.md
@@ -16,6 +16,29 @@ Versioning](https://semver.org/spec/v2.0.0.html).
- Updated nautobot-plugin-nornir dependency minimum to 2.2.1.
- Changed multiple detail views to use new component UI functionality.
+## [v2.4.1 (2025-05-09)](https://github.com/nautobot/nautobot-app-golden-config/releases/tag/v2.4.1)
+
+### Added
+
+- [#898](https://github.com/nautobot/nautobot-app-golden-config/issues/898) - Added tests to ensure that mkdocs release notes are properly set.
+- [#921](https://github.com/nautobot/nautobot-app-golden-config/issues/921) - Add testing for the two issues with mkdocs versus markdown rendering and associated fixes.
+
+### Changed
+
+- [#886](https://github.com/nautobot/nautobot-app-golden-config/issues/886) - Updated the generate intended config api to perform a shallow git clone.
+
+### Fixed
+
+- [#794](https://github.com/nautobot/nautobot-app-golden-config/issues/794) - Fixed Git Repo Sync issue when multiple platforms use the same network_driver.
+- [#881](https://github.com/nautobot/nautobot-app-golden-config/issues/881) - Fixed generate intended config view to use Golden Config `sot_agg_transposer`.
+- [#887](https://github.com/nautobot/nautobot-app-golden-config/issues/887) - Fixed copy button not hidden on diff tab in generate intended config tool.
+- [#924](https://github.com/nautobot/nautobot-app-golden-config/issues/924) - Fixed copy button not working on detail views.
+- [#906](https://github.com/nautobot/nautobot-app-golden-config/issues/906) - Fix missing post processing enable check in deploy task.
+
+### Housekeeping
+
+- Rebaked from the cookie `nautobot-app-v2.4.2`.
+
## [v2.4.0 (2025-02-20)](https://github.com/nautobot/nautobot-app-golden-config/releases/tag/v2.4.0)
diff --git a/docs/admin/troubleshooting/E3032.md b/docs/admin/troubleshooting/E3032.md
index 8b3a9c169..883383ed8 100644
--- a/docs/admin/troubleshooting/E3032.md
+++ b/docs/admin/troubleshooting/E3032.md
@@ -2,21 +2,16 @@
## Message emitted:
-`E3032: Disabled Golden Config setting.`
+`E3032: Reference to {yaml_attr_name}: {yaml_attr_value}, is not unique. Please use platform_name key instead.`
## Description:
-This error occurs when a required feature is disabled in Golden Config, preventing the task from executing properly.
-
-The features affected by this error include:
-* Configuration backup
-* Intended configuration
-* Compliance execution
+Syncing Golden Config properties using Datasource feature, but using non-unique key..
## Troubleshooting:
-Review the exception message to identify the cause of the failure.
+Check the YAML file for the `platform_slug` or `platform_network_driver` key. If it is not unique, then you need to use the `platform_name` key instead.
## Recommendation:
-Enable the feature in the Golden Configuration Settings to execute the task.
+Migrate the YAML file keys from `platform_slug` or `platform_network_driver` to `platform_name`.
diff --git a/docs/admin/troubleshooting/E3033.md b/docs/admin/troubleshooting/E3033.md
index 1b17c7e9a..7735e0e33 100644
--- a/docs/admin/troubleshooting/E3033.md
+++ b/docs/admin/troubleshooting/E3033.md
@@ -2,16 +2,16 @@
## Message emitted:
-`E3033: Missing required settings.`
+`E3033: Reference to {yaml_attr_name}: {yaml_attr_value} is not available.`
## Description:
-This error occurs when a feature is missing in Golden Config, but is required to execute the task. Currently, this applies to the intended feature, which requires an SoT Agg (GraphQL) query to render templates from device data.
+Searching for the platform key in the YAML file and it cannot be found in the database.
## Troubleshooting:
-Review the exception message to determine the cause of the failure.
+The platform key used in the YAML file cannot be found.
## Recommendation:
-Double-check the intended and template configurations to ensure all required fields are populated and enabled.
+Check the YAML file for misspellings or incorrect values, if using `platform_slug` or `platform_network_driver`, then migrate to `platform_name` key instead.
diff --git a/docs/admin/troubleshooting/E3038.md b/docs/admin/troubleshooting/E3038.md
new file mode 100644
index 000000000..165ce83b4
--- /dev/null
+++ b/docs/admin/troubleshooting/E3038.md
@@ -0,0 +1,23 @@
+# E3038 Details
+
+## Message emitted:
+
+`E3038: Disabled Golden Config setting.`
+
+## Description:
+
+This error occurs when a required feature is disabled in Golden Config, preventing the task from executing properly.
+
+The features affected by this error include:
+
+* Configuration backup
+* Intended configuration
+* Compliance execution
+
+## Troubleshooting:
+
+Review the exception message to identify the cause of the failure.
+
+## Recommendation:
+
+Enable the feature in the Golden Configuration Settings to execute the task.
\ No newline at end of file
diff --git a/docs/admin/troubleshooting/E3039.md b/docs/admin/troubleshooting/E3039.md
new file mode 100644
index 000000000..14431facf
--- /dev/null
+++ b/docs/admin/troubleshooting/E3039.md
@@ -0,0 +1,17 @@
+# E3039 Details
+
+## Message emitted:
+
+`E3039: Missing required settings.`
+
+## Description:
+
+This error occurs when a feature is missing in Golden Config, but is required to execute the task. Currently, this applies to the intended feature, which requires an SoT Agg (GraphQL) query to render templates from device data.
+
+## Troubleshooting:
+
+Review the exception message to determine the cause of the failure.
+
+## Recommendation:
+
+Double-check the intended and template configurations to ensure all required fields are populated and enabled.
diff --git a/docs/admin/troubleshooting/E3XXX.md b/docs/admin/troubleshooting/E3XXX.md
new file mode 100755
index 000000000..08859bbf4
--- /dev/null
+++ b/docs/admin/troubleshooting/E3XXX.md
@@ -0,0 +1,17 @@
+# E3XXX Details
+
+## Message emitted:
+
+`E3XXX: Un-Registered Error Code used.`
+
+## Description:
+
+This means a code snippet was calling get_error_code() with an error code that is not registered.
+
+## Troubleshooting:
+
+Find the error code in the traceback, and search for it in the codebase.
+
+## Recommendation:
+
+Add the error code to the `error_codes.py` file.
\ No newline at end of file
diff --git a/docs/assets/overrides/partials/copyright.html b/docs/assets/overrides/partials/copyright.html
index b92cf5e3f..d213bc5bf 100644
--- a/docs/assets/overrides/partials/copyright.html
+++ b/docs/assets/overrides/partials/copyright.html
@@ -10,7 +10,7 @@
Made with Material for MkDocs
- | Join Nautobot Slack
+ | Join #Nautobot on the Network to Code Slack
{% if config.extra.ntc_sponsor == true %}
| Sponsored by
diff --git a/docs/dev/dev_environment.md b/docs/dev/dev_environment.md
index 05b174235..f50f44362 100644
--- a/docs/dev/dev_environment.md
+++ b/docs/dev/dev_environment.md
@@ -38,7 +38,6 @@ Once you have Poetry and Docker installed you can run the following commands (in
```shell
poetry shell
poetry install
-cp development/creds.example.env development/creds.env
invoke build
invoke start
```
@@ -125,6 +124,7 @@ Each command can be executed with `invoke `. All commands support the a
```
ruff Run ruff to perform code formatting and/or linting.
pylint Run pylint code analysis.
+ markdownlint Run pymarkdown linting.
tests Run all tests for this app.
unittest Run Django unit tests for the app.
```
@@ -159,7 +159,7 @@ This project is set up with a number of **Invoke** tasks consumed as simple CLI
### Copy the credentials file for Nautobot
-First, you need to create the `development/creds.env` file - it stores a bunch of private information such as passwords and tokens for your local Nautobot install. You can make a copy of the `development/creds.example.env` and modify it to suit you.
+First, you may create/overwrite the `development/creds.env` file - it stores a bunch of private information such as passwords and tokens for your local Nautobot install. You can make a copy of the `development/creds.example.env` and modify it to suit you.
```shell
cp development/creds.example.env development/creds.env
diff --git a/docs/index.md b/docs/index.md
index f32fd72b7..1c72e14f7 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -1,6 +1,8 @@
---
hide:
- - navigation
+
+ - navigation
+
---
--8<-- "README.md"
diff --git a/docs/insert-analytics.sh b/docs/insert-analytics.sh
new file mode 100755
index 000000000..b77ac1124
--- /dev/null
+++ b/docs/insert-analytics.sh
@@ -0,0 +1,36 @@
+#! /bin/bash
+
+# !!! IMPORTANT - READ THIS FIRST !!!
+# This script is to be used ONLY when building PUBLIC documentation hosted
+# on ReadTheDocs! It's executed in the pipeline defined in .readthedocs.yaml
+# and inserts a Google Tag Manager tracking code for web analytics.
+# Do NOT reuse this to insert analytics code in your development environments
+# or into the release process (i.e. built packages).
+
+cat > ./docs/assets/overrides/main.html <
+
+
+
+{{ super() }}
+{% endblock %}
+
+{% block header %}
+
+
+
+
+{{ super() }}
+{% endblock %}
+EOL
diff --git a/docs/user/app_feature_compliancejson.md b/docs/user/app_feature_compliancejson.md
index 2ce4ba4be..21cc91d58 100644
--- a/docs/user/app_feature_compliancejson.md
+++ b/docs/user/app_feature_compliancejson.md
@@ -3,6 +3,7 @@
JSON based compliance provides a mechanism to understand device configurations stored in JSON format and compare between them.
## Caveats
+
- The `Compliance Rule` need to be defined as JSON `config-type`.
- When creating `Compliance Rules` with the config type of JSON, the `config to match` field is used to specify individual top-level JSON keys, or it can be left blank to compare all keys.
- Uses Git repositories for backup and intended configurations.
@@ -11,15 +12,15 @@ JSON based compliance provides a mechanism to understand device configurations s
1. First, the compliance feature needs to be created, the feature name needs to be unique for a Platform and can not be shared between CLI and JSON types.
-
+ 
-3. Link the feature that was just created to a rule definition.
+1. Link the feature that was just created to a rule definition.
-
+ 
-4. Now that the definitions are created and the rule is created and mapped to a Platform, execute compliance job under Jobs.
+1. Now that the definitions are created and the rule is created and mapped to a Platform, execute compliance job under Jobs.
-5. Verify the compliance results
+1. Verify the compliance results
In the navigation menu: `Golden Config -> Configuration Compliance`.
diff --git a/docs/user/app_feature_intended.md b/docs/user/app_feature_intended.md
index 38a8c5233..f0c72a17c 100644
--- a/docs/user/app_feature_intended.md
+++ b/docs/user/app_feature_intended.md
@@ -126,15 +126,6 @@ In order to generate the intended configurations at least two repositories are n
3. The [intended_path_template](./app_use_cases.md#application-settings) configuration parameter.
4. The [jinja_path_template](./app_use_cases.md#application-settings) configuration parameter.
-### Intended Repository Matching Rule
-
-!!! note
- Only use a Intended Repository Matching Rule if you have **more than one** intended repository. It is **not needed** if you only have one repository. The operator is expected to ensure that every device results in a successful matching rule (or that device will fail to render a config).
-
-The `intended_match_rule` setting allows you to match a given `Device` Django ORM object to a backup Git repository. This field should contain a Jinja2-formatted template. The app populates the variables in the Jinja2 template via the GraphQL query configured on the app.
-
-This is exactly the same concept as described in [Backup Repository Matching Rule](./app_feature_backup.md#repository-matching-rule), and better described there.
-
## Data
The data provided while rendering the configuration of a device is described in the [SoT Aggregation](./app_feature_sotagg.md) overview.
diff --git a/docs/user/app_feature_remediation.md b/docs/user/app_feature_remediation.md
index 4cde54f00..bc6e6c6fb 100644
--- a/docs/user/app_feature_remediation.md
+++ b/docs/user/app_feature_remediation.md
@@ -46,7 +46,7 @@ Default Hier config options can be used or customized on a per platform basis, a

For additional information on how to customize Hier Config options, please refer to the Hierarchical Configuration development guide:
-https://netdevops.io/hier_config/advanced-topics/
+https://hier-config.readthedocs.io/en/latest/
### Custom Config Remediation Type
diff --git a/docs/user/app_getting_started.md b/docs/user/app_getting_started.md
index 7aafc6b93..faeb56bda 100644
--- a/docs/user/app_getting_started.md
+++ b/docs/user/app_getting_started.md
@@ -59,14 +59,14 @@ Follow the steps below to get up and running for the intended configuration elem
2. Add any git repositories that will be used to house the intended configurations.
1. In the UI `Extensibility -> Git Repositories`. Click Add.
- 2. Populate the Git Repository data for the intended. [Git Settings](./app_feature_backup.md#git-settings)
+ 2. Populate the Git Repository data for the intended. [Git Settings](./app_use_cases.md#git-settings)
3. Make sure to select the **Provides** called `intended configs`.
4. Click Create.
3. Add the git repository that will be used to house the Jinja2 templates.
1. In the UI `Extensibility -> Git Repositories`. Click Add.
- 2. Populate the Git Repository data for the jinja2 templates. [Git Settings](./app_feature_backup.md#git-settings)
+ 2. Populate the Git Repository data for the jinja2 templates. [Git Settings](./app_use_cases.md#git-settings)
3. Make sure to select the **Provides** called `jinja templates`.
4. Click Create.
@@ -75,7 +75,7 @@ Follow the steps below to get up and running for the intended configuration elem
1. Navigate to `Golden Config -> Settings` under the Golden Configuration Section.
2. Create new or select one of the existing `Settings` objects
3. Fill out the Intended Repository. (The dropdown will show the repository that was just created.)
- 4. Fill out Intended Path Template. Typically `{{obj.location.name|slugify}}/{{obj.name}}.cfg`, see [Setting Details](./app_feature_backup.md#application-settings)
+ 4. Fill out Intended Path Template. Typically `{{obj.location.name|slugify}}/{{obj.name}}.cfg`, see [Setting Details](./app_use_cases.md#application-settings)
5. Fill out Jinja Repository. (The dropdown will show the repository that was just created.)
6. Fill out Jinja Path Template. Typically `{{obj.platform.network_driver}}.j2`.
@@ -181,6 +181,7 @@ Golden Config properties include: Compliance Features, Compliance Rules, Config
│ ├── compliance_rules
│ ├── config_removes
│ ├── config_replaces
+│ ├── remediation_settings
```
The files within these folders can follow any naming pattern or nested folder structure, all of them will be recursively taken into account. So it's up to you to decide how to you prefer to organize these files (within the previously stated directory structure):
@@ -200,9 +201,12 @@ The files within these folders can follow any naming pattern or nested folder st
│ ├── config_replaces
│ │ ├── cisco_ios.yml
│ │ └── juniper_junos.yml
+│ ├── remediation_settings
+│ │ ├── cisco_ios.yml
+│ │ └── juniper_junos.yml
```
-The `YAML` files will contain all the attributes necessary to identify an object (for instance, a `ComplianceRule` is identified by the `feature_slug` and the `platform_network_driver` together) and the other attributes (the ones that are not used to identify the object). For example:
+The `YAML` files will contain all the attributes necessary to identify an object (for instance, a `ComplianceRule` is identified by the `feature_slug` and the `platform_name` together) and the other attributes (the ones that are not used to identify the object). For example:
`compliance_features` example:
@@ -218,7 +222,7 @@ The `YAML` files will contain all the attributes necessary to identify an object
```yaml
---
- feature_slug: "aaa"
- platform_network_driver: "Cisco IOS"
+ platform_name: "Cisco IOS"
config_ordered: true
match_config: |
aaa
@@ -233,7 +237,7 @@ The `YAML` files will contain all the attributes necessary to identify an object
```yaml
---
-- platform_network_driver: "Cisco IOS"
+- platform_name: "Cisco IOS"
name: "Build config"
regex: '^Building\s+configuration.*\n'
```
@@ -243,12 +247,28 @@ The `YAML` files will contain all the attributes necessary to identify an object
```yaml
---
- name: "username"
- platform_network_driver: "Cisco IOS"
+ platform_name: "Cisco IOS"
description: "username"
regex: '(username\s+\S+\spassword\s+5\s+)\S+(\s+role\s+\S+)'
replace: '\1\2'
```
+`remediation_settings` example:
+
+```yaml
+---
+- platform_name: "Cisco IOS"
+ remediation_type: "hierconfig"
+ remediation_options:
+ style: ios
+ negation: no
+ idempotent_commands:
+ - lineage:
+ - startswith: vlan
+ - startswith: name
+...
+```
+
CustomField data can be added using the `_custom_field_data` attribute, that takes a dictionary mapping custom_field names to their values:
```yaml
@@ -262,7 +282,7 @@ CustomField data can be added using the `_custom_field_data` attribute, that tak
```
!!! note
- For Foreign Key references to `ComplianceFeature` and `Platform` we use the keywords `feature_slug` and `platform_network_driver` respectively.
+ For Foreign Key references to `ComplianceFeature` and `Platform` we use the keywords `feature_slug` and `platform_name` respectively.
1. Add the Git repository that will be used to sync Git properties.
diff --git a/docs/user/faq.md b/docs/user/faq.md
index 80da33366..70a569d99 100644
--- a/docs/user/faq.md
+++ b/docs/user/faq.md
@@ -115,9 +115,9 @@ With the original Git Data Source implementation, passwords were stored in the d
Applying ipam.0006_ipaddress_nat_outside_list... OK
Applying ipam.0007_add_natural_indexing... OK
Applying nautobot_golden_config.0006_multi_repo_support_temp_field...Traceback (most recent call last):
- File "/usr/local/lib/python3.12/site-packages/django/db/models/fields/related_descriptors.py", line 173, in __get__
+ File "/usr/local/lib/python3.11/site-packages/django/db/models/fields/related_descriptors.py", line 173, in __get__
rel_obj = self.field.get_cached_value(instance)
- File "/usr/local/lib/python3.12/site-packages/django/db/models/fields/mixins.py", line 15, in get_cached_value
+ File "/usr/local/lib/python3.11/site-packages/django/db/models/fields/mixins.py", line 15, in get_cached_value
return instance._state.fields_cache[cache_name]
KeyError: 'backup_repository'
@@ -125,7 +125,7 @@ During handling of the above exception, another exception occurred:
Traceback (most recent call last):
- File "/usr/local/lib/python3.12/site-packages/cryptography/hazmat/backends/openssl/hmac.py", line 85, in verify
+ File "/usr/local/lib/python3.11/site-packages/cryptography/hazmat/backends/openssl/hmac.py", line 85, in verify
raise InvalidSignature("Signature did not match digest.")
cryptography.exceptions.InvalidSignature: Signature did not match digest.
@@ -133,7 +133,7 @@ During handling of the above exception, another exception occurred:
Traceback (most recent call last):
- File "/usr/local/lib/python3.12/site-packages/django_cryptography/core/signing.py", line 239, in unsign
+ File "/usr/local/lib/python3.11/site-packages/django_cryptography/core/signing.py", line 239, in unsign
raise BadSignature(
django.core.signing.BadSignature: Signature "b'A9QMEEeCk2+tAc6naf2KDiZBvACNWGNHGMPJ/SHOYY8=\n'" does not match
ERROR: 1
diff --git a/error_code_template.j2 b/error_code_template.j2
new file mode 100644
index 000000000..be34e16ea
--- /dev/null
+++ b/error_code_template.j2
@@ -0,0 +1,17 @@
+# {{ error_code }} Details
+
+## Message emitted:
+
+`{{ error_code }}: {{ error.error_message }}`
+
+## Description:
+
+{{ error.description }}
+
+## Troubleshooting:
+
+{{ error.troubleshooting }}
+
+## Recommendation:
+
+{{ error.recommendation }}
\ No newline at end of file
diff --git a/get_troubleshooting_docs.py b/get_troubleshooting_docs.py
new file mode 100644
index 000000000..53fd1e53c
--- /dev/null
+++ b/get_troubleshooting_docs.py
@@ -0,0 +1,36 @@
+"""Generate troubleshooting documentation for each error code."""
+
+import os
+
+from jinja2 import Environment, FileSystemLoader
+
+from nautobot_golden_config.error_codes import ERROR_CODES
+
+
+def generate_files_from_template(template_file):
+ """
+ Generates files from a Jinja2 template in the same directory as the script.
+
+ Args:
+ template_file (str): Name of the Jinja2 template file.
+ output_dir (str): Path to the directory where the output files will be saved.
+ data (dict): Data to be passed to the template.
+ num_files (int, optional): Number of files to generate. Defaults to 1.
+ """
+ data = {}
+ template_dir = os.path.dirname(os.path.abspath(__file__))
+ env = Environment(loader=FileSystemLoader(template_dir), autoescape=True)
+ template = env.get_template(template_file)
+ for error_code, error in ERROR_CODES.items():
+ data["error_code"] = error_code
+ data["error"] = error
+ output_filename = f"{error_code}.md" # Customize the filename as needed
+ output_filepath = os.path.join(template_dir, "docs", "admin", "troubleshooting", output_filename)
+ output_content = template.render(data)
+
+ with open(output_filepath, "w", encoding="utf-8") as doc_file:
+ doc_file.write(output_content)
+
+
+if __name__ == "__main__":
+ generate_files_from_template("error_code_template.j2")
diff --git a/invoke.example.yml b/invoke.example.yml
index d23d7d95f..1a043e634 100644
--- a/invoke.example.yml
+++ b/invoke.example.yml
@@ -1,7 +1,7 @@
---
nautobot_golden_config:
nautobot_ver: "2.4.2"
- python_ver: "3.12"
+ python_ver: "3.11"
# local: false
# compose_dir: "/full/path/to/nautobot-app-golden-config/development"
diff --git a/invoke.mysql.yml b/invoke.mysql.yml
index d995ab191..46269e95a 100644
--- a/invoke.mysql.yml
+++ b/invoke.mysql.yml
@@ -3,7 +3,7 @@ nautobot_golden_config:
project_name: "nautobot-golden-config"
nautobot_ver: "2.4.2"
local: false
- python_ver: "3.12"
+ python_ver: "3.11"
compose_dir: "development"
compose_files:
- "docker-compose.base.yml"
diff --git a/mkdocs.yml b/mkdocs.yml
index 48d36852a..7e9dd5120 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -46,12 +46,6 @@ theme:
extra_css:
- "assets/extra.css"
-# needed for RTD version flyout menu
-# jquery is not (yet) injected by RTD automatically and it might be dropped
-# as a dependency in the future
-extra_javascript:
- - "https://code.jquery.com/jquery-3.6.0.min.js"
-
extra:
generator: false
ntc_sponsor: true
@@ -87,7 +81,7 @@ markdown_extensions:
custom_fences:
- name: "mermaid"
class: "mermaid"
- format: !!python/name:pymdownx.superfences.fence_code_format
+ format: "!!python/name:pymdownx.superfences.fence_code_format"
- "footnotes"
plugins:
- "search"
@@ -101,6 +95,12 @@ plugins:
watch:
- "README.md"
+validation:
+ omitted_files: "warn"
+ absolute_links: "warn"
+ unrecognized_links: "warn"
+ anchors: "warn"
+
nav:
- Overview: "index.md"
- User Guide:
@@ -131,6 +131,7 @@ nav:
- Compatibility Matrix: "admin/compatibility_matrix.md"
- Troubleshooting:
- "admin/troubleshooting/index.md"
+ - E3XXX: "admin/troubleshooting/E3XXX.md"
- E3001: "admin/troubleshooting/E3001.md"
- E3002: "admin/troubleshooting/E3002.md"
- E3003: "admin/troubleshooting/E3003.md"
@@ -168,6 +169,8 @@ nav:
- E3035: "admin/troubleshooting/E3035.md"
- E3036: "admin/troubleshooting/E3036.md"
- E3037: "admin/troubleshooting/E3037.md"
+ - E3038: "admin/troubleshooting/E3038.md"
+ - E3039: "admin/troubleshooting/E3039.md"
- Migrating To v2: "admin/migrating_to_v2.md"
- Release Notes:
- "admin/release_notes/index.md"
diff --git a/nautobot_golden_config/api/urls.py b/nautobot_golden_config/api/urls.py
index 884b1d6b6..f710b66f9 100644
--- a/nautobot_golden_config/api/urls.py
+++ b/nautobot_golden_config/api/urls.py
@@ -6,7 +6,6 @@
from nautobot_golden_config.api import views
router = OrderedDefaultRouter()
-# add the name of your api endpoint, usually hyphenated model name in plural, e.g. "my-model-classes"
router.APIRootView = views.GoldenConfigRootView
router.register("compliance-feature", views.ComplianceFeatureViewSet)
router.register("compliance-rule", views.ComplianceRuleViewSet)
diff --git a/nautobot_golden_config/api/views.py b/nautobot_golden_config/api/views.py
index d7c2c1a21..2eb3a4301 100644
--- a/nautobot_golden_config/api/views.py
+++ b/nautobot_golden_config/api/views.py
@@ -356,16 +356,16 @@ def get(self, request, *args, **kwargs): # pylint: disable=too-many-locals, too
)
intended_config = self._render_config_nornir_serial(
device=device,
- jinja_template=filesystem_path.name,
- jinja_root_path=filesystem_path.parent,
+ jinja_template=str(filesystem_path.relative_to(git_repo_path)),
+ jinja_root_path=git_repo_path,
graphql_data=graphql_data,
)
else:
filesystem_path = self._get_jinja_template_path(settings, device, git_repository)
intended_config = self._render_config_nornir_serial(
device=device,
- jinja_template=filesystem_path.name,
- jinja_root_path=filesystem_path.parent,
+ jinja_template=str(filesystem_path.relative_to(git_repository.filesystem_path)),
+ jinja_root_path=git_repository.filesystem_path,
graphql_data=graphql_data,
)
except Exception as exc:
diff --git a/nautobot_golden_config/datasources.py b/nautobot_golden_config/datasources.py
index d4e04275b..8d53f3c45 100644
--- a/nautobot_golden_config/datasources.py
+++ b/nautobot_golden_config/datasources.py
@@ -8,11 +8,17 @@
from nautobot.extras.choices import LogLevelChoices
from nautobot.extras.registry import DatasourceContent
-from nautobot_golden_config.exceptions import MissingReference
-from nautobot_golden_config.models import ComplianceFeature, ComplianceRule, ConfigRemove, ConfigReplace
-from nautobot_golden_config.utilities.helper import get_golden_config_settings
+from nautobot_golden_config.exceptions import MissingReference, MultipleReferences
+from nautobot_golden_config.models import (
+ ComplianceFeature,
+ ComplianceRule,
+ ConfigRemove,
+ ConfigReplace,
+ RemediationSetting,
+)
+from nautobot_golden_config.utilities.helper import get_error_message
-settings = get_golden_config_settings()
+# settings = get_golden_config_settings()
def refresh_git_jinja(repository_record, job_result, delete=False): # pylint: disable=unused-argument
@@ -48,6 +54,7 @@ def refresh_git_gc_properties(repository_record, job_result, delete=False): # p
│ ├── compliance_rules
│ ├── config_removes
│ ├── config_replaces
+ │ ├── remediation_settings
"""
if "nautobot_golden_config.pluginproperties" not in repository_record.provided_contents:
@@ -81,23 +88,23 @@ def refresh_git_gc_properties(repository_record, job_result, delete=False): # p
"id_keys": (
("feature", "feature_slug"),
("platform", "platform_network_driver"),
+ ("platform", "platform_name"),
),
},
{
"directory_name": "config_removes",
"class": ConfigRemove,
- "id_keys": (
- ("name", "name"),
- ("platform", "platform_network_driver"),
- ),
+ "id_keys": (("name", "name"), ("platform", "platform_network_driver"), ("platform", "platform_name")),
},
{
"directory_name": "config_replaces",
"class": ConfigReplace,
- "id_keys": (
- ("name", "name"),
- ("platform", "platform_network_driver"),
- ),
+ "id_keys": (("name", "name"), ("platform", "platform_network_driver"), ("platform", "platform_name")),
+ },
+ {
+ "directory_name": "remediation_settings",
+ "class": RemediationSetting,
+ "id_keys": (("platform", "platform_name"),),
},
)
@@ -117,6 +124,8 @@ def get_id_kwargs(gc_config_item_dict, id_keys, job_result):
if "platform_slug" in gc_config_item_dict.keys():
gc_config_item_dict["platform_network_driver"] = gc_config_item_dict.pop("platform_slug")
+ if "platform_name" in gc_config_item_dict.keys() and "platform_network_driver" in gc_config_item_dict.keys():
+ gc_config_item_dict.pop("platform_network_driver")
id_kwargs = {}
for id_key in id_keys:
@@ -127,19 +136,26 @@ def get_id_kwargs(gc_config_item_dict, id_keys, job_result):
if actual_attr_name in fk_class_mapping:
if "network_driver" in yaml_attr_name:
field_name = "network_driver"
+ elif "platform_name" in yaml_attr_name:
+ field_name = "name"
else:
_, field_name = yaml_attr_name.split("_")
- kwargs = {field_name: gc_config_item_dict[yaml_attr_name]}
+ if not gc_config_item_dict.get(yaml_attr_name):
+ continue
+ kwargs = {field_name: gc_config_item_dict.get(yaml_attr_name, "")}
try:
id_kwargs[actual_attr_name] = fk_class_mapping[actual_attr_name].objects.get(**kwargs)
+ except fk_class_mapping[actual_attr_name].MultipleObjectsReturned:
+ error_msg = get_error_message(
+ "E3032", yaml_attr_name=yaml_attr_name, yaml_attr_value=gc_config_item_dict[yaml_attr_name]
+ )
+ job_result.log(error_msg, level_choice=LogLevelChoices.LOG_WARNING)
+ raise MultipleReferences from fk_class_mapping[actual_attr_name].MultipleObjectsReturned
except fk_class_mapping[actual_attr_name].DoesNotExist:
- job_result.log(
- (
- f"Reference to {yaml_attr_name}: {gc_config_item_dict[yaml_attr_name]}",
- "is not available.",
- ),
- level_choice=LogLevelChoices.LOG_WARNING,
+ error_msg = get_error_message(
+ "E3033", yaml_attr_name=yaml_attr_name, yaml_attr_value=gc_config_item_dict[yaml_attr_name]
)
+ job_result.log(error_msg, level_choice=LogLevelChoices.LOG_WARNING)
raise MissingReference from fk_class_mapping[actual_attr_name].DoesNotExist
else:
id_kwargs[actual_attr_name] = gc_config_item_dict[yaml_attr_name]
@@ -217,42 +233,42 @@ def update_git_gc_properties(golden_config_path, job_result, gc_config_item): #
datasource_contents = []
-if settings.intended_enabled or settings.compliance_enabled:
- datasource_contents.append(
- (
- "extras.gitrepository",
- DatasourceContent(
- name="intended configs",
- content_identifier="nautobot_golden_config.intendedconfigs",
- icon="mdi-file-document-outline",
- callback=refresh_git_intended,
- ),
- )
+# if settings.intended_enabled or settings.compliance_enabled:
+datasource_contents.append(
+ (
+ "extras.gitrepository",
+ DatasourceContent(
+ name="intended configs",
+ content_identifier="nautobot_golden_config.intendedconfigs",
+ icon="mdi-file-document-outline",
+ callback=refresh_git_intended,
+ ),
)
-if settings.intended_enabled:
- datasource_contents.append(
- (
- "extras.gitrepository",
- DatasourceContent(
- name="jinja templates",
- content_identifier="nautobot_golden_config.jinjatemplate",
- icon="mdi-text-box-check-outline",
- callback=refresh_git_jinja,
- ),
- )
+)
+# if settings.intended_enabled:
+datasource_contents.append(
+ (
+ "extras.gitrepository",
+ DatasourceContent(
+ name="jinja templates",
+ content_identifier="nautobot_golden_config.jinjatemplate",
+ icon="mdi-text-box-check-outline",
+ callback=refresh_git_jinja,
+ ),
)
-if settings.backup_enabled or settings.compliance_enabled:
- datasource_contents.append(
- (
- "extras.gitrepository",
- DatasourceContent(
- name="backup configs",
- content_identifier="nautobot_golden_config.backupconfigs",
- icon="mdi-file-code",
- callback=refresh_git_backup,
- ),
- )
+)
+# if settings.backup_enabled or settings.compliance_enabled:
+datasource_contents.append(
+ (
+ "extras.gitrepository",
+ DatasourceContent(
+ name="backup configs",
+ content_identifier="nautobot_golden_config.backupconfigs",
+ icon="mdi-file-code",
+ callback=refresh_git_backup,
+ ),
)
+)
datasource_contents.append(
(
diff --git a/nautobot_golden_config/details.py b/nautobot_golden_config/details.py
index 073d197b1..b013baf15 100644
--- a/nautobot_golden_config/details.py
+++ b/nautobot_golden_config/details.py
@@ -1,8 +1,25 @@
"""Object Detail components for golden config."""
+from django.utils.html import format_html
from nautobot.apps import ui
from nautobot.core.templatetags import helpers
+
+def get_model_instances(m2m_object):
+ """Return a unordered bullet list of model instances from a m2m object."""
+ if m2m_object.count() == 0:
+ return None
+ ul_elements = []
+ for obj in m2m_object.all():
+ ul_elements.append(f"
{helpers.hyperlinked_object(obj)}
")
+ return format_html(f"
{''.join(ul_elements)}
")
+
+
+def hyperlinked_field_with_icon(url, title, icon_class="mdi mdi-text-box-check-outline"):
+ """Render a redirect link with custom icon."""
+ return format_html('', url, icon_class, title)
+
+
compliance_feature = ui.ObjectDetailContent(
panels=(
ui.ObjectFieldsPanel(
@@ -65,3 +82,169 @@
),
),
)
+
+golden_config_setting = ui.ObjectDetailContent(
+ panels=(
+ ui.ObjectFieldsPanel(
+ label="General Settings",
+ section=ui.SectionChoices.LEFT_HALF,
+ weight=100,
+ fields=(
+ "weight",
+ "description",
+ "backup_enabled",
+ "intended_enabled",
+ "compliance_enabled",
+ "plan_enabled",
+ "deploy_enabled",
+ ),
+ ),
+ ui.KeyValueTablePanel(
+ section=ui.SectionChoices.LEFT_HALF,
+ weight=100,
+ context_data_key="dg_data",
+ label="Device Scope Details",
+ value_transforms={
+ "Filter Query Logic": [lambda v: helpers.render_json(v, pretty_print=True)],
+ "Scope of Devices": [lambda v: helpers.hyperlinked_field(v.members.count(), v.get_group_members_url())],
+ },
+ ),
+ ui.ObjectFieldsPanel(
+ label="Backup Configuration",
+ section=ui.SectionChoices.RIGHT_HALF,
+ weight=100,
+ fields=("backup_repository", "backup_path_template", "backup_test_connectivity"),
+ value_transforms={
+ "backup_path_template": [helpers.pre_tag],
+ },
+ ),
+ ui.ObjectFieldsPanel(
+ label="Intended Configuration",
+ section=ui.SectionChoices.RIGHT_HALF,
+ weight=200,
+ fields=("intended_repository", "intended_path_template"),
+ value_transforms={
+ "intended_path_template": [helpers.pre_tag],
+ },
+ ),
+ ui.ObjectFieldsPanel(
+ label="Templates Configuration",
+ section=ui.SectionChoices.RIGHT_HALF,
+ weight=300,
+ fields=("jinja_repository", "jinja_path_template", "sot_agg_query"),
+ value_transforms={
+ "jinja_path_template": [helpers.pre_tag],
+ },
+ ),
+ )
+)
+
+golden_config = ui.ObjectDetailContent(
+ panels=(
+ ui.KeyValueTablePanel(
+ section=ui.SectionChoices.RIGHT_HALF,
+ weight=100,
+ label="Configuration Links",
+ context_data_key="device_object",
+ value_transforms={
+ "Backup Config": [
+ lambda v: hyperlinked_field_with_icon(v, title="Backup Configuration"),
+ ],
+ "Intended Config": [
+ lambda v: hyperlinked_field_with_icon(v, title="Intended Configuration"),
+ ],
+ "Compliance Config": [
+ lambda v: hyperlinked_field_with_icon(v, title="Compliance"),
+ ],
+ },
+ ),
+ ui.ObjectFieldsPanel(
+ section=ui.SectionChoices.LEFT_HALF,
+ weight=100,
+ fields=("device",),
+ ),
+ ui.ObjectFieldsPanel(
+ label="Backup Configuration",
+ section=ui.SectionChoices.LEFT_HALF,
+ weight=200,
+ fields=("backup_last_attempt_date", "backup_last_success_date"),
+ ),
+ ui.ObjectFieldsPanel(
+ label="Intended Configuration",
+ section=ui.SectionChoices.LEFT_HALF,
+ weight=300,
+ fields=("intended_last_attempt_date", "intended_last_success_date"),
+ ),
+ ui.ObjectFieldsPanel(
+ label="Compliance Details",
+ section=ui.SectionChoices.LEFT_HALF,
+ weight=400,
+ fields=("compliance_last_attempt_date", "compliance_last_success_date"),
+ ),
+ ),
+)
+
+config_plan = ui.ObjectDetailContent(
+ panels=(
+ ui.ObjectFieldsPanel(
+ section=ui.SectionChoices.LEFT_HALF,
+ label="Config Plan Details",
+ weight=100,
+ fields=(
+ "device",
+ "status",
+ "created",
+ "plan_type",
+ "feature",
+ "plan_result",
+ ),
+ value_transforms={
+ "plan_result": [lambda v: helpers.hyperlinked_field(getattr(v, "status", v))],
+ "feature": [get_model_instances, helpers.placeholder],
+ },
+ ),
+ ui.ObjectFieldsPanel(
+ section=ui.SectionChoices.RIGHT_HALF,
+ label="Config Deployment Details",
+ weight=100,
+ fields=(
+ "change_control_id",
+ "change_control_url",
+ "deploy_result",
+ ),
+ value_transforms={
+ "deploy_result": [lambda v: helpers.hyperlinked_field(getattr(v, "status", v))],
+ },
+ ),
+ ui.Panel(
+ label="Postprocessed Config Set",
+ weight=100,
+ section=ui.SectionChoices.RIGHT_HALF,
+ body_content_template_path="nautobot_golden_config/configplan_postprocessing.html",
+ ),
+ ui.ObjectTextPanel(
+ weight=200,
+ label="Config Set",
+ section=ui.SectionChoices.FULL_WIDTH,
+ object_field="config_set",
+ render_as=ui.TextPanel.RenderOptions.CODE,
+ render_placeholder=True,
+ ),
+ ),
+)
+
+
+config_compliance = ui.ObjectDetailContent(
+ panels=(
+ ui.ObjectFieldsPanel(
+ section=ui.SectionChoices.LEFT_HALF,
+ weight=100,
+ fields=("device", "rule", "compliance"),
+ ),
+ ui.ObjectFieldsPanel(
+ section=ui.SectionChoices.RIGHT_HALF,
+ weight=100,
+ fields=("actual", "intended", "remediation", "missing", "extra"),
+ ),
+ ),
+)
diff --git a/nautobot_golden_config/error_codes.py b/nautobot_golden_config/error_codes.py
new file mode 100644
index 000000000..14e8c6060
--- /dev/null
+++ b/nautobot_golden_config/error_codes.py
@@ -0,0 +1,26 @@
+"""Error codes used in Stacktrace and generated docs."""
+
+from collections import namedtuple
+
+ErrorCode = namedtuple("ErrorCode", ["troubleshooting", "description", "error_message", "recommendation"])
+
+ERROR_CODES = {
+ "E3XXX": ErrorCode(
+ troubleshooting="Find the error code in the traceback, and search for it in the codebase.",
+ description="This means a code snippet was calling get_error_code() with an error code that is not registered.",
+ error_message="Un-Registered Error Code used.",
+ recommendation="Add the error code to the `error_codes.py` file.",
+ ),
+ "E3032": ErrorCode(
+ troubleshooting="Check the YAML file for the `platform_slug` or `platform_network_driver` key. If it is not unique, then you need to use the `platform_name` key instead.",
+ description="Syncing Golden Config properties using Datasource feature, but using non-unique key..",
+ error_message="Reference to {yaml_attr_name}: {yaml_attr_value}, is not unique. Please use platform_name key instead.",
+ recommendation="Migrate the YAML file keys from `platform_slug` or `platform_network_driver` to `platform_name`.",
+ ),
+ "E3033": ErrorCode(
+ troubleshooting="The platform key used in the YAML file cannot be found.",
+ description="Searching for the platform key in the YAML file and it cannot be found in the database.",
+ error_message="Reference to {yaml_attr_name}: {yaml_attr_value} is not available.",
+ recommendation="Check the YAML file for misspellings or incorrect values, if using `platform_slug` or `platform_network_driver`, then migrate to `platform_name` key instead.",
+ ),
+}
diff --git a/nautobot_golden_config/exceptions.py b/nautobot_golden_config/exceptions.py
index 25983c229..18c1fa62c 100644
--- a/nautobot_golden_config/exceptions.py
+++ b/nautobot_golden_config/exceptions.py
@@ -9,6 +9,10 @@ class MissingReference(GoldenConfigError):
"""Custom error to signal a missing FK reference when looking up."""
+class MultipleReferences(GoldenConfigError):
+ """Custom error to signal a get() returned more than 1 FK reference when looking up."""
+
+
class RenderConfigToPushError(GoldenConfigError):
"""Exception related to Render Configuration Postprocessing operations."""
@@ -23,3 +27,7 @@ class IntendedGenerationFailure(GoldenConfigError):
class ComplianceFailure(GoldenConfigError):
"""Custom error for when there's a failure in Compliance Job."""
+
+
+class ConfigPlanDeploymentFailure(GoldenConfigError):
+ """Custom error for when there's a failure in Config Plan Deployment Job."""
diff --git a/nautobot_golden_config/forms.py b/nautobot_golden_config/forms.py
index e6c98e98f..a5ab9c2c6 100644
--- a/nautobot_golden_config/forms.py
+++ b/nautobot_golden_config/forms.py
@@ -345,13 +345,30 @@ class Meta:
class GoldenConfigSettingForm(NautobotModelForm):
- """Filter Form for GoldenConfigSettingForm instances."""
+ """Form for GoldenConfigSetting instances."""
slug = forms.SlugField()
- dynamic_group = django_forms.ModelChoiceField(queryset=DynamicGroup.objects.all())
+ # Should filter model and this by dynamic groups of content type devices
+ dynamic_group = forms.DynamicModelChoiceField(queryset=DynamicGroup.objects.all())
+ backup_repository = forms.DynamicModelChoiceField(
+ queryset=GitRepository.objects.all(),
+ query_params={"provided_contents": "nautobot_golden_config.backupconfigs"},
+ required=False,
+ )
+ intended_repository = forms.DynamicModelChoiceField(
+ queryset=GitRepository.objects.all(),
+ query_params={"provided_contents": "nautobot_golden_config.intendedconfigs"},
+ required=False,
+ )
+ jinja_repository = forms.DynamicModelChoiceField(
+ queryset=GitRepository.objects.all(),
+ query_params={"provided_contents": "nautobot_golden_config.jinjatemplate"},
+ required=False,
+ )
+ sot_agg_query = forms.DynamicModelChoiceField(queryset=GraphQLQuery.objects.all(), required=False)
class Meta:
- """Filter Form Meta Data for GoldenConfigSettingForm instances."""
+ """Form Meta Data for GoldenConfigSetting instances."""
model = models.GoldenConfigSetting
fields = "__all__"
diff --git a/nautobot_golden_config/jobs.py b/nautobot_golden_config/jobs.py
index dcb1973fe..97978c095 100644
--- a/nautobot_golden_config/jobs.py
+++ b/nautobot_golden_config/jobs.py
@@ -20,7 +20,7 @@
StringVar,
TextVar,
)
-from nautobot.extras.models import DynamicGroup, Role, Status, Tag
+from nautobot.extras.models import Role, Status, Tag
from nautobot.tenancy.models import Tenant, TenantGroup
from nautobot_plugin_nornir.plugins.inventory.nautobot_orm import NautobotORMInventory
from nornir.core.plugins.inventory import InventoryPluginRegister
@@ -28,7 +28,7 @@
from nautobot_golden_config.choices import ConfigPlanTypeChoice
from nautobot_golden_config.exceptions import BackupFailure, ComplianceFailure, IntendedGenerationFailure
-from nautobot_golden_config.models import ComplianceFeature, ConfigPlan, GoldenConfig
+from nautobot_golden_config.models import ComplianceFeature, ConfigPlan, GoldenConfig, GoldenConfigSetting
from nautobot_golden_config.nornir_plays.config_backup import config_backup
from nautobot_golden_config.nornir_plays.config_compliance import config_compliance
from nautobot_golden_config.nornir_plays.config_deployment import config_deployment
@@ -38,10 +38,12 @@
generate_config_set_from_compliance_feature,
generate_config_set_from_manual,
)
+from nautobot_golden_config.utilities.constant import JOB_FUNCTION_MAP
from nautobot_golden_config.utilities.git import GitRepo
from nautobot_golden_config.utilities.helper import (
get_device_to_settings_map,
- get_golden_config_settings,
+ # get_golden_config_settings,
+ get_inscope_settings_from_device_qs,
get_job_filter,
update_dynamic_groups_cache,
verify_config_plan_eligibility,
@@ -56,37 +58,53 @@
def get_repo_types_for_job(job_name):
"""Logic to determine which repo_types are needed based on job + plugin settings."""
repo_types = []
- settings = get_golden_config_settings()
-
- if settings.backup_enabled and job_name == "nautobot_golden_config.jobs.BackupJob":
+ if job_name == "backup":
repo_types.append("backup_repository")
- if settings.intended_enabled and job_name == "nautobot_golden_config.jobs.IntendedJob":
+ if job_name == "intended":
repo_types.extend(["jinja_repository", "intended_repository"])
- if settings.compliance_enabled and job_name == "nautobot_golden_config.jobs.ComplianceJob":
+ if job_name == "compliance":
repo_types.extend(["intended_repository", "backup_repository"])
- if "All" in job_name:
+ if "all" in job_name.lower():
repo_types.extend(["backup_repository", "jinja_repository", "intended_repository"])
return repo_types
-def get_refreshed_repos(job_obj, repo_types, data=None):
- """Small wrapper to pull latest branch, and return a GitRepo app specific object."""
- settings = get_golden_config_settings()
- dynamic_groups = DynamicGroup.objects.exclude(golden_config_setting__isnull=True)
- repository_records = set()
- for group in dynamic_groups:
- # Make sure the data(device qs) device exist in the dg first.
- if data.filter(group.generate_query()).exists():
- for repo_type in repo_types:
- repo = getattr(group.golden_config_setting, repo_type, None)
- if repo:
- repository_records.add(repo)
-
- repositories = {}
+# def get_refreshed_repos(job_obj, repository_records, gc_setting):
+# """Small wrapper to pull latest branch, and return a GitRepo app specific object."""
+# repositories = {}
+# for repository_record in repository_records:
+# ensure_git_repository(repository_record, job_obj.logger)
+# # TODO: Should this not point to non-nautobot.core import
+# # We should ask in nautobot core for the `from_url` constructor to be it's own function
+# git_info = get_repo_from_url_to_path_and_from_branch(repository_record)
+# git_repo = GitRepo(
+# repository_record.filesystem_path,
+# git_info.from_url,
+# clone_initially=False,
+# base_url=repository_record.remote_url,
+# nautobot_repo_obj=repository_record,
+# )
+# commit = False
+
+# if (
+# gc_setting.intended_enabled
+# and "nautobot_golden_config.intendedconfigs" in git_repo.nautobot_repo_obj.provided_contents
+# ):
+# commit = True
+# if (
+# gc_setting.backup_enabled
+# and "nautobot_golden_config.backupconfigs" in git_repo.nautobot_repo_obj.provided_contents
+# ):
+# commit = True
+
+# repositories[str(git_repo.nautobot_repo_obj.id)] = {"repo_obj": git_repo, "to_commit": commit}
+# return repositories
+
+
+def get_refreshed_reposv2(repository_records):
+ """Small wrapper to pull latest branch, and return a list of GitRepo app specific objects."""
+ gitrepo_obj = []
for repository_record in repository_records:
- ensure_git_repository(repository_record, job_obj.logger)
- # TODO: Should this not point to non-nautobot.core import
- # We should ask in nautobot core for the `from_url` constructor to be it's own function
git_info = get_repo_from_url_to_path_and_from_branch(repository_record)
git_repo = GitRepo(
repository_record.filesystem_path,
@@ -95,48 +113,66 @@ def get_refreshed_repos(job_obj, repo_types, data=None):
base_url=repository_record.remote_url,
nautobot_repo_obj=repository_record,
)
- commit = False
-
- if (
- settings.intended_enabled
- and "nautobot_golden_config.intendedconfigs" in git_repo.nautobot_repo_obj.provided_contents
- ):
- commit = True
- if (
- settings.backup_enabled
- and "nautobot_golden_config.backupconfigs" in git_repo.nautobot_repo_obj.provided_contents
- ):
- commit = True
-
- repositories[str(git_repo.nautobot_repo_obj.id)] = {"repo_obj": git_repo, "to_commit": commit}
- return repositories
-
-
-def gc_repo_prep(job, data):
- """Prepare Golden Config git repos for work.
-
- Args:
- job (Job): Nautobot Job object with logger and other vars.
- data (dict): Data being passed from Job.
-
- Returns:
- List[GitRepo]: List of GitRepos to be used with Job(s).
- """
- job.logger.debug("Compiling device data for GC job.", extra={"grouping": "Get Job Filter"})
- job.qs = get_job_filter(data)
- job.logger.debug(f"In scope device count for this job: {job.qs.count()}", extra={"grouping": "Get Job Filter"})
- job.logger.debug("Mapping device(s) to GC Settings.", extra={"grouping": "Device to Settings Map"})
- job.device_to_settings_map = get_device_to_settings_map(queryset=job.qs)
- gitrepo_types = list(set(get_repo_types_for_job(job.class_path)))
- job.logger.debug(
- f"Repository types to sync: {', '.join(sorted(gitrepo_types))}",
- extra={"grouping": "GC Repo Syncs"},
- )
- current_repos = get_refreshed_repos(job_obj=job, repo_types=gitrepo_types, data=job.qs)
- return current_repos
-
-
-def gc_repo_push(job, current_repos, commit_message=""):
+ gitrepo_obj.append(git_repo)
+ return gitrepo_obj
+
+
+# def gc_repo_prep(job, inscope_gc_settings):
+# """Prepare Golden Config git repos for work.
+
+# Args:
+# job (Job): Nautobot Job object with logger and other vars.
+# data (dict): Data being passed from Job.
+
+# Returns:
+# List[GitRepo]: List of GitRepos to be used with Job(s).
+# """
+# gitrepo_types = list(set(get_repo_types_for_job(job.class_path)))
+# if inscope_gc_settings:
+# for gcs in inscope_gc_settings:
+# repos = GoldenConfigSetting.objects.get_repos_for_setting(setting=gcs, repo_types=gitrepo_types)
+# job.logger.debug(
+# f"Repositories to sync for GC Setting {gcs.name}: {', '.join(sorted([repo.name for repo in repos]))}",
+# extra={"grouping": "GC Repo Syncs"},
+# )
+# current_repos = get_refreshed_repos(job_obj=job, repository_records=repos, gc_setting=gcs)
+# return current_repos
+# return []
+
+
+# def gc_repo_push(job, current_repos, commit_message=""):
+# """Push any work from worker to git repos in Job.
+
+# Args:
+# job (Job): Nautobot Job with logger and other attributes.
+# current_repos (List[GitRepo]): List of GitRepos to be used with Job(s).
+# """
+# now = make_aware(datetime.now())
+# job.logger.debug(
+# f"Finished the {job.Meta.name} job execution.",
+# extra={"grouping": "GC After Run"},
+# )
+# if current_repos:
+# for _, repo in current_repos.items():
+# if repo["to_commit"]:
+# job.logger.debug(
+# f"Pushing {job.Meta.name} results to repo {repo['repo_obj'].base_url}.",
+# extra={"grouping": "GC Repo Commit and Push"},
+# )
+# if not commit_message:
+# commit_message = f"{job.Meta.name.upper()} JOB {now}"
+# repo["repo_obj"].commit_with_added(commit_message)
+# repo["repo_obj"].push()
+# job.logger.info(
+# f'{repo["repo_obj"].nautobot_repo_obj.name}: the new Git repository hash is "{repo["repo_obj"].head}"',
+# extra={
+# "grouping": "GC Repo Commit and Push",
+# "object": repo["repo_obj"].nautobot_repo_obj,
+# },
+# )
+
+
+def gc_repo_pushv2(job, current_repos, commit_message=""):
"""Push any work from worker to git repos in Job.
Args:
@@ -149,31 +185,55 @@ def gc_repo_push(job, current_repos, commit_message=""):
extra={"grouping": "GC After Run"},
)
if current_repos:
- for _, repo in current_repos.items():
- if repo["to_commit"]:
- job.logger.debug(
- f"Pushing {job.Meta.name} results to repo {repo['repo_obj'].base_url}.",
- extra={"grouping": "GC Repo Commit and Push"},
- )
- if not commit_message:
- commit_message = f"{job.Meta.name.upper()} JOB {now}"
- repo["repo_obj"].commit_with_added(commit_message)
- repo["repo_obj"].push()
- job.logger.info(
- f'{repo["repo_obj"].nautobot_repo_obj.name}: the new Git repository hash is "{repo["repo_obj"].head}"',
- extra={
- "grouping": "GC Repo Commit and Push",
- "object": repo["repo_obj"].nautobot_repo_obj,
- },
- )
+ for repo in current_repos:
+ job.logger.debug(
+ f"Pushing {job.Meta.name} results to repo {repo.base_url}.",
+ extra={"grouping": "GC Repo Commit and Push"},
+ )
+ if not commit_message:
+ commit_message = f"{job.Meta.name.upper()} JOB {now}"
+ repo.commit_with_added(commit_message)
+ repo.push()
+ job.logger.info(
+ f'{repo.nautobot_repo_obj.name}: the new Git repository hash is "{repo.head}"',
+ extra={
+ "grouping": "GC Repo Commit and Push",
+ "object": repo.nautobot_repo_obj,
+ },
+ )
+
+
+# def gc_repos(func):
+# """Decorator used for handle repo syncing, commiting, and pushing."""
+
+# def gc_repo_wrapper(self, *args, **kwargs):
+# """Decorator used for handle repo syncing, commiting, and pushing."""
+# self.qs = get_job_filter(data=kwargs)
+# # self.gc_advanced_filter = GCSettingsDeviceFilterSet(self.qs)
+# self.gc_advanced_filter = get_device_to_settings_map(self.qs, self.name)
+# active_settings = set(list(self.gc_advanced_filter[JOB_FUNCTION_MAP[self.name]][True].values()))
+# current_repos = gc_repo_prep(job=self, inscope_gc_settings=active_settings)
+# # This is where the specific jobs run method runs via this decorator.
+# try:
+# func(self, *args, **kwargs)
+# except Exception as error: # pylint: disable=broad-exception-caught
+# error_msg = f"`E3001:` General Exception handler, original error message ```{error}```"
+# # Raise error only if the job kwarg (checkbox) is selected to do so on the job execution form.
+# if kwargs.get("fail_job_on_task_failure"):
+# raise NornirNautobotException(error_msg) from error
+# finally:
+# gc_repo_push(job=self, current_repos=current_repos, commit_message=kwargs.get("commit_message", ""))
+# return gc_repo_wrapper
-def gc_repos(func):
+
+def gc_job_helper(func):
"""Decorator used for handle repo syncing, commiting, and pushing."""
- def gc_repo_wrapper(self, *args, **kwargs):
- """Decorator used for handle repo syncing, commiting, and pushing."""
- current_repos = gc_repo_prep(job=self, data=kwargs)
+ def gc_job_wrapper(self, *args, **kwargs):
+ """Decorator used for GC job setup, repo syncing, commiting, and pushing."""
+ # self.gc_job_setup(data=kwargs, all_job=False)
+ self.gc_job_setup(data=kwargs)
# This is where the specific jobs run method runs via this decorator.
try:
func(self, *args, **kwargs)
@@ -183,9 +243,13 @@ def gc_repo_wrapper(self, *args, **kwargs):
if kwargs.get("fail_job_on_task_failure"):
raise NornirNautobotException(error_msg) from error
finally:
- gc_repo_push(job=self, current_repos=current_repos, commit_message=kwargs.get("commit_message"))
+ gc_repo_pushv2(
+ job=self,
+ current_repos=get_refreshed_reposv2(self.repos_to_push),
+ commit_message=kwargs.get("commit_message", ""),
+ )
- return gc_repo_wrapper
+ return gc_job_wrapper
class FormEntry: # pylint disable=too-few-public-method
@@ -229,8 +293,62 @@ class GoldenConfigJobMixin(Job): # pylint: disable=abstract-method
def __init__(self, *args, **kwargs):
"""Initialize the job."""
super().__init__(*args, **kwargs)
- self.qs = None
- self.device_to_settings_map = {}
+ self.qs = Device.objects.none()
+ self.task_qs = Device.objects.none()
+ self.gc_advanced_settings_filter = {}
+ self.job_function = ""
+ self.repos_to_push = []
+
+ def gc_job_setup(self, data):
+ """Handles the setup for the Golden Config job."""
+ self.job_function = JOB_FUNCTION_MAP[self.name]
+ self.qs = get_job_filter(data=data)
+ self.gc_advanced_settings_filter = get_device_to_settings_map(self.qs, self.job_function)
+ if self.job_function.lower() == "all":
+ # If the job is "all", we need to set the job_function to each individual job.
+ # If the job is one of the all jobs, we need to loop through each job and run the setup for each.
+ return
+ enabled_qs, disabled_qs = self._get_filtered_queryset(self.job_function)
+ self._log_out_of_scope_devices(disabled_qs)
+ if enabled_qs.count() == 0:
+ self.logger.warning(
+ f"E3039: No devices found with Golden Config settings enabled for the {self.job_function} job."
+ )
+ return
+ inscope_gcs = get_inscope_settings_from_device_qs(enabled_qs)
+ repos_to_sync, self.repos_to_push = GoldenConfigSetting.objects.get_repos_for_settings(
+ inscope_gcs, get_repo_types_for_job(self.job_function)
+ )
+ if repos_to_sync:
+ for repository_record in repos_to_sync:
+ ensure_git_repository(repository_record, self.logger)
+
+ def _log_out_of_scope_devices(self, disabled_devices_qs):
+ """Log devices that are out of scope for the job."""
+ if disabled_devices_qs.count() > 0:
+ for device in disabled_devices_qs:
+ self.logger.warning(
+ f"E3038: Device {device.name} does not have the required settings to run the job. Skipping device.",
+ extra={"object": device},
+ )
+
+ def _get_filtered_queryset(self, job_function):
+ """Helper for gc_advanced_settings_filter to get filtered queryset."""
+ enabled_devs = list(self.gc_advanced_settings_filter[job_function][True].keys())
+ disabled_devs = list(self.gc_advanced_settings_filter[job_function][False].keys())
+ enabled_qs = self.qs.filter(pk__in=enabled_devs)
+ disabled_qs = self.qs.filter(pk__in=disabled_devs)
+
+ self.logger.debug(
+ f"Device(s) with settings enabled for {job_function} job: {enabled_qs.count()}",
+ extra={"grouping": "Get Filtered Queryset"},
+ )
+ self.logger.debug(
+ f"Device(s) with settings disabled for {job_function} job: {disabled_qs.count()}",
+ extra={"grouping": "Get Filtered Queryset"},
+ )
+ self.task_qs = enabled_qs
+ return enabled_qs, disabled_qs
class ComplianceJob(GoldenConfigJobMixin, FormEntry):
@@ -243,11 +361,13 @@ class Meta:
description = "Run configuration compliance on your network infrastructure."
has_sensitive_variables = False
- @gc_repos
+ @gc_job_helper
def run(self, *args, **data): # pylint: disable=unused-argument
"""Run config compliance report script."""
+ if self.task_qs.count() == 0:
+ return
try:
- self.logger.warning("Starting config compliance nornir play.")
+ self.logger.debug("Starting config compliance nornir play.")
config_compliance(self)
except NornirNautobotException as error:
error_msg = str(error)
@@ -265,9 +385,11 @@ class Meta:
description = "Generate the configuration for your intended state."
has_sensitive_variables = False
- @gc_repos
+ @gc_job_helper
def run(self, *args, **data): # pylint: disable=unused-argument
"""Run config generation script."""
+ if self.task_qs.count() == 0:
+ return
try:
self.logger.debug("Building device settings mapping and running intended config nornir play.")
config_intended(self)
@@ -287,9 +409,11 @@ class Meta:
description = "Backup the configurations of your network devices."
has_sensitive_variables = False
- @gc_repos
+ @gc_job_helper
def run(self, *args, **data): # pylint: disable=unused-argument
"""Run config backup process."""
+ if self.task_qs.count() == 0:
+ return
try:
self.logger.debug("Starting config backup nornir play.")
config_backup(self)
@@ -314,18 +438,21 @@ class Meta:
def run(self, *args, **data): # pylint: disable=unused-argument, too-many-branches
"""Run all jobs on a single device."""
- current_repos = gc_repo_prep(job=self, data=data)
failed_jobs = []
error_msg, jobs_list = "", "All"
- settings = get_golden_config_settings()
- for enabled, play in [
- (settings.intended_enabled, config_intended),
- (settings.backup_enabled, config_backup),
- (settings.compliance_enabled, config_compliance),
- ]:
+ self.gc_job_setup(data)
+ gc_setting = GoldenConfigSetting.objects.get_for_device(data["device"])
+ repos_to_sync, self.repos_to_push = GoldenConfigSetting.objects.get_repos_for_settings(
+ gc_setting,
+ get_repo_types_for_job(self.job_function),
+ )
+ if repos_to_sync:
+ for repository_record in repos_to_sync:
+ ensure_git_repository(repository_record, self.logger)
+ for nornir_play in [config_intended, config_backup, config_compliance]:
+ self.task_qs, _ = self._get_filtered_queryset(nornir_play.__name__.split("_")[1])
try:
- if enabled:
- play(self)
+ nornir_play(self)
except BackupFailure:
self.logger.error("Backup failure occurred!")
failed_jobs.append("Backup")
@@ -337,7 +464,11 @@ def run(self, *args, **data): # pylint: disable=unused-argument, too-many-branc
failed_jobs.append("Compliance")
except Exception as error: # pylint: disable=broad-exception-caught
error_msg = f"`E3001:` General Exception handler, original error message ```{error}```"
- gc_repo_push(job=self, current_repos=current_repos, commit_message=data.get("commit_message"))
+ gc_repo_pushv2(
+ job=self,
+ current_repos=get_refreshed_reposv2(self.repos_to_push),
+ commit_message=data.get("commit_message", ""),
+ )
if len(failed_jobs) > 1:
jobs_list = ", ".join(failed_jobs)
elif len(failed_jobs) == 1:
@@ -364,18 +495,21 @@ class Meta:
def run(self, *args, **data): # pylint: disable=unused-argument, too-many-branches
"""Run all jobs on multiple devices."""
- current_repos = gc_repo_prep(job=self, data=data)
+ self.gc_job_setup(data)
failed_jobs = []
error_msg, jobs_list = "", "All"
- settings = get_golden_config_settings()
- for enabled, play in [
- (settings.intended_enabled, config_intended),
- (settings.backup_enabled, config_backup),
- (settings.compliance_enabled, config_compliance),
- ]:
+ inscope_gcs = get_inscope_settings_from_device_qs(self.qs)
+ repos_to_sync, self.repos_to_push = GoldenConfigSetting.objects.get_repos_for_settings(
+ inscope_gcs,
+ get_repo_types_for_job(self.job_function),
+ )
+ if repos_to_sync:
+ for repository_record in repos_to_sync:
+ ensure_git_repository(repository_record, self.logger)
+ for nornir_play in [config_intended, config_backup, config_compliance]:
+ self.task_qs, _ = self._get_filtered_queryset(nornir_play.__name__.split("_")[1])
try:
- if enabled:
- play(self)
+ nornir_play(self)
except BackupFailure:
self.logger.error("Backup failure occurred!")
failed_jobs.append("Backup")
@@ -387,7 +521,11 @@ def run(self, *args, **data): # pylint: disable=unused-argument, too-many-branc
failed_jobs.append("Compliance")
except Exception as error: # pylint: disable=broad-exception-caught
error_msg = f"`E3001:` General Exception handler, original error message ```{error}```"
- gc_repo_push(job=self, current_repos=current_repos, commit_message=data.get("commit_message"))
+ gc_repo_pushv2(
+ job=self,
+ current_repos=get_refreshed_reposv2(self.repos_to_push),
+ commit_message=data.get("commit_message", ""),
+ )
if len(failed_jobs) > 1:
jobs_list = ", ".join(failed_jobs)
elif len(failed_jobs) == 1:
@@ -518,7 +656,7 @@ def _generate_config_plan_from_manual(self):
def run(self, **data):
"""Run config plan generation process."""
self.logger.debug("Starting config plan generation job.")
- settings = get_golden_config_settings()
+ settings = None
self._validate_inputs(data)
try:
@@ -549,6 +687,7 @@ class DeployConfigPlans(Job):
"""Job to deploy config plans."""
config_plan = MultiObjectVar(model=ConfigPlan, required=True)
+ fail_job_on_task_failure = BooleanVar(description="If any tasks for any device fails, fail the entire job result.")
debug = BooleanVar(description="Enable for more verbose debug logging")
class Meta:
@@ -569,13 +708,19 @@ def run(self, **data): # pylint: disable=arguments-differ
update_dynamic_groups_cache()
self.logger.debug("Starting config plan deployment job.")
self.data = data
- settings = get_golden_config_settings()
+ settings = None
# Verify deployment eligibility for each config plan
for config_plan in self.data["config_plan"]:
verify_deployment_eligibility(self.logger, config_plan, settings)
- config_deployment(self)
+ try:
+ config_deployment(self)
+ except Exception as error: # pylint: disable=broad-exception-caught
+ error_msg = f"`E3001:` General Exception handler, original error message ```{error}```"
+ self.logger.error(error_msg)
+ if data.get("fail_job_on_task_failure"):
+ raise NornirNautobotException(error_msg) from error
class DeployConfigPlanJobButtonReceiver(JobButtonReceiver):
diff --git a/nautobot_golden_config/migrations/0029_alter_configplan_unique_together.py b/nautobot_golden_config/migrations/0029_alter_configplan_unique_together.py
index b4ba6672e..89cbbfe7e 100644
--- a/nautobot_golden_config/migrations/0029_alter_configplan_unique_together.py
+++ b/nautobot_golden_config/migrations/0029_alter_configplan_unique_together.py
@@ -24,7 +24,16 @@ def ensure_config_plan_created_timestamps_are_unique(apps, schema_editor):
for duplicate_record in duplicate_records:
duplicate_record.pop("count")
for record in ConfigPlan.objects.filter(**duplicate_record):
- record.created += timedelta(milliseconds=secrets.randbelow(1000))
+ new_time = record.created + timedelta(milliseconds=secrets.randbelow(1000))
+
+ while (
+ ConfigPlan.objects.filter(plan_type=record.plan_type, device=record.device, created=new_time).length()
+ > 0
+ ):
+ # Make sure there are no other lines conflicting with the new time
+ new_time = record.created + timedelta(milliseconds=secrets.randbelow(1000))
+
+ record.created = new_time
record.save()
diff --git a/nautobot_golden_config/models.py b/nautobot_golden_config/models.py
index 456b4234f..478c66bd4 100644
--- a/nautobot_golden_config/models.py
+++ b/nautobot_golden_config/models.py
@@ -9,7 +9,8 @@
from django.db import models
from django.db.models.manager import BaseManager
from django.utils.module_loading import import_string
-from hier_config import Host as HierConfigHost
+from hier_config import WorkflowRemediation, get_hconfig
+from hier_config.utils import hconfig_v2_os_v3_platform_mapper, load_hconfig_v2_options
from nautobot.apps.models import RestrictedQuerySet
from nautobot.apps.utils import render_jinja2
from nautobot.core.models.generics import PrimaryModel
@@ -28,7 +29,6 @@
ENABLE_DEPLOY,
ENABLE_INTENDED,
ENABLE_PLAN,
- ENABLE_SOTAGG,
PLUGIN_CFG,
)
@@ -184,33 +184,62 @@ def _verify_get_custom_compliance_data(compliance_details):
def _get_hierconfig_remediation(obj):
- """Returns the remediating config."""
- hierconfig_os = obj.device.platform.network_driver_mappings["hier_config"]
+ """
+ Generate the remediation configuration for a device using HierConfig.
+
+ This function determines the remediating configuration required to bring a device's actual configuration
+ in line with its intended configuration, using the HierConfig library. It performs the following steps:
+
+ 1. Retrieves the HierConfig OS type for the device's platform from the device's network driver mappings.
+ 2. Validates that the platform is supported by HierConfig.
+ 3. Fetches the RemediationSetting object for the platform associated with the compliance rule.
+ 4. Loads any remediation options defined for the platform into the HierConfig OS object.
+ 5. Instantiates HierConfig objects for both the actual and intended configurations.
+ 6. Uses WorkflowRemediation to compute the remediation configuration needed.
+ 7. Returns the filtered remediation configuration as text.
+
+ Raises:
+ ValidationError: If the platform is not supported or remediation settings are missing.
+ Exception: If HierConfig cannot be instantiated due to device, platform, or option issues.
+
+ Args:
+ obj: The ConfigCompliance instance containing device, rule, actual, and intended configuration data.
+
+ Returns:
+ str: The remediation configuration as a string.
+ """
+ hierconfig_os = obj.device.platform.network_driver_mappings.get("hier_config")
+
if not hierconfig_os:
- raise ValidationError(f"platform {obj.network_driver} is not supported by hierconfig.")
+ raise ValidationError(f"platform {obj.device.platform.name} is not supported by hierconfig.")
try:
remediation_setting_obj = RemediationSetting.objects.get(platform=obj.rule.platform)
except Exception as err: # pylint: disable=broad-except:
- raise ValidationError(f"Platform {obj.network_driver} has no Remediation Settings defined.") from err
+ raise ValidationError(f"Platform {obj.device.platform.name} has no Remediation Settings defined.") from err
remediation_options = remediation_setting_obj.remediation_options
try:
- hc_kwargs = {"hostname": obj.device.name, "os": hierconfig_os}
+ hierconfig_os = hconfig_v2_os_v3_platform_mapper(hierconfig_os)
+
if remediation_options:
- hc_kwargs.update(hconfig_options=remediation_options)
- host = HierConfigHost(**hc_kwargs)
+ load_hconfig_v2_options(remediation_options, hierconfig_os)
+
+ hierconfig_running_config = get_hconfig(hierconfig_os, obj.actual)
+ hierconfig_intended_config = get_hconfig(hierconfig_os, obj.intended)
+ hierconfig_wfr = WorkflowRemediation(
+ hierconfig_running_config,
+ hierconfig_intended_config,
+ )
except Exception as err: # pylint: disable=broad-except:
raise Exception( # pylint: disable=broad-exception-raised
f"Cannot instantiate HierConfig on {obj.device.name}, check Device, Platform and Hier Options."
) from err
- host.load_generated_config(obj.intended)
- host.load_running_config(obj.actual)
- host.remediation_config()
- remediation_config = host.remediation_config_filtered_text(include_tags={}, exclude_tags={})
+ hierconfig_wfr.remediation_config # pylint: disable=pointless-statement
+ remediation_config = hierconfig_wfr.remediation_config_filtered_text(include_tags={}, exclude_tags={})
return remediation_config
@@ -228,7 +257,7 @@ def _get_hierconfig_remediation(obj):
try:
FUNC_MAPPER[custom_type] = import_string(PLUGIN_CFG[custom_function])
except Exception as error: # pylint: disable=broad-except
- msg = (
+ msg = ( # pylint: disable=invalid-name
"There was an issue attempting to import the custom function of"
f"{PLUGIN_CFG[custom_function]}, this is expected with a local configuration issue "
"and not related to the Golden Configuration App, please contact your system admin for further details"
@@ -523,6 +552,29 @@ def get_for_device(self, device):
return dynamic_group.order_by("-golden_config_setting__weight").first().golden_config_setting
return None
+ def get_repos_for_settings(self, gcs_queryset, job_types):
+ """Return all enabled repos for all settings in a restricted queryset."""
+ repos_to_sync, repos_to_push = [], []
+ if isinstance(gcs_queryset, GoldenConfigSetting):
+ gcs_queryset = [gcs_queryset]
+ for setting in gcs_queryset:
+ for job_type in job_types:
+ if job_type == "backup_repository":
+ if setting.backup_enabled and setting.backup_repository:
+ repos_to_sync.append(setting.backup_repository)
+ repos_to_push.append(setting.backup_repository)
+ if not setting.backup_enabled and setting.backup_repository:
+ repos_to_sync.append(setting.backup_repository)
+ if job_type == "intended_repository":
+ if setting.intended_enabled and setting.intended_repository:
+ repos_to_sync.append(setting.intended_repository)
+ repos_to_push.append(setting.intended_repository)
+ if setting.jinja_repository:
+ repos_to_sync.append(setting.jinja_repository)
+ if not setting.intended_enabled and setting.intended_repository:
+ repos_to_sync.append(setting.intended_repository)
+ return list(set(repos_to_sync)), list(set(repos_to_push))
+
@extras_features(
"graphql",
@@ -597,6 +649,7 @@ class GoldenConfigSetting(PrimaryModel): # pylint: disable=too-many-ancestors
sot_agg_query = models.ForeignKey(
to="extras.GraphQLQuery",
on_delete=models.PROTECT,
+ verbose_name="GraphQL Query",
null=True,
blank=True,
related_name="sot_aggregation",
@@ -625,6 +678,15 @@ class GoldenConfigSetting(PrimaryModel): # pylint: disable=too-many-ancestors
objects = GoldenConfigSettingManager()
+ clone_fields = [
+ "weight",
+ "backup_path_template",
+ "backup_test_connectivity",
+ "intended_path_template",
+ "jinja_path_template",
+ "sot_agg_query",
+ ]
+
def __str__(self):
"""Return a simple string if model is called."""
return f"Golden Config Setting - {self.name}"
@@ -644,8 +706,12 @@ def clean(self):
"""Validate the scope and GraphQL query."""
super().clean()
- if ENABLE_SOTAGG and not self.sot_agg_query:
- raise ValidationError("A GraphQL query must be defined when `ENABLE_SOTAGG` is True")
+ if self.intended_enabled and (
+ not self.jinja_repository or not self.sot_agg_query or not self.jinja_path_template
+ ):
+ raise ValidationError(
+ "When Intended is enabled, you must be define a `Sot agg query`, `Jinja repository` and `Jinja Template Path`."
+ )
if self.sot_agg_query:
LOGGER.debug("GraphQL - test query start with: `%s`", GRAPHQL_STR_START)
diff --git a/nautobot_golden_config/nornir_plays/config_backup.py b/nautobot_golden_config/nornir_plays/config_backup.py
index 4f2ffdd48..6415faa06 100644
--- a/nautobot_golden_config/nornir_plays/config_backup.py
+++ b/nautobot_golden_config/nornir_plays/config_backup.py
@@ -3,6 +3,7 @@
# pylint: disable=relative-beyond-top-level
import logging
import os
+import traceback
from datetime import datetime
from django.utils.timezone import make_aware
@@ -19,7 +20,6 @@
from nautobot_golden_config.nornir_plays.processor import ProcessGoldenConfig
from nautobot_golden_config.utilities.db_management import close_threaded_db_connections
from nautobot_golden_config.utilities.helper import (
- CustomFilterSettings,
dispatch_params,
render_jinja_template,
)
@@ -100,10 +100,6 @@ def config_backup(job):
"""
now = make_aware(datetime.now())
logger = NornirLogger(job.job_result, job.logger.getEffectiveLevel())
- device_filter = CustomFilterSettings(job.qs)
-
- # Verify backup feature is enabled and has required settings
- device_filter.verify_feature_enabled(logger, "backup", required_settings=["backup_path_template"])
# Build a dictionary, with keys of platform.network_driver, and the regex line in it for the netutils func.
remove_regex_dict = {}
@@ -127,7 +123,8 @@ def config_backup(job):
"options": {
"credentials_class": NORNIR_SETTINGS.get("credentials"),
"params": NORNIR_SETTINGS.get("inventory_params"),
- "queryset": device_filter.filtered_queryset,
+ "queryset": job.task_qs,
+ # "queryset": job.settings_filters["backup"][True].keys(),
"defaults": {"now": now},
},
},
@@ -139,15 +136,16 @@ def config_backup(job):
task=run_backup,
name="BACKUP CONFIG",
logger=logger,
- device_to_settings_map=job.device_to_settings_map,
+ device_to_settings_map=job.gc_advanced_settings_filter["backup"][True],
remove_regex_dict=remove_regex_dict,
replace_regex_dict=replace_regex_dict,
)
logger.debug("Completed configuration from devices.")
except NornirNautobotException as err:
- logger.error(
- f"`E3027:` NornirNautobotException raised during backup tasks. Original exception message: ```{err}```"
- )
+ if job.job_result.task_kwargs["debug"]:
+ logger.error(
+ f"`E3027:` NornirNautobotException raised during backup tasks. Original exception message: ```{traceback.format_exc()}```"
+ )
# re-raise Exception if it's raised from nornir-nautobot or nautobot-app-nornir
if str(err).startswith("`E2") or str(err).startswith("`E1"):
raise NornirNautobotException(err) from err
diff --git a/nautobot_golden_config/nornir_plays/config_compliance.py b/nautobot_golden_config/nornir_plays/config_compliance.py
index ce064117d..0d62b2d8f 100644
--- a/nautobot_golden_config/nornir_plays/config_compliance.py
+++ b/nautobot_golden_config/nornir_plays/config_compliance.py
@@ -23,7 +23,6 @@
from nautobot_golden_config.nornir_plays.processor import ProcessGoldenConfig
from nautobot_golden_config.utilities.db_management import close_threaded_db_connections
from nautobot_golden_config.utilities.helper import (
- CustomFilterSettings,
get_json_config,
get_xml_config,
get_xml_subtree_with_full_path,
@@ -210,15 +209,6 @@ def config_compliance(job): # pylint: disable=unused-argument
now = make_aware(datetime.now())
logger = NornirLogger(job.job_result, job.logger.getEffectiveLevel())
rules = get_rules()
- device_filter = CustomFilterSettings(job.qs)
-
- # Verify compliance feature is enabled and has required settings
- device_filter.verify_feature_enabled(
- logger,
- "compliance",
- required_settings=["backup_path_template", "intended_path_template"],
- )
-
try:
with InitNornir(
runner=NORNIR_SETTINGS.get("runner"),
@@ -228,7 +218,7 @@ def config_compliance(job): # pylint: disable=unused-argument
"options": {
"credentials_class": NORNIR_SETTINGS.get("credentials"),
"params": NORNIR_SETTINGS.get("inventory_params"),
- "queryset": device_filter.filtered_queryset,
+ "queryset": job.task_qs,
"defaults": {"now": now},
},
},
@@ -240,7 +230,7 @@ def config_compliance(job): # pylint: disable=unused-argument
task=run_compliance,
name="RENDER COMPLIANCE TASK GROUP",
logger=logger,
- device_to_settings_map=job.device_to_settings_map,
+ device_to_settings_map=job.gc_advanced_settings_filter["compliance"][True],
rules=rules,
)
except NornirNautobotException as err:
diff --git a/nautobot_golden_config/nornir_plays/config_deployment.py b/nautobot_golden_config/nornir_plays/config_deployment.py
index b87887904..79f66d552 100644
--- a/nautobot_golden_config/nornir_plays/config_deployment.py
+++ b/nautobot_golden_config/nornir_plays/config_deployment.py
@@ -16,9 +16,10 @@
from nornir_nautobot.exceptions import NornirNautobotException
from nornir_nautobot.plugins.tasks.dispatcher import dispatcher
+from nautobot_golden_config.exceptions import ConfigPlanDeploymentFailure
from nautobot_golden_config.nornir_plays.processor import ProcessGoldenConfig
from nautobot_golden_config.utilities.config_postprocessing import get_config_postprocessing
-from nautobot_golden_config.utilities.constant import DEFAULT_DEPLOY_STATUS
+from nautobot_golden_config.utilities.constant import DEFAULT_DEPLOY_STATUS, ENABLE_POSTPROCESSING
from nautobot_golden_config.utilities.db_management import close_threaded_db_connections
from nautobot_golden_config.utilities.helper import dispatch_params
from nautobot_golden_config.utilities.logger import NornirLogger
@@ -34,8 +35,10 @@ def run_deployment(task: Task, logger: logging.Logger, config_plan_qs, deploy_jo
plans_to_deploy.update(deploy_result=deploy_job_result)
consolidated_config_set = "\n".join(plans_to_deploy.values_list("config_set", flat=True))
logger.debug(f"Consolidated config set: {consolidated_config_set}")
- logger.debug("Executing post-processing on the config set")
- post_config = get_config_postprocessing(plans_to_deploy, job_request)
+ post_config = consolidated_config_set
+ if ENABLE_POSTPROCESSING:
+ logger.debug("Executing post-processing on the config set")
+ post_config = get_config_postprocessing(plans_to_deploy, job_request)
plans_to_deploy.update(status=Status.objects.get(name="In Progress"))
try:
result = task.run(
@@ -113,7 +116,7 @@ def config_deployment(job):
) as nornir_obj:
nr_with_processors = nornir_obj.with_processors([ProcessGoldenConfig(logger)])
- nr_with_processors.run(
+ results = nr_with_processors.run(
task=run_deployment,
name="DEPLOY CONFIG",
logger=logger,
@@ -127,3 +130,5 @@ def config_deployment(job):
raise NornirNautobotException(error_msg) from error
logger.debug("Completed configuration deployment.")
+ if results.failed:
+ raise ConfigPlanDeploymentFailure()
diff --git a/nautobot_golden_config/nornir_plays/config_intended.py b/nautobot_golden_config/nornir_plays/config_intended.py
index bb552dd57..4cd7ef900 100644
--- a/nautobot_golden_config/nornir_plays/config_intended.py
+++ b/nautobot_golden_config/nornir_plays/config_intended.py
@@ -20,7 +20,6 @@
from nautobot_golden_config.utilities.db_management import close_threaded_db_connections
from nautobot_golden_config.utilities.graphql import graph_ql_query
from nautobot_golden_config.utilities.helper import (
- CustomFilterSettings,
dispatch_params,
get_django_env,
render_jinja_template,
@@ -107,15 +106,21 @@ def config_intended(job):
"""
now = make_aware(datetime.now())
logger = NornirLogger(job.job_result, job.logger.getEffectiveLevel())
- device_filter = CustomFilterSettings(job.qs)
+ # enabled_qs, disabled_qs = job.gc_advanced_filter.get_filtered_querysets("intended")
+ # device_filter = GCSettingsDeviceFilterSet(job.qs)
# Verify intended feature is enabled and has required settings
- device_filter.verify_feature_enabled(
- logger,
- "intended",
- required_settings=["jinja_path_template", "intended_path_template", "sot_agg_query"],
- )
-
+ # device_filter.verify_feature_enabled(
+ # logger,
+ # "intended",
+ # required_settings=["jinja_path_template", "intended_path_template", "sot_agg_query"],
+ # )
+ # if job.job_result.task_kwargs["debug"]:
+ # for device in disabled_qs:
+ # logger.warning(
+ # f"E3038: Device {device.name} does not have the required settings to run the intended job. Skipping device.",
+ # extra={"object": device},
+ # )
# Retrieve filters from the Django jinja template engine
jinja_env = get_django_env()
try:
@@ -127,7 +132,7 @@ def config_intended(job):
"options": {
"credentials_class": NORNIR_SETTINGS.get("credentials"),
"params": NORNIR_SETTINGS.get("inventory_params"),
- "queryset": device_filter.filtered_queryset,
+ "queryset": job.task_qs,
"defaults": {"now": now},
},
},
@@ -140,7 +145,7 @@ def config_intended(job):
task=run_template,
name="RENDER CONFIG",
logger=logger,
- device_to_settings_map=job.device_to_settings_map,
+ device_to_settings_map=job.gc_advanced_settings_filter["intended"][True],
job_class_instance=job,
jinja_env=jinja_env,
)
diff --git a/nautobot_golden_config/tables.py b/nautobot_golden_config/tables.py
index cc47a4946..458d8348b 100644
--- a/nautobot_golden_config/tables.py
+++ b/nautobot_golden_config/tables.py
@@ -7,18 +7,18 @@
from nautobot.extras.tables import StatusTableMixin
from nautobot_golden_config import models
-from nautobot_golden_config.utilities.constant import ENABLE_POSTPROCESSING, ENABLE_SOTAGG
-from nautobot_golden_config.utilities.helper import get_golden_config_settings
-
-settings = get_golden_config_settings()
-
-CONFIG_FEATURES = {
- "intended": settings.intended_enabled,
- "compliance": settings.compliance_enabled,
- "backup": settings.backup_enabled,
- "sotagg": ENABLE_SOTAGG,
- "postprocessing": ENABLE_POSTPROCESSING,
-}
+
+# from nautobot_golden_config.utilities.helper import get_golden_config_settings
+
+# settings = get_golden_config_settings()
+
+# CONFIG_FEATURES = {
+# "intended": settings.intended_enabled,
+# "compliance": settings.compliance_enabled,
+# "backup": settings.backup_enabled,
+# "sotagg": ENABLE_SOTAGG,
+# "postprocessing": ENABLE_POSTPROCESSING,
+# }
ALL_ACTIONS = """
{% if backup == True %}
diff --git a/nautobot_golden_config/template_content.py b/nautobot_golden_config/template_content.py
index 44db49783..236a1cbc9 100644
--- a/nautobot_golden_config/template_content.py
+++ b/nautobot_golden_config/template_content.py
@@ -5,19 +5,19 @@
from django.urls import reverse
from nautobot.extras.plugins import PluginTemplateExtension
-from nautobot_golden_config.models import ConfigCompliance, GoldenConfig
-from nautobot_golden_config.utilities.constant import ENABLE_POSTPROCESSING, ENABLE_SOTAGG
-from nautobot_golden_config.utilities.helper import get_golden_config_settings
+from nautobot_golden_config.models import ConfigCompliance, GoldenConfig, GoldenConfigSetting
-settings = get_golden_config_settings()
+# from nautobot_golden_config.utilities.helper import get_golden_config_settings
-CONFIG_FEATURES = {
- "intended": settings.intended_enabled,
- "compliance": settings.compliance_enabled,
- "backup": settings.backup_enabled,
- "sotagg": ENABLE_SOTAGG,
- "postprocessing": ENABLE_POSTPROCESSING,
-}
+# settings = get_golden_config_settings()
+
+# CONFIG_FEATURES = {
+# "intended": settings.intended_enabled,
+# "compliance": settings.compliance_enabled,
+# "backup": settings.backup_enabled,
+# "sotagg": ENABLE_SOTAGG,
+# "postprocessing": ENABLE_POSTPROCESSING,
+# }
class ConfigComplianceDeviceCheck(PluginTemplateExtension): # pylint: disable=abstract-method
@@ -117,7 +117,8 @@ def right_page(self):
"device": self.get_device(), # device,
"golden_config": golden_config,
"template_type": "device-configs",
- "config_features": CONFIG_FEATURES,
+ "config_features": GoldenConfigSetting.objects.get_for_device(device),
+ # "config_features": CONFIG_FEATURES,
}
return self.render(
"nautobot_golden_config/content_template.html",
@@ -157,10 +158,10 @@ def right_page(self):
extensions = [ConfigDeviceDetails]
-if settings.compliance_enabled:
- extensions.append(ConfigComplianceDeviceCheck)
- extensions.append(ConfigComplianceLocationCheck)
- extensions.append(ConfigComplianceTenantCheck)
+# if settings.compliance_enabled:
+extensions.append(ConfigComplianceDeviceCheck)
+extensions.append(ConfigComplianceLocationCheck)
+extensions.append(ConfigComplianceTenantCheck)
template_extensions = extensions
diff --git a/nautobot_golden_config/templates/nautobot_golden_config/configcompliance_retrieve.html b/nautobot_golden_config/templates/nautobot_golden_config/configcompliance_retrieve.html
deleted file mode 100644
index e544d072f..000000000
--- a/nautobot_golden_config/templates/nautobot_golden_config/configcompliance_retrieve.html
+++ /dev/null
@@ -1,99 +0,0 @@
-{% extends 'generic/object_detail.html' %}
-{% load helpers %}
-{% load static %}
-
-{% block extra_styles %}
-
-{% endblock extra_styles %}
-
-{% block content_left_page %}
-