Compare commits
No commits in common. "docs" and "main" have entirely different histories.
2
.dictionary
Normal file
2
.dictionary
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
Ansible
|
||||||
|
Kaussow
|
111
.gitignore
vendored
Normal file
111
.gitignore
vendored
Normal file
|
@ -0,0 +1,111 @@
|
||||||
|
# ---> Python
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
env/
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*,cover
|
||||||
|
.hypothesis/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
.python-version
|
||||||
|
|
||||||
|
# celery beat schedule file
|
||||||
|
celerybeat-schedule
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# dotenv
|
||||||
|
.env
|
||||||
|
|
||||||
|
# virtualenv
|
||||||
|
.venv
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env/
|
||||||
|
env*/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# Ignore ide addons
|
||||||
|
.server-script
|
||||||
|
.on-save.json
|
||||||
|
.vscode
|
||||||
|
.pytest_cache
|
||||||
|
|
||||||
|
pip-wheel-metadata
|
||||||
|
|
||||||
|
# Hugo documentation
|
||||||
|
docs/themes/
|
||||||
|
docs/public/
|
||||||
|
resources/_gen/
|
||||||
|
|
||||||
|
CHANGELOG.md
|
||||||
|
tests/output
|
47
.gitsv/config.yml
Normal file
47
.gitsv/config.yml
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
---
|
||||||
|
version: "1.1"
|
||||||
|
|
||||||
|
versioning:
|
||||||
|
update-major: []
|
||||||
|
update-minor: [feat]
|
||||||
|
update-patch: [fix, perf, refactor, chore, test, ci, docs]
|
||||||
|
|
||||||
|
tag:
|
||||||
|
pattern: "v%d.%d.%d"
|
||||||
|
|
||||||
|
release-notes:
|
||||||
|
sections:
|
||||||
|
- name: Features
|
||||||
|
commit-types: [feat]
|
||||||
|
section-type: commits
|
||||||
|
- name: Bug Fixes
|
||||||
|
commit-types: [fix]
|
||||||
|
section-type: commits
|
||||||
|
- name: Performance Improvements
|
||||||
|
commit-types: [perf]
|
||||||
|
section-type: commits
|
||||||
|
- name: Code Refactoring
|
||||||
|
commit-types: [refactor]
|
||||||
|
section-type: commits
|
||||||
|
- name: Others
|
||||||
|
commit-types: [chore]
|
||||||
|
section-type: commits
|
||||||
|
- name: Testing
|
||||||
|
commit-types: [test]
|
||||||
|
section-type: commits
|
||||||
|
- name: CI Pipeline
|
||||||
|
commit-types: [ci]
|
||||||
|
section-type: commits
|
||||||
|
- name: Documentation
|
||||||
|
commit-types: [docs]
|
||||||
|
section-type: commits
|
||||||
|
- name: Breaking Changes
|
||||||
|
section-type: breaking-changes
|
||||||
|
|
||||||
|
commit-message:
|
||||||
|
footer:
|
||||||
|
issue:
|
||||||
|
key: issue
|
||||||
|
add-value-prefix: "#"
|
||||||
|
issue:
|
||||||
|
regex: "#?[0-9]+"
|
7
.markdownlint.yml
Normal file
7
.markdownlint.yml
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
---
|
||||||
|
default: True
|
||||||
|
MD013: False
|
||||||
|
MD041: False
|
||||||
|
MD024: False
|
||||||
|
MD004:
|
||||||
|
style: dash
|
2
.prettierignore
Normal file
2
.prettierignore
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
*.tpl.md
|
||||||
|
LICENSE
|
46
.woodpecker/build-package.yml
Normal file
46
.woodpecker/build-package.yml
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
---
|
||||||
|
when:
|
||||||
|
- event: [pull_request, tag]
|
||||||
|
- event: [push, manual]
|
||||||
|
branch:
|
||||||
|
- ${CI_REPO_DEFAULT_BRANCH}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: build
|
||||||
|
image: docker.io/library/python:3.12
|
||||||
|
commands:
|
||||||
|
- GALAXY_VERSION=${CI_COMMIT_TAG##v}
|
||||||
|
- 'sed -i ''s/version: 0.0.0/version: ''"$${GALAXY_VERSION:-0.0.0}"''/g'' galaxy.yml'
|
||||||
|
- pip install poetry -qq
|
||||||
|
- poetry install --all-extras --no-root
|
||||||
|
- poetry run ansible-galaxy collection build --output-path dist/
|
||||||
|
|
||||||
|
- name: checksum
|
||||||
|
image: quay.io/thegeeklab/alpine-tools
|
||||||
|
commands:
|
||||||
|
- cd dist/ && sha256sum * > ../sha256sum.txt
|
||||||
|
|
||||||
|
- name: changelog
|
||||||
|
image: quay.io/thegeeklab/git-sv
|
||||||
|
commands:
|
||||||
|
- git sv current-version
|
||||||
|
- git sv release-notes -t ${CI_COMMIT_TAG:-next} -o CHANGELOG.md
|
||||||
|
- cat CHANGELOG.md
|
||||||
|
|
||||||
|
- name: publish-gitea
|
||||||
|
image: quay.io/thegeeklab/wp-gitea-release
|
||||||
|
settings:
|
||||||
|
api_key:
|
||||||
|
from_secret: gitea_token
|
||||||
|
base_url: https://gitea.rknet.org
|
||||||
|
files:
|
||||||
|
- dist/*
|
||||||
|
- sha256sum.txt
|
||||||
|
note: CHANGELOG.md
|
||||||
|
title: ${CI_COMMIT_TAG}
|
||||||
|
when:
|
||||||
|
- event: [tag]
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
- unit-test
|
||||||
|
- sanity-test
|
49
.woodpecker/docs.yml
Normal file
49
.woodpecker/docs.yml
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
---
|
||||||
|
when:
|
||||||
|
- event: [pull_request, tag]
|
||||||
|
- event: [push, manual]
|
||||||
|
branch:
|
||||||
|
- ${CI_REPO_DEFAULT_BRANCH}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: markdownlint
|
||||||
|
image: quay.io/thegeeklab/markdownlint-cli
|
||||||
|
group: test
|
||||||
|
commands:
|
||||||
|
- markdownlint 'docs/**/*.md' 'README.md'
|
||||||
|
|
||||||
|
- name: spellcheck
|
||||||
|
image: quay.io/thegeeklab/alpine-tools
|
||||||
|
group: test
|
||||||
|
commands:
|
||||||
|
- spellchecker --files 'docs/**/*.md' 'README.md' -d .dictionary -p spell indefinite-article syntax-urls frontmatter --frontmatter-keys title tags
|
||||||
|
environment:
|
||||||
|
FORCE_COLOR: "true"
|
||||||
|
|
||||||
|
- name: link-validation
|
||||||
|
image: docker.io/lycheeverse/lychee
|
||||||
|
group: test
|
||||||
|
commands:
|
||||||
|
- lychee --no-progress --format detailed docs/ README.md
|
||||||
|
|
||||||
|
- name: publish
|
||||||
|
image: quay.io/thegeeklab/wp-git-action
|
||||||
|
settings:
|
||||||
|
action:
|
||||||
|
- pages
|
||||||
|
author_email: shipper@rknet.org
|
||||||
|
author_name: shipper
|
||||||
|
branch: docs
|
||||||
|
message: auto-update documentation
|
||||||
|
netrc_machine: gitea.rknet.org
|
||||||
|
netrc_password:
|
||||||
|
from_secret: gitea_token
|
||||||
|
pages_directory: docs/
|
||||||
|
remote_url: https://gitea.rknet.org/ansible/${CI_REPO_NAME}
|
||||||
|
when:
|
||||||
|
- event: [push, manual]
|
||||||
|
branch:
|
||||||
|
- ${CI_REPO_DEFAULT_BRANCH}
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
- build-package
|
25
.woodpecker/lint.yml
Normal file
25
.woodpecker/lint.yml
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
---
|
||||||
|
when:
|
||||||
|
- event: [pull_request, tag]
|
||||||
|
- event: [push, manual]
|
||||||
|
branch:
|
||||||
|
- ${CI_REPO_DEFAULT_BRANCH}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: check-format
|
||||||
|
image: docker.io/library/python:3.12
|
||||||
|
commands:
|
||||||
|
- pip install poetry -qq
|
||||||
|
- poetry install --all-extras --no-root
|
||||||
|
- poetry run ruff format --check --diff ./plugins
|
||||||
|
environment:
|
||||||
|
PY_COLORS: "1"
|
||||||
|
|
||||||
|
- name: check-coding
|
||||||
|
image: docker.io/library/python:3.12
|
||||||
|
commands:
|
||||||
|
- pip install poetry -qq
|
||||||
|
- poetry install --all-extras --no-root
|
||||||
|
- poetry run ruff ./plugins
|
||||||
|
environment:
|
||||||
|
PY_COLORS: "1"
|
26
.woodpecker/notify.yml
Normal file
26
.woodpecker/notify.yml
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
---
|
||||||
|
when:
|
||||||
|
- event: [tag]
|
||||||
|
- event: [push, manual]
|
||||||
|
branch:
|
||||||
|
- ${CI_REPO_DEFAULT_BRANCH}
|
||||||
|
|
||||||
|
runs_on: [success, failure]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: matrix
|
||||||
|
image: quay.io/thegeeklab/wp-matrix
|
||||||
|
settings:
|
||||||
|
homeserver:
|
||||||
|
from_secret: matrix_homeserver
|
||||||
|
password:
|
||||||
|
from_secret: matrix_password
|
||||||
|
roomid:
|
||||||
|
from_secret: matrix_roomid
|
||||||
|
username:
|
||||||
|
from_secret: matrix_username
|
||||||
|
when:
|
||||||
|
- status: [success, failure]
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
- docs
|
45
.woodpecker/sanity-test.yml
Normal file
45
.woodpecker/sanity-test.yml
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
---
|
||||||
|
when:
|
||||||
|
- event: [pull_request, tag]
|
||||||
|
- event: [push, manual]
|
||||||
|
branch:
|
||||||
|
- ${CI_REPO_DEFAULT_BRANCH}
|
||||||
|
|
||||||
|
variables:
|
||||||
|
- &ansible_base
|
||||||
|
image: docker.io/library/python:3.10
|
||||||
|
group: ansible
|
||||||
|
commands:
|
||||||
|
- pip install poetry -qq
|
||||||
|
- poetry install --all-extras --no-root
|
||||||
|
- poetry run pip install https://github.com/ansible/ansible/archive/$${ANSIBLE_VERSION}.tar.gz --disable-pip-version-check
|
||||||
|
- poetry run ansible --version
|
||||||
|
- poetry run ansible-test sanity --exclude .gitsv/ --exclude .woodpecker/ --python 3.10
|
||||||
|
- &ansible_env
|
||||||
|
PY_COLORS: "1"
|
||||||
|
|
||||||
|
workspace:
|
||||||
|
base: /woodpecker/src
|
||||||
|
path: ansible_collections/${CI_REPO_NAME/./\/}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: ansible-devel
|
||||||
|
<<: *ansible_base
|
||||||
|
environment:
|
||||||
|
ANSIBLE_VERSION: "devel"
|
||||||
|
<<: *ansible_env
|
||||||
|
|
||||||
|
- name: ansible-216
|
||||||
|
<<: *ansible_base
|
||||||
|
environment:
|
||||||
|
ANSIBLE_VERSION: "stable-2.16"
|
||||||
|
<<: *ansible_env
|
||||||
|
|
||||||
|
- name: ansible-215
|
||||||
|
<<: *ansible_base
|
||||||
|
environment:
|
||||||
|
ANSIBLE_VERSION: "stable-2.15"
|
||||||
|
<<: *ansible_env
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
- lint
|
36
.woodpecker/unit-test.yml
Normal file
36
.woodpecker/unit-test.yml
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
---
|
||||||
|
when:
|
||||||
|
- event: [pull_request, tag]
|
||||||
|
- event: [push, manual]
|
||||||
|
branch:
|
||||||
|
- ${CI_REPO_DEFAULT_BRANCH}
|
||||||
|
|
||||||
|
variables:
|
||||||
|
- &pytest_base
|
||||||
|
group: pytest
|
||||||
|
commands:
|
||||||
|
- pip install poetry -qq
|
||||||
|
- poetry install --all-extras --no-root
|
||||||
|
- poetry run pytest
|
||||||
|
environment:
|
||||||
|
PY_COLORS: "1"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: pyton-312
|
||||||
|
image: docker.io/library/python:3.12
|
||||||
|
<<: *pytest_base
|
||||||
|
|
||||||
|
- name: pyton-311
|
||||||
|
image: docker.io/library/python:3.11
|
||||||
|
<<: *pytest_base
|
||||||
|
|
||||||
|
- name: pyton-310
|
||||||
|
image: docker.io/library/python:3.10
|
||||||
|
<<: *pytest_base
|
||||||
|
|
||||||
|
- name: pyton-39
|
||||||
|
image: docker.io/library/python:3.9
|
||||||
|
<<: *pytest_base
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
- lint
|
21
LICENSE
Normal file
21
LICENSE
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2022 Robert Kaussow <mail@thegeeklab.de>
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is furnished
|
||||||
|
to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice (including the next
|
||||||
|
paragraph) shall be included in all copies or substantial portions of the
|
||||||
|
Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||||
|
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
|
||||||
|
OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
|
||||||
|
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF
|
||||||
|
OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
10
README.md
Normal file
10
README.md
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
# xoxys.general
|
||||||
|
|
||||||
|
[![Build Status](https://ci.rknet.org/api/badges/ansible/xoxys.general/status.svg)](https://ci.rknet.org/repos/ansible/xoxys.general)
|
||||||
|
[![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg?label=license)](https://gitea.rknet.org/ansible/xoxys.general/src/branch/main/LICENSE)
|
||||||
|
|
||||||
|
Custom general Ansible collection.
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
This project is licensed under the MIT License - see the [LICENSE](https://gitea.rknet.org/ansible/xoxys.general/src/branch/main/LICENSE) file for details.
|
24
galaxy.yml
Normal file
24
galaxy.yml
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
---
|
||||||
|
namespace: xoxys
|
||||||
|
name: general
|
||||||
|
# The version is generated during the release by Woocpecker CI.
|
||||||
|
version: 0.0.0
|
||||||
|
readme: README.md
|
||||||
|
authors:
|
||||||
|
- Robert Kaussow <mail@thegeeklab.de>
|
||||||
|
description: Custom general Ansible collection
|
||||||
|
license:
|
||||||
|
- MIT
|
||||||
|
license_file: "LICENSE"
|
||||||
|
tags:
|
||||||
|
- misc
|
||||||
|
repository: https://gitea.rknet.org/ansible/xoxys.general/
|
||||||
|
homepage: https://thegeeklab.de/
|
||||||
|
documentation: https://galaxy.geekdocs.de/collections/general/
|
||||||
|
build_ignore:
|
||||||
|
- ".*"
|
||||||
|
- "*requirements.txt"
|
||||||
|
- docs
|
||||||
|
- test
|
||||||
|
- dist
|
||||||
|
- setup.cfg
|
2
meta/runtime.yml
Normal file
2
meta/runtime.yml
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
---
|
||||||
|
requires_ansible: ">=2.10"
|
107
plugins/doc_fragments/hashivault.py
Normal file
107
plugins/doc_fragments/hashivault.py
Normal file
|
@ -0,0 +1,107 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
"""Implement documentation fragment for Hashivault module."""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
class ModuleDocFragment: # noqa
|
||||||
|
# Standard documentation
|
||||||
|
DOCUMENTATION = r"""
|
||||||
|
requirements:
|
||||||
|
- hvac>=0.10.1
|
||||||
|
- ansible>=2.0.0
|
||||||
|
- requests
|
||||||
|
options:
|
||||||
|
url:
|
||||||
|
description:
|
||||||
|
- URL of the Vault server.
|
||||||
|
- You can use C(VAULT_ADDR) environment variable.
|
||||||
|
default: ""
|
||||||
|
type: str
|
||||||
|
ca_cert:
|
||||||
|
description:
|
||||||
|
- Path to a PEM-encoded CA cert file to use to verify the Vault server
|
||||||
|
TLS certificate.
|
||||||
|
- You can use C(VAULT_CACERT) environment variable.
|
||||||
|
default: ""
|
||||||
|
type: str
|
||||||
|
ca_path:
|
||||||
|
description:
|
||||||
|
- Path to a directory of PEM-encoded CA cert files to verify the Vault server
|
||||||
|
TLS certificate. If ca_cert is specified, its value will take precedence.
|
||||||
|
- You can use C(VAULT_CAPATH) environment variable.
|
||||||
|
default: ""
|
||||||
|
type: str
|
||||||
|
client_cert:
|
||||||
|
description:
|
||||||
|
- Path to a PEM-encoded client certificate for TLS authentication to the Vault
|
||||||
|
server.
|
||||||
|
- You can use C(VAULT_CLIENT_CERT) environment variable.
|
||||||
|
default: ""
|
||||||
|
type: str
|
||||||
|
client_key:
|
||||||
|
description:
|
||||||
|
- Path to an unencrypted PEM-encoded private key matching the client certificate.
|
||||||
|
- You can use C(VAULT_CLIENT_KEY) environment variable.
|
||||||
|
default: ""
|
||||||
|
type: str
|
||||||
|
verify:
|
||||||
|
description:
|
||||||
|
- If set, do not verify presented TLS certificate before communicating with Vault
|
||||||
|
server. Setting this variable is not recommended except during testing.
|
||||||
|
- You can use C(VAULT_SKIP_VERIFY) environment variable.
|
||||||
|
default: false
|
||||||
|
type: bool
|
||||||
|
authtype:
|
||||||
|
description:
|
||||||
|
- Authentication type.
|
||||||
|
- You can use C(VAULT_AUTHTYPE) environment variable.
|
||||||
|
default: "token"
|
||||||
|
type: str
|
||||||
|
choices: ["token", "userpass", "github", "ldap", "approle"]
|
||||||
|
login_mount_point:
|
||||||
|
description:
|
||||||
|
- Authentication mount point.
|
||||||
|
- You can use C(VAULT_LOGIN_MOUNT_POINT) environment variable.
|
||||||
|
type: str
|
||||||
|
token:
|
||||||
|
description:
|
||||||
|
- Token for vault.
|
||||||
|
- You can use C(VAULT_TOKEN) environment variable.
|
||||||
|
type: str
|
||||||
|
username:
|
||||||
|
description:
|
||||||
|
- Username to login to vault.
|
||||||
|
- You can use C(VAULT_USER) environment variable.
|
||||||
|
default: ""
|
||||||
|
type: str
|
||||||
|
password:
|
||||||
|
description:
|
||||||
|
- Password to login to vault.
|
||||||
|
- You can use C(VAULT_PASSWORD) environment variable.
|
||||||
|
type: str
|
||||||
|
role_id:
|
||||||
|
description:
|
||||||
|
- Role id for vault.
|
||||||
|
- You can use C(VAULT_ROLE_ID) environment variable.
|
||||||
|
type: str
|
||||||
|
secret_id:
|
||||||
|
description:
|
||||||
|
- Secret id for vault.
|
||||||
|
- You can use C(VAULT_SECRET_ID) environment variable.
|
||||||
|
type: str
|
||||||
|
aws_header:
|
||||||
|
description:
|
||||||
|
- X-Vault-AWS-IAM-Server-ID Header value to prevent replay attacks.
|
||||||
|
- You can use C(VAULT_AWS_HEADER) environment variable.
|
||||||
|
type: str
|
||||||
|
namespace:
|
||||||
|
description:
|
||||||
|
- Namespace for vault.
|
||||||
|
- You can use C(VAULT_NAMESPACE) environment variable.
|
||||||
|
type: str
|
||||||
|
"""
|
14
plugins/filter/prefix.py
Normal file
14
plugins/filter/prefix.py
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
"""Filter to prefix all itams from a list."""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
def prefix(value, prefix="--"):
|
||||||
|
return [prefix + x for x in value]
|
||||||
|
|
||||||
|
|
||||||
|
class FilterModule(object): # noqa
|
||||||
|
def filters(self):
|
||||||
|
return {"prefix": prefix}
|
14
plugins/filter/wrap.py
Normal file
14
plugins/filter/wrap.py
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
"""Filter to wrap all items from a list."""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
def wrap(value, wrapper="'"):
|
||||||
|
return [wrapper + x + wrapper for x in value]
|
||||||
|
|
||||||
|
|
||||||
|
class FilterModule(object): # noqa
|
||||||
|
def filters(self):
|
||||||
|
return {"wrap": wrap}
|
323
plugins/inventory/proxmox.py
Normal file
323
plugins/inventory/proxmox.py
Normal file
|
@ -0,0 +1,323 @@
|
||||||
|
# Copyright (c) 2014, Mathieu GAUTHIER-LAFAYE <gauthierl@lapth.cnrs.fr>
|
||||||
|
# Copyright (c) 2016, Matt Harris <matthaeus.harris@gmail.com>
|
||||||
|
# Copyright (c) 2020, Robert Kaussow <mail@thegeeklab.de>
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
"""Dynamic inventory plugin for Proxmox VE."""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
DOCUMENTATION = """
|
||||||
|
---
|
||||||
|
name: proxmox
|
||||||
|
short_description: Proxmox VE inventory source
|
||||||
|
version_added: 1.1.0
|
||||||
|
description:
|
||||||
|
- Get inventory hosts from the proxmox service.
|
||||||
|
- "Uses a configuration file as an inventory source, it must end in C(.proxmox.yml) or C(.proxmox.yaml) and has a C(plugin: xoxys.general.proxmox) entry."
|
||||||
|
extends_documentation_fragment:
|
||||||
|
- inventory_cache
|
||||||
|
options:
|
||||||
|
plugin:
|
||||||
|
description: The name of this plugin, it should always be set to C(xoxys.general.proxmox) for this plugin to recognize it as it's own.
|
||||||
|
required: yes
|
||||||
|
choices: ["xoxys.general.proxmox"]
|
||||||
|
api_host:
|
||||||
|
description:
|
||||||
|
- Specify the target host of the Proxmox VE cluster.
|
||||||
|
type: str
|
||||||
|
required: true
|
||||||
|
env:
|
||||||
|
- name: PROXMOX_SERVER
|
||||||
|
api_user:
|
||||||
|
description:
|
||||||
|
- Specify the user to authenticate with.
|
||||||
|
type: str
|
||||||
|
required: true
|
||||||
|
env:
|
||||||
|
- name: PROXMOX_USER
|
||||||
|
api_password:
|
||||||
|
description:
|
||||||
|
- Specify the password to authenticate with.
|
||||||
|
type: str
|
||||||
|
env:
|
||||||
|
- name: PROXMOX_PASSWORD
|
||||||
|
api_token_id:
|
||||||
|
description:
|
||||||
|
- Specify the token ID.
|
||||||
|
type: str
|
||||||
|
env:
|
||||||
|
- name: PROXMOX_TOKEN_ID
|
||||||
|
api_token_secret:
|
||||||
|
description:
|
||||||
|
- Specify the token secret.
|
||||||
|
type: str
|
||||||
|
env:
|
||||||
|
- name: PROXMOX_TOKEN_SECRET
|
||||||
|
verify_ssl:
|
||||||
|
description:
|
||||||
|
- If C(false), SSL certificates will not be validated.
|
||||||
|
- This should only be used on personally controlled sites using self-signed certificates.
|
||||||
|
type: bool
|
||||||
|
default: True
|
||||||
|
auth_timeout:
|
||||||
|
description: Proxmox VE authentication timeout.
|
||||||
|
type: int
|
||||||
|
default: 5
|
||||||
|
exclude_vmid:
|
||||||
|
description: VMID's to exclude from inventory.
|
||||||
|
type: list
|
||||||
|
default: []
|
||||||
|
elements: str
|
||||||
|
exclude_state:
|
||||||
|
description: VM states to exclude from inventory.
|
||||||
|
type: list
|
||||||
|
default: []
|
||||||
|
elements: str
|
||||||
|
group:
|
||||||
|
description: Group to place all hosts into.
|
||||||
|
type: string
|
||||||
|
default: proxmox
|
||||||
|
want_facts:
|
||||||
|
description: Toggle, if C(true) the plugin will retrieve host facts from the server
|
||||||
|
type: boolean
|
||||||
|
default: True
|
||||||
|
requirements:
|
||||||
|
- "proxmoxer"
|
||||||
|
""" # noqa
|
||||||
|
|
||||||
|
EXAMPLES = """
|
||||||
|
# proxmox.yml
|
||||||
|
plugin: xoxys.general.proxmox
|
||||||
|
api_user: root@pam
|
||||||
|
api_password: secret
|
||||||
|
api_host: helldorado
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
import socket
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
from ansible.errors import AnsibleError
|
||||||
|
from ansible.module_utils._text import to_native
|
||||||
|
from ansible.module_utils.parsing.convert_bool import boolean
|
||||||
|
from ansible.module_utils.six import iteritems
|
||||||
|
from ansible.plugins.inventory import BaseInventoryPlugin
|
||||||
|
from ansible_collections.xoxys.general.plugins.module_utils.version import LooseVersion
|
||||||
|
|
||||||
|
try:
|
||||||
|
from proxmoxer import ProxmoxAPI
|
||||||
|
|
||||||
|
HAS_PROXMOXER = True
|
||||||
|
except ImportError:
|
||||||
|
HAS_PROXMOXER = False
|
||||||
|
|
||||||
|
try:
|
||||||
|
from requests.packages import urllib3
|
||||||
|
|
||||||
|
HAS_URLLIB3 = True
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
import urllib3
|
||||||
|
|
||||||
|
HAS_URLLIB3 = True
|
||||||
|
except ImportError:
|
||||||
|
HAS_URLLIB3 = False
|
||||||
|
|
||||||
|
|
||||||
|
class InventoryModule(BaseInventoryPlugin):
|
||||||
|
"""Provide Proxmox VE inventory."""
|
||||||
|
|
||||||
|
NAME = "xoxys.general.proxmox"
|
||||||
|
|
||||||
|
def _proxmox_auth(self):
|
||||||
|
auth_args = {"user": self.get_option("api_user")}
|
||||||
|
if not (self.get_option("api_token_id") and self.get_option("api_token_secret")):
|
||||||
|
auth_args["password"] = self.get_option("api_password")
|
||||||
|
else:
|
||||||
|
auth_args["token_name"] = self.get_option("api_token_id")
|
||||||
|
auth_args["token_value"] = self.get_option("api_token_secret")
|
||||||
|
|
||||||
|
verify_ssl = boolean(self.get_option("verify_ssl"), strict=False)
|
||||||
|
if not verify_ssl and HAS_URLLIB3:
|
||||||
|
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||||
|
|
||||||
|
self.client = ProxmoxAPI(
|
||||||
|
self.get_option("api_host"),
|
||||||
|
verify_ssl=verify_ssl,
|
||||||
|
timeout=self.get_option("auth_timeout"),
|
||||||
|
**auth_args,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _get_version(self):
|
||||||
|
return LooseVersion(self.client.version.get()["version"])
|
||||||
|
|
||||||
|
def _get_major(self):
|
||||||
|
return LooseVersion(self.client.version.get()["release"])
|
||||||
|
|
||||||
|
def _get_names(self, pve_list, pve_type):
|
||||||
|
if pve_type == "node":
|
||||||
|
return [node["node"] for node in pve_list]
|
||||||
|
if pve_type == "pool":
|
||||||
|
return [pool["poolid"] for pool in pve_list]
|
||||||
|
|
||||||
|
return []
|
||||||
|
|
||||||
|
def _get_variables(self, pve_list, pve_type):
|
||||||
|
variables = {}
|
||||||
|
|
||||||
|
if pve_type in ["qemu", "container"]:
|
||||||
|
for vm in pve_list:
|
||||||
|
nested = {}
|
||||||
|
for key, value in iteritems(vm):
|
||||||
|
nested["proxmox_" + key] = value
|
||||||
|
variables[vm["name"]] = nested
|
||||||
|
|
||||||
|
return variables
|
||||||
|
|
||||||
|
def _get_ip_address(self, pve_type, pve_node, vmid):
|
||||||
|
def validate(address):
|
||||||
|
try:
|
||||||
|
# IP address validation
|
||||||
|
if socket.inet_aton(address) and address != "127.0.0.1":
|
||||||
|
return address
|
||||||
|
except OSError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
address = False
|
||||||
|
networks = False
|
||||||
|
if pve_type == "qemu":
|
||||||
|
# If qemu agent is enabled, try to gather the IP address
|
||||||
|
try:
|
||||||
|
if self.client.nodes(pve_node).get(pve_type, vmid, "agent", "info") is not None:
|
||||||
|
networks = self.client.nodes(pve_node).get(
|
||||||
|
"qemu", vmid, "agent", "network-get-interfaces"
|
||||||
|
)["result"]
|
||||||
|
except Exception: # noqa
|
||||||
|
pass
|
||||||
|
|
||||||
|
if networks and isinstance(networks, list):
|
||||||
|
for network in networks:
|
||||||
|
for ip_address in network.get("ip-addresses", []):
|
||||||
|
address = validate(ip_address["ip-address"])
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
config = self.client.nodes(pve_node).get(pve_type, vmid, "config")
|
||||||
|
address = re.search(r"ip=(\d*\.\d*\.\d*\.\d*)", config["net0"]).group(1)
|
||||||
|
except Exception: # noqa
|
||||||
|
pass
|
||||||
|
|
||||||
|
return address
|
||||||
|
|
||||||
|
def _exclude(self, pve_list):
|
||||||
|
filtered = []
|
||||||
|
for item in pve_list:
|
||||||
|
obj = defaultdict(dict, item)
|
||||||
|
if obj["template"] == 1:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if obj["status"] in self.get_option("exclude_state"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if obj["vmid"] in self.get_option("exclude_vmid"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
filtered.append(item.copy())
|
||||||
|
return filtered
|
||||||
|
|
||||||
|
def _propagate(self):
|
||||||
|
for node in self._get_names(self.client.nodes.get(), "node"):
|
||||||
|
try:
|
||||||
|
qemu_list = self._exclude(self.client.nodes(node).qemu.get())
|
||||||
|
container_list = self._exclude(self.client.nodes(node).lxc.get())
|
||||||
|
except Exception as e: # noqa
|
||||||
|
raise AnsibleError(f"Proxmoxer API error: {to_native(e)}") from e
|
||||||
|
|
||||||
|
# Merge QEMU and Containers lists from this node
|
||||||
|
instances = self._get_variables(qemu_list, "qemu").copy()
|
||||||
|
instances.update(self._get_variables(container_list, "container"))
|
||||||
|
|
||||||
|
for host in instances:
|
||||||
|
vmid = instances[host]["proxmox_vmid"]
|
||||||
|
|
||||||
|
try:
|
||||||
|
pve_type = instances[host]["proxmox_type"]
|
||||||
|
except KeyError:
|
||||||
|
pve_type = "qemu"
|
||||||
|
|
||||||
|
try:
|
||||||
|
description = self.client.nodes(node).get(pve_type, vmid, "config")[
|
||||||
|
"description"
|
||||||
|
]
|
||||||
|
except KeyError:
|
||||||
|
description = None
|
||||||
|
except Exception as e: # noqa
|
||||||
|
raise AnsibleError(f"Proxmoxer API error: {to_native(e)}") from e
|
||||||
|
|
||||||
|
try:
|
||||||
|
metadata = json.loads(description)
|
||||||
|
except TypeError:
|
||||||
|
metadata = {}
|
||||||
|
except ValueError:
|
||||||
|
metadata = {"notes": description}
|
||||||
|
|
||||||
|
# Add hosts to default group
|
||||||
|
self.inventory.add_group(group=self.get_option("group"))
|
||||||
|
self.inventory.add_host(group=self.get_option("group"), host=host)
|
||||||
|
|
||||||
|
# Group hosts by status
|
||||||
|
self.inventory.add_group(group=instances[host]["proxmox_status"])
|
||||||
|
self.inventory.add_host(group=instances[host]["proxmox_status"], host=host)
|
||||||
|
|
||||||
|
if "groups" in metadata:
|
||||||
|
for group in metadata["groups"]:
|
||||||
|
self.inventory.add_group(group=group)
|
||||||
|
self.inventory.add_host(group=group, host=host)
|
||||||
|
|
||||||
|
if self.get_option("want_facts"):
|
||||||
|
for attr in instances[host]:
|
||||||
|
if attr not in ["proxmox_template"]:
|
||||||
|
self.inventory.set_variable(host, attr, instances[host][attr])
|
||||||
|
|
||||||
|
address = self._get_ip_address(pve_type, node, vmid)
|
||||||
|
if address:
|
||||||
|
self.inventory.set_variable(host, "ansible_host", address)
|
||||||
|
|
||||||
|
for pool in self._get_names(self.client.pools.get(), "pool"):
|
||||||
|
try:
|
||||||
|
pool_list = self._exclude(self.client.pool(pool).get()["members"])
|
||||||
|
except Exception as e: # noqa
|
||||||
|
raise AnsibleError(f"Proxmoxer API error: {to_native(e)}") from e
|
||||||
|
|
||||||
|
members = [
|
||||||
|
member["name"]
|
||||||
|
for member in pool_list
|
||||||
|
if (member["type"] == "qemu" or member["type"] == "lxc")
|
||||||
|
]
|
||||||
|
|
||||||
|
for member in members:
|
||||||
|
self.inventory.add_host(group=pool, host=member)
|
||||||
|
|
||||||
|
def verify_file(self, path):
|
||||||
|
"""Verify the Proxmox VE configuration file."""
|
||||||
|
if super().verify_file(path):
|
||||||
|
endings = ("proxmox.yaml", "proxmox.yml")
|
||||||
|
if any(path.endswith(ending) for ending in endings):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def parse(self, inventory, loader, path, cache=True): # noqa
|
||||||
|
"""Dynamically parse the Proxmox VE cloud inventory."""
|
||||||
|
if not HAS_PROXMOXER:
|
||||||
|
raise AnsibleError(
|
||||||
|
"The Proxmox VE dynamic inventory plugin requires proxmoxer: "
|
||||||
|
"https://pypi.org/project/proxmoxer/"
|
||||||
|
)
|
||||||
|
|
||||||
|
super().parse(inventory, loader, path)
|
||||||
|
|
||||||
|
self._read_config_data(path)
|
||||||
|
self._proxmox_auth()
|
||||||
|
self._propagate()
|
402
plugins/module_utils/hashivault.py
Normal file
402
plugins/module_utils/hashivault.py
Normal file
|
@ -0,0 +1,402 @@
|
||||||
|
"""Provide helper functions for Hashivault module."""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from ansible.module_utils.basic import missing_required_lib
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
from ansible.module_utils.basic import env_fallback
|
||||||
|
|
||||||
|
HVAC_IMP_ERR = None
|
||||||
|
try:
|
||||||
|
import hvac
|
||||||
|
from hvac.exceptions import InvalidPath
|
||||||
|
|
||||||
|
HAS_HVAC = True
|
||||||
|
except ImportError:
|
||||||
|
HAS_HVAC = False
|
||||||
|
HVAC_IMP_ERR = traceback.format_exc()
|
||||||
|
|
||||||
|
|
||||||
|
def hashivault_argspec():
|
||||||
|
return dict(
|
||||||
|
url=dict(required=False, default=os.environ.get("VAULT_ADDR", ""), type="str"),
|
||||||
|
ca_cert=dict(required=False, default=os.environ.get("VAULT_CACERT", ""), type="str"),
|
||||||
|
ca_path=dict(required=False, default=os.environ.get("VAULT_CAPATH", ""), type="str"),
|
||||||
|
client_cert=dict(
|
||||||
|
required=False, default=os.environ.get("VAULT_CLIENT_CERT", ""), type="str"
|
||||||
|
),
|
||||||
|
client_key=dict(
|
||||||
|
required=False, default=os.environ.get("VAULT_CLIENT_KEY", ""), type="str", no_log=True
|
||||||
|
),
|
||||||
|
verify=dict(
|
||||||
|
required=False, default=(not os.environ.get("VAULT_SKIP_VERIFY", "False")), type="bool"
|
||||||
|
),
|
||||||
|
authtype=dict(
|
||||||
|
required=False,
|
||||||
|
default=os.environ.get("VAULT_AUTHTYPE", "token"),
|
||||||
|
type="str",
|
||||||
|
choices=["token", "userpass", "github", "ldap", "approle"],
|
||||||
|
),
|
||||||
|
login_mount_point=dict(
|
||||||
|
required=False, default=os.environ.get("VAULT_LOGIN_MOUNT_POINT", None), type="str"
|
||||||
|
),
|
||||||
|
token=dict(
|
||||||
|
required=False,
|
||||||
|
fallback=(hashivault_default_token, ["VAULT_TOKEN"]),
|
||||||
|
type="str",
|
||||||
|
no_log=True,
|
||||||
|
),
|
||||||
|
username=dict(required=False, default=os.environ.get("VAULT_USER", ""), type="str"),
|
||||||
|
password=dict(
|
||||||
|
required=False, fallback=(env_fallback, ["VAULT_PASSWORD"]), type="str", no_log=True
|
||||||
|
),
|
||||||
|
role_id=dict(
|
||||||
|
required=False, fallback=(env_fallback, ["VAULT_ROLE_ID"]), type="str", no_log=True
|
||||||
|
),
|
||||||
|
secret_id=dict(
|
||||||
|
required=False, fallback=(env_fallback, ["VAULT_SECRET_ID"]), type="str", no_log=True
|
||||||
|
),
|
||||||
|
aws_header=dict(
|
||||||
|
required=False, fallback=(env_fallback, ["VAULT_AWS_HEADER"]), type="str", no_log=True
|
||||||
|
),
|
||||||
|
namespace=dict(
|
||||||
|
required=False, default=os.environ.get("VAULT_NAMESPACE", None), type="str"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def hashivault_init(
|
||||||
|
argument_spec,
|
||||||
|
supports_check_mode=False,
|
||||||
|
required_if=None,
|
||||||
|
required_together=None,
|
||||||
|
required_one_of=None,
|
||||||
|
mutually_exclusive=None,
|
||||||
|
):
|
||||||
|
module = AnsibleModule(
|
||||||
|
argument_spec=argument_spec,
|
||||||
|
supports_check_mode=supports_check_mode,
|
||||||
|
required_if=required_if,
|
||||||
|
required_together=required_together,
|
||||||
|
required_one_of=required_one_of,
|
||||||
|
mutually_exclusive=mutually_exclusive,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not HAS_HVAC:
|
||||||
|
module.fail_json(msg=missing_required_lib("hvac"), exception=HVAC_IMP_ERR)
|
||||||
|
|
||||||
|
module.no_log_values.discard("0")
|
||||||
|
module.no_log_values.discard(0)
|
||||||
|
module.no_log_values.discard("1")
|
||||||
|
module.no_log_values.discard(1)
|
||||||
|
module.no_log_values.discard(True)
|
||||||
|
module.no_log_values.discard(False)
|
||||||
|
module.no_log_values.discard("ttl")
|
||||||
|
|
||||||
|
return module
|
||||||
|
|
||||||
|
|
||||||
|
def hashivault_client(params):
|
||||||
|
url = params.get("url")
|
||||||
|
ca_cert = params.get("ca_cert")
|
||||||
|
ca_path = params.get("ca_path")
|
||||||
|
client_cert = params.get("client_cert")
|
||||||
|
client_key = params.get("client_key")
|
||||||
|
cert = (client_cert, client_key)
|
||||||
|
check_verify = params.get("verify")
|
||||||
|
namespace = params.get("namespace", None)
|
||||||
|
|
||||||
|
if check_verify == "" or check_verify:
|
||||||
|
if ca_cert:
|
||||||
|
verify = ca_cert
|
||||||
|
elif ca_path:
|
||||||
|
verify = ca_path
|
||||||
|
else:
|
||||||
|
verify = check_verify
|
||||||
|
else:
|
||||||
|
verify = check_verify
|
||||||
|
|
||||||
|
return hvac.Client(url=url, cert=cert, verify=verify, namespace=namespace)
|
||||||
|
|
||||||
|
|
||||||
|
def hashivault_auth(client, params):
|
||||||
|
token = params.get("token")
|
||||||
|
authtype = params.get("authtype")
|
||||||
|
login_mount_point = params.get("login_mount_point", authtype)
|
||||||
|
if not login_mount_point:
|
||||||
|
login_mount_point = authtype
|
||||||
|
username = params.get("username")
|
||||||
|
password = params.get("password")
|
||||||
|
secret_id = params.get("secret_id")
|
||||||
|
role_id = params.get("role_id")
|
||||||
|
|
||||||
|
if authtype == "github":
|
||||||
|
client.auth.github.login(token, mount_point=login_mount_point)
|
||||||
|
elif authtype == "userpass":
|
||||||
|
client.auth_userpass(username, password, mount_point=login_mount_point)
|
||||||
|
elif authtype == "ldap":
|
||||||
|
client.auth.ldap.login(username, password, mount_point=login_mount_point)
|
||||||
|
elif authtype == "approle":
|
||||||
|
client = AppRoleClient(client, role_id, secret_id, mount_point=login_mount_point)
|
||||||
|
elif authtype == "tls":
|
||||||
|
client.auth_tls()
|
||||||
|
else:
|
||||||
|
client.token = token
|
||||||
|
return client
|
||||||
|
|
||||||
|
|
||||||
|
def hashivault_auth_client(params):
|
||||||
|
client = hashivault_client(params)
|
||||||
|
return hashivault_auth(client, params)
|
||||||
|
|
||||||
|
|
||||||
|
def hashiwrapper(function):
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
result = {"changed": False, "rc": 0}
|
||||||
|
result.update(function(*args, **kwargs))
|
||||||
|
return result
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def hashivault_default_token(env):
|
||||||
|
"""Get a default Vault token from an environment variable or a file."""
|
||||||
|
envvar = env[0]
|
||||||
|
if envvar in os.environ:
|
||||||
|
return os.environ[envvar]
|
||||||
|
token_file = os.path.expanduser("~/.vault-token")
|
||||||
|
if os.path.exists(token_file):
|
||||||
|
with open(token_file) as f:
|
||||||
|
return f.read().strip()
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
@hashiwrapper
|
||||||
|
def hashivault_read(params):
|
||||||
|
result = {"changed": False, "rc": 0}
|
||||||
|
client = hashivault_auth_client(params)
|
||||||
|
version = params.get("version")
|
||||||
|
mount_point = params.get("mount_point")
|
||||||
|
secret = params.get("secret")
|
||||||
|
secret_version = params.get("secret_version")
|
||||||
|
|
||||||
|
key = params.get("key")
|
||||||
|
default = params.get("default")
|
||||||
|
|
||||||
|
if secret.startswith("/"):
|
||||||
|
secret = secret.lstrip("/")
|
||||||
|
mount_point = ""
|
||||||
|
|
||||||
|
secret_path = f"{mount_point}/{secret}" if mount_point else secret
|
||||||
|
|
||||||
|
try:
|
||||||
|
if version == 2:
|
||||||
|
response = client.secrets.kv.v2.read_secret_version(
|
||||||
|
secret, mount_point=mount_point, version=secret_version
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
response = client.secrets.kv.v1.read_secret(secret, mount_point=mount_point)
|
||||||
|
except InvalidPath:
|
||||||
|
response = None
|
||||||
|
except Exception as e: # noqa: BLE001
|
||||||
|
result["rc"] = 1
|
||||||
|
result["failed"] = True
|
||||||
|
error_string = f"{e.__class__.__name__}({e})"
|
||||||
|
result["msg"] = f"Error {error_string} reading {secret_path}"
|
||||||
|
return result
|
||||||
|
if not response:
|
||||||
|
if default is not None:
|
||||||
|
result["value"] = default
|
||||||
|
return result
|
||||||
|
result["rc"] = 1
|
||||||
|
result["failed"] = True
|
||||||
|
result["msg"] = f"Secret {secret_path} is not in vault"
|
||||||
|
return result
|
||||||
|
if version == 2:
|
||||||
|
try:
|
||||||
|
data = response.get("data", {})
|
||||||
|
data = data.get("data", {})
|
||||||
|
except Exception: # noqa: BLE001
|
||||||
|
data = str(response)
|
||||||
|
else:
|
||||||
|
data = response["data"]
|
||||||
|
lease_duration = response.get("lease_duration", None)
|
||||||
|
if lease_duration is not None:
|
||||||
|
result["lease_duration"] = lease_duration
|
||||||
|
lease_id = response.get("lease_id", None)
|
||||||
|
if lease_id is not None:
|
||||||
|
result["lease_id"] = lease_id
|
||||||
|
renewable = response.get("renewable", None)
|
||||||
|
if renewable is not None:
|
||||||
|
result["renewable"] = renewable
|
||||||
|
wrap_info = response.get("wrap_info", None)
|
||||||
|
if wrap_info is not None:
|
||||||
|
result["wrap_info"] = wrap_info
|
||||||
|
if key and key not in data:
|
||||||
|
if default is not None:
|
||||||
|
result["value"] = default
|
||||||
|
return result
|
||||||
|
result["rc"] = 1
|
||||||
|
result["failed"] = True
|
||||||
|
result["msg"] = f"Key {key} is not in secret {secret_path}"
|
||||||
|
return result
|
||||||
|
value = data[key] if key else data
|
||||||
|
result["value"] = value
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class AppRoleClient:
|
||||||
|
"""
|
||||||
|
hvac.Client decorator generate and set a new approle token.
|
||||||
|
|
||||||
|
This allows multiple calls to Vault without having to manually
|
||||||
|
generate and set a token on every Vault call.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, client, role_id, secret_id, mount_point):
|
||||||
|
object.__setattr__(self, "client", client)
|
||||||
|
object.__setattr__(self, "role_id", role_id)
|
||||||
|
object.__setattr__(self, "secret_id", secret_id)
|
||||||
|
object.__setattr__(self, "login_mount_point", mount_point)
|
||||||
|
|
||||||
|
def __setattr__(self, name, val):
|
||||||
|
client = object.__getattribute__(self, "client")
|
||||||
|
client.__setattr__(name, val)
|
||||||
|
|
||||||
|
def __getattribute__(self, name):
|
||||||
|
client = object.__getattribute__(self, "client")
|
||||||
|
attr = client.__getattribute__(name)
|
||||||
|
|
||||||
|
role_id = object.__getattribute__(self, "role_id")
|
||||||
|
secret_id = object.__getattribute__(self, "secret_id")
|
||||||
|
login_mount_point = object.__getattribute__(self, "login_mount_point")
|
||||||
|
resp = client.auth_approle(role_id, secret_id=secret_id, mount_point=login_mount_point)
|
||||||
|
client.token = str(resp["auth"]["client_token"])
|
||||||
|
return attr
|
||||||
|
|
||||||
|
|
||||||
|
def _compare_state(desired_state, current_state, ignore=None):
|
||||||
|
"""
|
||||||
|
Compare desired state to current state.
|
||||||
|
|
||||||
|
Returns true if objects are equal.
|
||||||
|
|
||||||
|
Recursively walks dict object to compare all keys.
|
||||||
|
|
||||||
|
:param desired_state: The state user desires.
|
||||||
|
:param current_state: The state that currently exists.
|
||||||
|
:param ignore: Ignore these keys.
|
||||||
|
:type ignore: list
|
||||||
|
|
||||||
|
:return: True if the states are the same.
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
|
||||||
|
if ignore is None:
|
||||||
|
ignore = []
|
||||||
|
if isinstance(desired_state, list):
|
||||||
|
if not isinstance(current_state, list) or (len(desired_state) != len(current_state)):
|
||||||
|
return False
|
||||||
|
return set(desired_state) == set(current_state)
|
||||||
|
|
||||||
|
if isinstance(desired_state, dict):
|
||||||
|
if not isinstance(current_state, dict):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# iterate over dictionary keys
|
||||||
|
for key in desired_state:
|
||||||
|
if key in ignore:
|
||||||
|
continue
|
||||||
|
v = desired_state[key]
|
||||||
|
if (key not in current_state) or (not _compare_state(v, current_state.get(key))):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Lots of things get handled as strings in ansible that aren"t necessarily strings,
|
||||||
|
# can extend this list later.
|
||||||
|
if isinstance(desired_state, str) and isinstance(current_state, int):
|
||||||
|
current_state = str(current_state)
|
||||||
|
|
||||||
|
return desired_state == current_state
|
||||||
|
|
||||||
|
|
||||||
|
def _convert_to_seconds(original_value):
|
||||||
|
try:
|
||||||
|
value = str(original_value)
|
||||||
|
seconds = 0
|
||||||
|
if "h" in value:
|
||||||
|
ray = value.split("h")
|
||||||
|
seconds = int(ray.pop(0)) * 3600
|
||||||
|
value = "".join(ray)
|
||||||
|
if "m" in value:
|
||||||
|
ray = value.split("m")
|
||||||
|
seconds += int(ray.pop(0)) * 60
|
||||||
|
value = "".join(ray)
|
||||||
|
if value:
|
||||||
|
ray = value.split("s")
|
||||||
|
seconds += int(ray.pop(0))
|
||||||
|
return seconds
|
||||||
|
except Exception: # noqa: BLE001,S110
|
||||||
|
pass
|
||||||
|
return original_value
|
||||||
|
|
||||||
|
|
||||||
|
def get_keys_updated(desired_state, current_state, ignore=None):
|
||||||
|
"""
|
||||||
|
|
||||||
|
Return list of keys that have different values.
|
||||||
|
|
||||||
|
Recursively walks dict object to compare all keys.
|
||||||
|
|
||||||
|
:param desired_state: The state user desires.
|
||||||
|
:type desired_state: dict
|
||||||
|
:param current_state: The state that currently exists.
|
||||||
|
:type current_state: dict
|
||||||
|
:param ignore: Ignore these keys.
|
||||||
|
:type ignore: list
|
||||||
|
|
||||||
|
:return: Different items
|
||||||
|
:rtype: list
|
||||||
|
"""
|
||||||
|
|
||||||
|
if ignore is None:
|
||||||
|
ignore = []
|
||||||
|
|
||||||
|
differences = []
|
||||||
|
for key in desired_state:
|
||||||
|
if key in ignore:
|
||||||
|
continue
|
||||||
|
if key not in current_state:
|
||||||
|
differences.append(key)
|
||||||
|
continue
|
||||||
|
new_value = desired_state[key]
|
||||||
|
old_value = current_state[key]
|
||||||
|
if (
|
||||||
|
"ttl" in key and (_convert_to_seconds(old_value) != _convert_to_seconds(new_value))
|
||||||
|
) or not _compare_state(new_value, old_value):
|
||||||
|
differences.append(key)
|
||||||
|
return differences
|
||||||
|
|
||||||
|
|
||||||
|
def is_state_changed(desired_state, current_state, ignore=None): # noqa: ARG001
|
||||||
|
"""
|
||||||
|
Return list of keys that have different values.
|
||||||
|
|
||||||
|
Recursively walks dict object to compare all keys.
|
||||||
|
|
||||||
|
:param desired_state: The state user desires.
|
||||||
|
:type desired_state: dict
|
||||||
|
:param current_state: The state that currently exists.
|
||||||
|
:type current_state: dict
|
||||||
|
:param ignore: Ignore these keys.
|
||||||
|
:type ignore: list
|
||||||
|
|
||||||
|
:return: Different
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
|
return len(get_keys_updated(desired_state, current_state)) > 0
|
24
plugins/module_utils/version.py
Normal file
24
plugins/module_utils/version.py
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright (c) 2021, Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
"""Provide version object to compare version numbers."""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
from ansible.module_utils.six import raise_from
|
||||||
|
|
||||||
|
try:
|
||||||
|
from ansible.module_utils.compat.version import LooseVersion # noqa: F401,E501 pylint: disable=unused-import
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
from distutils.version import LooseVersion # noqa: F401, pylint: disable=unused-import
|
||||||
|
except ImportError as exc:
|
||||||
|
msg = (
|
||||||
|
"To use this plugin or module with ansible-core 2.11, you need to use Python < 3.12 "
|
||||||
|
"with distutils.version present"
|
||||||
|
)
|
||||||
|
raise_from(ImportError(msg), exc)
|
72
plugins/modules/hashivault_unseal.py
Normal file
72
plugins/modules/hashivault_unseal.py
Normal file
|
@ -0,0 +1,72 @@
|
||||||
|
#!/usr/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
"""Unseal Hashicorp Vault servers."""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
ANSIBLE_METADATA = {"status": ["stableinterface"], "supported_by": "community", "version": "1.1"}
|
||||||
|
|
||||||
|
DOCUMENTATION = """
|
||||||
|
---
|
||||||
|
module: hashivault_unseal
|
||||||
|
short_description: Hashicorp Vault unseal module.
|
||||||
|
version_added: 1.2.0
|
||||||
|
description:
|
||||||
|
- "Module to unseal Hashicorp Vault."
|
||||||
|
options:
|
||||||
|
keys:
|
||||||
|
description:
|
||||||
|
- Vault key shard(s).
|
||||||
|
type: list
|
||||||
|
elements: str
|
||||||
|
required: true
|
||||||
|
author:
|
||||||
|
- Robert Kaussow (@xoxys)
|
||||||
|
extends_documentation_fragment:
|
||||||
|
- xoxys.general.hashivault
|
||||||
|
"""
|
||||||
|
|
||||||
|
EXAMPLES = """
|
||||||
|
---
|
||||||
|
- name: Unseal vault
|
||||||
|
hashivault_unseal:
|
||||||
|
keys:
|
||||||
|
- 26479cc0-54bc-4252-9c34-baca54aa5de7
|
||||||
|
- 47f942e3-8525-4b44-ba2f-84a4ae81db7d
|
||||||
|
- 2ee9c868-4275-4836-8747-4f8fb7611aa0
|
||||||
|
url: https://vault.example.com
|
||||||
|
"""
|
||||||
|
|
||||||
|
from ansible_collections.xoxys.general.plugins.module_utils.hashivault import hashivault_argspec
|
||||||
|
from ansible_collections.xoxys.general.plugins.module_utils.hashivault import hashivault_client
|
||||||
|
from ansible_collections.xoxys.general.plugins.module_utils.hashivault import hashivault_init
|
||||||
|
from ansible_collections.xoxys.general.plugins.module_utils.hashivault import hashiwrapper
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
argspec = hashivault_argspec()
|
||||||
|
argspec["keys"] = dict(required=True, type="list", elements="str", no_log=True)
|
||||||
|
module = hashivault_init(argspec)
|
||||||
|
result = hashivault_unseal(module.params)
|
||||||
|
if result.get("failed"):
|
||||||
|
module.fail_json(**result)
|
||||||
|
else:
|
||||||
|
module.exit_json(**result)
|
||||||
|
|
||||||
|
|
||||||
|
@hashiwrapper
|
||||||
|
def hashivault_unseal(params):
|
||||||
|
keys = params.get("keys")
|
||||||
|
client = hashivault_client(params)
|
||||||
|
if client.sys.is_sealed():
|
||||||
|
return {"status": client.sys.submit_unseal_keys(keys), "changed": True}
|
||||||
|
|
||||||
|
return {"changed": False}
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
1092
plugins/modules/iptables_raw.py
Normal file
1092
plugins/modules/iptables_raw.py
Normal file
File diff suppressed because it is too large
Load Diff
431
plugins/modules/openssl_pkcs12.py
Normal file
431
plugins/modules/openssl_pkcs12.py
Normal file
|
@ -0,0 +1,431 @@
|
||||||
|
#!/usr/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
"""OpenSSL PKCS12 module."""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
ANSIBLE_METADATA = {"metadata_version": "1.0", "status": ["preview"], "supported_by": "community"}
|
||||||
|
|
||||||
|
DOCUMENTATION = """
|
||||||
|
---
|
||||||
|
module: openssl_pkcs12
|
||||||
|
author: "Guillaume Delpierre (@gdelpierre)"
|
||||||
|
version_added: 1.1.0
|
||||||
|
short_description: Generate OpenSSL pkcs12 archive.
|
||||||
|
description:
|
||||||
|
- "This module allows one to (re-)generate PKCS#12."
|
||||||
|
requirements:
|
||||||
|
- "python-pyOpenSSL"
|
||||||
|
extends_documentation_fragment: files
|
||||||
|
options:
|
||||||
|
ca_certificates:
|
||||||
|
required: False
|
||||||
|
type: list
|
||||||
|
elements: str
|
||||||
|
description:
|
||||||
|
- List of CA certificate to include.
|
||||||
|
cert_path:
|
||||||
|
required: False
|
||||||
|
type: path
|
||||||
|
description:
|
||||||
|
- The path to read certificates and private keys from.
|
||||||
|
Must be in PEM format.
|
||||||
|
action:
|
||||||
|
required: False
|
||||||
|
default: "export"
|
||||||
|
choices: ["parse", "export"]
|
||||||
|
type: str
|
||||||
|
description:
|
||||||
|
- Create (export) or parse a PKCS#12.
|
||||||
|
src:
|
||||||
|
required: False
|
||||||
|
type: path
|
||||||
|
description:
|
||||||
|
- PKCS#12 file path to parse.
|
||||||
|
path:
|
||||||
|
required: True
|
||||||
|
type: path
|
||||||
|
description:
|
||||||
|
- Filename to write the PKCS#12 file to.
|
||||||
|
force:
|
||||||
|
required: False
|
||||||
|
default: False
|
||||||
|
type: bool
|
||||||
|
description:
|
||||||
|
- Should the file be regenerated even it it already exists.
|
||||||
|
friendly_name:
|
||||||
|
required: False
|
||||||
|
type: str
|
||||||
|
aliases:
|
||||||
|
- "name"
|
||||||
|
description:
|
||||||
|
- Specifies the friendly name for the certificate and private key.
|
||||||
|
iter_size:
|
||||||
|
required: False
|
||||||
|
default: 2048
|
||||||
|
type: int
|
||||||
|
description:
|
||||||
|
- Number of times to repeat the encryption step.
|
||||||
|
maciter_size:
|
||||||
|
required: False
|
||||||
|
default: 1
|
||||||
|
type: int
|
||||||
|
description:
|
||||||
|
- Number of times to repeat the MAC step.
|
||||||
|
mode:
|
||||||
|
required: False
|
||||||
|
default: "0400"
|
||||||
|
type: str
|
||||||
|
description:
|
||||||
|
- Default mode for the generated PKCS#12 file.
|
||||||
|
passphrase:
|
||||||
|
required: False
|
||||||
|
type: str
|
||||||
|
description:
|
||||||
|
- The PKCS#12 password.
|
||||||
|
privatekey_path:
|
||||||
|
required: False
|
||||||
|
type: path
|
||||||
|
description:
|
||||||
|
- File to read private key from.
|
||||||
|
privatekey_passphrase:
|
||||||
|
required: False
|
||||||
|
type: str
|
||||||
|
description:
|
||||||
|
- Passphrase source to decrypt any input private keys with.
|
||||||
|
state:
|
||||||
|
required: False
|
||||||
|
default: "present"
|
||||||
|
choices: ["present", "absent"]
|
||||||
|
type: str
|
||||||
|
description:
|
||||||
|
- Whether the file should exist or not.
|
||||||
|
"""
|
||||||
|
|
||||||
|
EXAMPLES = """
|
||||||
|
- name: "Generate PKCS#12 file"
|
||||||
|
openssl_pkcs12:
|
||||||
|
path: "/opt/certs/ansible.p12"
|
||||||
|
friendly_name: "raclette"
|
||||||
|
privatekey_path: "/opt/certs/keys/key.pem"
|
||||||
|
cert_path: "/opt/certs/cert.pem"
|
||||||
|
ca_certificates: "/opt/certs/ca.pem"
|
||||||
|
state: present
|
||||||
|
|
||||||
|
- name: "Change PKCS#12 file permission"
|
||||||
|
openssl_pkcs12:
|
||||||
|
path: "/opt/certs/ansible.p12"
|
||||||
|
friendly_name: "raclette"
|
||||||
|
privatekey_path: "/opt/certs/keys/key.pem"
|
||||||
|
cert_path: "/opt/certs/cert.pem"
|
||||||
|
ca_certificates: "/opt/certs/ca.pem"
|
||||||
|
state: present
|
||||||
|
mode: 0600
|
||||||
|
|
||||||
|
- name: "Regen PKCS#12 file"
|
||||||
|
openssl_pkcs12:
|
||||||
|
path: "/opt/certs/ansible.p12"
|
||||||
|
friendly_name: "raclette"
|
||||||
|
privatekey_path: "/opt/certs/keys/key.pem"
|
||||||
|
cert_path: "/opt/certs/cert.pem"
|
||||||
|
ca_certificates: "/opt/certs/ca.pem"
|
||||||
|
state: present
|
||||||
|
mode: 0600
|
||||||
|
force: True
|
||||||
|
|
||||||
|
- name: "Dump/Parse PKCS#12 file"
|
||||||
|
openssl_pkcs12:
|
||||||
|
src: "/opt/certs/ansible.p12"
|
||||||
|
path: "/opt/certs/ansible.pem"
|
||||||
|
state: present
|
||||||
|
|
||||||
|
- name: "Remove PKCS#12 file"
|
||||||
|
openssl_pkcs12:
|
||||||
|
path: "/opt/certs/ansible.p12"
|
||||||
|
state: absent
|
||||||
|
"""
|
||||||
|
|
||||||
|
RETURN = """
|
||||||
|
filename:
|
||||||
|
description: Path to the generate PKCS#12 file.
|
||||||
|
returned: changed or success
|
||||||
|
type: str
|
||||||
|
sample: /opt/certs/ansible.p12
|
||||||
|
"""
|
||||||
|
|
||||||
|
import errno
|
||||||
|
import os
|
||||||
|
|
||||||
|
from ansible.module_utils._text import to_native
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
|
||||||
|
try:
|
||||||
|
from OpenSSL import crypto
|
||||||
|
except ImportError:
|
||||||
|
pyopenssl_found = False
|
||||||
|
else:
|
||||||
|
pyopenssl_found = True
|
||||||
|
|
||||||
|
|
||||||
|
class PkcsError(Exception): # noqa
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Pkcs(object): # noqa
|
||||||
|
def __init__(self, module):
|
||||||
|
self.path = module.params["path"]
|
||||||
|
self.force = module.params["force"]
|
||||||
|
self.state = module.params["state"]
|
||||||
|
self.action = module.params["action"]
|
||||||
|
self.check_mode = module.check_mode
|
||||||
|
self.iter_size = module.params["iter_size"]
|
||||||
|
self.maciter_size = module.params["maciter_size"]
|
||||||
|
self.pkcs12 = None
|
||||||
|
self.src = module.params["src"]
|
||||||
|
self.privatekey_path = module.params["privatekey_path"]
|
||||||
|
self.privatekey_passphrase = module.params["privatekey_passphrase"]
|
||||||
|
self.cert_path = module.params["cert_path"]
|
||||||
|
self.ca_certificates = module.params["ca_certificates"]
|
||||||
|
self.friendly_name = module.params["friendly_name"]
|
||||||
|
self.passphrase = module.params["passphrase"]
|
||||||
|
self.mode = module.params["mode"]
|
||||||
|
self.changed = False
|
||||||
|
if not self.mode:
|
||||||
|
self.mode = int("0400", 8)
|
||||||
|
|
||||||
|
def load_privatekey(self, path, passphrase=None):
|
||||||
|
"""Load the specified OpenSSL private key."""
|
||||||
|
try:
|
||||||
|
return (
|
||||||
|
crypto.load_privatekey(
|
||||||
|
crypto.FILETYPE_PEM,
|
||||||
|
open(path, "rb").read(), # noqa
|
||||||
|
passphrase,
|
||||||
|
)
|
||||||
|
if passphrase
|
||||||
|
else crypto.load_privatekey(
|
||||||
|
crypto.FILETYPE_PEM,
|
||||||
|
open(path, "rb").read(), # noqa
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except OSError as exc:
|
||||||
|
raise PkcsError(exc) from exc
|
||||||
|
|
||||||
|
def load_certificate(self, path):
|
||||||
|
"""Load the specified certificate."""
|
||||||
|
try:
|
||||||
|
cert_content = open(path, "rb").read() # noqa
|
||||||
|
return crypto.load_certificate(crypto.FILETYPE_PEM, cert_content)
|
||||||
|
except OSError as exc:
|
||||||
|
raise PkcsError(exc) from exc
|
||||||
|
|
||||||
|
def load_pkcs12(self, path, passphrase=None):
|
||||||
|
"""Load pkcs12 file."""
|
||||||
|
try:
|
||||||
|
if passphrase:
|
||||||
|
return crypto.load_pkcs12(open(path, "rb").read(), passphrase) # noqa
|
||||||
|
|
||||||
|
return crypto.load_pkcs12(open(path, "rb").read()) # noqa
|
||||||
|
except OSError as exc:
|
||||||
|
raise PkcsError(exc) from exc
|
||||||
|
|
||||||
|
def dump_privatekey(self, path):
|
||||||
|
"""Dump the specified OpenSSL private key."""
|
||||||
|
try:
|
||||||
|
return crypto.dump_privatekey(
|
||||||
|
crypto.FILETYPE_PEM, self.load_pkcs12(path).get_privatekey()
|
||||||
|
)
|
||||||
|
except OSError as exc:
|
||||||
|
raise PkcsError(exc) from exc
|
||||||
|
|
||||||
|
def dump_certificate(self, path):
|
||||||
|
"""Dump the specified certificate."""
|
||||||
|
try:
|
||||||
|
return crypto.dump_certificate(
|
||||||
|
crypto.FILETYPE_PEM, self.load_pkcs12(path).get_certificate()
|
||||||
|
)
|
||||||
|
except OSError as exc:
|
||||||
|
raise PkcsError(exc) from exc
|
||||||
|
|
||||||
|
def generate(self, module):
|
||||||
|
"""Generate PKCS#12 file archive."""
|
||||||
|
if not os.path.exists(self.path) or self.force:
|
||||||
|
self.pkcs12 = crypto.PKCS12()
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.remove()
|
||||||
|
except PkcsError as exc:
|
||||||
|
module.fail_json(msg=to_native(exc))
|
||||||
|
|
||||||
|
if self.ca_certificates:
|
||||||
|
ca_certs = [self.load_certificate(ca_cert) for ca_cert in self.ca_certificates]
|
||||||
|
self.pkcs12.set_ca_certificates(ca_certs)
|
||||||
|
|
||||||
|
if self.cert_path:
|
||||||
|
self.pkcs12.set_certificate(self.load_certificate(self.cert_path))
|
||||||
|
|
||||||
|
if self.friendly_name:
|
||||||
|
self.pkcs12.set_friendlyname(self.friendly_name)
|
||||||
|
|
||||||
|
if self.privatekey_path:
|
||||||
|
self.pkcs12.set_privatekey(
|
||||||
|
self.load_privatekey(self.privatekey_path, self.privatekey_passphrase)
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(self.path, "wb", self.mode) as archive:
|
||||||
|
archive.write(
|
||||||
|
self.pkcs12.export(self.passphrase, self.iter_size, self.maciter_size)
|
||||||
|
)
|
||||||
|
module.set_mode_if_different(self.path, self.mode, False)
|
||||||
|
self.changed = True
|
||||||
|
except OSError as exc:
|
||||||
|
self.remove()
|
||||||
|
raise PkcsError(exc) from exc
|
||||||
|
|
||||||
|
file_args = module.load_file_common_arguments(module.params)
|
||||||
|
if module.set_fs_attributes_if_different(file_args, False):
|
||||||
|
module.set_mode_if_different(self.path, self.mode, False)
|
||||||
|
self.changed = True
|
||||||
|
|
||||||
|
def parse(self, module):
|
||||||
|
"""Read PKCS#12 file."""
|
||||||
|
if not os.path.exists(self.path) or self.force:
|
||||||
|
try:
|
||||||
|
self.remove()
|
||||||
|
|
||||||
|
with open(self.path, "wb") as content:
|
||||||
|
content.write(
|
||||||
|
f"{self.dump_privatekey(self.src)}{self.dump_certificate(self.src)}"
|
||||||
|
)
|
||||||
|
module.set_mode_if_different(self.path, self.mode, False)
|
||||||
|
self.changed = True
|
||||||
|
except OSError as exc:
|
||||||
|
raise PkcsError(exc) from exc
|
||||||
|
|
||||||
|
file_args = module.load_file_common_arguments(module.params)
|
||||||
|
if module.set_fs_attributes_if_different(file_args, False):
|
||||||
|
module.set_mode_if_different(self.path, self.mode, False)
|
||||||
|
self.changed = True
|
||||||
|
|
||||||
|
def remove(self):
|
||||||
|
"""Remove the PKCS#12 file archive from the filesystem."""
|
||||||
|
try:
|
||||||
|
os.remove(self.path)
|
||||||
|
self.changed = True
|
||||||
|
except OSError as exc:
|
||||||
|
if exc.errno != errno.ENOENT:
|
||||||
|
raise PkcsError(exc) from exc
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
def check(self, module, perms_required=True): # noqa
|
||||||
|
def _check_pkey_passphrase():
|
||||||
|
if self.privatekey_passphrase:
|
||||||
|
try:
|
||||||
|
self.load_privatekey(self.path, self.privatekey_passphrase)
|
||||||
|
return True
|
||||||
|
except crypto.Error:
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
if not os.path.exists(self.path):
|
||||||
|
return os.path.exists(self.path)
|
||||||
|
|
||||||
|
return _check_pkey_passphrase
|
||||||
|
|
||||||
|
def dump(self):
|
||||||
|
"""Serialize the object into a dictionary."""
|
||||||
|
result = {
|
||||||
|
"changed": self.changed,
|
||||||
|
"filename": self.path,
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.privatekey_path:
|
||||||
|
result["privatekey_path"] = self.privatekey_path
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
argument_spec = dict(
|
||||||
|
action=dict(default="export", choices=["parse", "export"], type="str", required=False),
|
||||||
|
ca_certificates=dict(type="list", elements="str", required=False),
|
||||||
|
cert_path=dict(type="path"),
|
||||||
|
force=dict(default=False, type="bool"),
|
||||||
|
friendly_name=dict(type="str", aliases=["name"]),
|
||||||
|
iter_size=dict(default=2048, type="int"),
|
||||||
|
maciter_size=dict(default=1, type="int"),
|
||||||
|
passphrase=dict(type="str", no_log=True),
|
||||||
|
path=dict(type="path", required=True),
|
||||||
|
privatekey_path=dict(type="path"),
|
||||||
|
privatekey_passphrase=dict(type="str", no_log=True),
|
||||||
|
state=dict(default="present", choices=["present", "absent"], type="str"),
|
||||||
|
src=dict(type="path"),
|
||||||
|
mode=dict(default="0400", type="str", required=False),
|
||||||
|
)
|
||||||
|
|
||||||
|
required_if = [
|
||||||
|
["action", "export", ["friendly_name"]],
|
||||||
|
["action", "parse", ["src"]],
|
||||||
|
]
|
||||||
|
|
||||||
|
required_together = [
|
||||||
|
["privatekey_path", "friendly_name"],
|
||||||
|
]
|
||||||
|
|
||||||
|
module = AnsibleModule(
|
||||||
|
argument_spec=argument_spec,
|
||||||
|
add_file_common_args=True,
|
||||||
|
required_if=required_if,
|
||||||
|
required_together=required_together,
|
||||||
|
supports_check_mode=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not pyopenssl_found:
|
||||||
|
module.fail_json(msg="The python pyOpenSSL library is required")
|
||||||
|
|
||||||
|
base_dir = os.path.dirname(module.params["path"])
|
||||||
|
if not os.path.isdir(base_dir):
|
||||||
|
module.fail_json(
|
||||||
|
name=base_dir,
|
||||||
|
msg=f"The directory {base_dir} does not exist or the file is not a directory",
|
||||||
|
)
|
||||||
|
|
||||||
|
pkcs12 = Pkcs(module)
|
||||||
|
|
||||||
|
if module.params["state"] == "present":
|
||||||
|
if module.check_mode:
|
||||||
|
result = pkcs12.dump()
|
||||||
|
result["changed"] = module.params["force"] or not pkcs12.check(module)
|
||||||
|
module.exit_json(**result)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if module.params["action"] == "export":
|
||||||
|
pkcs12.generate(module)
|
||||||
|
else:
|
||||||
|
pkcs12.parse(module)
|
||||||
|
except PkcsError as exc:
|
||||||
|
module.fail_json(msg=to_native(exc))
|
||||||
|
else:
|
||||||
|
if module.check_mode:
|
||||||
|
result = pkcs12.dump()
|
||||||
|
result["changed"] = os.path.exists(module.params["path"])
|
||||||
|
module.exit_json(**result)
|
||||||
|
|
||||||
|
try:
|
||||||
|
pkcs12.remove()
|
||||||
|
except PkcsError as exc:
|
||||||
|
module.fail_json(msg=to_native(exc))
|
||||||
|
|
||||||
|
result = pkcs12.dump()
|
||||||
|
|
||||||
|
module.exit_json(**result)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
1658
plugins/modules/proxmox_kvm.py
Normal file
1658
plugins/modules/proxmox_kvm.py
Normal file
File diff suppressed because it is too large
Load Diff
135
plugins/modules/ucr.py
Normal file
135
plugins/modules/ucr.py
Normal file
|
@ -0,0 +1,135 @@
|
||||||
|
#!/usr/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
"""Control Univention Corporate Registry."""
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
ANSIBLE_METADATA = {"metadata_version": "1.1", "status": ["preview"], "supported_by": "community"}
|
||||||
|
|
||||||
|
DOCUMENTATION = """
|
||||||
|
---
|
||||||
|
module: ucr
|
||||||
|
short_description: Manage variables in univention configuration registry.
|
||||||
|
version_added: 1.1.0
|
||||||
|
description:
|
||||||
|
- "This module allows to manage variables inside the univention configuration registry
|
||||||
|
on a univention corporate server (UCS)."
|
||||||
|
options:
|
||||||
|
path:
|
||||||
|
description:
|
||||||
|
- Path for the variable
|
||||||
|
aliases:
|
||||||
|
- name
|
||||||
|
required: True
|
||||||
|
type: str
|
||||||
|
value:
|
||||||
|
description:
|
||||||
|
- New value of the variable
|
||||||
|
required: False
|
||||||
|
type: str
|
||||||
|
default: ""
|
||||||
|
state:
|
||||||
|
required: False
|
||||||
|
default: "present"
|
||||||
|
choices: ["present", "absent"]
|
||||||
|
type: str
|
||||||
|
description:
|
||||||
|
- Whether the variable should be exist or not.
|
||||||
|
author:
|
||||||
|
- Robert Kaussow (@xoxys)
|
||||||
|
"""
|
||||||
|
|
||||||
|
EXAMPLES = """
|
||||||
|
# Set variable to force https in ucs frontend
|
||||||
|
- name: Force https
|
||||||
|
ucr:
|
||||||
|
path: apache2/force_https
|
||||||
|
value: yes
|
||||||
|
|
||||||
|
# Allow another user as root to login as ssh
|
||||||
|
- name: Add ssh user
|
||||||
|
ucr:
|
||||||
|
path: auth/sshd/user/myuser
|
||||||
|
value: yes
|
||||||
|
"""
|
||||||
|
|
||||||
|
RETURN = """
|
||||||
|
original_message:
|
||||||
|
description: The original name param that was passed in
|
||||||
|
type: str
|
||||||
|
returned: success
|
||||||
|
message:
|
||||||
|
description: The output message that the sample module generates
|
||||||
|
type: str
|
||||||
|
returned: success
|
||||||
|
"""
|
||||||
|
|
||||||
|
from ansible.module_utils.basic import AnsibleModule
|
||||||
|
|
||||||
|
try:
|
||||||
|
from univention.config_registry import ConfigRegistry
|
||||||
|
from univention.config_registry.frontend import ucr_update
|
||||||
|
|
||||||
|
HAS_UNIVENTION = True
|
||||||
|
except ImportError:
|
||||||
|
HAS_UNIVENTION = False
|
||||||
|
|
||||||
|
|
||||||
|
def get_variable(ucr, path):
|
||||||
|
ucr.load()
|
||||||
|
return ucr.get(path) if path in ucr else None
|
||||||
|
|
||||||
|
|
||||||
|
def set_variable(ucr, path, value, result): # noqa
|
||||||
|
org_value = get_variable(ucr, path)
|
||||||
|
ucr_update(ucr, {path: value})
|
||||||
|
new_value = get_variable(ucr, path)
|
||||||
|
return org_value != new_value
|
||||||
|
|
||||||
|
|
||||||
|
def dry_variable(ucr, path, value, result): # noqa
|
||||||
|
org_value = get_variable(ucr, path)
|
||||||
|
return org_value != value
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
module_args = dict(
|
||||||
|
path=dict(type="str", required=True, aliases=["name"]),
|
||||||
|
value=dict(type="str", required=False, default=""),
|
||||||
|
state=dict(default="present", choices=["present", "absent"], type="str"),
|
||||||
|
)
|
||||||
|
|
||||||
|
required_if = [["state", "present", ["value"]]]
|
||||||
|
|
||||||
|
module = AnsibleModule(
|
||||||
|
argument_spec=module_args, supports_check_mode=True, required_if=required_if
|
||||||
|
)
|
||||||
|
|
||||||
|
if not HAS_UNIVENTION:
|
||||||
|
module.fail_json(msg="univention required for this module")
|
||||||
|
|
||||||
|
ucr = ConfigRegistry()
|
||||||
|
|
||||||
|
result = dict(changed=False, original_message="", message="")
|
||||||
|
|
||||||
|
path = module.params["path"]
|
||||||
|
value = module.params["value"]
|
||||||
|
if module.params["state"] == "present" and (value is None or value == "None"):
|
||||||
|
value = ""
|
||||||
|
elif module.params["state"] == "absent":
|
||||||
|
value = None
|
||||||
|
|
||||||
|
if not module.check_mode:
|
||||||
|
result["changed"] = set_variable(ucr, path, value, result)
|
||||||
|
else:
|
||||||
|
result["changed"] = dry_variable(ucr, path, value, result)
|
||||||
|
|
||||||
|
module.exit_json(**result)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
1158
poetry.lock
generated
Normal file
1158
poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
152
pyproject.toml
Normal file
152
pyproject.toml
Normal file
|
@ -0,0 +1,152 @@
|
||||||
|
[tool.poetry]
|
||||||
|
authors = ["Robert Kaussow <mail@thegeeklab.de>"]
|
||||||
|
classifiers = [
|
||||||
|
"Development Status :: 5 - Production/Stable",
|
||||||
|
"Environment :: Console",
|
||||||
|
"License :: OSI Approved :: MIT License",
|
||||||
|
"Intended Audience :: Developers",
|
||||||
|
"Intended Audience :: Information Technology",
|
||||||
|
"Intended Audience :: System Administrators",
|
||||||
|
"Natural Language :: English",
|
||||||
|
"Operating System :: POSIX",
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"Programming Language :: Python :: 3.9",
|
||||||
|
"Programming Language :: Python :: 3.10",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"Programming Language :: Python :: 3.12",
|
||||||
|
"Topic :: Utilities",
|
||||||
|
"Topic :: Software Development",
|
||||||
|
"Topic :: Software Development :: Documentation",
|
||||||
|
]
|
||||||
|
description = "Build environment for Ansible Collection."
|
||||||
|
license = "MIT"
|
||||||
|
name = "xoxys.general"
|
||||||
|
readme = "README.md"
|
||||||
|
repository = "https://gitea.rknet.org/ansible/xoxys.general"
|
||||||
|
version = "0.0.0"
|
||||||
|
|
||||||
|
[tool.poetry.dependencies]
|
||||||
|
python = "^3.9.0"
|
||||||
|
ansible-core = { version = "<=2.14.0", optional = true }
|
||||||
|
pyopenssl = "23.0.0"
|
||||||
|
proxmoxer = "2.0.1"
|
||||||
|
hcloud = "1.18.2"
|
||||||
|
|
||||||
|
[tool.poetry.extras]
|
||||||
|
ansible = ["ansible-core"]
|
||||||
|
|
||||||
|
[tool.poetry.group.dev.dependencies]
|
||||||
|
ruff = "0.1.7"
|
||||||
|
pytest = "7.2.1"
|
||||||
|
pytest-mock = "3.10.0"
|
||||||
|
pytest-cov = "4.0.0"
|
||||||
|
toml = "0.10.2"
|
||||||
|
pycodestyle = "2.10.0"
|
||||||
|
yamllint = "1.29.0"
|
||||||
|
pylint = "2.15.0"
|
||||||
|
voluptuous = "0.13.1"
|
||||||
|
pytest-ansible = "3.1.5"
|
||||||
|
pytest-forked = "1.6.0"
|
||||||
|
pytest-xdist = "3.3.1"
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
addopts = "--cov --cov-report=xml:coverage.xml --cov-report=term --cov-append --no-cov-on-fail"
|
||||||
|
pythonpath = [
|
||||||
|
"."
|
||||||
|
]
|
||||||
|
testpaths = [
|
||||||
|
"tests",
|
||||||
|
]
|
||||||
|
filterwarnings = [
|
||||||
|
"ignore::FutureWarning",
|
||||||
|
"ignore::DeprecationWarning",
|
||||||
|
"ignore:.*pep8.*:FutureWarning",
|
||||||
|
"ignore:AnsibleCollectionFinder.*:UserWarning"
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.coverage.run]
|
||||||
|
omit = ["**/tests/*"]
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
build-backend = "poetry_dynamic_versioning.backend"
|
||||||
|
requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"]
|
||||||
|
|
||||||
|
[tool.ruff]
|
||||||
|
exclude = [
|
||||||
|
".git",
|
||||||
|
"__pycache__",
|
||||||
|
"build",
|
||||||
|
"dist",
|
||||||
|
"tests",
|
||||||
|
"*.pyc",
|
||||||
|
"*.egg-info",
|
||||||
|
".cache",
|
||||||
|
".eggs",
|
||||||
|
"env*",
|
||||||
|
".venv",
|
||||||
|
"iptables_raw.py",
|
||||||
|
]
|
||||||
|
|
||||||
|
line-length = 99
|
||||||
|
indent-width = 4
|
||||||
|
|
||||||
|
# Explanation of errors
|
||||||
|
#
|
||||||
|
# D102: Missing docstring in public method
|
||||||
|
# D103: Missing docstring in public function
|
||||||
|
# D105: Missing docstring in magic method
|
||||||
|
# D107: Missing docstring in __init__
|
||||||
|
# D202: No blank lines allowed after function docstring
|
||||||
|
# D203: One blank line required before class docstring
|
||||||
|
# E402: Module level import not at top of file
|
||||||
|
# SIM105: Use `contextlib.suppress(Exception)` instead of try-except-pass
|
||||||
|
# C402: Unnecessary generator (rewrite as a `dict` comprehension)
|
||||||
|
# C408: Unnecessary `dict` call (rewrite as a literal)
|
||||||
|
# I001: Import block is un-sorted or un-formatted
|
||||||
|
# UP001: `__metaclass__ = type` is implied
|
||||||
|
# UP009: UTF-8 encoding declaration is unnecessary
|
||||||
|
# UP010: Unnecessary `__future__` imports `absolute_import`, `division`, `print_function` for target Python version
|
||||||
|
ignore = [
|
||||||
|
"D102",
|
||||||
|
"D103",
|
||||||
|
"D105",
|
||||||
|
"D107",
|
||||||
|
"D202",
|
||||||
|
"D203",
|
||||||
|
"D212",
|
||||||
|
"E402",
|
||||||
|
"SIM105",
|
||||||
|
"C402",
|
||||||
|
"C408",
|
||||||
|
"I001",
|
||||||
|
"UP001",
|
||||||
|
"UP009",
|
||||||
|
"UP010",
|
||||||
|
"RUF100",
|
||||||
|
]
|
||||||
|
select = [
|
||||||
|
"D",
|
||||||
|
"E",
|
||||||
|
"F",
|
||||||
|
"Q",
|
||||||
|
"W",
|
||||||
|
"I",
|
||||||
|
"S",
|
||||||
|
"BLE",
|
||||||
|
"N",
|
||||||
|
"UP",
|
||||||
|
"B",
|
||||||
|
"A",
|
||||||
|
"C4",
|
||||||
|
"T20",
|
||||||
|
"SIM",
|
||||||
|
"RET",
|
||||||
|
"ARG",
|
||||||
|
"ERA",
|
||||||
|
"RUF",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff.format]
|
||||||
|
quote-style = "double"
|
||||||
|
indent-style = "space"
|
||||||
|
line-ending = "lf"
|
2
tests/config.yml
Normal file
2
tests/config.yml
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
modules:
|
||||||
|
python_requires: ">=3.9"
|
90
tests/unit/plugins/inventory/test_proxmox.py
Normal file
90
tests/unit/plugins/inventory/test_proxmox.py
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
"""Test inventory plugin proxmox."""
|
||||||
|
# Copyright (c) 2020, Robert Kaussow <mail@thegeeklab.de>
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
proxmox = pytest.importorskip("proxmoxer")
|
||||||
|
|
||||||
|
from ansible_collections.xoxys.general.plugins.inventory.proxmox import InventoryModule
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def inventory():
|
||||||
|
return InventoryModule()
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_names(inventory):
|
||||||
|
nodes = [{"status": "online", "type": "node", "id": "node/testnode", "node": "testnode"}]
|
||||||
|
pools = [{"poolid": "testpool"}]
|
||||||
|
|
||||||
|
assert ["testnode"] == inventory._get_names(nodes, "node")
|
||||||
|
assert ["testpool"] == inventory._get_names(pools, "pool")
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_variables(inventory):
|
||||||
|
pve_list = [{
|
||||||
|
"status": "running",
|
||||||
|
"vmid": "100",
|
||||||
|
"name": "test",
|
||||||
|
}]
|
||||||
|
|
||||||
|
variables = {
|
||||||
|
"test": {
|
||||||
|
"proxmox_status": "running",
|
||||||
|
"proxmox_vmid": "100",
|
||||||
|
"proxmox_name": "test",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert variables == inventory._get_variables(pve_list, "qemu")
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_ip_address(inventory, mocker):
|
||||||
|
networks = {
|
||||||
|
"result": [{
|
||||||
|
"ip-addresses": [{
|
||||||
|
"ip-address": "10.0.0.1",
|
||||||
|
"prefix": 26,
|
||||||
|
"ip-address-type": "ipv4"
|
||||||
|
}],
|
||||||
|
"name": "eth0"
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
inventory.client = mocker.MagicMock()
|
||||||
|
inventory.client.nodes.return_value.get.return_value = networks
|
||||||
|
|
||||||
|
assert inventory._get_ip_address("qemu", None, None) == "10.0.0.1"
|
||||||
|
|
||||||
|
|
||||||
|
def test_exclude(inventory, mocker):
|
||||||
|
|
||||||
|
def get_option(name, *args, **kwargs):
|
||||||
|
if name == "exclude_state":
|
||||||
|
return ["stopped"]
|
||||||
|
|
||||||
|
return []
|
||||||
|
|
||||||
|
inventory.get_option = mocker.MagicMock(side_effect=get_option)
|
||||||
|
|
||||||
|
pve_list = [{
|
||||||
|
"status": "running",
|
||||||
|
"vmid": "100",
|
||||||
|
"name": "test",
|
||||||
|
}, {
|
||||||
|
"status": "stopped",
|
||||||
|
"vmid": "101",
|
||||||
|
"name": "stop",
|
||||||
|
}]
|
||||||
|
|
||||||
|
filtered = [{
|
||||||
|
"status": "running",
|
||||||
|
"vmid": "100",
|
||||||
|
"name": "test",
|
||||||
|
}]
|
||||||
|
|
||||||
|
assert filtered == inventory._exclude(pve_list)
|
Loading…
Reference in New Issue
Block a user