Compare commits
No commits in common. "main" and "v2.0.0" have entirely different histories.
23
.chglog/CHANGELOG.tpl.md
Executable file
23
.chglog/CHANGELOG.tpl.md
Executable file
@ -0,0 +1,23 @@
|
||||
# Changelog
|
||||
|
||||
{{ range .Versions -}}
|
||||
## {{ if .Tag.Previous }}[{{ .Tag.Name }}]({{ $.Info.RepositoryURL }}/compare/{{ .Tag.Previous.Name }}...{{ .Tag.Name }}){{ else }}{{ .Tag.Name }}{{ end }} ({{ datetime "2006-01-02" .Tag.Date }})
|
||||
|
||||
{{ range .CommitGroups -}}
|
||||
### {{ .Title }}
|
||||
|
||||
{{ range .Commits -}}
|
||||
- {{ if .Scope }}**{{ .Scope }}:** {{ end }}{{ .Subject }}
|
||||
{{ end }}
|
||||
{{ end -}}
|
||||
|
||||
{{- if .NoteGroups -}}
|
||||
{{ range .NoteGroups -}}
|
||||
### {{ .Title }}
|
||||
|
||||
{{ range .Notes }}
|
||||
{{ .Body }}
|
||||
{{ end }}
|
||||
{{ end -}}
|
||||
{{ end -}}
|
||||
{{ end -}}
|
24
.chglog/config.yml
Executable file
24
.chglog/config.yml
Executable file
@ -0,0 +1,24 @@
|
||||
style: github
|
||||
template: CHANGELOG.tpl.md
|
||||
info:
|
||||
title: CHANGELOG
|
||||
repository_url: https://gitea.rknet.org/ansible/xoxys.general
|
||||
options:
|
||||
commit_groups:
|
||||
title_maps:
|
||||
feat: Features
|
||||
fix: Bug Fixes
|
||||
perf: Performance Improvements
|
||||
refactor: Code Refactoring
|
||||
chore: Others
|
||||
test: Testing
|
||||
ci: CI Pipeline
|
||||
header:
|
||||
pattern: "^(\\w*)(?:\\(([\\w\\$\\.\\-\\*\\s]*)\\))?\\:\\s(.*)$"
|
||||
pattern_maps:
|
||||
- Type
|
||||
- Scope
|
||||
- Subject
|
||||
notes:
|
||||
keywords:
|
||||
- BREAKING CHANGE
|
@ -1,2 +0,0 @@
|
||||
Ansible
|
||||
Kaussow
|
189
.drone.jsonnet
Normal file
189
.drone.jsonnet
Normal file
@ -0,0 +1,189 @@
|
||||
local PythonVersion(pyversion='3.6') = {
|
||||
name: 'python' + std.strReplace(pyversion, '.', '') + '-pytest',
|
||||
image: 'python:' + pyversion,
|
||||
environment: {
|
||||
PY_COLORS: 1,
|
||||
},
|
||||
commands: [
|
||||
'pip install -r dev-requirements.txt -qq',
|
||||
'pip install -r test/unit/requirements.txt -qq',
|
||||
'python -m pytest --cov --cov-append --no-cov-on-fail',
|
||||
],
|
||||
depends_on: [
|
||||
'clone',
|
||||
],
|
||||
};
|
||||
|
||||
local PipelineLint = {
|
||||
kind: 'pipeline',
|
||||
name: 'lint',
|
||||
platform: {
|
||||
os: 'linux',
|
||||
arch: 'amd64',
|
||||
},
|
||||
steps: [
|
||||
{
|
||||
name: 'flake8',
|
||||
image: 'python:3.9',
|
||||
environment: {
|
||||
PY_COLORS: 1,
|
||||
},
|
||||
commands: [
|
||||
'pip install -r dev-requirements.txt -qq',
|
||||
'flake8',
|
||||
],
|
||||
},
|
||||
],
|
||||
trigger: {
|
||||
ref: ['refs/heads/master', 'refs/tags/**', 'refs/pull/**'],
|
||||
},
|
||||
};
|
||||
|
||||
local PipelineTest = {
|
||||
kind: 'pipeline',
|
||||
name: 'test',
|
||||
platform: {
|
||||
os: 'linux',
|
||||
arch: 'amd64',
|
||||
},
|
||||
steps: [
|
||||
PythonVersion(pyversion='3.6'),
|
||||
PythonVersion(pyversion='3.7'),
|
||||
PythonVersion(pyversion='3.8'),
|
||||
PythonVersion(pyversion='3.9'),
|
||||
],
|
||||
depends_on: [
|
||||
'lint',
|
||||
],
|
||||
trigger: {
|
||||
ref: ['refs/heads/master', 'refs/tags/**', 'refs/pull/**'],
|
||||
},
|
||||
};
|
||||
|
||||
local PipelineBuild = {
|
||||
kind: 'pipeline',
|
||||
name: 'build',
|
||||
platform: {
|
||||
os: 'linux',
|
||||
arch: 'amd64',
|
||||
},
|
||||
steps: [
|
||||
{
|
||||
name: 'build',
|
||||
image: 'python:3.9',
|
||||
commands: [
|
||||
'GALAXY_VERSION=${DRONE_TAG:##v}',
|
||||
"sed -i 's/version: 0.0.0/version: '\"$${GALAXY_VERSION:-0.0.0}\"'/g' galaxy.yml",
|
||||
'pip install ansible -qq',
|
||||
'ansible-galaxy collection build --output-path dist/',
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'checksum',
|
||||
image: 'alpine',
|
||||
commands: [
|
||||
'cd dist/ && sha256sum * > ../sha256sum.txt',
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'changelog',
|
||||
image: 'thegeeklab/git-chglog',
|
||||
commands: [
|
||||
'git fetch -tq',
|
||||
'git-chglog --no-color --no-emoji ${DRONE_TAG:---next-tag unreleased unreleased}',
|
||||
'git-chglog --no-color --no-emoji -o CHANGELOG.md ${DRONE_TAG:---next-tag unreleased unreleased}',
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'publish-gitea',
|
||||
image: 'plugins/gitea-release',
|
||||
settings: {
|
||||
overwrite: true,
|
||||
api_key: { from_secret: 'gitea_token' },
|
||||
files: ['dist/*', 'sha256sum.txt'],
|
||||
base_url: 'https://gitea.rknet.org',
|
||||
title: '${DRONE_TAG}',
|
||||
note: 'CHANGELOG.md',
|
||||
},
|
||||
when: {
|
||||
ref: ['refs/tags/**'],
|
||||
},
|
||||
},
|
||||
],
|
||||
depends_on: [
|
||||
'test',
|
||||
],
|
||||
trigger: {
|
||||
ref: ['refs/heads/master', 'refs/tags/**', 'refs/pull/**'],
|
||||
},
|
||||
};
|
||||
|
||||
local PipelineDocumentation = {
|
||||
kind: 'pipeline',
|
||||
name: 'documentation',
|
||||
platform: {
|
||||
os: 'linux',
|
||||
arch: 'amd64',
|
||||
},
|
||||
steps: [
|
||||
{
|
||||
name: 'publish',
|
||||
image: 'plugins/gh-pages',
|
||||
settings: {
|
||||
netrc_machine: 'gitea.rknet.org',
|
||||
pages_directory: 'docs/',
|
||||
password: {
|
||||
from_secret: 'gitea_token',
|
||||
},
|
||||
remote_url: 'https://gitea.rknet.org/ansible/${DRONE_REPO_NAME}',
|
||||
target_branch: 'docs',
|
||||
username: {
|
||||
from_secret: 'gitea_username',
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
depends_on: [
|
||||
'build',
|
||||
],
|
||||
trigger: {
|
||||
ref: ['refs/heads/master', 'refs/tags/**'],
|
||||
},
|
||||
};
|
||||
|
||||
local PipelineNotifications = {
|
||||
kind: 'pipeline',
|
||||
name: 'notifications',
|
||||
platform: {
|
||||
os: 'linux',
|
||||
arch: 'amd64',
|
||||
},
|
||||
steps: [
|
||||
{
|
||||
name: 'matrix',
|
||||
image: 'plugins/matrix',
|
||||
settings: {
|
||||
homeserver: { from_secret: 'matrix_homeserver' },
|
||||
roomid: { from_secret: 'matrix_roomid' },
|
||||
template: 'Status: **{{ build.status }}**<br/> Build: [{{ repo.Owner }}/{{ repo.Name }}]({{ build.link }}) ({{ build.branch }}) by {{ build.author }}<br/> Message: {{ build.message }}',
|
||||
username: { from_secret: 'matrix_username' },
|
||||
password: { from_secret: 'matrix_password' },
|
||||
},
|
||||
},
|
||||
],
|
||||
depends_on: [
|
||||
'documentation',
|
||||
],
|
||||
trigger: {
|
||||
ref: ['refs/heads/master', 'refs/tags/**'],
|
||||
status: ['success', 'failure'],
|
||||
},
|
||||
};
|
||||
|
||||
[
|
||||
PipelineLint,
|
||||
PipelineTest,
|
||||
PipelineBuild,
|
||||
PipelineDocumentation,
|
||||
PipelineNotifications,
|
||||
]
|
206
.drone.yml
Normal file
206
.drone.yml
Normal file
@ -0,0 +1,206 @@
|
||||
---
|
||||
kind: pipeline
|
||||
name: lint
|
||||
|
||||
platform:
|
||||
os: linux
|
||||
arch: amd64
|
||||
|
||||
steps:
|
||||
- name: flake8
|
||||
image: python:3.9
|
||||
commands:
|
||||
- pip install -r dev-requirements.txt -qq
|
||||
- flake8
|
||||
environment:
|
||||
PY_COLORS: 1
|
||||
|
||||
trigger:
|
||||
ref:
|
||||
- refs/heads/master
|
||||
- refs/tags/**
|
||||
- refs/pull/**
|
||||
|
||||
---
|
||||
kind: pipeline
|
||||
name: test
|
||||
|
||||
platform:
|
||||
os: linux
|
||||
arch: amd64
|
||||
|
||||
steps:
|
||||
- name: python36-pytest
|
||||
image: python:3.6
|
||||
commands:
|
||||
- pip install -r dev-requirements.txt -qq
|
||||
- pip install -r test/unit/requirements.txt -qq
|
||||
- python -m pytest --cov --cov-append --no-cov-on-fail
|
||||
environment:
|
||||
PY_COLORS: 1
|
||||
depends_on:
|
||||
- clone
|
||||
|
||||
- name: python37-pytest
|
||||
image: python:3.7
|
||||
commands:
|
||||
- pip install -r dev-requirements.txt -qq
|
||||
- pip install -r test/unit/requirements.txt -qq
|
||||
- python -m pytest --cov --cov-append --no-cov-on-fail
|
||||
environment:
|
||||
PY_COLORS: 1
|
||||
depends_on:
|
||||
- clone
|
||||
|
||||
- name: python38-pytest
|
||||
image: python:3.8
|
||||
commands:
|
||||
- pip install -r dev-requirements.txt -qq
|
||||
- pip install -r test/unit/requirements.txt -qq
|
||||
- python -m pytest --cov --cov-append --no-cov-on-fail
|
||||
environment:
|
||||
PY_COLORS: 1
|
||||
depends_on:
|
||||
- clone
|
||||
|
||||
- name: python39-pytest
|
||||
image: python:3.9
|
||||
commands:
|
||||
- pip install -r dev-requirements.txt -qq
|
||||
- pip install -r test/unit/requirements.txt -qq
|
||||
- python -m pytest --cov --cov-append --no-cov-on-fail
|
||||
environment:
|
||||
PY_COLORS: 1
|
||||
depends_on:
|
||||
- clone
|
||||
|
||||
trigger:
|
||||
ref:
|
||||
- refs/heads/master
|
||||
- refs/tags/**
|
||||
- refs/pull/**
|
||||
|
||||
depends_on:
|
||||
- lint
|
||||
|
||||
---
|
||||
kind: pipeline
|
||||
name: build
|
||||
|
||||
platform:
|
||||
os: linux
|
||||
arch: amd64
|
||||
|
||||
steps:
|
||||
- name: build
|
||||
image: python:3.9
|
||||
commands:
|
||||
- GALAXY_VERSION=${DRONE_TAG:##v}
|
||||
- "sed -i 's/version: 0.0.0/version: '\"$${GALAXY_VERSION:-0.0.0}\"'/g' galaxy.yml"
|
||||
- pip install ansible -qq
|
||||
- ansible-galaxy collection build --output-path dist/
|
||||
|
||||
- name: checksum
|
||||
image: alpine
|
||||
commands:
|
||||
- cd dist/ && sha256sum * > ../sha256sum.txt
|
||||
|
||||
- name: changelog
|
||||
image: thegeeklab/git-chglog
|
||||
commands:
|
||||
- git fetch -tq
|
||||
- git-chglog --no-color --no-emoji ${DRONE_TAG:---next-tag unreleased unreleased}
|
||||
- git-chglog --no-color --no-emoji -o CHANGELOG.md ${DRONE_TAG:---next-tag unreleased unreleased}
|
||||
|
||||
- name: publish-gitea
|
||||
image: plugins/gitea-release
|
||||
settings:
|
||||
api_key:
|
||||
from_secret: gitea_token
|
||||
base_url: https://gitea.rknet.org
|
||||
files:
|
||||
- dist/*
|
||||
- sha256sum.txt
|
||||
note: CHANGELOG.md
|
||||
overwrite: true
|
||||
title: ${DRONE_TAG}
|
||||
when:
|
||||
ref:
|
||||
- refs/tags/**
|
||||
|
||||
trigger:
|
||||
ref:
|
||||
- refs/heads/master
|
||||
- refs/tags/**
|
||||
- refs/pull/**
|
||||
|
||||
depends_on:
|
||||
- test
|
||||
|
||||
---
|
||||
kind: pipeline
|
||||
name: documentation
|
||||
|
||||
platform:
|
||||
os: linux
|
||||
arch: amd64
|
||||
|
||||
steps:
|
||||
- name: publish
|
||||
image: plugins/gh-pages
|
||||
settings:
|
||||
netrc_machine: gitea.rknet.org
|
||||
pages_directory: docs/
|
||||
password:
|
||||
from_secret: gitea_token
|
||||
remote_url: https://gitea.rknet.org/ansible/${DRONE_REPO_NAME}
|
||||
target_branch: docs
|
||||
username:
|
||||
from_secret: gitea_username
|
||||
|
||||
trigger:
|
||||
ref:
|
||||
- refs/heads/master
|
||||
- refs/tags/**
|
||||
|
||||
depends_on:
|
||||
- build
|
||||
|
||||
---
|
||||
kind: pipeline
|
||||
name: notifications
|
||||
|
||||
platform:
|
||||
os: linux
|
||||
arch: amd64
|
||||
|
||||
steps:
|
||||
- name: matrix
|
||||
image: plugins/matrix
|
||||
settings:
|
||||
homeserver:
|
||||
from_secret: matrix_homeserver
|
||||
password:
|
||||
from_secret: matrix_password
|
||||
roomid:
|
||||
from_secret: matrix_roomid
|
||||
template: "Status: **{{ build.status }}**<br/> Build: [{{ repo.Owner }}/{{ repo.Name }}]({{ build.link }}) ({{ build.branch }}) by {{ build.author }}<br/> Message: {{ build.message }}"
|
||||
username:
|
||||
from_secret: matrix_username
|
||||
|
||||
trigger:
|
||||
ref:
|
||||
- refs/heads/master
|
||||
- refs/tags/**
|
||||
status:
|
||||
- success
|
||||
- failure
|
||||
|
||||
depends_on:
|
||||
- documentation
|
||||
|
||||
---
|
||||
kind: signature
|
||||
hmac: 3755090863d1c4215ec5fce44c2e8efa825bda82afac5f2de4132bf4bf18fc2e
|
||||
|
||||
...
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -108,4 +108,3 @@ docs/public/
|
||||
resources/_gen/
|
||||
|
||||
CHANGELOG.md
|
||||
tests/output
|
||||
|
@ -1,47 +0,0 @@
|
||||
---
|
||||
version: "1.1"
|
||||
|
||||
versioning:
|
||||
update-major: []
|
||||
update-minor: [feat]
|
||||
update-patch: [fix, perf, refactor, chore, test, ci, docs]
|
||||
|
||||
tag:
|
||||
pattern: "v%d.%d.%d"
|
||||
|
||||
release-notes:
|
||||
sections:
|
||||
- name: Features
|
||||
commit-types: [feat]
|
||||
section-type: commits
|
||||
- name: Bug Fixes
|
||||
commit-types: [fix]
|
||||
section-type: commits
|
||||
- name: Performance Improvements
|
||||
commit-types: [perf]
|
||||
section-type: commits
|
||||
- name: Code Refactoring
|
||||
commit-types: [refactor]
|
||||
section-type: commits
|
||||
- name: Others
|
||||
commit-types: [chore]
|
||||
section-type: commits
|
||||
- name: Testing
|
||||
commit-types: [test]
|
||||
section-type: commits
|
||||
- name: CI Pipeline
|
||||
commit-types: [ci]
|
||||
section-type: commits
|
||||
- name: Documentation
|
||||
commit-types: [docs]
|
||||
section-type: commits
|
||||
- name: Breaking Changes
|
||||
section-type: breaking-changes
|
||||
|
||||
commit-message:
|
||||
footer:
|
||||
issue:
|
||||
key: issue
|
||||
add-value-prefix: "#"
|
||||
issue:
|
||||
regex: "#?[0-9]+"
|
@ -1,7 +0,0 @@
|
||||
---
|
||||
default: True
|
||||
MD013: False
|
||||
MD041: False
|
||||
MD024: False
|
||||
MD004:
|
||||
style: dash
|
@ -1,2 +0,0 @@
|
||||
*.tpl.md
|
||||
LICENSE
|
@ -1,46 +0,0 @@
|
||||
---
|
||||
when:
|
||||
- event: [pull_request, tag]
|
||||
- event: [push, manual]
|
||||
branch:
|
||||
- ${CI_REPO_DEFAULT_BRANCH}
|
||||
|
||||
steps:
|
||||
- name: build
|
||||
image: docker.io/library/python:3.12
|
||||
commands:
|
||||
- GALAXY_VERSION=${CI_COMMIT_TAG##v}
|
||||
- 'sed -i ''s/version: 0.0.0/version: ''"$${GALAXY_VERSION:-0.0.0}"''/g'' galaxy.yml'
|
||||
- pip install poetry -qq
|
||||
- poetry install --all-extras --no-root
|
||||
- poetry run ansible-galaxy collection build --output-path dist/
|
||||
|
||||
- name: checksum
|
||||
image: quay.io/thegeeklab/alpine-tools
|
||||
commands:
|
||||
- cd dist/ && sha256sum * > ../sha256sum.txt
|
||||
|
||||
- name: changelog
|
||||
image: quay.io/thegeeklab/git-sv
|
||||
commands:
|
||||
- git sv current-version
|
||||
- git sv release-notes -t ${CI_COMMIT_TAG:-next} -o CHANGELOG.md
|
||||
- cat CHANGELOG.md
|
||||
|
||||
- name: publish-gitea
|
||||
image: quay.io/thegeeklab/wp-gitea-release
|
||||
settings:
|
||||
api_key:
|
||||
from_secret: gitea_token
|
||||
base_url: https://gitea.rknet.org
|
||||
files:
|
||||
- dist/*
|
||||
- sha256sum.txt
|
||||
note: CHANGELOG.md
|
||||
title: ${CI_COMMIT_TAG}
|
||||
when:
|
||||
- event: [tag]
|
||||
|
||||
depends_on:
|
||||
- unit-test
|
||||
- sanity-test
|
@ -1,47 +0,0 @@
|
||||
---
|
||||
when:
|
||||
- event: [pull_request, tag]
|
||||
- event: [push, manual]
|
||||
branch:
|
||||
- ${CI_REPO_DEFAULT_BRANCH}
|
||||
|
||||
steps:
|
||||
- name: markdownlint
|
||||
image: quay.io/thegeeklab/markdownlint-cli
|
||||
commands:
|
||||
- markdownlint 'docs/**/*.md' 'README.md'
|
||||
|
||||
- name: spellcheck
|
||||
image: quay.io/thegeeklab/alpine-tools
|
||||
commands:
|
||||
- spellchecker --files 'docs/**/*.md' 'README.md' -d .dictionary -p spell indefinite-article syntax-urls frontmatter --frontmatter-keys title tags
|
||||
environment:
|
||||
FORCE_COLOR: "true"
|
||||
|
||||
- name: link-validation
|
||||
image: docker.io/lycheeverse/lychee
|
||||
commands:
|
||||
- lychee --no-progress --format detailed docs/ README.md
|
||||
|
||||
- name: publish
|
||||
image: quay.io/thegeeklab/wp-git-action
|
||||
depends_on: [markdownlint, spellcheck, link-validation]
|
||||
settings:
|
||||
action:
|
||||
- pages
|
||||
author_email: shipper@rknet.org
|
||||
author_name: shipper
|
||||
branch: docs
|
||||
message: auto-update documentation
|
||||
netrc_machine: gitea.rknet.org
|
||||
netrc_password:
|
||||
from_secret: gitea_token
|
||||
pages_directory: docs/
|
||||
remote_url: https://gitea.rknet.org/ansible/${CI_REPO_NAME}
|
||||
when:
|
||||
- event: [push, manual]
|
||||
branch:
|
||||
- ${CI_REPO_DEFAULT_BRANCH}
|
||||
|
||||
depends_on:
|
||||
- build-package
|
@ -1,25 +0,0 @@
|
||||
---
|
||||
when:
|
||||
- event: [pull_request, tag]
|
||||
- event: [push, manual]
|
||||
branch:
|
||||
- ${CI_REPO_DEFAULT_BRANCH}
|
||||
|
||||
steps:
|
||||
- name: check-format
|
||||
image: docker.io/library/python:3.12
|
||||
commands:
|
||||
- pip install poetry -qq
|
||||
- poetry install --all-extras --no-root
|
||||
- poetry run ruff format --check --diff ./plugins
|
||||
environment:
|
||||
PY_COLORS: "1"
|
||||
|
||||
- name: check-coding
|
||||
image: docker.io/library/python:3.12
|
||||
commands:
|
||||
- pip install poetry -qq
|
||||
- poetry install --all-extras --no-root
|
||||
- poetry run ruff ./plugins
|
||||
environment:
|
||||
PY_COLORS: "1"
|
@ -1,26 +0,0 @@
|
||||
---
|
||||
when:
|
||||
- event: [tag]
|
||||
- event: [push, manual]
|
||||
branch:
|
||||
- ${CI_REPO_DEFAULT_BRANCH}
|
||||
|
||||
runs_on: [success, failure]
|
||||
|
||||
steps:
|
||||
- name: matrix
|
||||
image: quay.io/thegeeklab/wp-matrix
|
||||
settings:
|
||||
homeserver:
|
||||
from_secret: matrix_homeserver
|
||||
room_id:
|
||||
from_secret: matrix_room_id
|
||||
user_id:
|
||||
from_secret: matrix_user_id
|
||||
access_token:
|
||||
from_secret: matrix_access_token
|
||||
when:
|
||||
- status: [failure]
|
||||
|
||||
depends_on:
|
||||
- docs
|
@ -1,45 +0,0 @@
|
||||
---
|
||||
when:
|
||||
- event: [pull_request, tag]
|
||||
- event: [push, manual]
|
||||
branch:
|
||||
- ${CI_REPO_DEFAULT_BRANCH}
|
||||
|
||||
variables:
|
||||
- &ansible_base
|
||||
image: docker.io/library/python:3.11
|
||||
depends_on: []
|
||||
commands:
|
||||
- pip install poetry -qq
|
||||
- poetry install --all-extras --no-root
|
||||
- poetry run pip install https://github.com/ansible/ansible/archive/$${ANSIBLE_VERSION}.tar.gz --disable-pip-version-check
|
||||
- poetry run ansible --version
|
||||
- poetry run ansible-test sanity --exclude .gitsv/ --exclude .woodpecker/ --python 3.11
|
||||
- &ansible_env
|
||||
PY_COLORS: "1"
|
||||
|
||||
workspace:
|
||||
base: /woodpecker/src
|
||||
path: ansible_collections/${CI_REPO_NAME/./\/}
|
||||
|
||||
steps:
|
||||
- name: ansible-devel
|
||||
<<: *ansible_base
|
||||
environment:
|
||||
ANSIBLE_VERSION: "devel"
|
||||
<<: *ansible_env
|
||||
|
||||
- name: ansible-217
|
||||
<<: *ansible_base
|
||||
environment:
|
||||
ANSIBLE_VERSION: "stable-2.17"
|
||||
<<: *ansible_env
|
||||
|
||||
- name: ansible-216
|
||||
<<: *ansible_base
|
||||
environment:
|
||||
ANSIBLE_VERSION: "stable-2.16"
|
||||
<<: *ansible_env
|
||||
|
||||
depends_on:
|
||||
- lint
|
@ -1,36 +0,0 @@
|
||||
---
|
||||
when:
|
||||
- event: [pull_request, tag]
|
||||
- event: [push, manual]
|
||||
branch:
|
||||
- ${CI_REPO_DEFAULT_BRANCH}
|
||||
|
||||
variables:
|
||||
- &pytest_base
|
||||
depends_on: []
|
||||
commands:
|
||||
- pip install poetry -qq
|
||||
- poetry install --all-extras --no-root
|
||||
- poetry run pytest
|
||||
environment:
|
||||
PY_COLORS: "1"
|
||||
|
||||
steps:
|
||||
- name: pyton-313
|
||||
image: docker.io/library/python:3.13
|
||||
<<: *pytest_base
|
||||
|
||||
- name: pyton-312
|
||||
image: docker.io/library/python:3.12
|
||||
<<: *pytest_base
|
||||
|
||||
- name: pyton-311
|
||||
image: docker.io/library/python:3.11
|
||||
<<: *pytest_base
|
||||
|
||||
- name: pyton-310
|
||||
image: docker.io/library/python:3.10
|
||||
<<: *pytest_base
|
||||
|
||||
depends_on:
|
||||
- lint
|
2
LICENSE
2
LICENSE
@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2022 Robert Kaussow <mail@thegeeklab.de>
|
||||
Copyright (c) 2020 Robert Kaussow <mail@thegeeklab.de>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
10
README.md
10
README.md
@ -1,10 +1,14 @@
|
||||
# xoxys.general
|
||||
|
||||
[![Build Status](https://ci.rknet.org/api/badges/ansible/xoxys.general/status.svg)](https://ci.rknet.org/repos/ansible/xoxys.general)
|
||||
[![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg?label=license)](https://gitea.rknet.org/ansible/xoxys.general/src/branch/main/LICENSE)
|
||||
[![Build Status](https://img.shields.io/drone/build/ansible/xoxys.general?logo=drone&server=https%3A%2F%2Fdrone.rknet.org)](https://drone.rknet.org/ansible/xoxys.general)
|
||||
[![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg?label=license)](LICENSE)
|
||||
|
||||
Custom general Ansible collection.
|
||||
|
||||
## License
|
||||
|
||||
This project is licensed under the MIT License - see the [LICENSE](https://gitea.rknet.org/ansible/xoxys.general/src/branch/main/LICENSE) file for details.
|
||||
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
|
||||
|
||||
## Maintainers and Contributors
|
||||
|
||||
[Robert Kaussow](https://gitea.rknet.org/xoxys)
|
||||
|
18
dev-requirements.txt
Normal file
18
dev-requirements.txt
Normal file
@ -0,0 +1,18 @@
|
||||
pydocstyle
|
||||
flake8
|
||||
flake8-blind-except
|
||||
flake8-builtins
|
||||
flake8-docstrings
|
||||
flake8-isort
|
||||
flake8-logging-format
|
||||
flake8-polyfill
|
||||
flake8-quotes
|
||||
flake8-pep3101
|
||||
flake8-eradicate
|
||||
pep8-naming
|
||||
wheel
|
||||
pytest
|
||||
pytest-mock
|
||||
pytest-cov
|
||||
bandit
|
||||
yapf
|
@ -6,7 +6,5 @@ geekdocFlatSection: true
|
||||
General custom content collection for Ansible.
|
||||
|
||||
<!-- spellchecker-disable -->
|
||||
|
||||
{{< toc-tree >}}
|
||||
|
||||
<!-- spellchecker-enable -->
|
||||
|
@ -14,7 +14,6 @@ my_list:
|
||||
```
|
||||
|
||||
Or pass a custom prefix:
|
||||
|
||||
```Yaml
|
||||
my_list:
|
||||
- item1
|
||||
|
@ -14,7 +14,6 @@ my_list:
|
||||
```
|
||||
|
||||
Or pass a custom wrapper:
|
||||
|
||||
```Yaml
|
||||
my_list:
|
||||
- item1
|
||||
|
10
galaxy.yml
10
galaxy.yml
@ -1,7 +1,7 @@
|
||||
---
|
||||
namespace: xoxys
|
||||
name: general
|
||||
# The version is generated during the release by Woocpecker CI.
|
||||
# The version is generated during the release by Drone CI.
|
||||
version: 0.0.0
|
||||
readme: README.md
|
||||
authors:
|
||||
@ -12,13 +12,7 @@ license:
|
||||
license_file: "LICENSE"
|
||||
tags:
|
||||
- misc
|
||||
dependencies: {}
|
||||
repository: https://gitea.rknet.org/ansible/xoxys.general/
|
||||
homepage: https://thegeeklab.de/
|
||||
documentation: https://galaxy.geekdocs.de/collections/general/
|
||||
build_ignore:
|
||||
- ".*"
|
||||
- "*requirements.txt"
|
||||
- docs
|
||||
- test
|
||||
- dist
|
||||
- setup.cfg
|
||||
|
@ -1,107 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
"""Implement documentation fragment for Hashivault module."""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
class ModuleDocFragment: # noqa
|
||||
# Standard documentation
|
||||
DOCUMENTATION = r"""
|
||||
requirements:
|
||||
- hvac>=0.10.1
|
||||
- ansible>=2.0.0
|
||||
- requests
|
||||
options:
|
||||
url:
|
||||
description:
|
||||
- URL of the Vault server.
|
||||
- You can use C(VAULT_ADDR) environment variable.
|
||||
default: ""
|
||||
type: str
|
||||
ca_cert:
|
||||
description:
|
||||
- Path to a PEM-encoded CA cert file to use to verify the Vault server
|
||||
TLS certificate.
|
||||
- You can use C(VAULT_CACERT) environment variable.
|
||||
default: ""
|
||||
type: str
|
||||
ca_path:
|
||||
description:
|
||||
- Path to a directory of PEM-encoded CA cert files to verify the Vault server
|
||||
TLS certificate. If ca_cert is specified, its value will take precedence.
|
||||
- You can use C(VAULT_CAPATH) environment variable.
|
||||
default: ""
|
||||
type: str
|
||||
client_cert:
|
||||
description:
|
||||
- Path to a PEM-encoded client certificate for TLS authentication to the Vault
|
||||
server.
|
||||
- You can use C(VAULT_CLIENT_CERT) environment variable.
|
||||
default: ""
|
||||
type: str
|
||||
client_key:
|
||||
description:
|
||||
- Path to an unencrypted PEM-encoded private key matching the client certificate.
|
||||
- You can use C(VAULT_CLIENT_KEY) environment variable.
|
||||
default: ""
|
||||
type: str
|
||||
verify:
|
||||
description:
|
||||
- If set, do not verify presented TLS certificate before communicating with Vault
|
||||
server. Setting this variable is not recommended except during testing.
|
||||
- You can use C(VAULT_SKIP_VERIFY) environment variable.
|
||||
default: false
|
||||
type: bool
|
||||
authtype:
|
||||
description:
|
||||
- Authentication type.
|
||||
- You can use C(VAULT_AUTHTYPE) environment variable.
|
||||
default: "token"
|
||||
type: str
|
||||
choices: ["token", "userpass", "github", "ldap", "approle"]
|
||||
login_mount_point:
|
||||
description:
|
||||
- Authentication mount point.
|
||||
- You can use C(VAULT_LOGIN_MOUNT_POINT) environment variable.
|
||||
type: str
|
||||
token:
|
||||
description:
|
||||
- Token for vault.
|
||||
- You can use C(VAULT_TOKEN) environment variable.
|
||||
type: str
|
||||
username:
|
||||
description:
|
||||
- Username to login to vault.
|
||||
- You can use C(VAULT_USER) environment variable.
|
||||
default: ""
|
||||
type: str
|
||||
password:
|
||||
description:
|
||||
- Password to login to vault.
|
||||
- You can use C(VAULT_PASSWORD) environment variable.
|
||||
type: str
|
||||
role_id:
|
||||
description:
|
||||
- Role id for vault.
|
||||
- You can use C(VAULT_ROLE_ID) environment variable.
|
||||
type: str
|
||||
secret_id:
|
||||
description:
|
||||
- Secret id for vault.
|
||||
- You can use C(VAULT_SECRET_ID) environment variable.
|
||||
type: str
|
||||
aws_header:
|
||||
description:
|
||||
- X-Vault-AWS-IAM-Server-ID Header value to prevent replay attacks.
|
||||
- You can use C(VAULT_AWS_HEADER) environment variable.
|
||||
type: str
|
||||
namespace:
|
||||
description:
|
||||
- Namespace for vault.
|
||||
- You can use C(VAULT_NAMESPACE) environment variable.
|
||||
type: str
|
||||
"""
|
@ -1,14 +1,11 @@
|
||||
"""Filter to prefix all itams from a list."""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
def prefix(value, prefix="--"):
|
||||
return [prefix + x for x in value]
|
||||
|
||||
|
||||
class FilterModule(object): # noqa
|
||||
class FilterModule(object):
|
||||
|
||||
def filters(self):
|
||||
return {"prefix": prefix}
|
||||
|
@ -1,14 +1,11 @@
|
||||
"""Filter to wrap all items from a list."""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
def wrap(value, wrapper="'"):
|
||||
return [wrapper + x + wrapper for x in value]
|
||||
|
||||
|
||||
class FilterModule(object): # noqa
|
||||
class FilterModule(object):
|
||||
|
||||
def filters(self):
|
||||
return {"wrap": wrap}
|
||||
|
@ -1,3 +1,4 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2014, Mathieu GAUTHIER-LAFAYE <gauthierl@lapth.cnrs.fr>
|
||||
# Copyright (c) 2016, Matt Harris <matthaeus.harris@gmail.com>
|
||||
# Copyright (c) 2020, Robert Kaussow <mail@thegeeklab.de>
|
||||
@ -9,146 +10,104 @@ from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = """
|
||||
---
|
||||
name: proxmox
|
||||
short_description: Proxmox VE inventory source
|
||||
version_added: 1.1.0
|
||||
description:
|
||||
- Get inventory hosts from the proxmox service.
|
||||
- "Uses a configuration file as an inventory source, it must end in C(.proxmox.yml) or C(.proxmox.yaml) and has a C(plugin: xoxys.general.proxmox) entry."
|
||||
extends_documentation_fragment:
|
||||
- inventory_cache
|
||||
options:
|
||||
plugin:
|
||||
description: The name of this plugin, it should always be set to C(xoxys.general.proxmox) for this plugin to recognize it as it's own.
|
||||
required: yes
|
||||
choices: ["xoxys.general.proxmox"]
|
||||
api_host:
|
||||
name: proxmox
|
||||
plugin_type: inventory
|
||||
short_description: Proxmox VE inventory source
|
||||
version_added: 1.0.0
|
||||
description:
|
||||
- Specify the target host of the Proxmox VE cluster.
|
||||
type: str
|
||||
required: true
|
||||
env:
|
||||
- name: PROXMOX_SERVER
|
||||
api_user:
|
||||
description:
|
||||
- Specify the user to authenticate with.
|
||||
type: str
|
||||
required: true
|
||||
env:
|
||||
- name: PROXMOX_USER
|
||||
api_password:
|
||||
description:
|
||||
- Specify the password to authenticate with.
|
||||
type: str
|
||||
env:
|
||||
- name: PROXMOX_PASSWORD
|
||||
api_token_id:
|
||||
description:
|
||||
- Specify the token ID.
|
||||
type: str
|
||||
env:
|
||||
- name: PROXMOX_TOKEN_ID
|
||||
api_token_secret:
|
||||
description:
|
||||
- Specify the token secret.
|
||||
type: str
|
||||
env:
|
||||
- name: PROXMOX_TOKEN_SECRET
|
||||
verify_ssl:
|
||||
description:
|
||||
- If C(false), SSL certificates will not be validated.
|
||||
- This should only be used on personally controlled sites using self-signed certificates.
|
||||
type: bool
|
||||
default: True
|
||||
auth_timeout:
|
||||
description: Proxmox VE authentication timeout.
|
||||
type: int
|
||||
default: 5
|
||||
exclude_vmid:
|
||||
description: VMID's to exclude from inventory.
|
||||
type: list
|
||||
default: []
|
||||
elements: str
|
||||
exclude_state:
|
||||
description: VM states to exclude from inventory.
|
||||
type: list
|
||||
default: []
|
||||
elements: str
|
||||
group:
|
||||
description: Group to place all hosts into.
|
||||
type: string
|
||||
default: proxmox
|
||||
want_facts:
|
||||
description: Toggle, if C(true) the plugin will retrieve host facts from the server
|
||||
type: boolean
|
||||
default: True
|
||||
requirements:
|
||||
- "proxmoxer"
|
||||
""" # noqa
|
||||
- Get inventory hosts from the proxmox service.
|
||||
- "Uses a configuration file as an inventory source, it must end in C(.proxmox.yml) or C(.proxmox.yaml) and has a C(plugin: xoxys.general.proxmox) entry."
|
||||
extends_documentation_fragment:
|
||||
- inventory_cache
|
||||
options:
|
||||
plugin:
|
||||
description: The name of this plugin, it should always be set to C(xoxys.general.proxmox) for this plugin to recognize it as it's own.
|
||||
required: yes
|
||||
choices: ["xoxys.general.proxmox"]
|
||||
server:
|
||||
description: Proxmox VE server url.
|
||||
default: "pve.example.com"
|
||||
type: string
|
||||
required: yes
|
||||
env:
|
||||
- name: PROXMOX_SERVER
|
||||
user:
|
||||
description: Proxmox VE authentication user.
|
||||
type: string
|
||||
required: yes
|
||||
env:
|
||||
- name: PROXMOX_USER
|
||||
password:
|
||||
description: Proxmox VE authentication password.
|
||||
type: string
|
||||
required: yes
|
||||
env:
|
||||
- name: PROXMOX_PASSWORD
|
||||
verify_ssl:
|
||||
description: Skip SSL certificate verification.
|
||||
type: boolean
|
||||
default: yes
|
||||
auth_timeout:
|
||||
description: Proxmox VE authentication timeout.
|
||||
type: int
|
||||
default: 5
|
||||
exclude_vmid:
|
||||
description: VMID's to exclude from inventory.
|
||||
type: list
|
||||
default: []
|
||||
elements: str
|
||||
exclude_state:
|
||||
description: VM states to exclude from inventory.
|
||||
type: list
|
||||
default: []
|
||||
elements: str
|
||||
group:
|
||||
description: Group to place all hosts into.
|
||||
type: string
|
||||
default: proxmox
|
||||
want_facts:
|
||||
description: Toggle, if C(true) the plugin will retrieve host facts from the server
|
||||
type: boolean
|
||||
default: yes
|
||||
""" # noqa
|
||||
|
||||
EXAMPLES = """
|
||||
# proxmox.yml
|
||||
plugin: xoxys.general.proxmox
|
||||
api_user: root@pam
|
||||
api_password: secret
|
||||
api_host: helldorado
|
||||
server: pve.example.com
|
||||
user: admin@pve
|
||||
password: secure
|
||||
"""
|
||||
|
||||
import json
|
||||
import re
|
||||
import socket
|
||||
from collections import defaultdict
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.module_utils.parsing.convert_bool import boolean
|
||||
from ansible.module_utils.six import iteritems
|
||||
from ansible.plugins.inventory import BaseInventoryPlugin
|
||||
from ansible_collections.xoxys.general.plugins.module_utils.version import LooseVersion
|
||||
from collections import defaultdict
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
try:
|
||||
from proxmoxer import ProxmoxAPI
|
||||
|
||||
HAS_PROXMOXER = True
|
||||
except ImportError:
|
||||
HAS_PROXMOXER = False
|
||||
|
||||
try:
|
||||
from requests.packages import urllib3
|
||||
|
||||
HAS_URLLIB3 = True
|
||||
except ImportError:
|
||||
try:
|
||||
import urllib3
|
||||
|
||||
HAS_URLLIB3 = True
|
||||
except ImportError:
|
||||
HAS_URLLIB3 = False
|
||||
|
||||
|
||||
class InventoryModule(BaseInventoryPlugin):
|
||||
"""Provide Proxmox VE inventory."""
|
||||
|
||||
NAME = "xoxys.general.proxmox"
|
||||
|
||||
def _proxmox_auth(self):
|
||||
auth_args = {"user": self.get_option("api_user")}
|
||||
if not (self.get_option("api_token_id") and self.get_option("api_token_secret")):
|
||||
auth_args["password"] = self.get_option("api_password")
|
||||
else:
|
||||
auth_args["token_name"] = self.get_option("api_token_id")
|
||||
auth_args["token_value"] = self.get_option("api_token_secret")
|
||||
|
||||
verify_ssl = boolean(self.get_option("verify_ssl"), strict=False)
|
||||
if not verify_ssl and HAS_URLLIB3:
|
||||
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||
|
||||
self.client = ProxmoxAPI(
|
||||
self.get_option("api_host"),
|
||||
verify_ssl=verify_ssl,
|
||||
timeout=self.get_option("auth_timeout"),
|
||||
**auth_args,
|
||||
def _auth(self):
|
||||
return ProxmoxAPI(
|
||||
self.get_option("server"),
|
||||
user=self.get_option("user"),
|
||||
password=self.get_option("password"),
|
||||
verify_ssl=boolean(self.get_option("password"), strict=False),
|
||||
timeout=self.get_option("auth_timeout")
|
||||
)
|
||||
|
||||
def _get_version(self):
|
||||
@ -158,12 +117,14 @@ class InventoryModule(BaseInventoryPlugin):
|
||||
return LooseVersion(self.client.version.get()["release"])
|
||||
|
||||
def _get_names(self, pve_list, pve_type):
|
||||
if pve_type == "node":
|
||||
return [node["node"] for node in pve_list]
|
||||
if pve_type == "pool":
|
||||
return [pool["poolid"] for pool in pve_list]
|
||||
names = []
|
||||
|
||||
return []
|
||||
if pve_type == "node":
|
||||
names = [node["node"] for node in pve_list]
|
||||
elif pve_type == "pool":
|
||||
names = [pool["poolid"] for pool in pve_list]
|
||||
|
||||
return names
|
||||
|
||||
def _get_variables(self, pve_list, pve_type):
|
||||
variables = {}
|
||||
@ -178,12 +139,15 @@ class InventoryModule(BaseInventoryPlugin):
|
||||
return variables
|
||||
|
||||
def _get_ip_address(self, pve_type, pve_node, vmid):
|
||||
|
||||
def validate(address):
|
||||
try:
|
||||
# IP address validation
|
||||
if socket.inet_aton(address) and address != "127.0.0.1":
|
||||
return address
|
||||
except OSError:
|
||||
if socket.inet_aton(address):
|
||||
# Ignore localhost
|
||||
if address != "127.0.0.1":
|
||||
return address
|
||||
except socket.error:
|
||||
return False
|
||||
|
||||
address = False
|
||||
@ -195,18 +159,19 @@ class InventoryModule(BaseInventoryPlugin):
|
||||
networks = self.client.nodes(pve_node).get(
|
||||
"qemu", vmid, "agent", "network-get-interfaces"
|
||||
)["result"]
|
||||
except Exception: # noqa
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if networks and isinstance(networks, list):
|
||||
for network in networks:
|
||||
for ip_address in network.get("ip-addresses", []):
|
||||
address = validate(ip_address["ip-address"])
|
||||
if networks:
|
||||
if type(networks) is list:
|
||||
for network in networks:
|
||||
for ip_address in network["ip-addresses"]:
|
||||
address = validate(ip_address["ip-address"])
|
||||
else:
|
||||
try:
|
||||
config = self.client.nodes(pve_node).get(pve_type, vmid, "config")
|
||||
address = re.search(r"ip=(\d*\.\d*\.\d*\.\d*)", config["net0"]).group(1)
|
||||
except Exception: # noqa
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return address
|
||||
@ -232,8 +197,8 @@ class InventoryModule(BaseInventoryPlugin):
|
||||
try:
|
||||
qemu_list = self._exclude(self.client.nodes(node).qemu.get())
|
||||
container_list = self._exclude(self.client.nodes(node).lxc.get())
|
||||
except Exception as e: # noqa
|
||||
raise AnsibleError(f"Proxmoxer API error: {to_native(e)}") from e
|
||||
except Exception as e:
|
||||
raise AnsibleError("Proxmoxer API error: {0}".format(to_native(e)))
|
||||
|
||||
# Merge QEMU and Containers lists from this node
|
||||
instances = self._get_variables(qemu_list, "qemu").copy()
|
||||
@ -248,13 +213,12 @@ class InventoryModule(BaseInventoryPlugin):
|
||||
pve_type = "qemu"
|
||||
|
||||
try:
|
||||
description = self.client.nodes(node).get(pve_type, vmid, "config")[
|
||||
"description"
|
||||
]
|
||||
description = self.client.nodes(node).get(pve_type, vmid,
|
||||
"config")["description"]
|
||||
except KeyError:
|
||||
description = None
|
||||
except Exception as e: # noqa
|
||||
raise AnsibleError(f"Proxmoxer API error: {to_native(e)}") from e
|
||||
except Exception as e:
|
||||
raise AnsibleError("Proxmoxer API error: {0}".format(to_native(e)))
|
||||
|
||||
try:
|
||||
metadata = json.loads(description)
|
||||
@ -288,8 +252,8 @@ class InventoryModule(BaseInventoryPlugin):
|
||||
for pool in self._get_names(self.client.pools.get(), "pool"):
|
||||
try:
|
||||
pool_list = self._exclude(self.client.pool(pool).get()["members"])
|
||||
except Exception as e: # noqa
|
||||
raise AnsibleError(f"Proxmoxer API error: {to_native(e)}") from e
|
||||
except Exception as e:
|
||||
raise AnsibleError("Proxmoxer API error: {0}".format(to_native(e)))
|
||||
|
||||
members = [
|
||||
member["name"]
|
||||
@ -302,13 +266,13 @@ class InventoryModule(BaseInventoryPlugin):
|
||||
|
||||
def verify_file(self, path):
|
||||
"""Verify the Proxmox VE configuration file."""
|
||||
if super().verify_file(path):
|
||||
if super(InventoryModule, self).verify_file(path):
|
||||
endings = ("proxmox.yaml", "proxmox.yml")
|
||||
if any(path.endswith(ending) for ending in endings):
|
||||
if any((path.endswith(ending) for ending in endings)):
|
||||
return True
|
||||
return False
|
||||
|
||||
def parse(self, inventory, loader, path, cache=True): # noqa
|
||||
def parse(self, inventory, loader, path, cache=True):
|
||||
"""Dynamically parse the Proxmox VE cloud inventory."""
|
||||
if not HAS_PROXMOXER:
|
||||
raise AnsibleError(
|
||||
@ -316,8 +280,8 @@ class InventoryModule(BaseInventoryPlugin):
|
||||
"https://pypi.org/project/proxmoxer/"
|
||||
)
|
||||
|
||||
super().parse(inventory, loader, path)
|
||||
super(InventoryModule, self).parse(inventory, loader, path)
|
||||
|
||||
self._read_config_data(path)
|
||||
self._proxmox_auth()
|
||||
self.client = self._auth()
|
||||
self._propagate()
|
||||
|
@ -1,402 +0,0 @@
|
||||
"""Provide helper functions for Hashivault module."""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.basic import missing_required_lib
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.basic import env_fallback
|
||||
|
||||
HVAC_IMP_ERR = None
|
||||
try:
|
||||
import hvac
|
||||
from hvac.exceptions import InvalidPath
|
||||
|
||||
HAS_HVAC = True
|
||||
except ImportError:
|
||||
HAS_HVAC = False
|
||||
HVAC_IMP_ERR = traceback.format_exc()
|
||||
|
||||
|
||||
def hashivault_argspec():
|
||||
return dict(
|
||||
url=dict(required=False, default=os.environ.get("VAULT_ADDR", ""), type="str"),
|
||||
ca_cert=dict(required=False, default=os.environ.get("VAULT_CACERT", ""), type="str"),
|
||||
ca_path=dict(required=False, default=os.environ.get("VAULT_CAPATH", ""), type="str"),
|
||||
client_cert=dict(
|
||||
required=False, default=os.environ.get("VAULT_CLIENT_CERT", ""), type="str"
|
||||
),
|
||||
client_key=dict(
|
||||
required=False, default=os.environ.get("VAULT_CLIENT_KEY", ""), type="str", no_log=True
|
||||
),
|
||||
verify=dict(
|
||||
required=False, default=(not os.environ.get("VAULT_SKIP_VERIFY", "False")), type="bool"
|
||||
),
|
||||
authtype=dict(
|
||||
required=False,
|
||||
default=os.environ.get("VAULT_AUTHTYPE", "token"),
|
||||
type="str",
|
||||
choices=["token", "userpass", "github", "ldap", "approle"],
|
||||
),
|
||||
login_mount_point=dict(
|
||||
required=False, default=os.environ.get("VAULT_LOGIN_MOUNT_POINT", None), type="str"
|
||||
),
|
||||
token=dict(
|
||||
required=False,
|
||||
fallback=(hashivault_default_token, ["VAULT_TOKEN"]),
|
||||
type="str",
|
||||
no_log=True,
|
||||
),
|
||||
username=dict(required=False, default=os.environ.get("VAULT_USER", ""), type="str"),
|
||||
password=dict(
|
||||
required=False, fallback=(env_fallback, ["VAULT_PASSWORD"]), type="str", no_log=True
|
||||
),
|
||||
role_id=dict(
|
||||
required=False, fallback=(env_fallback, ["VAULT_ROLE_ID"]), type="str", no_log=True
|
||||
),
|
||||
secret_id=dict(
|
||||
required=False, fallback=(env_fallback, ["VAULT_SECRET_ID"]), type="str", no_log=True
|
||||
),
|
||||
aws_header=dict(
|
||||
required=False, fallback=(env_fallback, ["VAULT_AWS_HEADER"]), type="str", no_log=True
|
||||
),
|
||||
namespace=dict(
|
||||
required=False, default=os.environ.get("VAULT_NAMESPACE", None), type="str"
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def hashivault_init(
|
||||
argument_spec,
|
||||
supports_check_mode=False,
|
||||
required_if=None,
|
||||
required_together=None,
|
||||
required_one_of=None,
|
||||
mutually_exclusive=None,
|
||||
):
|
||||
module = AnsibleModule(
|
||||
argument_spec=argument_spec,
|
||||
supports_check_mode=supports_check_mode,
|
||||
required_if=required_if,
|
||||
required_together=required_together,
|
||||
required_one_of=required_one_of,
|
||||
mutually_exclusive=mutually_exclusive,
|
||||
)
|
||||
|
||||
if not HAS_HVAC:
|
||||
module.fail_json(msg=missing_required_lib("hvac"), exception=HVAC_IMP_ERR)
|
||||
|
||||
module.no_log_values.discard("0")
|
||||
module.no_log_values.discard(0)
|
||||
module.no_log_values.discard("1")
|
||||
module.no_log_values.discard(1)
|
||||
module.no_log_values.discard(True)
|
||||
module.no_log_values.discard(False)
|
||||
module.no_log_values.discard("ttl")
|
||||
|
||||
return module
|
||||
|
||||
|
||||
def hashivault_client(params):
|
||||
url = params.get("url")
|
||||
ca_cert = params.get("ca_cert")
|
||||
ca_path = params.get("ca_path")
|
||||
client_cert = params.get("client_cert")
|
||||
client_key = params.get("client_key")
|
||||
cert = (client_cert, client_key)
|
||||
check_verify = params.get("verify")
|
||||
namespace = params.get("namespace", None)
|
||||
|
||||
if check_verify == "" or check_verify:
|
||||
if ca_cert:
|
||||
verify = ca_cert
|
||||
elif ca_path:
|
||||
verify = ca_path
|
||||
else:
|
||||
verify = check_verify
|
||||
else:
|
||||
verify = check_verify
|
||||
|
||||
return hvac.Client(url=url, cert=cert, verify=verify, namespace=namespace)
|
||||
|
||||
|
||||
def hashivault_auth(client, params):
|
||||
token = params.get("token")
|
||||
authtype = params.get("authtype")
|
||||
login_mount_point = params.get("login_mount_point", authtype)
|
||||
if not login_mount_point:
|
||||
login_mount_point = authtype
|
||||
username = params.get("username")
|
||||
password = params.get("password")
|
||||
secret_id = params.get("secret_id")
|
||||
role_id = params.get("role_id")
|
||||
|
||||
if authtype == "github":
|
||||
client.auth.github.login(token, mount_point=login_mount_point)
|
||||
elif authtype == "userpass":
|
||||
client.auth_userpass(username, password, mount_point=login_mount_point)
|
||||
elif authtype == "ldap":
|
||||
client.auth.ldap.login(username, password, mount_point=login_mount_point)
|
||||
elif authtype == "approle":
|
||||
client = AppRoleClient(client, role_id, secret_id, mount_point=login_mount_point)
|
||||
elif authtype == "tls":
|
||||
client.auth_tls()
|
||||
else:
|
||||
client.token = token
|
||||
return client
|
||||
|
||||
|
||||
def hashivault_auth_client(params):
|
||||
client = hashivault_client(params)
|
||||
return hashivault_auth(client, params)
|
||||
|
||||
|
||||
def hashiwrapper(function):
|
||||
def wrapper(*args, **kwargs):
|
||||
result = {"changed": False, "rc": 0}
|
||||
result.update(function(*args, **kwargs))
|
||||
return result
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def hashivault_default_token(env):
|
||||
"""Get a default Vault token from an environment variable or a file."""
|
||||
envvar = env[0]
|
||||
if envvar in os.environ:
|
||||
return os.environ[envvar]
|
||||
token_file = os.path.expanduser("~/.vault-token")
|
||||
if os.path.exists(token_file):
|
||||
with open(token_file) as f:
|
||||
return f.read().strip()
|
||||
return ""
|
||||
|
||||
|
||||
@hashiwrapper
|
||||
def hashivault_read(params):
|
||||
result = {"changed": False, "rc": 0}
|
||||
client = hashivault_auth_client(params)
|
||||
version = params.get("version")
|
||||
mount_point = params.get("mount_point")
|
||||
secret = params.get("secret")
|
||||
secret_version = params.get("secret_version")
|
||||
|
||||
key = params.get("key")
|
||||
default = params.get("default")
|
||||
|
||||
if secret.startswith("/"):
|
||||
secret = secret.lstrip("/")
|
||||
mount_point = ""
|
||||
|
||||
secret_path = f"{mount_point}/{secret}" if mount_point else secret
|
||||
|
||||
try:
|
||||
if version == 2:
|
||||
response = client.secrets.kv.v2.read_secret_version(
|
||||
secret, mount_point=mount_point, version=secret_version
|
||||
)
|
||||
else:
|
||||
response = client.secrets.kv.v1.read_secret(secret, mount_point=mount_point)
|
||||
except InvalidPath:
|
||||
response = None
|
||||
except Exception as e: # noqa: BLE001
|
||||
result["rc"] = 1
|
||||
result["failed"] = True
|
||||
error_string = f"{e.__class__.__name__}({e})"
|
||||
result["msg"] = f"Error {error_string} reading {secret_path}"
|
||||
return result
|
||||
if not response:
|
||||
if default is not None:
|
||||
result["value"] = default
|
||||
return result
|
||||
result["rc"] = 1
|
||||
result["failed"] = True
|
||||
result["msg"] = f"Secret {secret_path} is not in vault"
|
||||
return result
|
||||
if version == 2:
|
||||
try:
|
||||
data = response.get("data", {})
|
||||
data = data.get("data", {})
|
||||
except Exception: # noqa: BLE001
|
||||
data = str(response)
|
||||
else:
|
||||
data = response["data"]
|
||||
lease_duration = response.get("lease_duration", None)
|
||||
if lease_duration is not None:
|
||||
result["lease_duration"] = lease_duration
|
||||
lease_id = response.get("lease_id", None)
|
||||
if lease_id is not None:
|
||||
result["lease_id"] = lease_id
|
||||
renewable = response.get("renewable", None)
|
||||
if renewable is not None:
|
||||
result["renewable"] = renewable
|
||||
wrap_info = response.get("wrap_info", None)
|
||||
if wrap_info is not None:
|
||||
result["wrap_info"] = wrap_info
|
||||
if key and key not in data:
|
||||
if default is not None:
|
||||
result["value"] = default
|
||||
return result
|
||||
result["rc"] = 1
|
||||
result["failed"] = True
|
||||
result["msg"] = f"Key {key} is not in secret {secret_path}"
|
||||
return result
|
||||
value = data[key] if key else data
|
||||
result["value"] = value
|
||||
return result
|
||||
|
||||
|
||||
class AppRoleClient:
|
||||
"""
|
||||
hvac.Client decorator generate and set a new approle token.
|
||||
|
||||
This allows multiple calls to Vault without having to manually
|
||||
generate and set a token on every Vault call.
|
||||
"""
|
||||
|
||||
def __init__(self, client, role_id, secret_id, mount_point):
|
||||
object.__setattr__(self, "client", client)
|
||||
object.__setattr__(self, "role_id", role_id)
|
||||
object.__setattr__(self, "secret_id", secret_id)
|
||||
object.__setattr__(self, "login_mount_point", mount_point)
|
||||
|
||||
def __setattr__(self, name, val):
|
||||
client = object.__getattribute__(self, "client")
|
||||
client.__setattr__(name, val)
|
||||
|
||||
def __getattribute__(self, name):
|
||||
client = object.__getattribute__(self, "client")
|
||||
attr = client.__getattribute__(name)
|
||||
|
||||
role_id = object.__getattribute__(self, "role_id")
|
||||
secret_id = object.__getattribute__(self, "secret_id")
|
||||
login_mount_point = object.__getattribute__(self, "login_mount_point")
|
||||
resp = client.auth_approle(role_id, secret_id=secret_id, mount_point=login_mount_point)
|
||||
client.token = str(resp["auth"]["client_token"])
|
||||
return attr
|
||||
|
||||
|
||||
def _compare_state(desired_state, current_state, ignore=None):
|
||||
"""
|
||||
Compare desired state to current state.
|
||||
|
||||
Returns true if objects are equal.
|
||||
|
||||
Recursively walks dict object to compare all keys.
|
||||
|
||||
:param desired_state: The state user desires.
|
||||
:param current_state: The state that currently exists.
|
||||
:param ignore: Ignore these keys.
|
||||
:type ignore: list
|
||||
|
||||
:return: True if the states are the same.
|
||||
:rtype: bool
|
||||
"""
|
||||
|
||||
if ignore is None:
|
||||
ignore = []
|
||||
if isinstance(desired_state, list):
|
||||
if not isinstance(current_state, list) or (len(desired_state) != len(current_state)):
|
||||
return False
|
||||
return set(desired_state) == set(current_state)
|
||||
|
||||
if isinstance(desired_state, dict):
|
||||
if not isinstance(current_state, dict):
|
||||
return False
|
||||
|
||||
# iterate over dictionary keys
|
||||
for key in desired_state:
|
||||
if key in ignore:
|
||||
continue
|
||||
v = desired_state[key]
|
||||
if (key not in current_state) or (not _compare_state(v, current_state.get(key))):
|
||||
return False
|
||||
return True
|
||||
|
||||
# Lots of things get handled as strings in ansible that aren"t necessarily strings,
|
||||
# can extend this list later.
|
||||
if isinstance(desired_state, str) and isinstance(current_state, int):
|
||||
current_state = str(current_state)
|
||||
|
||||
return desired_state == current_state
|
||||
|
||||
|
||||
def _convert_to_seconds(original_value):
|
||||
try:
|
||||
value = str(original_value)
|
||||
seconds = 0
|
||||
if "h" in value:
|
||||
ray = value.split("h")
|
||||
seconds = int(ray.pop(0)) * 3600
|
||||
value = "".join(ray)
|
||||
if "m" in value:
|
||||
ray = value.split("m")
|
||||
seconds += int(ray.pop(0)) * 60
|
||||
value = "".join(ray)
|
||||
if value:
|
||||
ray = value.split("s")
|
||||
seconds += int(ray.pop(0))
|
||||
return seconds
|
||||
except Exception: # noqa: BLE001,S110
|
||||
pass
|
||||
return original_value
|
||||
|
||||
|
||||
def get_keys_updated(desired_state, current_state, ignore=None):
|
||||
"""
|
||||
|
||||
Return list of keys that have different values.
|
||||
|
||||
Recursively walks dict object to compare all keys.
|
||||
|
||||
:param desired_state: The state user desires.
|
||||
:type desired_state: dict
|
||||
:param current_state: The state that currently exists.
|
||||
:type current_state: dict
|
||||
:param ignore: Ignore these keys.
|
||||
:type ignore: list
|
||||
|
||||
:return: Different items
|
||||
:rtype: list
|
||||
"""
|
||||
|
||||
if ignore is None:
|
||||
ignore = []
|
||||
|
||||
differences = []
|
||||
for key in desired_state:
|
||||
if key in ignore:
|
||||
continue
|
||||
if key not in current_state:
|
||||
differences.append(key)
|
||||
continue
|
||||
new_value = desired_state[key]
|
||||
old_value = current_state[key]
|
||||
if (
|
||||
"ttl" in key and (_convert_to_seconds(old_value) != _convert_to_seconds(new_value))
|
||||
) or not _compare_state(new_value, old_value):
|
||||
differences.append(key)
|
||||
return differences
|
||||
|
||||
|
||||
def is_state_changed(desired_state, current_state, ignore=None): # noqa: ARG001
|
||||
"""
|
||||
Return list of keys that have different values.
|
||||
|
||||
Recursively walks dict object to compare all keys.
|
||||
|
||||
:param desired_state: The state user desires.
|
||||
:type desired_state: dict
|
||||
:param current_state: The state that currently exists.
|
||||
:type current_state: dict
|
||||
:param ignore: Ignore these keys.
|
||||
:type ignore: list
|
||||
|
||||
:return: Different
|
||||
:rtype: bool
|
||||
"""
|
||||
return len(get_keys_updated(desired_state, current_state)) > 0
|
@ -1,24 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2021, Felix Fontein <felix@fontein.de>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
"""Provide version object to compare version numbers."""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.module_utils.six import raise_from
|
||||
|
||||
try:
|
||||
from ansible.module_utils.compat.version import LooseVersion # noqa: F401,E501 pylint: disable=unused-import
|
||||
except ImportError:
|
||||
try:
|
||||
from distutils.version import LooseVersion # noqa: F401, pylint: disable=unused-import
|
||||
except ImportError as exc:
|
||||
msg = (
|
||||
"To use this plugin or module with ansible-core 2.11, you need to use Python < 3.12 "
|
||||
"with distutils.version present"
|
||||
)
|
||||
raise_from(ImportError(msg), exc)
|
@ -1,72 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
"""Unseal Hashicorp Vault servers."""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {"status": ["stableinterface"], "supported_by": "community", "version": "1.1"}
|
||||
|
||||
DOCUMENTATION = """
|
||||
---
|
||||
module: hashivault_unseal
|
||||
short_description: Hashicorp Vault unseal module.
|
||||
version_added: 1.2.0
|
||||
description:
|
||||
- "Module to unseal Hashicorp Vault."
|
||||
options:
|
||||
keys:
|
||||
description:
|
||||
- Vault key shard(s).
|
||||
type: list
|
||||
elements: str
|
||||
required: true
|
||||
author:
|
||||
- Robert Kaussow (@xoxys)
|
||||
extends_documentation_fragment:
|
||||
- xoxys.general.hashivault
|
||||
"""
|
||||
|
||||
EXAMPLES = """
|
||||
---
|
||||
- name: Unseal vault
|
||||
hashivault_unseal:
|
||||
keys:
|
||||
- 26479cc0-54bc-4252-9c34-baca54aa5de7
|
||||
- 47f942e3-8525-4b44-ba2f-84a4ae81db7d
|
||||
- 2ee9c868-4275-4836-8747-4f8fb7611aa0
|
||||
url: https://vault.example.com
|
||||
"""
|
||||
|
||||
from ansible_collections.xoxys.general.plugins.module_utils.hashivault import hashivault_argspec
|
||||
from ansible_collections.xoxys.general.plugins.module_utils.hashivault import hashivault_client
|
||||
from ansible_collections.xoxys.general.plugins.module_utils.hashivault import hashivault_init
|
||||
from ansible_collections.xoxys.general.plugins.module_utils.hashivault import hashiwrapper
|
||||
|
||||
|
||||
def main():
|
||||
argspec = hashivault_argspec()
|
||||
argspec["keys"] = dict(required=True, type="list", elements="str", no_log=True)
|
||||
module = hashivault_init(argspec)
|
||||
result = hashivault_unseal(module.params)
|
||||
if result.get("failed"):
|
||||
module.fail_json(**result)
|
||||
else:
|
||||
module.exit_json(**result)
|
||||
|
||||
|
||||
@hashiwrapper
|
||||
def hashivault_unseal(params):
|
||||
keys = params.get("keys")
|
||||
client = hashivault_client(params)
|
||||
if client.sys.is_sealed():
|
||||
return {"status": client.sys.submit_unseal_keys(keys), "changed": True}
|
||||
|
||||
return {"changed": False}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -1,4 +1,3 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
IPtables raw module.
|
||||
@ -19,20 +18,16 @@ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'metadata_version': '1.0'}
|
||||
|
||||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: iptables_raw
|
||||
short_description: Manage iptables rules
|
||||
version_added: 1.1.0
|
||||
version_added: "2.4"
|
||||
description:
|
||||
- Add/remove iptables rules while keeping state.
|
||||
options:
|
||||
@ -40,14 +35,13 @@ options:
|
||||
description:
|
||||
- Create a backup of the iptables state file before overwriting it.
|
||||
required: false
|
||||
type: bool
|
||||
default: False
|
||||
choices: ["yes", "no"]
|
||||
default: "no"
|
||||
ipversion:
|
||||
description:
|
||||
- Target the IP version this rule is for.
|
||||
required: false
|
||||
default: "4"
|
||||
type: str
|
||||
choices: ["4", "6"]
|
||||
keep_unmanaged:
|
||||
description:
|
||||
@ -60,8 +54,8 @@ options:
|
||||
first time, since if you don't specify correct rules, you can block
|
||||
yourself out of the managed host."
|
||||
required: false
|
||||
type: bool
|
||||
default: True
|
||||
choices: ["yes", "no"]
|
||||
default: "yes"
|
||||
name:
|
||||
description:
|
||||
- Name that will be used as an identifier for these rules. It can contain
|
||||
@ -70,21 +64,17 @@ options:
|
||||
C(state=absent) to flush all rules in the selected table, or even all
|
||||
tables with C(table=*).
|
||||
required: true
|
||||
type: str
|
||||
rules:
|
||||
description:
|
||||
- The rules that we want to add. Accepts multiline values.
|
||||
- "Note: You can only use C(-A)/C(--append), C(-N)/C(--new-chain), and
|
||||
C(-P)/C(--policy) to specify rules."
|
||||
required: false
|
||||
type: str
|
||||
default: ""
|
||||
state:
|
||||
description:
|
||||
- The state this rules fragment should be in.
|
||||
choices: ["present", "absent"]
|
||||
required: false
|
||||
type: str
|
||||
default: present
|
||||
table:
|
||||
description:
|
||||
@ -92,13 +82,12 @@ options:
|
||||
with C(name=*) and C(state=absent) to flush all rules in all tables.
|
||||
choices: ["filter", "nat", "mangle", "raw", "security", "*"]
|
||||
required: false
|
||||
type: str
|
||||
default: filter
|
||||
weight:
|
||||
description:
|
||||
- Determines the order of the rules. Lower C(weight) means higher
|
||||
priority. Supported range is C(0 - 99)
|
||||
type: int
|
||||
choices: ["0 - 99"]
|
||||
required: false
|
||||
default: 40
|
||||
notes:
|
||||
@ -127,7 +116,7 @@ EXAMPLES = '''
|
||||
- iptables_raw:
|
||||
name: default_rules
|
||||
weight: 10
|
||||
keep_unmanaged: false
|
||||
keep_unmanaged: no
|
||||
rules: |
|
||||
-A INPUT -m state --state RELATED,ESTABLISHED -j ACCEPT
|
||||
-A INPUT -i lo -j ACCEPT
|
||||
@ -167,12 +156,12 @@ RETURN = '''
|
||||
state:
|
||||
description: state of the rules
|
||||
returned: success
|
||||
type: str
|
||||
type: string
|
||||
sample: present
|
||||
name:
|
||||
description: name of the rules
|
||||
returned: success
|
||||
type: str
|
||||
type: string
|
||||
sample: open_tcp_80
|
||||
weight:
|
||||
description: weight of the rules
|
||||
@ -187,22 +176,22 @@ ipversion:
|
||||
rules:
|
||||
description: passed rules
|
||||
returned: success
|
||||
type: str
|
||||
type: string
|
||||
sample: "-A INPUT -p tcp -m tcp --dport 80 -j ACCEPT"
|
||||
table:
|
||||
description: iptables table used
|
||||
returned: success
|
||||
type: str
|
||||
type: string
|
||||
sample: filter
|
||||
backup:
|
||||
description: if the iptables file should backed up
|
||||
returned: success
|
||||
type: bool
|
||||
type: boolean
|
||||
sample: False
|
||||
keep_unmanaged:
|
||||
description: if it should keep unmanaged rules
|
||||
returned: success
|
||||
type: bool
|
||||
type: boolean
|
||||
sample: True
|
||||
'''
|
||||
|
||||
@ -216,7 +205,6 @@ from collections import defaultdict
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.basic import json
|
||||
from ansible_collections.xoxys.general.plugins.module_utils.version import LooseVersion
|
||||
|
||||
|
||||
# Genereates a diff dictionary from an old and new table dump.
|
||||
@ -358,6 +346,7 @@ class Iptables:
|
||||
|
||||
# Checks if iptables is installed and if we have a correct version.
|
||||
def _check_compatibility(self):
|
||||
from distutils.version import StrictVersion
|
||||
cmd = [self.bins['iptables'], '--version']
|
||||
rc, stdout, stderr = Iptables.module.run_command(cmd, check_rc=False)
|
||||
if rc == 0:
|
||||
@ -366,7 +355,7 @@ class Iptables:
|
||||
version = result.group(1)
|
||||
# CentOS 5 ip6tables (v1.3.x) doesn't support comments,
|
||||
# which means it cannot be used with this module.
|
||||
if LooseVersion(version) < LooseVersion('1.4'):
|
||||
if StrictVersion(version) < StrictVersion('1.4'):
|
||||
Iptables.module.fail_json(
|
||||
msg="This module isn't compatible with ip6tables versions older than 1.4.x"
|
||||
)
|
||||
|
@ -1,109 +1,89 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
"""OpenSSL PKCS12 module."""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
ANSIBLE_METADATA = {"metadata_version": "1.0", "status": ["preview"], "supported_by": "community"}
|
||||
|
||||
DOCUMENTATION = """
|
||||
---
|
||||
module: openssl_pkcs12
|
||||
author: "Guillaume Delpierre (@gdelpierre)"
|
||||
version_added: 1.1.0
|
||||
version_added: "2.4"
|
||||
short_description: Generate OpenSSL pkcs12 archive.
|
||||
description:
|
||||
- "This module allows one to (re-)generate PKCS#12."
|
||||
- "This module allows one to (re-)generate PKCS#12."
|
||||
requirements:
|
||||
- "python-pyOpenSSL"
|
||||
extends_documentation_fragment: files
|
||||
- "python-pyOpenSSL"
|
||||
options:
|
||||
ca_certificates:
|
||||
required: False
|
||||
type: list
|
||||
elements: str
|
||||
description:
|
||||
- List of CA certificate to include.
|
||||
cert_path:
|
||||
required: False
|
||||
type: path
|
||||
description:
|
||||
- The path to read certificates and private keys from.
|
||||
Must be in PEM format.
|
||||
action:
|
||||
required: False
|
||||
default: "export"
|
||||
choices: ["parse", "export"]
|
||||
type: str
|
||||
description:
|
||||
- Create (export) or parse a PKCS#12.
|
||||
src:
|
||||
required: False
|
||||
type: path
|
||||
description:
|
||||
- PKCS#12 file path to parse.
|
||||
path:
|
||||
required: True
|
||||
type: path
|
||||
description:
|
||||
- Filename to write the PKCS#12 file to.
|
||||
force:
|
||||
required: False
|
||||
default: False
|
||||
type: bool
|
||||
description:
|
||||
- Should the file be regenerated even it it already exists.
|
||||
friendly_name:
|
||||
required: False
|
||||
type: str
|
||||
aliases:
|
||||
- "name"
|
||||
description:
|
||||
- Specifies the friendly name for the certificate and private key.
|
||||
iter_size:
|
||||
required: False
|
||||
default: 2048
|
||||
type: int
|
||||
description:
|
||||
- Number of times to repeat the encryption step.
|
||||
maciter_size:
|
||||
required: False
|
||||
default: 1
|
||||
type: int
|
||||
description:
|
||||
- Number of times to repeat the MAC step.
|
||||
mode:
|
||||
required: False
|
||||
default: "0400"
|
||||
type: str
|
||||
description:
|
||||
- Default mode for the generated PKCS#12 file.
|
||||
passphrase:
|
||||
required: False
|
||||
type: str
|
||||
description:
|
||||
- The PKCS#12 password.
|
||||
privatekey_path:
|
||||
required: False
|
||||
type: path
|
||||
description:
|
||||
- File to read private key from.
|
||||
privatekey_passphrase:
|
||||
required: False
|
||||
type: str
|
||||
description:
|
||||
- Passphrase source to decrypt any input private keys with.
|
||||
state:
|
||||
required: False
|
||||
default: "present"
|
||||
choices: ["present", "absent"]
|
||||
type: str
|
||||
description:
|
||||
- Whether the file should exist or not.
|
||||
ca_certificates:
|
||||
required: False
|
||||
description:
|
||||
- List of CA certificate to include.
|
||||
cert_path:
|
||||
required: False
|
||||
description:
|
||||
- The path to read certificates and private keys from.
|
||||
Must be in PEM format.
|
||||
action:
|
||||
required: False
|
||||
default: "export"
|
||||
choices: ["parse", "export"]
|
||||
description:
|
||||
- Create (export) or parse a PKCS#12.
|
||||
src:
|
||||
required: False
|
||||
description:
|
||||
- PKCS#12 file path to parse.
|
||||
path:
|
||||
required: True
|
||||
default: null
|
||||
description:
|
||||
- Filename to write the PKCS#12 file to.
|
||||
force:
|
||||
required: False
|
||||
default: False
|
||||
description:
|
||||
- Should the file be regenerated even it it already exists.
|
||||
friendly_name:
|
||||
required: False
|
||||
default: null
|
||||
aliases: "name"
|
||||
description:
|
||||
- Specifies the friendly name for the certificate and private key.
|
||||
iter_size:
|
||||
required: False
|
||||
default: 2048
|
||||
description:
|
||||
- Number of times to repeat the encryption step.
|
||||
maciter_size:
|
||||
required: False
|
||||
default: 1
|
||||
description:
|
||||
- Number of times to repeat the MAC step.
|
||||
mode:
|
||||
required: False
|
||||
default: 0400
|
||||
description:
|
||||
- Default mode for the generated PKCS#12 file.
|
||||
passphrase:
|
||||
required: False
|
||||
default: null
|
||||
description:
|
||||
- The PKCS#12 password.
|
||||
privatekey_path:
|
||||
required: False
|
||||
description:
|
||||
- File to read private key from.
|
||||
privatekey_passphrase:
|
||||
required: False
|
||||
default: null
|
||||
description:
|
||||
- Passphrase source to decrypt any input private keys with.
|
||||
state:
|
||||
required: False
|
||||
default: "present"
|
||||
choices: ["present", "absent"]
|
||||
description:
|
||||
- Whether the file should exist or not.
|
||||
"""
|
||||
|
||||
EXAMPLES = """
|
||||
@ -151,10 +131,10 @@ EXAMPLES = """
|
||||
|
||||
RETURN = """
|
||||
filename:
|
||||
description: Path to the generate PKCS#12 file.
|
||||
returned: changed or success
|
||||
type: str
|
||||
sample: /opt/certs/ansible.p12
|
||||
description: Path to the generate PKCS#12 file.
|
||||
returned: changed or success
|
||||
type: string
|
||||
sample: /opt/certs/ansible.p12
|
||||
"""
|
||||
|
||||
import errno
|
||||
@ -171,11 +151,12 @@ else:
|
||||
pyopenssl_found = True
|
||||
|
||||
|
||||
class PkcsError(Exception): # noqa
|
||||
class PkcsError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class Pkcs(object): # noqa
|
||||
class Pkcs(object):
|
||||
|
||||
def __init__(self, module):
|
||||
self.path = module.params["path"]
|
||||
self.force = module.params["force"]
|
||||
@ -200,56 +181,56 @@ class Pkcs(object): # noqa
|
||||
def load_privatekey(self, path, passphrase=None):
|
||||
"""Load the specified OpenSSL private key."""
|
||||
try:
|
||||
return (
|
||||
crypto.load_privatekey(
|
||||
if passphrase:
|
||||
privatekey = crypto.load_privatekey(
|
||||
crypto.FILETYPE_PEM,
|
||||
open(path, "rb").read(), # noqa
|
||||
passphrase,
|
||||
open(path, "rb").read(), passphrase
|
||||
)
|
||||
if passphrase
|
||||
else crypto.load_privatekey(
|
||||
crypto.FILETYPE_PEM,
|
||||
open(path, "rb").read(), # noqa
|
||||
)
|
||||
)
|
||||
except OSError as exc:
|
||||
raise PkcsError(exc) from exc
|
||||
else:
|
||||
privatekey = crypto.load_privatekey(crypto.FILETYPE_PEM, open(path, "rb").read())
|
||||
|
||||
return privatekey
|
||||
except (IOError, OSError) as exc:
|
||||
raise PkcsError(exc)
|
||||
|
||||
def load_certificate(self, path):
|
||||
"""Load the specified certificate."""
|
||||
try:
|
||||
cert_content = open(path, "rb").read() # noqa
|
||||
return crypto.load_certificate(crypto.FILETYPE_PEM, cert_content)
|
||||
except OSError as exc:
|
||||
raise PkcsError(exc) from exc
|
||||
cert_content = open(path, "rb").read()
|
||||
cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert_content)
|
||||
return cert
|
||||
except (IOError, OSError) as exc:
|
||||
raise PkcsError(exc)
|
||||
|
||||
def load_pkcs12(self, path, passphrase=None):
|
||||
"""Load pkcs12 file."""
|
||||
try:
|
||||
if passphrase:
|
||||
return crypto.load_pkcs12(open(path, "rb").read(), passphrase) # noqa
|
||||
|
||||
return crypto.load_pkcs12(open(path, "rb").read()) # noqa
|
||||
except OSError as exc:
|
||||
raise PkcsError(exc) from exc
|
||||
return crypto.load_pkcs12(open(path, "rb").read(), passphrase)
|
||||
else:
|
||||
return crypto.load_pkcs12(open(path, "rb").read())
|
||||
except (IOError, OSError) as exc:
|
||||
raise PkcsError(exc)
|
||||
|
||||
def dump_privatekey(self, path):
|
||||
"""Dump the specified OpenSSL private key."""
|
||||
try:
|
||||
return crypto.dump_privatekey(
|
||||
crypto.FILETYPE_PEM, self.load_pkcs12(path).get_privatekey()
|
||||
crypto.FILETYPE_PEM,
|
||||
self.load_pkcs12(path).get_privatekey()
|
||||
)
|
||||
except OSError as exc:
|
||||
raise PkcsError(exc) from exc
|
||||
except (IOError, OSError) as exc:
|
||||
raise PkcsError(exc)
|
||||
|
||||
def dump_certificate(self, path):
|
||||
"""Dump the specified certificate."""
|
||||
try:
|
||||
return crypto.dump_certificate(
|
||||
crypto.FILETYPE_PEM, self.load_pkcs12(path).get_certificate()
|
||||
crypto.FILETYPE_PEM,
|
||||
self.load_pkcs12(path).get_certificate()
|
||||
)
|
||||
except OSError as exc:
|
||||
raise PkcsError(exc) from exc
|
||||
except (IOError, OSError) as exc:
|
||||
raise PkcsError(exc)
|
||||
|
||||
def generate(self, module):
|
||||
"""Generate PKCS#12 file archive."""
|
||||
@ -283,9 +264,9 @@ class Pkcs(object): # noqa
|
||||
)
|
||||
module.set_mode_if_different(self.path, self.mode, False)
|
||||
self.changed = True
|
||||
except OSError as exc:
|
||||
except (IOError, OSError) as exc:
|
||||
self.remove()
|
||||
raise PkcsError(exc) from exc
|
||||
raise PkcsError(exc)
|
||||
|
||||
file_args = module.load_file_common_arguments(module.params)
|
||||
if module.set_fs_attributes_if_different(file_args, False):
|
||||
@ -300,12 +281,14 @@ class Pkcs(object): # noqa
|
||||
|
||||
with open(self.path, "wb") as content:
|
||||
content.write(
|
||||
f"{self.dump_privatekey(self.src)}{self.dump_certificate(self.src)}"
|
||||
"{0}{1}".format(
|
||||
self.dump_privatekey(self.src), self.dump_certificate(self.src)
|
||||
)
|
||||
)
|
||||
module.set_mode_if_different(self.path, self.mode, False)
|
||||
self.changed = True
|
||||
except OSError as exc:
|
||||
raise PkcsError(exc) from exc
|
||||
except IOError as exc:
|
||||
raise PkcsError(exc)
|
||||
|
||||
file_args = module.load_file_common_arguments(module.params)
|
||||
if module.set_fs_attributes_if_different(file_args, False):
|
||||
@ -319,11 +302,12 @@ class Pkcs(object): # noqa
|
||||
self.changed = True
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
raise PkcsError(exc) from exc
|
||||
raise PkcsError(exc)
|
||||
else:
|
||||
pass
|
||||
|
||||
pass
|
||||
def check(self, module, perms_required=True):
|
||||
|
||||
def check(self, module, perms_required=True): # noqa
|
||||
def _check_pkey_passphrase():
|
||||
if self.privatekey_passphrase:
|
||||
try:
|
||||
@ -353,20 +337,19 @@ class Pkcs(object): # noqa
|
||||
|
||||
def main():
|
||||
argument_spec = dict(
|
||||
action=dict(default="export", choices=["parse", "export"], type="str", required=False),
|
||||
ca_certificates=dict(type="list", elements="str", required=False),
|
||||
action=dict(default="export", choices=["parse", "export"], type="str"),
|
||||
ca_certificates=dict(type="list"),
|
||||
cert_path=dict(type="path"),
|
||||
force=dict(default=False, type="bool"),
|
||||
friendly_name=dict(type="str", aliases=["name"]),
|
||||
iter_size=dict(default=2048, type="int"),
|
||||
maciter_size=dict(default=1, type="int"),
|
||||
passphrase=dict(type="str", no_log=True),
|
||||
path=dict(type="path", required=True),
|
||||
path=dict(required=True, type="path"),
|
||||
privatekey_path=dict(type="path"),
|
||||
privatekey_passphrase=dict(type="str", no_log=True),
|
||||
state=dict(default="present", choices=["present", "absent"], type="str"),
|
||||
src=dict(type="path"),
|
||||
mode=dict(default="0400", type="str", required=False),
|
||||
)
|
||||
|
||||
required_if = [
|
||||
@ -393,7 +376,8 @@ def main():
|
||||
if not os.path.isdir(base_dir):
|
||||
module.fail_json(
|
||||
name=base_dir,
|
||||
msg=f"The directory {base_dir} does not exist or the file is not a directory",
|
||||
msg="The directory {0} does not exist or "
|
||||
"the file is not a directory".format(base_dir)
|
||||
)
|
||||
|
||||
pkcs12 = Pkcs(module)
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,12 +1,5 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
"""Control Univention Corporate Registry."""
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
"""Module to control Univention Corporate Registry."""
|
||||
|
||||
ANSIBLE_METADATA = {"metadata_version": "1.1", "status": ["preview"], "supported_by": "community"}
|
||||
|
||||
@ -14,33 +7,28 @@ DOCUMENTATION = """
|
||||
---
|
||||
module: ucr
|
||||
short_description: Manage variables in univention configuration registry.
|
||||
version_added: 1.1.0
|
||||
version_added: "2.6"
|
||||
description:
|
||||
- "This module allows to manage variables inside the univention configuration registry
|
||||
on a univention corporate server (UCS)."
|
||||
- "This module allows to manage variables inside the univention configuration registry
|
||||
on a univention corporate server (UCS)."
|
||||
options:
|
||||
path:
|
||||
description:
|
||||
- Path for the variable
|
||||
aliases:
|
||||
- name
|
||||
required: True
|
||||
type: str
|
||||
value:
|
||||
description:
|
||||
- New value of the variable
|
||||
required: False
|
||||
type: str
|
||||
default: ""
|
||||
state:
|
||||
required: False
|
||||
default: "present"
|
||||
choices: ["present", "absent"]
|
||||
type: str
|
||||
description:
|
||||
- Whether the variable should be exist or not.
|
||||
path:
|
||||
description:
|
||||
- Path for the variable
|
||||
required: True
|
||||
default: null
|
||||
value:
|
||||
description:
|
||||
- New value of the variable
|
||||
required: False
|
||||
state:
|
||||
required: False
|
||||
default: "present"
|
||||
choices: ["present", "absent"]
|
||||
description:
|
||||
- Whether the variable should be exist or not.
|
||||
author:
|
||||
- Robert Kaussow (@xoxys)
|
||||
- Robert Kaussow (@xoxys)
|
||||
"""
|
||||
|
||||
EXAMPLES = """
|
||||
@ -59,48 +47,45 @@ EXAMPLES = """
|
||||
|
||||
RETURN = """
|
||||
original_message:
|
||||
description: The original name param that was passed in
|
||||
type: str
|
||||
returned: success
|
||||
description: The original name param that was passed in
|
||||
type: str
|
||||
message:
|
||||
description: The output message that the sample module generates
|
||||
type: str
|
||||
returned: success
|
||||
description: The output message that the sample module generates
|
||||
"""
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
try:
|
||||
from univention.config_registry import ConfigRegistry
|
||||
from univention.config_registry.frontend import ucr_update
|
||||
|
||||
HAS_UNIVENTION = True
|
||||
except ImportError:
|
||||
HAS_UNIVENTION = False
|
||||
from univention.config_registry import ConfigRegistry # noqa
|
||||
from univention.config_registry.frontend import ucr_update # noqa
|
||||
|
||||
|
||||
def get_variable(ucr, path):
|
||||
ucr.load()
|
||||
return ucr.get(path) if path in ucr else None
|
||||
if path in ucr:
|
||||
value = ucr.get(path)
|
||||
else:
|
||||
value = None
|
||||
return value
|
||||
|
||||
|
||||
def set_variable(ucr, path, value, result): # noqa
|
||||
def set_variable(ucr, path, value, result):
|
||||
org_value = get_variable(ucr, path)
|
||||
ucr_update(ucr, {path: value})
|
||||
new_value = get_variable(ucr, path)
|
||||
return org_value != new_value
|
||||
return not org_value == new_value
|
||||
|
||||
|
||||
def dry_variable(ucr, path, value, result): # noqa
|
||||
def dry_variable(ucr, path, value, result):
|
||||
org_value = get_variable(ucr, path)
|
||||
return org_value != value
|
||||
return not org_value == value
|
||||
|
||||
|
||||
def main():
|
||||
ucr = ConfigRegistry()
|
||||
|
||||
module_args = dict(
|
||||
path=dict(type="str", required=True, aliases=["name"]),
|
||||
value=dict(type="str", required=False, default=""),
|
||||
state=dict(default="present", choices=["present", "absent"], type="str"),
|
||||
state=dict(default="present", choices=["present", "absent"], type="str")
|
||||
)
|
||||
|
||||
required_if = [["state", "present", ["value"]]]
|
||||
@ -109,17 +94,13 @@ def main():
|
||||
argument_spec=module_args, supports_check_mode=True, required_if=required_if
|
||||
)
|
||||
|
||||
if not HAS_UNIVENTION:
|
||||
module.fail_json(msg="univention required for this module")
|
||||
|
||||
ucr = ConfigRegistry()
|
||||
|
||||
result = dict(changed=False, original_message="", message="")
|
||||
|
||||
path = module.params["path"]
|
||||
value = module.params["value"]
|
||||
if module.params["state"] == "present" and (value is None or value == "None"):
|
||||
value = ""
|
||||
if module.params["state"] == "present":
|
||||
if value is None or value == "None":
|
||||
value = ""
|
||||
elif module.params["state"] == "absent":
|
||||
value = None
|
||||
|
||||
|
1158
poetry.lock
generated
1158
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
152
pyproject.toml
152
pyproject.toml
@ -1,152 +0,0 @@
|
||||
[tool.poetry]
|
||||
authors = ["Robert Kaussow <mail@thegeeklab.de>"]
|
||||
classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Environment :: Console",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Intended Audience :: Developers",
|
||||
"Intended Audience :: Information Technology",
|
||||
"Intended Audience :: System Administrators",
|
||||
"Natural Language :: English",
|
||||
"Operating System :: POSIX",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Topic :: Utilities",
|
||||
"Topic :: Software Development",
|
||||
"Topic :: Software Development :: Documentation",
|
||||
]
|
||||
description = "Build environment for Ansible Collection."
|
||||
license = "MIT"
|
||||
name = "xoxys.general"
|
||||
readme = "README.md"
|
||||
repository = "https://gitea.rknet.org/ansible/xoxys.general"
|
||||
version = "0.0.0"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.9.0"
|
||||
ansible-core = { version = "<=2.14.0", optional = true }
|
||||
pyopenssl = "23.0.0"
|
||||
proxmoxer = "2.0.1"
|
||||
hcloud = "1.18.2"
|
||||
|
||||
[tool.poetry.extras]
|
||||
ansible = ["ansible-core"]
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
ruff = "0.1.7"
|
||||
pytest = "7.2.1"
|
||||
pytest-mock = "3.10.0"
|
||||
pytest-cov = "4.0.0"
|
||||
toml = "0.10.2"
|
||||
pycodestyle = "2.10.0"
|
||||
yamllint = "1.29.0"
|
||||
pylint = "2.15.0"
|
||||
voluptuous = "0.13.1"
|
||||
pytest-ansible = "3.1.5"
|
||||
pytest-forked = "1.6.0"
|
||||
pytest-xdist = "3.3.1"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
addopts = "--cov --cov-report=xml:coverage.xml --cov-report=term --cov-append --no-cov-on-fail"
|
||||
pythonpath = [
|
||||
"."
|
||||
]
|
||||
testpaths = [
|
||||
"tests",
|
||||
]
|
||||
filterwarnings = [
|
||||
"ignore::FutureWarning",
|
||||
"ignore::DeprecationWarning",
|
||||
"ignore:.*pep8.*:FutureWarning",
|
||||
"ignore:AnsibleCollectionFinder.*:UserWarning"
|
||||
]
|
||||
|
||||
[tool.coverage.run]
|
||||
omit = ["**/tests/*"]
|
||||
|
||||
[build-system]
|
||||
build-backend = "poetry_dynamic_versioning.backend"
|
||||
requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"]
|
||||
|
||||
[tool.ruff]
|
||||
exclude = [
|
||||
".git",
|
||||
"__pycache__",
|
||||
"build",
|
||||
"dist",
|
||||
"tests",
|
||||
"*.pyc",
|
||||
"*.egg-info",
|
||||
".cache",
|
||||
".eggs",
|
||||
"env*",
|
||||
".venv",
|
||||
"iptables_raw.py",
|
||||
]
|
||||
|
||||
line-length = 99
|
||||
indent-width = 4
|
||||
|
||||
# Explanation of errors
|
||||
#
|
||||
# D102: Missing docstring in public method
|
||||
# D103: Missing docstring in public function
|
||||
# D105: Missing docstring in magic method
|
||||
# D107: Missing docstring in __init__
|
||||
# D202: No blank lines allowed after function docstring
|
||||
# D203: One blank line required before class docstring
|
||||
# E402: Module level import not at top of file
|
||||
# SIM105: Use `contextlib.suppress(Exception)` instead of try-except-pass
|
||||
# C402: Unnecessary generator (rewrite as a `dict` comprehension)
|
||||
# C408: Unnecessary `dict` call (rewrite as a literal)
|
||||
# I001: Import block is un-sorted or un-formatted
|
||||
# UP001: `__metaclass__ = type` is implied
|
||||
# UP009: UTF-8 encoding declaration is unnecessary
|
||||
# UP010: Unnecessary `__future__` imports `absolute_import`, `division`, `print_function` for target Python version
|
||||
ignore = [
|
||||
"D102",
|
||||
"D103",
|
||||
"D105",
|
||||
"D107",
|
||||
"D202",
|
||||
"D203",
|
||||
"D212",
|
||||
"E402",
|
||||
"SIM105",
|
||||
"C402",
|
||||
"C408",
|
||||
"I001",
|
||||
"UP001",
|
||||
"UP009",
|
||||
"UP010",
|
||||
"RUF100",
|
||||
]
|
||||
select = [
|
||||
"D",
|
||||
"E",
|
||||
"F",
|
||||
"Q",
|
||||
"W",
|
||||
"I",
|
||||
"S",
|
||||
"BLE",
|
||||
"N",
|
||||
"UP",
|
||||
"B",
|
||||
"A",
|
||||
"C4",
|
||||
"T20",
|
||||
"SIM",
|
||||
"RET",
|
||||
"ARG",
|
||||
"ERA",
|
||||
"RUF",
|
||||
]
|
||||
|
||||
[tool.ruff.format]
|
||||
quote-style = "double"
|
||||
indent-style = "space"
|
||||
line-ending = "lf"
|
4
requirements.txt
Normal file
4
requirements.txt
Normal file
@ -0,0 +1,4 @@
|
||||
ansible-base
|
||||
pyopenssl
|
||||
proxmoxer
|
||||
hcloud
|
42
setup.cfg
Normal file
42
setup.cfg
Normal file
@ -0,0 +1,42 @@
|
||||
[isort]
|
||||
default_section = THIRDPARTY
|
||||
sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
|
||||
force_single_line = true
|
||||
line_length = 99
|
||||
skip_glob = **/.venv*,**/venv/*,**/docs/*,**/inventory/*,**/modules/*
|
||||
|
||||
[yapf]
|
||||
based_on_style = google
|
||||
column_limit = 99
|
||||
dedent_closing_brackets = true
|
||||
coalesce_brackets = true
|
||||
split_before_logical_operator = true
|
||||
|
||||
[tool:pytest]
|
||||
filterwarnings =
|
||||
ignore::FutureWarning
|
||||
ignore:.*collections.*:DeprecationWarning
|
||||
ignore:.*pep8.*:FutureWarning
|
||||
|
||||
[coverage:run]
|
||||
omit =
|
||||
**/test/*
|
||||
**/.venv/*
|
||||
|
||||
[flake8]
|
||||
ignore = D101, D102, D103, D105, D107, D202, E402, W503, B902
|
||||
max-line-length = 99
|
||||
inline-quotes = double
|
||||
exclude =
|
||||
.git
|
||||
.tox
|
||||
__pycache__
|
||||
build
|
||||
dist
|
||||
test
|
||||
*.pyc
|
||||
*.egg-info
|
||||
.cache
|
||||
.eggs
|
||||
env*
|
||||
iptables_raw.py
|
1
test/unit/plugins/inventory/__init__.py
Normal file
1
test/unit/plugins/inventory/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# noqa
|
@ -1,16 +1,16 @@
|
||||
"""Test inventory plugin proxmox."""
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2020, Robert Kaussow <mail@thegeeklab.de>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
|
||||
proxmox = pytest.importorskip("proxmoxer")
|
||||
|
||||
from ansible_collections.xoxys.general.plugins.inventory.proxmox import InventoryModule
|
||||
from ansible.errors import AnsibleError, AnsibleParserError # noqa
|
||||
from plugins.inventory.proxmox import InventoryModule
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
@ -58,7 +58,7 @@ def test_get_ip_address(inventory, mocker):
|
||||
inventory.client = mocker.MagicMock()
|
||||
inventory.client.nodes.return_value.get.return_value = networks
|
||||
|
||||
assert inventory._get_ip_address("qemu", None, None) == "10.0.0.1"
|
||||
assert "10.0.0.1" == inventory._get_ip_address("qemu", None, None)
|
||||
|
||||
|
||||
def test_exclude(inventory, mocker):
|
11
test/unit/requirements.txt
Normal file
11
test/unit/requirements.txt
Normal file
@ -0,0 +1,11 @@
|
||||
ansible
|
||||
|
||||
# requirement for the proxmox modules
|
||||
proxmoxer
|
||||
requests
|
||||
|
||||
# requirement for the corenetworks modules
|
||||
corenetworks
|
||||
|
||||
# requirement for the openssl_pkcs12 module
|
||||
pyOpenSSL
|
@ -1,2 +0,0 @@
|
||||
modules:
|
||||
python_requires: ">=3.9"
|
Loading…
Reference in New Issue
Block a user